Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
24e606a3fd | ||
|
|
8c0b7f84fc | ||
|
|
5a13faeb5e | ||
|
|
3932176a57 | ||
|
|
57a2179412 | ||
|
|
605b618856 |
@@ -37,4 +37,4 @@ This Blender add-on allows you to import and export meshes and animations to the
|
|||||||
## Why are the mesh normals not accurate when importing a PSK extracted from [UE Viewer](https://www.gildor.org/en/projects/umodel)?
|
## Why are the mesh normals not accurate when importing a PSK extracted from [UE Viewer](https://www.gildor.org/en/projects/umodel)?
|
||||||
If preserving the mesh normals of models is important for your workflow, it is *not recommended* to export PSK files from UE Viewer. This is because UE Viewer makes no attempt to reconstruct the original [smoothing groups](https://en.wikipedia.org/wiki/Smoothing_group). As a result, the normals of imported PSK files will be incorrect when imported into Blender and will need to be manually fixed.
|
If preserving the mesh normals of models is important for your workflow, it is *not recommended* to export PSK files from UE Viewer. This is because UE Viewer makes no attempt to reconstruct the original [smoothing groups](https://en.wikipedia.org/wiki/Smoothing_group). As a result, the normals of imported PSK files will be incorrect when imported into Blender and will need to be manually fixed.
|
||||||
|
|
||||||
As a workaround, it is recommended to export [glTF](https://en.wikipedia.org/wiki/GlTF) meshes out of UE Viewer instead, since the glTF format has support for explicit normals and UE Viewer can correctly preserve the mesh normals on export. Note, however, that the imported glTF armature may have it's bones oriented incorrectly when imported into blender. To mitigate this, you can combine the armature of PSK and the mesh of the glTF for best results.
|
As a workaround, it is recommended to export [glTF](https://en.wikipedia.org/wiki/GlTF) meshes out of UE Viewer instead, since the glTF format has support for explicit normals and UE Viewer can correctly preserve the mesh normals on export. Note, however, that the imported glTF armature may have it's bones oriented incorrectly when imported into Blender. To mitigate this, you can combine the armature of PSK and the mesh of the glTF for best results.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "PSK/PSA Importer/Exporter",
|
"name": "PSK/PSA Importer/Exporter",
|
||||||
"author": "Colin Basnett",
|
"author": "Colin Basnett",
|
||||||
"version": (1, 2, 0),
|
"version": (1, 2, 1),
|
||||||
"blender": (2, 80, 0),
|
"blender": (2, 80, 0),
|
||||||
# "location": "File > Export > PSK Export (.psk)",
|
# "location": "File > Export > PSK Export (.psk)",
|
||||||
"description": "PSK/PSA Import/Export (.psk/.psa)",
|
"description": "PSK/PSA Import/Export (.psk/.psa)",
|
||||||
|
|||||||
@@ -102,6 +102,7 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
box.label(text='Bones', icon='BONE_DATA')
|
box.label(text='Bones', icon='BONE_DATA')
|
||||||
bone_filter_mode_items = property_group.bl_rna.properties['bone_filter_mode'].enum_items_static
|
bone_filter_mode_items = property_group.bl_rna.properties['bone_filter_mode'].enum_items_static
|
||||||
row = box.row(align=True)
|
row = box.row(align=True)
|
||||||
|
|
||||||
for item in bone_filter_mode_items:
|
for item in bone_filter_mode_items:
|
||||||
identifier = item.identifier
|
identifier = item.identifier
|
||||||
item_layout = row.row(align=True)
|
item_layout = row.row(align=True)
|
||||||
@@ -114,6 +115,7 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
rows = max(3, min(len(property_group.bone_group_list), 10))
|
rows = max(3, min(len(property_group.bone_group_list), 10))
|
||||||
row.template_list('PSX_UL_BoneGroupList', '', property_group, 'bone_group_list', property_group, 'bone_group_list_index', rows=rows)
|
row.template_list('PSX_UL_BoneGroupList', '', property_group, 'bone_group_list', property_group, 'bone_group_list_index', rows=rows)
|
||||||
|
|
||||||
|
|
||||||
def is_action_for_armature(self, action):
|
def is_action_for_armature(self, action):
|
||||||
if len(action.fcurves) == 0:
|
if len(action.fcurves) == 0:
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -10,12 +10,20 @@ from bpy.props import StringProperty, BoolProperty, CollectionProperty, PointerP
|
|||||||
from .reader import PsaReader
|
from .reader import PsaReader
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportOptions(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.should_clean_keys = True
|
||||||
|
self.should_use_fake_user = False
|
||||||
|
self.should_stash = False
|
||||||
|
self.sequence_names = []
|
||||||
|
|
||||||
|
|
||||||
class PsaImporter(object):
|
class PsaImporter(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def import_psa(self, psa_reader: PsaReader, sequence_names: List[AnyStr], armature_object):
|
def import_psa(self, psa_reader: PsaReader, armature_object, options: PsaImportOptions):
|
||||||
sequences = map(lambda x: psa_reader.sequences[x], sequence_names)
|
sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names)
|
||||||
armature_data = armature_object.data
|
armature_data = armature_object.data
|
||||||
|
|
||||||
class ImportBone(object):
|
class ImportBone(object):
|
||||||
@@ -104,9 +112,11 @@ class PsaImporter(object):
|
|||||||
import_bone.post_quat = import_bone.orig_quat.conjugated()
|
import_bone.post_quat = import_bone.orig_quat.conjugated()
|
||||||
|
|
||||||
# Create and populate the data for new sequences.
|
# Create and populate the data for new sequences.
|
||||||
|
actions = []
|
||||||
for sequence in sequences:
|
for sequence in sequences:
|
||||||
# Add the action.
|
# Add the action.
|
||||||
action = bpy.data.actions.new(name=sequence.name.decode())
|
action = bpy.data.actions.new(name=sequence.name.decode())
|
||||||
|
action.use_fake_user = options.should_use_fake_user
|
||||||
|
|
||||||
# Create f-curves for the rotation and location of each bone.
|
# Create f-curves for the rotation and location of each bone.
|
||||||
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
||||||
@@ -124,19 +134,30 @@ class PsaImporter(object):
|
|||||||
action.fcurves.new(location_data_path, index=2), # Lz
|
action.fcurves.new(location_data_path, index=2), # Lz
|
||||||
]
|
]
|
||||||
|
|
||||||
# Read the sequence keys from the PSA file.
|
|
||||||
sequence_name = sequence.name.decode('windows-1252')
|
sequence_name = sequence.name.decode('windows-1252')
|
||||||
|
|
||||||
# Read the sequence data matrix from the PSA.
|
# Read the sequence data matrix from the PSA.
|
||||||
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
||||||
keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8)
|
keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8)
|
||||||
|
|
||||||
# The first step is to determine the frames at which each bone will write out a keyframe.
|
# Convert the sequence's data from world-space to local-space.
|
||||||
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
|
if import_bone is None:
|
||||||
|
continue
|
||||||
|
for frame_index in range(sequence.frame_count):
|
||||||
|
# This bone has writeable keyframes for this frame.
|
||||||
|
key_data = sequence_data_matrix[frame_index, bone_index]
|
||||||
|
# Calculate the local-space key data for the bone.
|
||||||
|
sequence_data_matrix[frame_index, bone_index] = calculate_fcurve_data(import_bone, key_data)
|
||||||
|
|
||||||
|
# Clean the keyframe data. This is accomplished by writing zeroes to the write matrix when there is an
|
||||||
|
# insufficiently large change in the data from frame-to-frame.
|
||||||
|
if options.should_clean_keys:
|
||||||
threshold = 0.001
|
threshold = 0.001
|
||||||
for bone_index, import_bone in enumerate(import_bones):
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
if import_bone is None:
|
if import_bone is None:
|
||||||
continue
|
continue
|
||||||
for fcurve_index, fcurve in enumerate(import_bone.fcurves):
|
for fcurve_index in range(len(import_bone.fcurves)):
|
||||||
# Get all the keyframe data for the bone's f-curve data from the sequence data matrix.
|
# Get all the keyframe data for the bone's f-curve data from the sequence data matrix.
|
||||||
fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index]
|
fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index]
|
||||||
last_written_datum = 0
|
last_written_datum = 0
|
||||||
@@ -145,7 +166,7 @@ class PsaImporter(object):
|
|||||||
if frame_index > 0 and abs(datum - last_written_datum) < threshold:
|
if frame_index > 0 and abs(datum - last_written_datum) < threshold:
|
||||||
keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0
|
keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0
|
||||||
else:
|
else:
|
||||||
last_written_datum = fcurve_frame_data[frame_index]
|
last_written_datum = datum
|
||||||
|
|
||||||
# Write the keyframes out!
|
# Write the keyframes out!
|
||||||
for frame_index in range(sequence.frame_count):
|
for frame_index in range(sequence.frame_count):
|
||||||
@@ -156,12 +177,22 @@ class PsaImporter(object):
|
|||||||
if bone_has_writeable_keyframes:
|
if bone_has_writeable_keyframes:
|
||||||
# This bone has writeable keyframes for this frame.
|
# This bone has writeable keyframes for this frame.
|
||||||
key_data = sequence_data_matrix[frame_index, bone_index]
|
key_data = sequence_data_matrix[frame_index, bone_index]
|
||||||
# Calculate the local-space key data for the bone.
|
for fcurve, should_write, datum in zip(import_bone.fcurves, keyframe_write_matrix[frame_index, bone_index], key_data):
|
||||||
fcurve_data = calculate_fcurve_data(import_bone, key_data)
|
|
||||||
for fcurve, should_write, datum in zip(import_bone.fcurves, keyframe_write_matrix[frame_index, bone_index], fcurve_data):
|
|
||||||
if should_write:
|
if should_write:
|
||||||
fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'})
|
fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'})
|
||||||
|
|
||||||
|
actions.append(action)
|
||||||
|
|
||||||
|
# If the user specifies, store the new animations as strips on a non-contributing NLA stack.
|
||||||
|
if options.should_stash:
|
||||||
|
if armature_object.animation_data is None:
|
||||||
|
armature_object.animation_data_create()
|
||||||
|
for action in actions:
|
||||||
|
nla_track = armature_object.animation_data.nla_tracks.new()
|
||||||
|
nla_track.name = action.name
|
||||||
|
nla_track.mute = True
|
||||||
|
nla_track.strips.new(name=action.name, start=0, action=action)
|
||||||
|
|
||||||
|
|
||||||
class PsaImportPsaBoneItem(PropertyGroup):
|
class PsaImportPsaBoneItem(PropertyGroup):
|
||||||
bone_name: StringProperty()
|
bone_name: StringProperty()
|
||||||
@@ -182,7 +213,6 @@ class PsaImportActionListItem(PropertyGroup):
|
|||||||
|
|
||||||
|
|
||||||
def on_psa_file_path_updated(property, context):
|
def on_psa_file_path_updated(property, context):
|
||||||
print('PATH UPDATED')
|
|
||||||
property_group = context.scene.psa_import
|
property_group = context.scene.psa_import
|
||||||
property_group.action_list.clear()
|
property_group.action_list.clear()
|
||||||
property_group.psa_bones.clear()
|
property_group.psa_bones.clear()
|
||||||
@@ -195,33 +225,23 @@ def on_psa_file_path_updated(property, context):
|
|||||||
item.action_name = sequence.name.decode('windows-1252')
|
item.action_name = sequence.name.decode('windows-1252')
|
||||||
item.frame_count = sequence.frame_count
|
item.frame_count = sequence.frame_count
|
||||||
item.is_selected = True
|
item.is_selected = True
|
||||||
|
|
||||||
for psa_bone in psa_reader.bones:
|
for psa_bone in psa_reader.bones:
|
||||||
item = property_group.psa_bones.add()
|
item = property_group.psa_bones.add()
|
||||||
item.bone_name = psa_bone.name
|
item.bone_name = psa_bone.name
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
print('ERROR READING FILE')
|
|
||||||
print(e)
|
|
||||||
# TODO: set an error somewhere so the user knows the PSA could not be read.
|
# TODO: set an error somewhere so the user knows the PSA could not be read.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def on_armature_object_updated(property, context):
|
|
||||||
# TODO: ensure that there are matching bones between the two rigs.
|
|
||||||
property_group = context.scene.psa_import
|
|
||||||
armature_object = property_group.armature_object
|
|
||||||
if armature_object is not None:
|
|
||||||
armature_bone_names = set(map(lambda bone: bone.name, armature_object.data.bones))
|
|
||||||
psa_bone_names = set(map(lambda psa_bone: psa_bone.name, property_group.psa_bones))
|
|
||||||
|
|
||||||
|
|
||||||
class PsaImportPropertyGroup(bpy.types.PropertyGroup):
|
class PsaImportPropertyGroup(bpy.types.PropertyGroup):
|
||||||
psa_file_path: StringProperty(default='', update=on_psa_file_path_updated, name='PSA File Path')
|
psa_file_path: StringProperty(default='', update=on_psa_file_path_updated, name='PSA File Path')
|
||||||
psa_bones: CollectionProperty(type=PsaImportPsaBoneItem)
|
psa_bones: CollectionProperty(type=PsaImportPsaBoneItem)
|
||||||
# armature_object: PointerProperty(name='Object', type=bpy.types.Object, update=on_armature_object_updated)
|
|
||||||
action_list: CollectionProperty(type=PsaImportActionListItem)
|
action_list: CollectionProperty(type=PsaImportActionListItem)
|
||||||
action_list_index: IntProperty(name='', default=0)
|
action_list_index: IntProperty(name='', default=0)
|
||||||
action_filter_name: StringProperty(default='')
|
action_filter_name: StringProperty(default='')
|
||||||
|
should_clean_keys: BoolProperty(default=True, name='Clean Keyframes', description='Exclude unnecessary keyframes from being written to the actions.')
|
||||||
|
should_use_fake_user: BoolProperty(default=True, name='Fake User', description='Assign each imported action a fake user so that the data block is saved even it has no users.')
|
||||||
|
should_stash: BoolProperty(default=False, name='Stash', description='Stash each imported action as a strip on a new non-contributing NLA track')
|
||||||
|
|
||||||
|
|
||||||
class PSA_UL_ImportActionList(UIList):
|
class PSA_UL_ImportActionList(UIList):
|
||||||
@@ -314,12 +334,10 @@ class PSA_PT_ImportPanel(Panel):
|
|||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.prop(property_group, 'psa_file_path', text='')
|
row.prop(property_group, 'psa_file_path', text='')
|
||||||
row.enabled = False
|
row.enabled = False
|
||||||
# row.enabled = property_group.psa_file_path is not ''
|
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
|
|
||||||
layout.separator()
|
|
||||||
|
|
||||||
row.operator('psa_import.select_file', text='Select PSA File', icon='FILEBROWSER')
|
row.operator('psa_import.select_file', text='Select PSA File', icon='FILEBROWSER')
|
||||||
|
|
||||||
if len(property_group.action_list) > 0:
|
if len(property_group.action_list) > 0:
|
||||||
box = layout.box()
|
box = layout.box()
|
||||||
box.label(text=f'Actions ({len(property_group.action_list)})', icon='ACTION')
|
box.label(text=f'Actions ({len(property_group.action_list)})', icon='ACTION')
|
||||||
@@ -331,7 +349,13 @@ class PSA_PT_ImportPanel(Panel):
|
|||||||
row.operator('psa_import.actions_select_all', text='All')
|
row.operator('psa_import.actions_select_all', text='All')
|
||||||
row.operator('psa_import.actions_deselect_all', text='None')
|
row.operator('psa_import.actions_deselect_all', text='None')
|
||||||
|
|
||||||
layout.separator()
|
row = layout.row()
|
||||||
|
row.prop(property_group, 'should_clean_keys')
|
||||||
|
|
||||||
|
# DATA
|
||||||
|
row = layout.row()
|
||||||
|
row.prop(property_group, 'should_use_fake_user')
|
||||||
|
row.prop(property_group, 'should_stash')
|
||||||
|
|
||||||
layout.operator('psa_import.import', text=f'Import')
|
layout.operator('psa_import.import', text=f'Import')
|
||||||
|
|
||||||
@@ -370,7 +394,12 @@ class PsaImportOperator(Operator):
|
|||||||
property_group = context.scene.psa_import
|
property_group = context.scene.psa_import
|
||||||
psa_reader = PsaReader(property_group.psa_file_path)
|
psa_reader = PsaReader(property_group.psa_file_path)
|
||||||
sequence_names = [x.action_name for x in property_group.action_list if x.is_selected]
|
sequence_names = [x.action_name for x in property_group.action_list if x.is_selected]
|
||||||
PsaImporter().import_psa(psa_reader, sequence_names, context.view_layer.objects.active)
|
options = PsaImportOptions()
|
||||||
|
options.sequence_names = sequence_names
|
||||||
|
options.should_clean_keys = property_group.should_clean_keys
|
||||||
|
options.should_use_fake_user = property_group.should_use_fake_user
|
||||||
|
options.should_stash = property_group.should_stash
|
||||||
|
PsaImporter().import_psa(psa_reader, context.view_layer.objects.active, options)
|
||||||
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
|
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user