Compare commits
17 Commits
9.0.2
...
psa-collec
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4a9815edc2 | ||
|
|
d66d4499e5 | ||
|
|
41e772e63c | ||
|
|
6dfc02b262 | ||
|
|
13fed0e214 | ||
|
|
4dac4d5115 | ||
|
|
63ee31bb00 | ||
|
|
69fc702393 | ||
|
|
a8fc115b14 | ||
|
|
e50a964dd3 | ||
|
|
bc7ea6472e | ||
|
|
a5eba2b6d9 | ||
|
|
efe845bf4a | ||
|
|
12025452d0 | ||
|
|
f6625d983a | ||
|
|
0a783bb89d | ||
|
|
f3b5ac9daf |
12
AGENTS.md
Normal file
12
AGENTS.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# AGENTS.md
|
||||
|
||||
This is an Blender addon for importing and exporting Unreal Engine PSK (skeletal mesh) and PSX (animation) files.
|
||||
|
||||
# PSK/PSA File Format Notes
|
||||
* PSK and PSA bone hierarchies must have a single root bone. The root bone's `parent_index` is always `0`.
|
||||
* All indices in PSK/PSX files are zero-based.
|
||||
* All string fields in PSK/PSX files use Windows-1252 encoding and are null-terminated if they do not use the full length of the field.
|
||||
* Bone transforms are in parent bone space, except for root bones, which are in world space.
|
||||
|
||||
# Naming Conventions
|
||||
* The `PSX` prefix is used when refer to concepts that are shared between PSK and PSX files.
|
||||
@@ -20,9 +20,10 @@ RUN BLENDER_EXECUTABLE=$(blender-downloader $BLENDER_VERSION --extract --remove-
|
||||
RUN pip install pytest-cov
|
||||
|
||||
# Source the environment variables and install Python dependencies
|
||||
# TODO: would be nice to have these installed in the bash script below.
|
||||
RUN . /etc/environment && \
|
||||
$BLENDER_PYTHON -m ensurepip && \
|
||||
$BLENDER_PYTHON -m pip install pytest pytest-cov psk-psa-py
|
||||
$BLENDER_PYTHON -m pip install pytest pytest-cov psk-psa-py==0.0.4
|
||||
|
||||
# Persist BLENDER_EXECUTABLE as an environment variable
|
||||
RUN echo $(cat /blender_executable_path) > /tmp/blender_executable_path_env && \
|
||||
|
||||
@@ -2,6 +2,7 @@ from bpy.app.handlers import persistent
|
||||
|
||||
from .shared import types as shared_types, helpers as shared_helpers
|
||||
from .shared import dfs as shared_dfs, ui as shared_ui
|
||||
from .shared import operators as shared_operators
|
||||
from .psk import (
|
||||
builder as psk_builder,
|
||||
importer as psk_importer,
|
||||
@@ -28,6 +29,8 @@ from .psa.export import (
|
||||
from .psa.import_ import operators as psa_import_operators
|
||||
from .psa.import_ import ui as psa_import_ui, properties as psa_import_properties
|
||||
|
||||
from .psa import file_handlers as psa_file_handlers
|
||||
|
||||
_needs_reload = 'bpy' in locals()
|
||||
|
||||
if _needs_reload:
|
||||
@@ -37,6 +40,7 @@ if _needs_reload:
|
||||
importlib.reload(shared_types)
|
||||
importlib.reload(shared_dfs)
|
||||
importlib.reload(shared_ui)
|
||||
importlib.reload(shared_operators)
|
||||
|
||||
importlib.reload(psk_builder)
|
||||
importlib.reload(psk_importer)
|
||||
@@ -56,6 +60,7 @@ if _needs_reload:
|
||||
importlib.reload(psa_import_properties)
|
||||
importlib.reload(psa_import_operators)
|
||||
importlib.reload(psa_import_ui)
|
||||
importlib.reload(psa_file_handlers)
|
||||
|
||||
import bpy
|
||||
from bpy.props import PointerProperty
|
||||
@@ -80,6 +85,7 @@ def psa_import_menu_func(self, context):
|
||||
_modules = (
|
||||
shared_types,
|
||||
shared_ui,
|
||||
shared_operators,
|
||||
psk_properties,
|
||||
psk_ui,
|
||||
psk_import_operators,
|
||||
@@ -91,7 +97,8 @@ _modules = (
|
||||
psa_export_ui,
|
||||
psa_import_properties,
|
||||
psa_import_operators,
|
||||
psa_import_ui
|
||||
psa_import_ui,
|
||||
psa_file_handlers,
|
||||
)
|
||||
|
||||
def register():
|
||||
@@ -101,21 +108,23 @@ def register():
|
||||
bpy.types.TOPBAR_MT_file_import.append(psk_import_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_export.append(psa_export_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_import.append(psa_import_menu_func)
|
||||
bpy.types.Material.psk = PointerProperty(type=psk_properties.PSX_PG_material, options={'HIDDEN'})
|
||||
bpy.types.Scene.psx_export = PointerProperty(type=shared_types.PSX_PG_scene_export, options={'HIDDEN'})
|
||||
bpy.types.Scene.psa_import = PointerProperty(type=psa_import_properties.PSA_PG_import, options={'HIDDEN'})
|
||||
bpy.types.Scene.psa_export = PointerProperty(type=psa_export_properties.PSA_PG_export, options={'HIDDEN'})
|
||||
bpy.types.Scene.psk_export = PointerProperty(type=psk_export_properties.PSK_PG_export, options={'HIDDEN'})
|
||||
bpy.types.Action.psa_export = PointerProperty(type=shared_types.PSX_PG_action_export, options={'HIDDEN'})
|
||||
|
||||
setattr(bpy.types.Material, 'psk', PointerProperty(type=psk_properties.PSX_PG_material, options={'HIDDEN'}))
|
||||
setattr(bpy.types.Scene, 'psx_export', PointerProperty(type=shared_types.PSX_PG_scene_export, options={'HIDDEN'}))
|
||||
setattr(bpy.types.Scene, 'psa_import', PointerProperty(type=psa_import_properties.PSA_PG_import, options={'HIDDEN'}))
|
||||
setattr(bpy.types.Scene, 'psa_export', PointerProperty(type=psa_export_properties.PSA_PG_export, options={'HIDDEN'}))
|
||||
setattr(bpy.types.Scene, 'psk_export', PointerProperty(type=psk_export_properties.PSK_PG_export, options={'HIDDEN'}))
|
||||
setattr(bpy.types.Action, 'psa_export', PointerProperty(type=shared_types.PSX_PG_action_export, options={'HIDDEN'}))
|
||||
|
||||
|
||||
def unregister():
|
||||
del bpy.types.Material.psk
|
||||
del bpy.types.Scene.psx_export
|
||||
del bpy.types.Scene.psa_import
|
||||
del bpy.types.Scene.psa_export
|
||||
del bpy.types.Scene.psk_export
|
||||
del bpy.types.Action.psa_export
|
||||
delattr(bpy.types.Material, 'psk')
|
||||
delattr(bpy.types.Scene, 'psx_export')
|
||||
delattr(bpy.types.Scene, 'psa_import')
|
||||
delattr(bpy.types.Scene, 'psa_export')
|
||||
delattr(bpy.types.Scene, 'psk_export')
|
||||
delattr(bpy.types.Action, 'psa_export')
|
||||
|
||||
bpy.types.TOPBAR_MT_file_export.remove(psk_export_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_import.remove(psk_import_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_export.remove(psa_export_menu_func)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
schema_version = "1.0.0"
|
||||
id = "io_scene_psk_psa"
|
||||
version = "9.0.2"
|
||||
version = "9.1.0"
|
||||
name = "Unreal PSK/PSA (.psk/.psa)"
|
||||
tagline = "Import and export PSK and PSA files used in Unreal Engine"
|
||||
maintainer = "Colin Basnett <cmbasnett@gmail.com>"
|
||||
@@ -14,7 +14,7 @@ license = [
|
||||
"SPDX:GPL-3.0-or-later",
|
||||
]
|
||||
wheels = [
|
||||
'./wheels/psk_psa_py-0.0.1-py3-none-any.whl'
|
||||
'./wheels/psk_psa_py-0.0.4-py3-none-any.whl'
|
||||
]
|
||||
|
||||
[build]
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
from bpy.types import Action, AnimData, Context, Object, PoseBone
|
||||
|
||||
from psk_psa_py.psa.data import Psa
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import Tuple
|
||||
from mathutils import Matrix, Quaternion, Vector
|
||||
|
||||
from ..shared.helpers import PsxBoneCollection, create_psx_bones, get_coordinate_system_transform
|
||||
from ..shared.helpers import PsxBoneCollection, convert_bpy_quaternion_to_psx_quaternion, convert_vector_to_vector3, create_psx_bones, get_coordinate_system_transform
|
||||
|
||||
|
||||
class PsaBuildSequence:
|
||||
class NlaState:
|
||||
def __init__(self):
|
||||
self.action: Optional[Action] = None
|
||||
self.action: Action | None = None
|
||||
self.frame_start: int = 0
|
||||
self.frame_end: int = 0
|
||||
|
||||
@@ -22,16 +22,16 @@ class PsaBuildSequence:
|
||||
self.compression_ratio: float = 1.0
|
||||
self.key_quota: int = 0
|
||||
self.fps: float = 30.0
|
||||
self.group: Optional[str] = None
|
||||
self.group: str | None = None
|
||||
|
||||
|
||||
class PsaBuildOptions:
|
||||
def __init__(self):
|
||||
self.armature_objects: List[Object] = []
|
||||
self.animation_data: Optional[AnimData] = None
|
||||
self.sequences: List[PsaBuildSequence] = []
|
||||
self.armature_objects: list[Object] = []
|
||||
self.animation_data: AnimData | None = None
|
||||
self.sequences: list[PsaBuildSequence] = []
|
||||
self.bone_filter_mode: str = 'ALL'
|
||||
self.bone_collection_indices: List[PsxBoneCollection] = []
|
||||
self.bone_collection_indices: list[PsxBoneCollection] = []
|
||||
self.sequence_name_prefix: str = ''
|
||||
self.sequence_name_suffix: str = ''
|
||||
self.scale = 1.0
|
||||
@@ -47,30 +47,79 @@ class PsaBuildOptions:
|
||||
return 'DATA' if self.sequence_source == 'ACTIVE_ACTION' else 'OBJECT'
|
||||
|
||||
|
||||
class PsaExportBone:
|
||||
def __init__(self,
|
||||
pose_bone: PoseBone | None,
|
||||
armature_object: Object | None,
|
||||
scale: Vector):
|
||||
self.pose_bone = pose_bone
|
||||
self.armature_object = armature_object
|
||||
self.scale = scale
|
||||
|
||||
@property
|
||||
def is_armature_root_bone(self) -> bool:
|
||||
return self.pose_bone is not None and self.pose_bone.parent is None
|
||||
|
||||
@property
|
||||
def is_attached_to_armature(self) -> bool:
|
||||
return self.get_attached_armature() is not None
|
||||
|
||||
def get_attached_armature(self) -> tuple[Object, PoseBone] | None:
|
||||
if not self.is_armature_root_bone:
|
||||
return None
|
||||
assert self.armature_object is not None
|
||||
match self.armature_object.parent_type:
|
||||
case 'BONE':
|
||||
parent_bone_name = self.armature_object.parent_bone
|
||||
assert self.armature_object.parent is not None
|
||||
parent_armature_object = self.armature_object.parent
|
||||
assert parent_armature_object.pose is not None
|
||||
parent_pose_bone = parent_armature_object.pose.bones.get(parent_bone_name)
|
||||
if parent_pose_bone is None:
|
||||
return None
|
||||
return (parent_armature_object, parent_pose_bone)
|
||||
case _:
|
||||
return None
|
||||
|
||||
def get_attached_armature_transform(self) -> Matrix:
|
||||
attached_armature, attached_pose_bone = self.get_attached_armature() or (None, None)
|
||||
if attached_armature is None or attached_pose_bone is None:
|
||||
return Matrix.Identity(4)
|
||||
if attached_pose_bone.parent is not None:
|
||||
attached_bone_matrix = attached_pose_bone.parent.matrix.inverted() @ attached_pose_bone.matrix
|
||||
else:
|
||||
attached_bone_matrix = attached_armature.matrix_world @ attached_pose_bone.matrix
|
||||
return attached_bone_matrix
|
||||
|
||||
def _get_pose_bone_location_and_rotation(
|
||||
pose_bone: Optional[PoseBone],
|
||||
armature_object: Optional[Object],
|
||||
pose_bone: PoseBone,
|
||||
armature_object: Object,
|
||||
export_bone: PsaExportBone,
|
||||
export_space: str,
|
||||
scale: Vector,
|
||||
coordinate_system_transform: Matrix,
|
||||
has_false_root_bone: bool,
|
||||
) -> Tuple[Vector, Quaternion]:
|
||||
is_false_root_bone = pose_bone is None and armature_object is None
|
||||
|
||||
if is_false_root_bone:
|
||||
pose_bone_matrix = coordinate_system_transform
|
||||
elif pose_bone is not None and pose_bone.parent is not None:
|
||||
pose_bone_matrix = pose_bone.matrix
|
||||
pose_bone_parent_matrix = pose_bone.parent.matrix
|
||||
pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix
|
||||
if pose_bone.parent is not None:
|
||||
pose_bone_matrix = pose_bone.parent.matrix.inverted() @ pose_bone.matrix
|
||||
else:
|
||||
# Root bone
|
||||
if has_false_root_bone:
|
||||
pose_bone_matrix = armature_object.matrix_world @ pose_bone.matrix
|
||||
else:
|
||||
# Get the bone's pose matrix and transform it into the export space.
|
||||
# In the case of an 'ARMATURE' export space, this will be the inverse of armature object's world matrix.
|
||||
# Otherwise, it will be the identity matrix.
|
||||
|
||||
if export_bone.is_attached_to_armature:
|
||||
# Get the world space matrix of both this bone and the bone that we're attached to,
|
||||
# then calculate a matrix relative to the attached bone.
|
||||
world_matrix = armature_object.matrix_world @ pose_bone.matrix
|
||||
assert export_bone.armature_object
|
||||
my_parent = export_bone.armature_object.parent
|
||||
assert my_parent
|
||||
my_parent_bone = export_bone.armature_object.parent_bone
|
||||
assert my_parent.pose
|
||||
parent_pose_bone = my_parent.pose.bones[my_parent_bone]
|
||||
parent_world_matrix = my_parent.matrix_world @ parent_pose_bone.matrix
|
||||
pose_bone_matrix = parent_world_matrix.inverted() @ world_matrix
|
||||
else:
|
||||
match export_space:
|
||||
case 'ARMATURE':
|
||||
pose_bone_matrix = pose_bone.matrix
|
||||
@@ -80,7 +129,6 @@ def _get_pose_bone_location_and_rotation(
|
||||
pose_bone_matrix = Matrix.Identity(4)
|
||||
case _:
|
||||
assert False, f'Invalid export space: {export_space}'
|
||||
|
||||
# The root bone is the only bone that should be transformed by the coordinate system transform, since all
|
||||
# other bones are relative to their parent bones.
|
||||
pose_bone_matrix = coordinate_system_transform @ pose_bone_matrix
|
||||
@@ -88,16 +136,10 @@ def _get_pose_bone_location_and_rotation(
|
||||
location = pose_bone_matrix.to_translation()
|
||||
rotation = pose_bone_matrix.to_quaternion().normalized()
|
||||
|
||||
if pose_bone.parent is not None:
|
||||
# Don't apply scale to the root bone of armatures if we have a false root.
|
||||
if not has_false_root_bone or (pose_bone is None or pose_bone.parent is not None):
|
||||
# TODO: probably remove this?
|
||||
location *= scale
|
||||
|
||||
if has_false_root_bone:
|
||||
is_child_bone = not is_false_root_bone
|
||||
else:
|
||||
is_child_bone = pose_bone.parent is not None
|
||||
|
||||
if is_child_bone:
|
||||
rotation.conjugate()
|
||||
|
||||
return location, rotation
|
||||
@@ -132,7 +174,7 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||
# Build list of PSA bones.
|
||||
# Note that the PSA bones are just here to validate the hierarchy.
|
||||
# The bind pose information is not used by the engine.
|
||||
psa.bones = [psx_bone for psx_bone, _ in psx_bone_create_result.bones]
|
||||
psa.bones = [bone.psx_bone for bone in psx_bone_create_result.bones]
|
||||
|
||||
# No bones are going to be exported.
|
||||
if len(psa.bones) == 0:
|
||||
@@ -144,7 +186,7 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||
export_sequence.name = export_sequence.name.strip()
|
||||
|
||||
# Save each armature object's current action and frame so that we can restore the state once we are done.
|
||||
saved_armature_object_actions = {o: o.animation_data.action for o in options.armature_objects}
|
||||
saved_armature_object_actions = {o: (o.animation_data.action if o.animation_data else None) for o in options.armature_objects}
|
||||
saved_frame_current = context.scene.frame_current
|
||||
|
||||
# Now build the PSA sequences.
|
||||
@@ -197,29 +239,20 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||
|
||||
# Link the action to the animation data and update view layer.
|
||||
for armature_object in options.armature_objects:
|
||||
if armature_object.animation_data:
|
||||
armature_object.animation_data.action = export_sequence.nla_state.action
|
||||
|
||||
assert context.view_layer
|
||||
context.view_layer.update()
|
||||
|
||||
def add_key(location: Vector, rotation: Quaternion):
|
||||
key = Psa.Key()
|
||||
key.location.x = location.x
|
||||
key.location.y = location.y
|
||||
key.location.z = location.z
|
||||
key.rotation.x = rotation.x
|
||||
key.rotation.y = rotation.y
|
||||
key.rotation.z = rotation.z
|
||||
key.rotation.w = rotation.w
|
||||
key.location = convert_vector_to_vector3(location)
|
||||
key.rotation = convert_bpy_quaternion_to_psx_quaternion(rotation)
|
||||
key.time = 1.0 / psa_sequence.fps
|
||||
psa.keys.append(key)
|
||||
|
||||
class PsaExportBone:
|
||||
def __init__(self, pose_bone: Optional[PoseBone], armature_object: Optional[Object], scale: Vector):
|
||||
self.pose_bone = pose_bone
|
||||
self.armature_object = armature_object
|
||||
self.scale = scale
|
||||
|
||||
armature_scales: Dict[Object, Vector] = {}
|
||||
armature_scales: dict[Object, Vector] = {}
|
||||
|
||||
# Extract the scale from the world matrix of the evaluated armature object.
|
||||
for armature_object in options.armature_objects:
|
||||
@@ -231,26 +264,27 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||
# Create a list of export pose bones, in the same order as the bones as they appear in the armature.
|
||||
# The object contains the pose bone, the armature object, and a pre-calculated scaling value to apply to the
|
||||
# locations.
|
||||
export_bones: List[PsaExportBone] = []
|
||||
export_bones: list[PsaExportBone] = []
|
||||
|
||||
for psx_bone, armature_object in psx_bone_create_result.bones:
|
||||
if armature_object is None:
|
||||
export_bones.append(PsaExportBone(None, None, Vector((1.0, 1.0, 1.0))))
|
||||
for bone in psx_bone_create_result.bones:
|
||||
if bone.armature_object is None:
|
||||
export_bone = PsaExportBone(None, None, Vector((1.0, 1.0, 1.0)))
|
||||
export_bones.append(export_bone)
|
||||
continue
|
||||
|
||||
assert armature_object.pose
|
||||
pose_bone = armature_object.pose.bones[psx_bone.name.decode('windows-1252')]
|
||||
assert bone.armature_object.pose
|
||||
pose_bone = bone.armature_object.pose.bones[bone.psx_bone.name.decode('windows-1252')]
|
||||
|
||||
export_bones.append(PsaExportBone(pose_bone, armature_object, armature_scales[armature_object]))
|
||||
export_bones.append(PsaExportBone(pose_bone, bone.armature_object, armature_scales[bone.armature_object]))
|
||||
|
||||
match options.sampling_mode:
|
||||
case 'INTERPOLATED':
|
||||
# Used as a store for the last frame's pose bone locations and rotations.
|
||||
last_frame: Optional[int] = None
|
||||
last_frame_bone_poses: List[Tuple[Vector, Quaternion]] = []
|
||||
last_frame: int | None = None
|
||||
last_frame_bone_poses: list[tuple[Vector, Quaternion]] = []
|
||||
|
||||
next_frame: Optional[int] = None
|
||||
next_frame_bone_poses: List[Tuple[Vector, Quaternion]] = []
|
||||
next_frame: int | None = None
|
||||
next_frame_bone_poses: list[tuple[Vector, Quaternion]] = []
|
||||
|
||||
for _ in range(frame_count):
|
||||
if last_frame is None or last_frame != int(frame):
|
||||
@@ -269,10 +303,10 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||
location, rotation = _get_pose_bone_location_and_rotation(
|
||||
export_bone.pose_bone,
|
||||
export_bone.armature_object,
|
||||
export_bone,
|
||||
options.export_space,
|
||||
export_bone.scale,
|
||||
coordinate_system_transform=coordinate_system_transform,
|
||||
has_false_root_bone=psx_bone_create_result.has_false_root_bone,
|
||||
coordinate_system_transform=coordinate_system_transform
|
||||
)
|
||||
last_frame_bone_poses.append((location, rotation))
|
||||
|
||||
@@ -292,10 +326,10 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||
location, rotation = _get_pose_bone_location_and_rotation(
|
||||
pose_bone=export_bone.pose_bone,
|
||||
armature_object=export_bone.armature_object,
|
||||
export_bone=export_bone,
|
||||
export_space=options.export_space,
|
||||
scale=export_bone.scale,
|
||||
coordinate_system_transform=coordinate_system_transform,
|
||||
has_false_root_bone=psx_bone_create_result.has_false_root_bone,
|
||||
)
|
||||
next_frame_bone_poses.append((location, rotation))
|
||||
|
||||
@@ -319,10 +353,10 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||
location, rotation = _get_pose_bone_location_and_rotation(
|
||||
pose_bone=export_bone.pose_bone,
|
||||
armature_object=export_bone.armature_object,
|
||||
export_bone=export_bone,
|
||||
export_space=options.export_space,
|
||||
scale=export_bone.scale,
|
||||
coordinate_system_transform=coordinate_system_transform,
|
||||
has_false_root_bone=psx_bone_create_result.has_false_root_bone,
|
||||
)
|
||||
add_key(location, rotation)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import re
|
||||
from configparser import ConfigParser
|
||||
from typing import Dict, List
|
||||
|
||||
REMOVE_TRACK_LOCATION = (1 << 0)
|
||||
REMOVE_TRACK_ROTATION = (1 << 1)
|
||||
@@ -8,7 +7,7 @@ REMOVE_TRACK_ROTATION = (1 << 1)
|
||||
|
||||
class PsaConfig:
|
||||
def __init__(self):
|
||||
self.sequence_bone_flags: Dict[str, Dict[int, int]] = dict()
|
||||
self.sequence_bone_flags: dict[str, dict[int, int]] = dict()
|
||||
|
||||
|
||||
def _load_config_file(file_path: str) -> ConfigParser:
|
||||
@@ -48,7 +47,7 @@ def _get_bone_flags_from_value(value: str) -> int:
|
||||
return 0
|
||||
|
||||
|
||||
def read_psa_config(psa_sequence_names: List[str], file_path: str) -> PsaConfig:
|
||||
def read_psa_config(psa_sequence_names: list[str], file_path: str) -> PsaConfig:
|
||||
psa_config = PsaConfig()
|
||||
|
||||
config = _load_config_file(file_path)
|
||||
|
||||
@@ -1,23 +1,27 @@
|
||||
from abc import abstractmethod
|
||||
from collections import Counter
|
||||
from typing import List, Iterable, Dict, Tuple, cast as typing_cast
|
||||
from typing import Iterable, Sequence, Tuple, cast as typing_cast
|
||||
|
||||
import bpy
|
||||
import re
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import Context, Action, Object, AnimData, TimelineMarker, Operator, Armature
|
||||
from bpy.types import Context, Action, Object, AnimData, TimelineMarker, Operator, Armature, UILayout, Scene, ActionKeyframeStrip
|
||||
from bpy_extras.io_utils import ExportHelper
|
||||
|
||||
from .properties import (
|
||||
PSA_PG_export,
|
||||
PSA_PG_export_action_list_item,
|
||||
PsaExportMixin,
|
||||
PsaExportSequenceMixin,
|
||||
filter_sequences,
|
||||
get_sequences_from_name_and_frame_range,
|
||||
)
|
||||
from .ui import PSA_UL_export_sequences
|
||||
from ..builder import build_psa, PsaBuildSequence, PsaBuildOptions
|
||||
from psk_psa_py.psa.writer import write_psa_to_file
|
||||
from ...shared.helpers import populate_bone_collection_list, get_nla_strips_in_frame_range, PsxBoneCollection
|
||||
from ...shared.helpers import get_collection_export_operator_from_context, get_collection_from_context, get_psk_input_objects_for_collection, populate_bone_collection_list, get_nla_strips_in_frame_range, PsxBoneCollection
|
||||
from ...shared.types import PSX_PG_action_export
|
||||
from ...shared.ui import draw_bone_filter_mode
|
||||
from ...shared.operators import PSK_OT_bone_collection_list_populate, PSK_OT_bone_collection_list_select_all
|
||||
|
||||
|
||||
def get_sequences_propnames_from_source(sequence_source: str) -> Tuple[str, str]:
|
||||
@@ -47,6 +51,8 @@ def is_action_for_object(obj: Object, action: Action):
|
||||
# The nesting here is absolutely bonkers.
|
||||
for layer in action.layers:
|
||||
for strip in layer.strips:
|
||||
if not isinstance(strip, ActionKeyframeStrip):
|
||||
continue
|
||||
for channelbag in strip.channelbags:
|
||||
for fcurve in channelbag.fcurves:
|
||||
match = re.match(r'pose\.bones\[\"([^\"]+)\"](\[\"([^\"]+)\"])?', fcurve.data_path)
|
||||
@@ -59,18 +65,23 @@ def is_action_for_object(obj: Object, action: Action):
|
||||
return False
|
||||
|
||||
|
||||
def update_actions_and_timeline_markers(context: Context, armature_objects: Iterable[Object]):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
def update_actions_and_timeline_markers(context: Context, armature_objects: Sequence[Object], pg: PsaExportMixin):
|
||||
assert context.scene is not None
|
||||
|
||||
# Clear actions and markers.
|
||||
pg.action_list.clear()
|
||||
pg.marker_list.clear()
|
||||
pg.active_action_list.clear()
|
||||
|
||||
# Get animation data.
|
||||
# TODO: Not sure how to handle this with multiple armatures.
|
||||
animation_data_object = get_animation_data_object(context)
|
||||
animation_data = animation_data_object.animation_data if animation_data_object else None
|
||||
# TODO: this is cleared in the callback, although this should probably be changed.
|
||||
# pg.nla_strip_list.clear()
|
||||
|
||||
assert len(armature_objects) >= 0, 'Must have at least one armature object'
|
||||
|
||||
# TODO: for now, use the first armature object's animation data.
|
||||
# animation_data_object = get_animation_data_object(context, pg)
|
||||
armature_object = armature_objects[0]
|
||||
animation_data = armature_object.animation_data if armature_object else None
|
||||
|
||||
if animation_data is None:
|
||||
return
|
||||
@@ -83,7 +94,7 @@ def update_actions_and_timeline_markers(context: Context, armature_objects: Iter
|
||||
|
||||
for (name, frame_start, frame_end) in get_sequences_from_action(action):
|
||||
item = pg.action_list.add()
|
||||
item.action = action
|
||||
item.action_name = action.name
|
||||
item.name = name
|
||||
item.is_selected = False
|
||||
item.is_pose_marker = False
|
||||
@@ -93,12 +104,10 @@ def update_actions_and_timeline_markers(context: Context, armature_objects: Iter
|
||||
# Pose markers are not guaranteed to be in frame-order, so make sure that they are.
|
||||
pose_markers = sorted(action.pose_markers, key=lambda x: x.frame)
|
||||
for pose_marker_index, pose_marker in enumerate(pose_markers):
|
||||
if pose_marker.name.strip() == '' or pose_marker.name.startswith('#'):
|
||||
continue
|
||||
sequences = get_sequences_from_action_pose_markers(action, pose_markers, pose_marker, pose_marker_index)
|
||||
for (name, frame_start, frame_end) in sequences:
|
||||
item = pg.action_list.add()
|
||||
item.action = action
|
||||
item.action_name = action.name
|
||||
item.name = name
|
||||
item.is_selected = False
|
||||
item.is_pose_marker = True
|
||||
@@ -107,7 +116,7 @@ def update_actions_and_timeline_markers(context: Context, armature_objects: Iter
|
||||
|
||||
# Populate timeline markers list.
|
||||
marker_names = [x.name for x in context.scene.timeline_markers]
|
||||
sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, marker_names)
|
||||
sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context.scene, marker_names)
|
||||
|
||||
for marker_name in marker_names:
|
||||
if marker_name not in sequence_frame_ranges:
|
||||
@@ -124,30 +133,32 @@ def update_actions_and_timeline_markers(context: Context, armature_objects: Iter
|
||||
item.frame_end = frame_end
|
||||
|
||||
# Populate the active action list.
|
||||
for armature_object in context.selected_objects:
|
||||
if armature_object.type != 'ARMATURE':
|
||||
continue
|
||||
action = armature_object.animation_data.action if armature_object.animation_data else None
|
||||
if action is None:
|
||||
for armature_object in armature_objects:
|
||||
active_action = armature_object.animation_data.action if armature_object.animation_data else None
|
||||
if active_action is None:
|
||||
continue
|
||||
sequences = get_sequences_from_action(active_action)
|
||||
for (sequence_name, frame_start, frame_end) in sequences:
|
||||
# TODO: for some reason we aren't doing the sequence name parsing here.
|
||||
item = pg.active_action_list.add()
|
||||
item.name = action.name
|
||||
item.armature_object = armature_object
|
||||
item.action = action
|
||||
item.frame_start = int(item.action.frame_range[0])
|
||||
item.frame_end = int(item.action.frame_range[1])
|
||||
item.name = sequence_name
|
||||
item.armature_object_name = armature_object.name
|
||||
item.action_name = active_action.name
|
||||
item.frame_start = frame_start
|
||||
item.frame_end = frame_end
|
||||
item.is_selected = True
|
||||
|
||||
|
||||
def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actions: Iterable[Action]) -> float:
|
||||
match fps_source:
|
||||
case 'SCENE':
|
||||
assert context.scene
|
||||
return context.scene.render.fps
|
||||
case 'CUSTOM':
|
||||
return fps_custom
|
||||
case 'ACTION_METADATA':
|
||||
# Get the minimum value of action metadata FPS values.
|
||||
return min([action.psa_export.fps for action in actions])
|
||||
return min([typing_cast(PSX_PG_action_export, getattr(action, 'psa_export')).fps for action in actions])
|
||||
case _:
|
||||
assert False, f'Invalid FPS source: {fps_source}'
|
||||
|
||||
@@ -160,41 +171,25 @@ def get_sequence_compression_ratio(
|
||||
match compression_ratio_source:
|
||||
case 'ACTION_METADATA':
|
||||
# Get the minimum value of action metadata compression ratio values.
|
||||
return min(map(lambda action: action.psa_export.compression_ratio, actions))
|
||||
return min(map(lambda action: typing_cast(PSX_PG_action_export, getattr(action, 'psa_export')).compression_ratio, actions))
|
||||
case 'CUSTOM':
|
||||
return compression_ratio_custom
|
||||
case _:
|
||||
assert False, f'Invalid compression ratio source: {compression_ratio_source}'
|
||||
|
||||
|
||||
def get_animation_data_object(context: Context) -> Object:
|
||||
pg: PSA_PG_export = getattr(context.scene, 'psa_export')
|
||||
|
||||
active_object = context.view_layer.objects.active
|
||||
|
||||
if active_object is None or active_object.type != 'ARMATURE':
|
||||
raise RuntimeError('Active object must be an Armature')
|
||||
|
||||
if pg.sequence_source != 'ACTIONS' and pg.should_override_animation_data:
|
||||
animation_data_object = pg.animation_data_override
|
||||
else:
|
||||
animation_data_object = active_object
|
||||
|
||||
return animation_data_object
|
||||
|
||||
|
||||
def get_timeline_marker_sequence_frame_ranges(
|
||||
animation_data: AnimData,
|
||||
context: Context,
|
||||
marker_names: List[str],
|
||||
) -> Dict:
|
||||
scene: Scene,
|
||||
marker_names: list[str],
|
||||
) -> dict[str, tuple[int, int]]:
|
||||
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
|
||||
sequence_frame_ranges = dict()
|
||||
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
|
||||
sequence_frame_ranges: dict[str, tuple[int, int]] = dict()
|
||||
sorted_timeline_markers = list(sorted(scene.timeline_markers, key=lambda x: x.frame))
|
||||
sorted_timeline_marker_names = [x.name for x in sorted_timeline_markers]
|
||||
|
||||
for marker_name in marker_names:
|
||||
marker = context.scene.timeline_markers[marker_name]
|
||||
marker = scene.timeline_markers[marker_name]
|
||||
frame_start = marker.frame
|
||||
# Determine the final frame of the sequence based on the next marker.
|
||||
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
|
||||
@@ -247,12 +242,14 @@ def get_sequences_from_action(action: Action):
|
||||
|
||||
def get_sequences_from_action_pose_markers(
|
||||
action: Action,
|
||||
pose_markers: List[TimelineMarker],
|
||||
pose_markers: list[TimelineMarker],
|
||||
pose_marker: TimelineMarker,
|
||||
pose_marker_index: int,
|
||||
):
|
||||
frame_start = pose_marker.frame
|
||||
sequence_name = pose_marker.name
|
||||
if pose_marker.name.strip() == '' or pose_marker.name.startswith('#'):
|
||||
return
|
||||
if pose_marker.name.startswith('!'):
|
||||
# If the pose marker name starts with an exclamation mark, only export the first frame.
|
||||
frame_end = frame_start
|
||||
@@ -264,7 +261,7 @@ def get_sequences_from_action_pose_markers(
|
||||
yield from get_sequences_from_name_and_frame_range(sequence_name, frame_start, frame_end)
|
||||
|
||||
|
||||
def get_visible_sequences(pg: PSA_PG_export, sequences) -> List[PSA_PG_export_action_list_item]:
|
||||
def get_visible_sequences(pg: PsaExportMixin, sequences) -> list[PsaExportSequenceMixin]:
|
||||
visible_sequences = []
|
||||
for i, flag in enumerate(filter_sequences(pg, sequences)):
|
||||
if bool(flag & (1 << 30)):
|
||||
@@ -272,10 +269,11 @@ def get_visible_sequences(pg: PSA_PG_export, sequences) -> List[PSA_PG_export_ac
|
||||
return visible_sequences
|
||||
|
||||
|
||||
class PSA_OT_export(Operator, ExportHelper):
|
||||
bl_idname = 'psa.export'
|
||||
|
||||
class PSA_OT_export_collection(Operator, ExportHelper, PsaExportMixin):
|
||||
bl_idname = 'psa.export_collection'
|
||||
bl_label = 'Export'
|
||||
bl_options = {'INTERNAL', 'UNDO'}
|
||||
bl_options = {'INTERNAL'}
|
||||
bl_description = 'Export actions to PSA'
|
||||
filename_ext = '.psa'
|
||||
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
||||
@@ -285,39 +283,122 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
maxlen=1024,
|
||||
default='')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.armature_objects: List[Object] = []
|
||||
def execute(self, context: Context):
|
||||
# TODO: get the armature objects from the collection export operator
|
||||
collection = get_collection_from_context(context)
|
||||
if collection is None:
|
||||
self.report({'ERROR'}, 'No collection found for export')
|
||||
return {'CANCELLED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
try:
|
||||
cls._check_context(context)
|
||||
import_objects = get_psk_input_objects_for_collection(collection)
|
||||
except RuntimeError as e:
|
||||
cls.poll_message_set(str(e))
|
||||
return False
|
||||
return True
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
|
||||
def draw(self, context):
|
||||
options = create_psa_export_options(context, import_objects.armature_objects, self)
|
||||
|
||||
if len(options.sequences) == 0:
|
||||
self.report({'ERROR'}, 'No sequences were selected for export')
|
||||
return {'CANCELLED'}
|
||||
|
||||
try:
|
||||
psa = build_psa(context, options)
|
||||
self.report({'INFO'}, f'PSA export successful')
|
||||
except RuntimeError as e:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
|
||||
write_psa_to_file(psa, self.filepath)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
def draw(self, context: Context):
|
||||
layout = self.layout
|
||||
assert layout
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
|
||||
assert layout is not None
|
||||
|
||||
flow = layout.grid_flow(row_major=True)
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
|
||||
# Sequences
|
||||
draw_sequences_panel(layout, self,
|
||||
PSA_OT_export_collection_sequences_select_all.bl_idname,
|
||||
PSA_OT_export_collection_sequences_deselect_all.bl_idname,
|
||||
)
|
||||
|
||||
# Bones
|
||||
bones_header, bones_panel = layout.panel('Bones', default_closed=False)
|
||||
bones_header.label(text='Bones', icon='BONE_DATA')
|
||||
if bones_panel:
|
||||
draw_bone_filter_mode(bones_panel, self, True)
|
||||
|
||||
if self.bone_filter_mode == 'BONE_COLLECTIONS':
|
||||
row = bones_panel.row()
|
||||
rows = max(3, min(len(self.bone_collection_list), 10))
|
||||
row.template_list('PSX_UL_bone_collection_list', '', self, 'bone_collection_list', self, 'bone_collection_list_index', rows=rows)
|
||||
col = row.column(align=True)
|
||||
col.operator(PSK_OT_bone_collection_list_populate.bl_idname, text='', icon='FILE_REFRESH')
|
||||
col.separator()
|
||||
op = col.operator(PSK_OT_bone_collection_list_select_all.bl_idname, text='', icon='CHECKBOX_HLT')
|
||||
op.is_selected = True
|
||||
op = col.operator(PSK_OT_bone_collection_list_select_all.bl_idname, text='', icon='CHECKBOX_DEHLT')
|
||||
op.is_selected = False
|
||||
|
||||
advanced_bones_header, advanced_bones_panel = bones_panel.panel('Advanced', default_closed=True)
|
||||
advanced_bones_header.label(text='Advanced')
|
||||
if advanced_bones_panel:
|
||||
flow = advanced_bones_panel.grid_flow(row_major=True)
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
flow.prop(self, 'root_bone_name')
|
||||
|
||||
# Transform
|
||||
transform_header, transform_panel = layout.panel('Transform', default_closed=False)
|
||||
transform_header.label(text='Transform', icon='DRIVER_TRANSFORM')
|
||||
if transform_panel:
|
||||
flow = transform_panel.grid_flow(row_major=True)
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
flow.prop(self, 'export_space')
|
||||
flow.prop(self, 'transform_source')
|
||||
|
||||
flow = transform_panel.grid_flow(row_major=True)
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
|
||||
match self.transform_source:
|
||||
case 'SCENE':
|
||||
transform_source = getattr(context.scene, 'psx_export')
|
||||
flow.enabled = False
|
||||
case 'CUSTOM':
|
||||
transform_source = self
|
||||
case _:
|
||||
assert False, f'Invalid transform source: {self.transform_source}'
|
||||
|
||||
flow.prop(transform_source, 'scale')
|
||||
flow.prop(transform_source, 'forward_axis')
|
||||
flow.prop(transform_source, 'up_axis')
|
||||
|
||||
|
||||
def draw_sequences_panel(
|
||||
layout: UILayout,
|
||||
pg: PsaExportMixin,
|
||||
sequences_select_all_operator_idname: str,
|
||||
sequences_deselect_all_operator_idname: str,
|
||||
):
|
||||
sequences_header, sequences_panel = layout.panel('Sequences', default_closed=False)
|
||||
sequences_header.label(text='Sequences', icon='ACTION')
|
||||
|
||||
if sequences_panel:
|
||||
sequences_panel.operator(PSA_OT_export_collection_populate_sequences.bl_idname, text='Refresh', icon='FILE_REFRESH')
|
||||
|
||||
flow = sequences_panel.grid_flow()
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
flow.prop(pg, 'sequence_source', text='Source')
|
||||
|
||||
if pg.sequence_source in {'TIMELINE_MARKERS', 'NLA_TRACK_STRIPS'}:
|
||||
# ANIMDATA SOURCE
|
||||
flow.prop(pg, 'should_override_animation_data')
|
||||
if pg.should_override_animation_data:
|
||||
flow.prop(pg, 'animation_data_override', text=' ')
|
||||
|
||||
if pg.sequence_source == 'NLA_TRACK_STRIPS':
|
||||
flow = sequences_panel.grid_flow()
|
||||
flow.use_property_split = True
|
||||
@@ -327,8 +408,8 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
# SELECT ALL/NONE
|
||||
row = sequences_panel.row(align=True)
|
||||
row.label(text='Select')
|
||||
row.operator(PSA_OT_export_actions_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row.operator(PSA_OT_export_actions_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
row.operator(sequences_select_all_operator_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row.operator(sequences_deselect_all_operator_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
|
||||
propname, active_propname = get_sequences_propnames_from_source(pg.sequence_source)
|
||||
sequences_panel.template_list(PSA_UL_export_sequences.bl_idname, '', pg, propname, pg, active_propname,
|
||||
@@ -386,6 +467,146 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
if pg.compression_ratio_source == 'CUSTOM':
|
||||
col.prop(pg, 'compression_ratio_custom', text='')
|
||||
|
||||
|
||||
def create_psa_export_options(context: Context, armature_objects: Sequence[Object], pg: PsaExportMixin) -> PsaBuildOptions:
|
||||
if len(armature_objects) == 0:
|
||||
raise RuntimeError(f'No armatures')
|
||||
|
||||
animation_data = armature_objects[0].animation_data
|
||||
export_sequences: list[PsaBuildSequence] = []
|
||||
|
||||
# TODO: this needs to be changed so that we iterate over all of the armature objects?
|
||||
# do we need to check for primary key? (data vs. object?)
|
||||
|
||||
def get_export_sequence_group(group_source: str, group_custom: str | None, action: Action | None) -> str | None:
|
||||
match group_source:
|
||||
case 'ACTIONS':
|
||||
if action is None:
|
||||
return None
|
||||
action_psa_export = typing_cast(PSX_PG_action_export, getattr(action, 'psa_export'))
|
||||
return action_psa_export.group
|
||||
case 'CUSTOM':
|
||||
return group_custom
|
||||
case _:
|
||||
return None
|
||||
|
||||
match pg.sequence_source:
|
||||
case 'ACTIONS':
|
||||
for action_item in filter(lambda x: x.is_selected, pg.action_list):
|
||||
if action_item.action is None:
|
||||
continue
|
||||
if len(action_item.action.layers) == 0:
|
||||
continue
|
||||
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||
export_sequence.name = action_item.name
|
||||
export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, action_item.action)
|
||||
export_sequence.nla_state.action = action_item.action
|
||||
export_sequence.nla_state.frame_start = action_item.frame_start
|
||||
export_sequence.nla_state.frame_end = action_item.frame_end
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action_item.action])
|
||||
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action_item.action])
|
||||
export_sequence.key_quota = action_item.action.psa_export.key_quota
|
||||
export_sequences.append(export_sequence)
|
||||
case 'TIMELINE_MARKERS':
|
||||
for marker_item in filter(lambda x: x.is_selected, pg.marker_list):
|
||||
nla_strips_actions: list[Action] = []
|
||||
for nla_strip in get_nla_strips_in_frame_range(animation_data, marker_item.frame_start, marker_item.frame_end):
|
||||
if nla_strip.action:
|
||||
nla_strips_actions.append(nla_strip.action)
|
||||
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||
export_sequence.name = marker_item.name
|
||||
export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, next(iter(nla_strips_actions), None))
|
||||
export_sequence.nla_state.frame_start = marker_item.frame_start
|
||||
export_sequence.nla_state.frame_end = marker_item.frame_end
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions)
|
||||
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, nla_strips_actions)
|
||||
export_sequences.append(export_sequence)
|
||||
case 'NLA_TRACK_STRIPS':
|
||||
for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list):
|
||||
if nla_strip_item.action is None:
|
||||
continue
|
||||
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||
export_sequence.name = nla_strip_item.name
|
||||
export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, nla_strip_item.action)
|
||||
export_sequence.nla_state.frame_start = nla_strip_item.frame_start
|
||||
export_sequence.nla_state.frame_end = nla_strip_item.frame_end
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [nla_strip_item.action])
|
||||
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [nla_strip_item.action])
|
||||
export_sequence.key_quota = nla_strip_item.action.psa_export.key_quota
|
||||
export_sequences.append(export_sequence)
|
||||
case 'ACTIVE_ACTION':
|
||||
for active_action_item in filter(lambda x: x.is_selected, pg.active_action_list):
|
||||
export_sequence = PsaBuildSequence(active_action_item.armature_object, active_action_item.armature_object.animation_data)
|
||||
action = active_action_item.action
|
||||
if action is None:
|
||||
continue
|
||||
export_sequence.name = action.name
|
||||
export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, action)
|
||||
export_sequence.nla_state.action = action
|
||||
export_sequence.nla_state.frame_start = int(action.frame_range[0])
|
||||
export_sequence.nla_state.frame_end = int(action.frame_range[1])
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action])
|
||||
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action])
|
||||
export_sequence.key_quota = action.psa_export.key_quota
|
||||
export_sequences.append(export_sequence)
|
||||
case _:
|
||||
assert False, f'Invalid sequence source: {pg.sequence_source}'
|
||||
|
||||
options = PsaBuildOptions()
|
||||
options.armature_objects = list(armature_objects)
|
||||
options.animation_data = animation_data
|
||||
options.sequences = export_sequences
|
||||
options.bone_filter_mode = pg.bone_filter_mode
|
||||
options.bone_collection_indices = [PsxBoneCollection(x.armature_object_name, x.armature_data_name, x.index) for x in pg.bone_collection_list if x.is_selected]
|
||||
options.sequence_name_prefix = pg.sequence_name_prefix
|
||||
options.sequence_name_suffix = pg.sequence_name_suffix
|
||||
options.sampling_mode = pg.sampling_mode
|
||||
options.export_space = pg.export_space
|
||||
options.scale = pg.scale
|
||||
options.forward_axis = pg.forward_axis
|
||||
options.up_axis = pg.up_axis
|
||||
options.root_bone_name = pg.root_bone_name
|
||||
options.sequence_source = pg.sequence_source
|
||||
|
||||
return options
|
||||
|
||||
|
||||
class PSA_OT_export(Operator, ExportHelper):
|
||||
bl_idname = 'psa.export'
|
||||
bl_label = 'Export'
|
||||
bl_options = {'INTERNAL'}
|
||||
bl_description = 'Export actions to PSA'
|
||||
filename_ext = '.psa'
|
||||
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
||||
filepath: StringProperty(
|
||||
name='File Path',
|
||||
description='File path used for exporting the PSA file',
|
||||
maxlen=1024,
|
||||
default='')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.armature_objects: list[Object] = []
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
try:
|
||||
cls._check_context(context)
|
||||
except RuntimeError as e:
|
||||
cls.poll_message_set(str(e))
|
||||
return False
|
||||
return True
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
assert layout
|
||||
pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export'))
|
||||
|
||||
# SEQUENCES
|
||||
draw_sequences_panel(layout, pg,
|
||||
PSA_OT_export_sequences_select_all.bl_idname,
|
||||
PSA_OT_export_sequences_deselect_all.bl_idname)
|
||||
|
||||
# BONES
|
||||
bones_header, bones_panel = layout.panel('Bones', default_closed=False)
|
||||
bones_header.label(text='Bones', icon='BONE_DATA')
|
||||
@@ -405,7 +626,7 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
rows=rows
|
||||
)
|
||||
|
||||
bones_advanced_header, bones_advanced_panel = layout.panel('Bones Advanced', default_closed=True)
|
||||
bones_advanced_header, bones_advanced_panel = bones_panel.panel('Bones Advanced', default_closed=True)
|
||||
bones_advanced_header.label(text='Advanced')
|
||||
if bones_advanced_panel:
|
||||
flow = bones_advanced_panel.grid_flow()
|
||||
@@ -415,7 +636,7 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
|
||||
# TRANSFORM
|
||||
transform_header, transform_panel = layout.panel('Advanced', default_closed=False)
|
||||
transform_header.label(text='Transform')
|
||||
transform_header.label(text='Transform', icon='DRIVER_TRANSFORM')
|
||||
|
||||
if transform_panel:
|
||||
flow = transform_panel.grid_flow(row_major=True)
|
||||
@@ -437,8 +658,7 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
if context.scene.is_nla_tweakmode:
|
||||
raise RuntimeError('Cannot export PSA while in NLA tweak mode')
|
||||
|
||||
|
||||
def invoke(self, context, _event):
|
||||
def invoke(self, context, event):
|
||||
try:
|
||||
self._check_context(context)
|
||||
except RuntimeError as e:
|
||||
@@ -447,6 +667,8 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
|
||||
pg: PSA_PG_export = getattr(context.scene, 'psa_export')
|
||||
|
||||
assert context.view_layer is not None
|
||||
|
||||
self.armature_objects = [x for x in context.view_layer.objects.selected if x.type == 'ARMATURE']
|
||||
|
||||
for armature_object in self.armature_objects:
|
||||
@@ -455,117 +677,28 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
if armature_object.animation_data is None:
|
||||
armature_object.animation_data_create()
|
||||
|
||||
update_actions_and_timeline_markers(context, self.armature_objects)
|
||||
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
update_actions_and_timeline_markers(context, self.armature_objects, pg)
|
||||
populate_bone_collection_list(
|
||||
pg.bone_collection_list,
|
||||
self.armature_objects,
|
||||
primary_key='DATA' if pg.sequence_source == 'ACTIVE_ACTION' else 'OBJECT',
|
||||
)
|
||||
|
||||
if context.window_manager is not None:
|
||||
context.window_manager.fileselect_add(self)
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def execute(self, context):
|
||||
pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export'))
|
||||
options = create_psa_export_options(context, self.armature_objects, pg)
|
||||
|
||||
# Populate the export sequence list.
|
||||
animation_data_object = get_animation_data_object(context)
|
||||
animation_data = animation_data_object.animation_data
|
||||
|
||||
if animation_data is None:
|
||||
raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'')
|
||||
|
||||
if context.active_object is None:
|
||||
raise RuntimeError('No active object')
|
||||
|
||||
export_sequences: List[PsaBuildSequence] = []
|
||||
|
||||
def get_export_sequence_group(group_source: str, group_custom: str | None, action: Action | None) -> str | None:
|
||||
match group_source:
|
||||
case 'ACTIONS':
|
||||
return action.psa_export.group if action else None
|
||||
case 'CUSTOM':
|
||||
return group_custom
|
||||
case _:
|
||||
return None
|
||||
|
||||
match pg.sequence_source:
|
||||
case 'ACTIONS':
|
||||
for action_item in filter(lambda x: x.is_selected, pg.action_list):
|
||||
if len(action_item.action.layers) == 0:
|
||||
continue
|
||||
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||
export_sequence.name = action_item.name
|
||||
export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, action_item.action)
|
||||
export_sequence.nla_state.action = action_item.action
|
||||
export_sequence.nla_state.frame_start = action_item.frame_start
|
||||
export_sequence.nla_state.frame_end = action_item.frame_end
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action_item.action])
|
||||
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action_item.action])
|
||||
export_sequence.key_quota = action_item.action.psa_export.key_quota
|
||||
export_sequences.append(export_sequence)
|
||||
case 'TIMELINE_MARKERS':
|
||||
for marker_item in filter(lambda x: x.is_selected, pg.marker_list):
|
||||
nla_strips_actions: List[Action] = []
|
||||
for nla_strip in get_nla_strips_in_frame_range(animation_data, marker_item.frame_start, marker_item.frame_end):
|
||||
if nla_strip.action:
|
||||
nla_strips_actions.append(nla_strip.action)
|
||||
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||
export_sequence.name = marker_item.name
|
||||
export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, next(iter(nla_strips_actions), None))
|
||||
export_sequence.nla_state.frame_start = marker_item.frame_start
|
||||
export_sequence.nla_state.frame_end = marker_item.frame_end
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions)
|
||||
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, nla_strips_actions)
|
||||
export_sequences.append(export_sequence)
|
||||
case 'NLA_TRACK_STRIPS':
|
||||
for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list):
|
||||
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||
export_sequence.name = nla_strip_item.name
|
||||
export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, nla_strip_item.action)
|
||||
export_sequence.nla_state.frame_start = nla_strip_item.frame_start
|
||||
export_sequence.nla_state.frame_end = nla_strip_item.frame_end
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [nla_strip_item.action])
|
||||
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [nla_strip_item.action])
|
||||
export_sequence.key_quota = nla_strip_item.action.psa_export.key_quota
|
||||
export_sequences.append(export_sequence)
|
||||
case 'ACTIVE_ACTION':
|
||||
for active_action_item in filter(lambda x: x.is_selected, pg.active_action_list):
|
||||
export_sequence = PsaBuildSequence(active_action_item.armature_object, active_action_item.armature_object.animation_data)
|
||||
action = active_action_item.action
|
||||
export_sequence.name = action.name
|
||||
export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, action)
|
||||
export_sequence.nla_state.action = action
|
||||
export_sequence.nla_state.frame_start = int(action.frame_range[0])
|
||||
export_sequence.nla_state.frame_end = int(action.frame_range[1])
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action])
|
||||
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action])
|
||||
export_sequence.key_quota = action.psa_export.key_quota
|
||||
export_sequences.append(export_sequence)
|
||||
case _:
|
||||
assert False, f'Invalid sequence source: {pg.sequence_source}'
|
||||
|
||||
if len(export_sequences) == 0:
|
||||
if len(options.sequences) == 0:
|
||||
self.report({'ERROR'}, 'No sequences were selected for export')
|
||||
return {'CANCELLED'}
|
||||
|
||||
options = PsaBuildOptions()
|
||||
options.armature_objects = self.armature_objects
|
||||
options.animation_data = animation_data
|
||||
options.sequences = export_sequences
|
||||
options.bone_filter_mode = pg.bone_filter_mode
|
||||
options.bone_collection_indices = [PsxBoneCollection(x.armature_object_name, x.armature_data_name, x.index) for x in pg.bone_collection_list if x.is_selected]
|
||||
options.sequence_name_prefix = pg.sequence_name_prefix
|
||||
options.sequence_name_suffix = pg.sequence_name_suffix
|
||||
options.sampling_mode = pg.sampling_mode
|
||||
options.export_space = pg.export_space
|
||||
options.scale = pg.scale
|
||||
options.forward_axis = pg.forward_axis
|
||||
options.up_axis = pg.up_axis
|
||||
options.root_bone_name = pg.root_bone_name
|
||||
options.sequence_source = pg.sequence_source
|
||||
|
||||
try:
|
||||
psa = build_psa(context, options)
|
||||
self.report({'INFO'}, f'PSA export successful')
|
||||
@@ -578,15 +711,15 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSA_OT_export_actions_select_all(Operator):
|
||||
bl_idname = 'psa.export_actions_select_all'
|
||||
bl_label = 'Select All'
|
||||
bl_description = 'Select all visible sequences'
|
||||
bl_options = {'INTERNAL'}
|
||||
class PsaExportActionsSelectOperator(Operator):
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def get_psa_export(cls, context: Context) -> PsaExportMixin:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def get_item_list(cls, context):
|
||||
pg = context.scene.psa_export
|
||||
def get_item_list(cls, context: Context):
|
||||
pg = cls.get_psa_export(context)
|
||||
match pg.sequence_source:
|
||||
case 'ACTIONS':
|
||||
return pg.action_list
|
||||
@@ -599,49 +732,40 @@ class PSA_OT_export_actions_select_all(Operator):
|
||||
case _:
|
||||
assert False, f'Invalid sequence source: {pg.sequence_source}'
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
item_list = cls.get_item_list(context)
|
||||
visible_sequences = get_visible_sequences(pg, item_list)
|
||||
has_unselected_sequences = any(map(lambda item: not item.is_selected, visible_sequences))
|
||||
return has_unselected_sequences
|
||||
|
||||
class PsaExportActionsSelectAllOperator(PsaExportActionsSelectOperator):
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
pg = self.__class__.get_psa_export(context)
|
||||
sequences = self.get_item_list(context)
|
||||
for sequence in get_visible_sequences(pg, sequences):
|
||||
sequence.is_selected = True
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSA_OT_export_actions_deselect_all(Operator):
|
||||
bl_idname = 'psa.export_sequences_deselect_all'
|
||||
bl_label = 'Deselect All'
|
||||
bl_description = 'Deselect all visible sequences'
|
||||
class PSA_OT_export_sequences_select_all(PsaExportActionsSelectAllOperator):
|
||||
bl_idname = 'psa.export_actions_select_all'
|
||||
bl_label = 'Select All'
|
||||
bl_description = 'Select all visible sequences'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
@classmethod
|
||||
def get_item_list(cls, context):
|
||||
pg = context.scene.psa_export
|
||||
match pg.sequence_source:
|
||||
case 'ACTIONS':
|
||||
return pg.action_list
|
||||
case 'TIMELINE_MARKERS':
|
||||
return pg.marker_list
|
||||
case 'NLA_TRACK_STRIPS':
|
||||
return pg.nla_strip_list
|
||||
case 'ACTIVE_ACTION':
|
||||
return pg.active_action_list
|
||||
case _:
|
||||
return None
|
||||
def get_psa_export(cls, context: Context) -> PsaExportMixin:
|
||||
return typing_cast(PsaExportMixin, getattr(context.scene, 'psa_export'))
|
||||
|
||||
|
||||
class PSA_OT_export_collection_sequences_select_all(PsaExportActionsSelectAllOperator):
|
||||
bl_idname = 'psa.export_collection_sequences_select_all'
|
||||
bl_label = 'Select All'
|
||||
bl_description = 'Select all visible sequences'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
item_list = cls.get_item_list(context)
|
||||
has_selected_items = any(map(lambda item: item.is_selected, item_list))
|
||||
return len(item_list) > 0 and has_selected_items
|
||||
def get_psa_export(cls, context: Context) -> PsaExportMixin:
|
||||
operator = get_collection_export_operator_from_context(context)
|
||||
operator = typing_cast(PsaExportMixin, operator)
|
||||
return operator
|
||||
|
||||
|
||||
class PsaExportActionsDeselectAllOperator(PsaExportActionsSelectOperator):
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
item_list = self.get_item_list(context)
|
||||
@@ -650,6 +774,30 @@ class PSA_OT_export_actions_deselect_all(Operator):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSA_OT_export_collection_sequences_deselect_all(PsaExportActionsDeselectAllOperator):
|
||||
bl_idname = 'psa.export_collection_sequences_deselect_all'
|
||||
bl_label = 'Deselect All'
|
||||
bl_description = 'Deselect all visible sequences'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
@classmethod
|
||||
def get_psa_export(cls, context: Context) -> PsaExportMixin:
|
||||
operator = get_collection_export_operator_from_context(context)
|
||||
operator = typing_cast(PsaExportMixin, operator)
|
||||
return operator
|
||||
|
||||
|
||||
class PSA_OT_export_sequences_deselect_all(PsaExportActionsDeselectAllOperator):
|
||||
bl_idname = 'psa.export_sequences_deselect_all'
|
||||
bl_label = 'Deselect All'
|
||||
bl_description = 'Deselect all visible sequences'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
@classmethod
|
||||
def get_psa_export(cls, context: Context) -> PsaExportMixin:
|
||||
return typing_cast(PsaExportMixin, getattr(context.scene, 'psa_export'))
|
||||
|
||||
|
||||
class PSA_OT_export_bone_collections_select_all(Operator):
|
||||
bl_idname = 'psa.export_bone_collections_select_all'
|
||||
bl_label = 'Select All'
|
||||
@@ -658,13 +806,13 @@ class PSA_OT_export_bone_collections_select_all(Operator):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export'))
|
||||
item_list = pg.bone_collection_list
|
||||
has_unselected_items = any(map(lambda action: not action.is_selected, item_list))
|
||||
return len(item_list) > 0 and has_unselected_items
|
||||
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export'))
|
||||
for item in pg.bone_collection_list:
|
||||
item.is_selected = True
|
||||
return {'FINISHED'}
|
||||
@@ -678,24 +826,72 @@ class PSA_OT_export_bone_collections_deselect_all(Operator):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export'))
|
||||
item_list = pg.bone_collection_list
|
||||
has_selected_actions = any(map(lambda action: action.is_selected, item_list))
|
||||
return len(item_list) > 0 and has_selected_actions
|
||||
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export'))
|
||||
for action in pg.bone_collection_list:
|
||||
action.is_selected = False
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSA_OT_export_collection_populate_sequences(Operator):
|
||||
bl_idname = 'psa.export_collection_populate_sequences'
|
||||
bl_label = 'Populate Sequences'
|
||||
bl_description = 'Populate the sequences list based on the armatures in the collection'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
def execute(self, context: Context):
|
||||
export_operator = get_collection_export_operator_from_context(context)
|
||||
assert export_operator is not None
|
||||
export_operator = typing_cast(PSA_OT_export_collection, export_operator)
|
||||
collection = get_collection_from_context(context)
|
||||
if collection is None:
|
||||
self.report({'ERROR'}, 'No collection found in context')
|
||||
return {'CANCELLED'}
|
||||
|
||||
try:
|
||||
input_objects = get_psk_input_objects_for_collection(collection)
|
||||
except RuntimeError as e:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
|
||||
# Keep track of what sequences were selected, then restore the selected status after we have updated the lists.
|
||||
def store_is_selected_for_sequence_list(sequences: Iterable[PsaExportSequenceMixin]) -> dict[int, bool]:
|
||||
return {hash(x): x.is_selected for x in sequences}
|
||||
|
||||
def restore_is_selected_for_sequence_list(sequence_list: Iterable[PsaExportSequenceMixin], is_selected_map: dict[int, bool]):
|
||||
for sequence in sequence_list:
|
||||
sequence.is_selected = is_selected_map.get(hash(sequence), False)
|
||||
|
||||
action_list_is_selected = store_is_selected_for_sequence_list(export_operator.action_list)
|
||||
markers_list_is_selected = store_is_selected_for_sequence_list(export_operator.marker_list)
|
||||
nla_strip_list_is_selected = store_is_selected_for_sequence_list(export_operator.nla_strip_list)
|
||||
active_action_list_is_selected = store_is_selected_for_sequence_list(export_operator.active_action_list)
|
||||
|
||||
update_actions_and_timeline_markers(context, input_objects.armature_objects, export_operator)
|
||||
|
||||
restore_is_selected_for_sequence_list(export_operator.action_list, action_list_is_selected)
|
||||
restore_is_selected_for_sequence_list(export_operator.marker_list, markers_list_is_selected)
|
||||
restore_is_selected_for_sequence_list(export_operator.nla_strip_list, nla_strip_list_is_selected)
|
||||
restore_is_selected_for_sequence_list(export_operator.active_action_list, active_action_list_is_selected)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
_classes = (
|
||||
PSA_OT_export,
|
||||
PSA_OT_export_actions_select_all,
|
||||
PSA_OT_export_actions_deselect_all,
|
||||
PSA_OT_export_collection,
|
||||
PSA_OT_export_sequences_select_all,
|
||||
PSA_OT_export_sequences_deselect_all,
|
||||
PSA_OT_export_collection_sequences_select_all,
|
||||
PSA_OT_export_collection_sequences_deselect_all,
|
||||
PSA_OT_export_bone_collections_select_all,
|
||||
PSA_OT_export_bone_collections_deselect_all,
|
||||
PSA_OT_export_collection_populate_sequences,
|
||||
)
|
||||
|
||||
from bpy.utils import register_classes_factory
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import re
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from typing import List, Optional
|
||||
from typing import Sequence
|
||||
import bpy
|
||||
from bpy.props import (
|
||||
BoolProperty,
|
||||
PointerProperty,
|
||||
EnumProperty,
|
||||
FloatProperty,
|
||||
CollectionProperty,
|
||||
@@ -15,52 +15,69 @@ from bpy.types import PropertyGroup, Object, Action, AnimData, Context
|
||||
|
||||
from ...shared.dfs import dfs_view_layer_objects
|
||||
from ...shared.helpers import populate_bone_collection_list
|
||||
from ...shared.types import TransformMixin, ExportSpaceMixin, PsxBoneExportMixin
|
||||
from ...shared.types import TransformMixin, ExportSpaceMixin, PsxBoneExportMixin, TransformSourceMixin
|
||||
|
||||
|
||||
def psa_export_property_group_animation_data_override_poll(_context, obj):
|
||||
return obj.animation_data is not None
|
||||
|
||||
|
||||
class PSA_PG_export_action_list_item(PropertyGroup):
|
||||
action: PointerProperty(type=Action)
|
||||
name: StringProperty()
|
||||
is_selected: BoolProperty(default=True)
|
||||
frame_start: IntProperty(options={'HIDDEN'})
|
||||
frame_end: IntProperty(options={'HIDDEN'})
|
||||
is_pose_marker: BoolProperty(options={'HIDDEN'})
|
||||
group: StringProperty()
|
||||
class PsaExportSequenceMixin(PropertyGroup):
|
||||
name: StringProperty(name='Name')
|
||||
is_selected: BoolProperty(name='Selected', default=True)
|
||||
frame_start: IntProperty(name='Start Frame', options={'HIDDEN'})
|
||||
frame_end: IntProperty(name='End Frame', options={'HIDDEN'})
|
||||
group: StringProperty(name='Group')
|
||||
action_name: StringProperty(name='Action Name',default='', options={'HIDDEN'})
|
||||
armature_object_name: StringProperty(name='Armature Object Name',default='', options={'HIDDEN'})
|
||||
marker_index: IntProperty(name='Marker Index',default=-1, options={'HIDDEN'})
|
||||
is_pose_marker: BoolProperty(name='Is Pose Marker',default=False, options={'HIDDEN'})
|
||||
|
||||
@property
|
||||
def action(self) -> Action | None:
|
||||
"""Get the action associated with this sequence (if any)."""
|
||||
return bpy.data.actions.get(self.action_name) if self.action_name else None
|
||||
|
||||
@property
|
||||
def armature_object(self) -> Object | None:
|
||||
"""Get the armature object associated with this sequence (if any)."""
|
||||
return bpy.data.objects.get(self.armature_object_name) if self.armature_object_name else None
|
||||
|
||||
@property
|
||||
def is_reversed(self) -> bool:
|
||||
"""Check if the sequence is reversed (end frame before start frame)."""
|
||||
return self.frame_end < self.frame_start
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.name)
|
||||
|
||||
|
||||
class PSA_PG_export_active_action_list_item(PropertyGroup):
|
||||
action: PointerProperty(type=Action)
|
||||
name: StringProperty()
|
||||
armature_object: PointerProperty(type=Object)
|
||||
is_selected: BoolProperty(default=True)
|
||||
frame_start: IntProperty(options={'HIDDEN'})
|
||||
frame_end: IntProperty(options={'HIDDEN'})
|
||||
group: StringProperty()
|
||||
|
||||
|
||||
class PSA_PG_export_timeline_markers(PropertyGroup): # TODO: rename this to singular
|
||||
marker_index: IntProperty()
|
||||
name: StringProperty()
|
||||
is_selected: BoolProperty(default=True)
|
||||
frame_start: IntProperty(options={'HIDDEN'})
|
||||
frame_end: IntProperty(options={'HIDDEN'})
|
||||
group: StringProperty()
|
||||
|
||||
|
||||
class PSA_PG_export_nla_strip_list_item(PropertyGroup):
|
||||
name: StringProperty()
|
||||
action: PointerProperty(type=Action)
|
||||
frame_start: FloatProperty()
|
||||
frame_end: FloatProperty()
|
||||
is_selected: BoolProperty(default=True)
|
||||
group: StringProperty()
|
||||
class PSA_PG_export_sequence(PsaExportSequenceMixin):
|
||||
pass
|
||||
|
||||
|
||||
def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int):
|
||||
# Check for loop
|
||||
anims: list[tuple[str, int, int]] = []
|
||||
loop_pattern = r'\@(\d+)\:(.+)'
|
||||
loop_match = re.match(loop_pattern, name)
|
||||
if loop_match:
|
||||
frame_count = max(1, int(loop_match.group(1)))
|
||||
sequence_name = loop_match.group(2)
|
||||
iteration = 0
|
||||
frame = frame_start
|
||||
while frame + frame_count <= frame_end:
|
||||
output_name = sequence_name.format(index=iteration)
|
||||
iteration_frame_start = frame
|
||||
iteration_frame_end = frame + frame_count - 1
|
||||
anims.append((output_name, iteration_frame_start, iteration_frame_end))
|
||||
frame += frame_count
|
||||
iteration += 1
|
||||
else:
|
||||
# If not, just treat it as a single animation, but parse for the reverse pattern as well.
|
||||
anims.append((name, frame_start, frame_end))
|
||||
|
||||
for (name, frame_start, frame_end) in anims:
|
||||
reversed_pattern = r'(.+)/(.+)'
|
||||
reversed_match = re.match(reversed_pattern, name)
|
||||
if reversed_match:
|
||||
@@ -83,17 +100,15 @@ def nla_track_update_cb(self: 'PSA_PG_export', context: Context) -> None:
|
||||
nla_track = animation_data.nla_tracks[self.nla_track_index]
|
||||
for nla_strip in nla_track.strips:
|
||||
for sequence_name, frame_start, frame_end in get_sequences_from_name_and_frame_range(nla_strip.name, nla_strip.frame_start, nla_strip.frame_end):
|
||||
strip: PSA_PG_export_nla_strip_list_item = self.nla_strip_list.add()
|
||||
strip.action = nla_strip.action
|
||||
strip: PSA_PG_export_sequence = self.nla_strip_list.add()
|
||||
strip.action_name = nla_strip.action
|
||||
strip.name = sequence_name
|
||||
strip.frame_start = frame_start
|
||||
strip.frame_end = frame_end
|
||||
|
||||
|
||||
def get_animation_data(pg: 'PSA_PG_export', context: Context) -> Optional[AnimData]:
|
||||
def get_animation_data(pg: 'PSA_PG_export', context: Context) -> AnimData | None:
|
||||
animation_data_object = context.object
|
||||
if pg.should_override_animation_data:
|
||||
animation_data_object = pg.animation_data_override
|
||||
return animation_data_object.animation_data if animation_data_object else None
|
||||
|
||||
|
||||
@@ -152,19 +167,7 @@ def sequence_source_update_cb(self: 'PSA_PG_export', context: Context) -> None:
|
||||
primary_key='DATA' if self.sequence_source == 'ACTIVE_ACTION' else 'OBJECT')
|
||||
|
||||
|
||||
class PSA_PG_export(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin):
|
||||
should_override_animation_data: BoolProperty(
|
||||
name='Override Animation Data',
|
||||
options=set(),
|
||||
default=False,
|
||||
description='Use the animation data from a different object instead of the selected object',
|
||||
update=animation_data_override_update_cb,
|
||||
)
|
||||
animation_data_override: PointerProperty(
|
||||
type=Object,
|
||||
update=animation_data_override_update_cb,
|
||||
poll=psa_export_property_group_animation_data_override_poll
|
||||
)
|
||||
class PsaExportMixin(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin, TransformSourceMixin):
|
||||
sequence_source: EnumProperty(
|
||||
name='Source',
|
||||
options=set(),
|
||||
@@ -194,14 +197,16 @@ class PSA_PG_export(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExpo
|
||||
items=compression_ratio_source_items,
|
||||
)
|
||||
compression_ratio_custom: FloatProperty(default=1.0, min=0.0, max=1.0, subtype='FACTOR', description='The key sampling ratio of the exported sequence.\n\nA compression ratio of 1.0 will export all frames, while a compression ratio of 0.5 will export half of the frames')
|
||||
action_list: CollectionProperty(type=PSA_PG_export_action_list_item)
|
||||
|
||||
action_list: CollectionProperty(type=PSA_PG_export_sequence)
|
||||
action_list_index: IntProperty(default=0)
|
||||
marker_list: CollectionProperty(type=PSA_PG_export_timeline_markers)
|
||||
marker_list: CollectionProperty(type=PSA_PG_export_sequence)
|
||||
marker_list_index: IntProperty(default=0)
|
||||
nla_strip_list: CollectionProperty(type=PSA_PG_export_nla_strip_list_item)
|
||||
nla_strip_list: CollectionProperty(type=PSA_PG_export_sequence)
|
||||
nla_strip_list_index: IntProperty(default=0)
|
||||
active_action_list: CollectionProperty(type=PSA_PG_export_active_action_list_item)
|
||||
active_action_list: CollectionProperty(type=PSA_PG_export_sequence)
|
||||
active_action_list_index: IntProperty(default=0)
|
||||
|
||||
sequence_name_prefix: StringProperty(name='Prefix', options=set())
|
||||
sequence_name_suffix: StringProperty(name='Suffix', options=set())
|
||||
sequence_filter_name: StringProperty(
|
||||
@@ -250,8 +255,11 @@ class PSA_PG_export(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExpo
|
||||
description='The group to apply to all exported sequences. Only applicable when Group Source is Custom.'
|
||||
)
|
||||
|
||||
class PSA_PG_export(PsaExportMixin):
|
||||
pass
|
||||
|
||||
def filter_sequences(pg: PSA_PG_export, sequences) -> List[int]:
|
||||
|
||||
def filter_sequences(pg: PsaExportMixin, sequences: Sequence[PsaExportSequenceMixin]) -> list[int]:
|
||||
bitflag_filter_item = 1 << 30
|
||||
flt_flags = [bitflag_filter_item] * len(sequences)
|
||||
|
||||
@@ -268,27 +276,24 @@ def filter_sequences(pg: PSA_PG_export, sequences) -> List[int]:
|
||||
|
||||
if not pg.sequence_filter_asset:
|
||||
for i, sequence in enumerate(sequences):
|
||||
if hasattr(sequence, 'action') and sequence.action is not None and sequence.action.asset_data is not None:
|
||||
if sequence.action is not None and sequence.action.asset_data is not None:
|
||||
flt_flags[i] &= ~bitflag_filter_item
|
||||
|
||||
if not pg.sequence_filter_pose_marker:
|
||||
for i, sequence in enumerate(sequences):
|
||||
if hasattr(sequence, 'is_pose_marker') and sequence.is_pose_marker:
|
||||
if sequence.is_pose_marker:
|
||||
flt_flags[i] &= ~bitflag_filter_item
|
||||
|
||||
if not pg.sequence_filter_reversed:
|
||||
for i, sequence in enumerate(sequences):
|
||||
if sequence.frame_start > sequence.frame_end:
|
||||
if sequence.is_reversed:
|
||||
flt_flags[i] &= ~bitflag_filter_item
|
||||
|
||||
return flt_flags
|
||||
|
||||
|
||||
_classes = (
|
||||
PSA_PG_export_action_list_item,
|
||||
PSA_PG_export_timeline_markers,
|
||||
PSA_PG_export_nla_strip_list_item,
|
||||
PSA_PG_export_active_action_list_item,
|
||||
PSA_PG_export_sequence,
|
||||
PSA_PG_export,
|
||||
)
|
||||
|
||||
|
||||
68
io_scene_psk_psa/psa/export/properties.pyi
Normal file
68
io_scene_psk_psa/psa/export/properties.pyi
Normal file
@@ -0,0 +1,68 @@
|
||||
from bpy.types import PropertyGroup, Object, Action
|
||||
|
||||
from ...shared.types import BpyCollectionProperty, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin, TransformSourceMixin
|
||||
|
||||
class PsaExportSequenceMixin(PropertyGroup):
|
||||
name: str
|
||||
is_selected: bool
|
||||
frame_start: int
|
||||
frame_end: int
|
||||
group: str
|
||||
action_name: str
|
||||
armature_object_name: str
|
||||
marker_index: int
|
||||
is_pose_marker: bool
|
||||
|
||||
@property
|
||||
def action(self) -> Action | None: ...
|
||||
|
||||
@property
|
||||
def armature_object(self) -> Object | None: ...
|
||||
|
||||
@property
|
||||
def is_reversed(self) -> bool: ...
|
||||
|
||||
|
||||
class PSA_PG_export_sequence(PsaExportSequenceMixin):
|
||||
"""Concrete type for PSA export sequences."""
|
||||
pass
|
||||
|
||||
|
||||
class PsaExportMixin(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin, TransformSourceMixin):
|
||||
sequence_source: str
|
||||
nla_track: str
|
||||
nla_track_index: int
|
||||
fps_source: str
|
||||
fps_custom: float
|
||||
compression_ratio_source: str
|
||||
compression_ratio_custom: float
|
||||
action_list: BpyCollectionProperty[PSA_PG_export_sequence]
|
||||
action_list_index: int
|
||||
marker_list: BpyCollectionProperty[PSA_PG_export_sequence]
|
||||
marker_list_index: int
|
||||
nla_strip_list: BpyCollectionProperty[PSA_PG_export_sequence]
|
||||
nla_strip_list_index: int
|
||||
active_action_list: BpyCollectionProperty[PSA_PG_export_sequence]
|
||||
active_action_list_index: int
|
||||
sequence_name_prefix: str
|
||||
sequence_name_suffix: str
|
||||
sequence_filter_name: str
|
||||
sequence_use_filter_invert: bool
|
||||
sequence_filter_asset: bool
|
||||
sequence_filter_pose_marker: bool
|
||||
sequence_use_filter_sort_reverse: bool
|
||||
sequence_filter_reversed: bool
|
||||
sampling_mode: str
|
||||
group_source: str
|
||||
group_custom: str
|
||||
|
||||
|
||||
class PSA_PG_export(PsaExportMixin):
|
||||
pass
|
||||
|
||||
|
||||
def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int):
|
||||
pass
|
||||
|
||||
def filter_sequences(pg: PsaExportMixin, sequences) -> list[int]:
|
||||
pass
|
||||
@@ -2,7 +2,7 @@ from typing import cast as typing_cast
|
||||
|
||||
from bpy.types import UIList
|
||||
|
||||
from .properties import PSA_PG_export_action_list_item, filter_sequences
|
||||
from .properties import PsaExportSequenceMixin, filter_sequences
|
||||
|
||||
|
||||
class PSA_UL_export_sequences(UIList):
|
||||
@@ -14,7 +14,7 @@ class PSA_UL_export_sequences(UIList):
|
||||
self.use_filter_show = True
|
||||
|
||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||
item = typing_cast(PSA_PG_export_action_list_item, item)
|
||||
item = typing_cast(PsaExportSequenceMixin, item)
|
||||
|
||||
is_pose_marker = hasattr(item, 'is_pose_marker') and item.is_pose_marker
|
||||
layout.prop(item, 'is_selected', icon_only=True, text=item.name)
|
||||
@@ -24,9 +24,9 @@ class PSA_UL_export_sequences(UIList):
|
||||
row = layout.row(align=True)
|
||||
row.alignment = 'RIGHT'
|
||||
|
||||
row.label(text=str(abs(item.frame_end - item.frame_start) + 1), icon='FRAME_PREV' if item.frame_end < item.frame_start else 'KEYFRAME')
|
||||
row.label(text=str(abs(item.frame_end - item.frame_start) + 1), icon='FRAME_PREV' if item.is_reversed else 'KEYFRAME')
|
||||
|
||||
if hasattr(item, 'armature_object') and item.armature_object is not None:
|
||||
if item.armature_object is not None:
|
||||
row.label(text=item.armature_object.name, icon='ARMATURE_DATA')
|
||||
|
||||
# row.label(text=item.action.name, icon='PMARKER' if is_pose_marker else 'ACTION_DATA')
|
||||
|
||||
24
io_scene_psk_psa/psa/file_handlers.py
Normal file
24
io_scene_psk_psa/psa/file_handlers.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from bpy.types import Context
|
||||
from bpy.types import FileHandler
|
||||
|
||||
from .import_.operators import PSA_OT_import_drag_and_drop
|
||||
from .export.operators import PSA_OT_export_collection
|
||||
|
||||
class PSA_FH_file_handler(FileHandler):
|
||||
bl_idname = 'PSA_FH_file_handler'
|
||||
bl_label = 'Unreal PSA'
|
||||
bl_import_operator = PSA_OT_import_drag_and_drop.bl_idname
|
||||
bl_export_operator = PSA_OT_export_collection.bl_idname
|
||||
bl_file_extensions = '.psa'
|
||||
|
||||
@classmethod
|
||||
def poll_drop(cls, context: Context) -> bool:
|
||||
return context.area is not None and context.area.type == 'VIEW_3D'
|
||||
|
||||
|
||||
_classes = (
|
||||
PSA_FH_file_handler,
|
||||
)
|
||||
|
||||
from bpy.utils import register_classes_factory
|
||||
register, unregister = register_classes_factory(_classes)
|
||||
@@ -3,7 +3,7 @@ from pathlib import Path
|
||||
from typing import Iterable
|
||||
|
||||
from bpy.props import CollectionProperty, StringProperty
|
||||
from bpy.types import Context, Event, FileHandler, Object, Operator, OperatorFileListElement
|
||||
from bpy.types import Context, Event, Object, Operator, OperatorFileListElement
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
|
||||
from .properties import PsaImportMixin, get_visible_sequences
|
||||
@@ -110,7 +110,7 @@ def load_psa_file(context, filepath: str):
|
||||
try:
|
||||
# Read the file and populate the action list.
|
||||
p = os.path.abspath(filepath)
|
||||
psa_reader = PsaReader(p)
|
||||
psa_reader = PsaReader.from_path(p)
|
||||
for sequence in psa_reader.sequences.values():
|
||||
item = pg.sequence_list.add()
|
||||
item.action_name = sequence.name.decode('windows-1252')
|
||||
@@ -142,7 +142,7 @@ class PSA_OT_import_drag_and_drop(Operator, PsaImportMixin):
|
||||
|
||||
for file in self.files:
|
||||
psa_path = str(os.path.join(self.directory, file.name))
|
||||
psa_reader = PsaReader(psa_path)
|
||||
psa_reader = PsaReader.from_path(psa_path)
|
||||
sequence_names = list(psa_reader.sequences.keys())
|
||||
options = psa_import_options_from_property_group(self, sequence_names)
|
||||
|
||||
@@ -188,6 +188,7 @@ def psa_import_options_from_property_group(pg: PsaImportMixin, sequence_names: I
|
||||
options.should_overwrite = pg.should_overwrite
|
||||
options.should_write_metadata = pg.should_write_metadata
|
||||
options.should_write_keyframes = pg.should_write_keyframes
|
||||
options.should_write_scale_keys = pg.should_write_scale_keys
|
||||
options.should_convert_to_samples = pg.should_convert_to_samples
|
||||
options.bone_mapping = BoneMapping(
|
||||
is_case_sensitive=pg.bone_mapping_is_case_sensitive,
|
||||
@@ -215,7 +216,7 @@ def _import_psa(context,
|
||||
except Exception as e:
|
||||
warnings.append(f'Failed to read PSA config file: {e}')
|
||||
|
||||
psa_reader = PsaReader(filepath)
|
||||
psa_reader = PsaReader.from_path(filepath)
|
||||
|
||||
result = import_psa(context, psa_reader, armature_object, options)
|
||||
result.warnings.extend(warnings)
|
||||
@@ -242,7 +243,7 @@ class PSA_OT_import_all(Operator, PsaImportMixin):
|
||||
|
||||
def execute(self, context):
|
||||
sequence_names = []
|
||||
with PsaReader(self.filepath) as psa_reader:
|
||||
with PsaReader.from_path(self.filepath) as psa_reader:
|
||||
sequence_names.extend(psa_reader.sequences.keys())
|
||||
|
||||
options = PsaImportOptions(
|
||||
@@ -376,6 +377,7 @@ class PSA_OT_import(Operator, ImportHelper, PsaImportMixin):
|
||||
col.use_property_decorate = False
|
||||
col.prop(self, 'should_write_keyframes')
|
||||
col.prop(self, 'should_write_metadata')
|
||||
col.prop(self, 'should_write_scale_keys')
|
||||
|
||||
if self.should_write_keyframes:
|
||||
col = col.column(heading='Keyframes')
|
||||
@@ -426,6 +428,7 @@ def draw_psa_import_options_no_panels(layout, pg: PsaImportMixin):
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_write_keyframes')
|
||||
col.prop(pg, 'should_write_metadata')
|
||||
col.prop(pg, 'should_write_scale_keys')
|
||||
|
||||
if pg.should_write_keyframes:
|
||||
col = col.column(heading='Keyframes')
|
||||
@@ -452,18 +455,6 @@ def draw_psa_import_options_no_panels(layout, pg: PsaImportMixin):
|
||||
col.prop(pg, 'should_use_config_file')
|
||||
|
||||
|
||||
class PSA_FH_import(FileHandler): # TODO: rename and add handling for PSA export.
|
||||
bl_idname = 'PSA_FH_import'
|
||||
bl_label = 'File handler for Unreal PSA import'
|
||||
bl_import_operator = PSA_OT_import_drag_and_drop.bl_idname
|
||||
# bl_export_operator = 'psa_export.export'
|
||||
bl_file_extensions = '.psa'
|
||||
|
||||
@classmethod
|
||||
def poll_drop(cls, context: Context) -> bool:
|
||||
return context.area is not None and context.area.type == 'VIEW_3D'
|
||||
|
||||
|
||||
_classes = (
|
||||
PSA_OT_import_sequences_select_all,
|
||||
PSA_OT_import_sequences_deselect_all,
|
||||
@@ -471,7 +462,6 @@ _classes = (
|
||||
PSA_OT_import,
|
||||
PSA_OT_import_all,
|
||||
PSA_OT_import_drag_and_drop,
|
||||
PSA_FH_import,
|
||||
)
|
||||
|
||||
from bpy.utils import register_classes_factory
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import re
|
||||
from fnmatch import fnmatch
|
||||
from typing import List
|
||||
|
||||
from bpy.props import (
|
||||
BoolProperty,
|
||||
@@ -67,6 +66,7 @@ class PsaImportMixin:
|
||||
should_write_metadata: BoolProperty(default=True, name='Metadata', options=set(),
|
||||
description='Additional data will be written to the custom properties of the '
|
||||
'Action (e.g., frame rate)')
|
||||
should_write_scale_keys: BoolProperty(default=True, name='Scale Keys', options=set())
|
||||
sequence_filter_name: StringProperty(default='', options={'TEXTEDIT_UPDATE'})
|
||||
sequence_filter_is_selected: BoolProperty(default=False, options=set(), name='Only Show Selected',
|
||||
description='Only show selected sequences')
|
||||
@@ -133,7 +133,7 @@ class PSA_PG_import(PropertyGroup):
|
||||
select_text: PointerProperty(type=Text)
|
||||
|
||||
|
||||
def filter_sequences(pg: PSA_PG_import, sequences) -> List[int]:
|
||||
def filter_sequences(pg: PSA_PG_import, sequences) -> list[int]:
|
||||
bitflag_filter_item = 1 << 30
|
||||
flt_flags = [bitflag_filter_item] * len(sequences)
|
||||
|
||||
@@ -167,7 +167,7 @@ def filter_sequences(pg: PSA_PG_import, sequences) -> List[int]:
|
||||
return flt_flags
|
||||
|
||||
|
||||
def get_visible_sequences(pg: PSA_PG_import, sequences) -> List[PSA_PG_import_action_list_item]:
|
||||
def get_visible_sequences(pg: PSA_PG_import, sequences) -> list[PSA_PG_import_action_list_item]:
|
||||
bitflag_filter_item = 1 << 30
|
||||
visible_sequences = []
|
||||
for i, flag in enumerate(filter_sequences(pg, sequences)):
|
||||
|
||||
60
io_scene_psk_psa/psa/import_/properties.pyi
Normal file
60
io_scene_psk_psa/psa/import_/properties.pyi
Normal file
@@ -0,0 +1,60 @@
|
||||
from bpy.types import PropertyGroup, Text
|
||||
|
||||
from ...shared.types import BpyCollectionProperty
|
||||
|
||||
|
||||
class PSA_PG_import_action_list_item:
|
||||
action_name: str
|
||||
is_selected: bool
|
||||
|
||||
|
||||
class PSA_PG_bone:
|
||||
bone_name: str
|
||||
|
||||
|
||||
class PSA_PG_data(PropertyGroup):
|
||||
bones: BpyCollectionProperty[PSA_PG_bone]
|
||||
sequence_count: int
|
||||
|
||||
class PsaImportMixin:
|
||||
should_use_fake_user: bool
|
||||
should_use_config_file: bool
|
||||
should_stash: bool
|
||||
should_use_action_name_prefix: bool
|
||||
action_name_prefix: str
|
||||
should_overwrite: bool
|
||||
should_write_keyframes: bool
|
||||
should_write_metadata: bool
|
||||
should_write_scale_keys: bool
|
||||
sequence_filter_name: str
|
||||
sequence_filter_is_selected: bool
|
||||
sequence_use_filter_invert: bool
|
||||
sequence_use_filter_regex: bool
|
||||
should_convert_to_samples: bool
|
||||
bone_mapping_is_case_sensitive: bool
|
||||
bone_mapping_should_ignore_trailing_whitespace: bool
|
||||
fps_source: str
|
||||
fps_custom: float
|
||||
compression_ratio_source: str
|
||||
compression_ratio_custom: float
|
||||
translation_scale: float
|
||||
|
||||
class PSA_PG_import:
|
||||
psa_error: str
|
||||
psa: PSA_PG_data
|
||||
sequence_list: BpyCollectionProperty[PSA_PG_import_action_list_item]
|
||||
sequence_list_index: int
|
||||
sequence_filter_name: str
|
||||
sequence_filter_is_selected: bool
|
||||
sequence_use_filter_invert: bool
|
||||
sequence_use_filter_regex: bool
|
||||
select_text: Text | None
|
||||
|
||||
|
||||
|
||||
def filter_sequences(pg: PSA_PG_import, sequences) -> list[int]:
|
||||
pass
|
||||
|
||||
|
||||
def get_visible_sequences(pg: PSA_PG_import, sequences) -> list[PSA_PG_import_action_list_item]:
|
||||
pass
|
||||
@@ -22,9 +22,9 @@ class PSA_UL_sequences_mixin(UIList):
|
||||
sub_row.prop(pg, 'sequence_use_filter_regex', text='', icon='SORTBYEXT')
|
||||
sub_row.prop(pg, 'sequence_filter_is_selected', text='', icon='CHECKBOX_HLT')
|
||||
|
||||
def filter_items(self, context, data, property_):
|
||||
def filter_items(self, context, data, property):
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
sequences = getattr(data, property_)
|
||||
sequences = getattr(data, property)
|
||||
flt_flags = filter_sequences(pg, sequences)
|
||||
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(sequences, 'action_name')
|
||||
return flt_flags, flt_neworder
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Sequence, Iterable, List, Optional, cast as typing_cast
|
||||
from typing import Sequence, Iterable, cast as typing_cast
|
||||
|
||||
import bpy
|
||||
import numpy as np
|
||||
@@ -7,6 +7,8 @@ from bpy.types import Armature, Context, FCurve, Object, Bone, PoseBone
|
||||
from mathutils import Vector, Quaternion
|
||||
from bpy_extras import anim_utils
|
||||
|
||||
from ..shared.types import PSX_PG_action_export
|
||||
|
||||
from .config import PsaConfig, REMOVE_TRACK_LOCATION, REMOVE_TRACK_ROTATION
|
||||
from psk_psa_py.psa.reader import PsaReader
|
||||
from psk_psa_py.shared.data import PsxBone
|
||||
@@ -30,7 +32,7 @@ class PsaImportOptions(object):
|
||||
fps_custom: float = 30.0,
|
||||
fps_source: str = 'SEQUENCE',
|
||||
psa_config: PsaConfig = PsaConfig(),
|
||||
sequence_names: Optional[List[str]] = None,
|
||||
sequence_names: list[str] | None = None,
|
||||
should_convert_to_samples: bool = False,
|
||||
should_overwrite: bool = False,
|
||||
should_stash: bool = False,
|
||||
@@ -38,6 +40,7 @@ class PsaImportOptions(object):
|
||||
should_use_fake_user: bool = False,
|
||||
should_write_keyframes: bool = True,
|
||||
should_write_metadata: bool = True,
|
||||
should_write_scale_keys: bool = True,
|
||||
translation_scale: float = 1.0
|
||||
):
|
||||
self.action_name_prefix = action_name_prefix
|
||||
@@ -53,25 +56,26 @@ class PsaImportOptions(object):
|
||||
self.should_use_fake_user = should_use_fake_user
|
||||
self.should_write_keyframes = should_write_keyframes
|
||||
self.should_write_metadata = should_write_metadata
|
||||
self.should_write_scale_keys = should_write_scale_keys
|
||||
self.translation_scale = translation_scale
|
||||
|
||||
|
||||
class ImportBone(object):
|
||||
def __init__(self, psa_bone: PsxBone):
|
||||
self.psa_bone: PsxBone = psa_bone
|
||||
self.parent: Optional[ImportBone] = None
|
||||
self.armature_bone: Optional[Bone] = None
|
||||
self.pose_bone: Optional[PoseBone] = None
|
||||
self.parent: ImportBone | None = None
|
||||
self.armature_bone: Bone | None = None
|
||||
self.pose_bone: PoseBone | None = None
|
||||
self.original_location: Vector = Vector()
|
||||
self.original_rotation: Quaternion = Quaternion()
|
||||
self.post_rotation: Quaternion = Quaternion()
|
||||
self.fcurves: List[FCurve] = []
|
||||
self.fcurves: list[FCurve] = []
|
||||
|
||||
|
||||
def _calculate_fcurve_data(import_bone: ImportBone, key_data: Sequence[float]):
|
||||
# Convert world-space transforms to local-space transforms.
|
||||
key_rotation = Quaternion(key_data[0:4])
|
||||
key_location = Vector(key_data[4:])
|
||||
key_location = Vector(key_data[4:7])
|
||||
q = import_bone.post_rotation.copy()
|
||||
q.rotate(import_bone.original_rotation)
|
||||
rotation = q
|
||||
@@ -83,15 +87,16 @@ def _calculate_fcurve_data(import_bone: ImportBone, key_data: Sequence[float]):
|
||||
rotation.rotate(q.conjugated())
|
||||
location = key_location - import_bone.original_location
|
||||
location.rotate(import_bone.post_rotation.conjugated())
|
||||
return rotation.w, rotation.x, rotation.y, rotation.z, location.x, location.y, location.z
|
||||
scale = Vector(key_data[7:10])
|
||||
return rotation.w, rotation.x, rotation.y, rotation.z, location.x, location.y, location.z, scale.x, scale.y, scale.z
|
||||
|
||||
|
||||
class PsaImportResult:
|
||||
def __init__(self):
|
||||
self.warnings: List[str] = []
|
||||
self.warnings: list[str] = []
|
||||
|
||||
|
||||
def _get_armature_bone_index_for_psa_bone(psa_bone_name: str, armature_bone_names: List[str], bone_mapping: BoneMapping) -> Optional[int]:
|
||||
def _get_armature_bone_index_for_psa_bone(psa_bone_name: str, armature_bone_names: list[str], bone_mapping: BoneMapping) -> int | None:
|
||||
"""
|
||||
@param psa_bone_name: The name of the PSA bone.
|
||||
@param armature_bone_names: The names of the bones in the armature.
|
||||
@@ -167,6 +172,34 @@ def _resample_sequence_data_matrix(sequence_data_matrix: np.ndarray, frame_step:
|
||||
return resampled_sequence_data_matrix
|
||||
|
||||
|
||||
def _read_sequence_data_matrix(psa_reader: PsaReader, sequence_name: str) -> np.ndarray:
|
||||
"""
|
||||
Reads and returns the data matrix for the given sequence.
|
||||
The order of the data in the third axis is Qw, Qx, Qy, Qz, Lx, Ly, Lz, Sx, Sy, Sz
|
||||
|
||||
@param sequence_name: The name of the sequence.
|
||||
@return: An FxBx10 matrix where F is the number of frames, B is the number of bones.
|
||||
"""
|
||||
sequence = psa_reader.sequences[sequence_name]
|
||||
keys = psa_reader.read_sequence_keys(sequence_name)
|
||||
bone_count = len(psa_reader.bones)
|
||||
matrix_size = sequence.frame_count, bone_count, 10
|
||||
matrix = np.ones(matrix_size)
|
||||
keys_iter = iter(keys)
|
||||
# Populate rotation and location data.
|
||||
for frame_index in range(sequence.frame_count):
|
||||
for bone_index in range(bone_count):
|
||||
matrix[frame_index, bone_index, :7] = list(next(keys_iter).data)
|
||||
# Populate scale data, if it exists.
|
||||
scale_keys = psa_reader.read_sequence_scale_keys(sequence_name)
|
||||
if len(scale_keys) > 0:
|
||||
scale_keys_iter = iter(scale_keys)
|
||||
for frame_index in range(sequence.frame_count):
|
||||
for bone_index in range(bone_count):
|
||||
matrix[frame_index, bone_index, 7:] = list(next(scale_keys_iter).data)
|
||||
return matrix
|
||||
|
||||
|
||||
def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object, options: PsaImportOptions) -> PsaImportResult:
|
||||
|
||||
assert context.window_manager
|
||||
@@ -309,8 +342,10 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
pose_bone = import_bone.pose_bone
|
||||
rotation_data_path = pose_bone.path_from_id('rotation_quaternion')
|
||||
location_data_path = pose_bone.path_from_id('location')
|
||||
scale_data_path = pose_bone.path_from_id('scale')
|
||||
add_rotation_fcurves = (bone_track_flags & REMOVE_TRACK_ROTATION) == 0
|
||||
add_location_fcurves = (bone_track_flags & REMOVE_TRACK_LOCATION) == 0
|
||||
add_scale_fcurves = psa_reader.has_scale_keys and options.should_write_scale_keys
|
||||
import_bone.fcurves = [
|
||||
channelbag.fcurves.ensure(rotation_data_path, index=0, group_name=pose_bone.name) if add_rotation_fcurves else None, # Qw
|
||||
channelbag.fcurves.ensure(rotation_data_path, index=1, group_name=pose_bone.name) if add_rotation_fcurves else None, # Qx
|
||||
@@ -319,14 +354,17 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
channelbag.fcurves.ensure(location_data_path, index=0, group_name=pose_bone.name) if add_location_fcurves else None, # Lx
|
||||
channelbag.fcurves.ensure(location_data_path, index=1, group_name=pose_bone.name) if add_location_fcurves else None, # Ly
|
||||
channelbag.fcurves.ensure(location_data_path, index=2, group_name=pose_bone.name) if add_location_fcurves else None, # Lz
|
||||
channelbag.fcurves.ensure(scale_data_path, index=0, group_name=pose_bone.name) if add_scale_fcurves else None, # Sx
|
||||
channelbag.fcurves.ensure(scale_data_path, index=1, group_name=pose_bone.name) if add_scale_fcurves else None, # Sy
|
||||
channelbag.fcurves.ensure(scale_data_path, index=2, group_name=pose_bone.name) if add_scale_fcurves else None, # Sz
|
||||
]
|
||||
|
||||
# Read the sequence data matrix from the PSA.
|
||||
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
||||
sequence_data_matrix = _read_sequence_data_matrix(psa_reader, sequence_name)
|
||||
|
||||
if options.translation_scale != 1.0:
|
||||
# Scale the translation data.
|
||||
sequence_data_matrix[:, :, 4:] *= options.translation_scale
|
||||
sequence_data_matrix[:, :, 4:7] *= options.translation_scale
|
||||
|
||||
# Convert the sequence's data from world-space to local-space.
|
||||
for bone_index, import_bone in enumerate(import_bones):
|
||||
@@ -364,12 +402,13 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
|
||||
if options.should_convert_to_samples:
|
||||
# Bake the curve to samples.
|
||||
for fcurve in action.fcurves:
|
||||
for fcurve in channelbag.fcurves:
|
||||
fcurve.convert_to_samples(start=0, end=sequence.frame_count)
|
||||
|
||||
# Write meta-data.
|
||||
if options.should_write_metadata:
|
||||
action.psa_export.fps = target_fps
|
||||
pg = typing_cast(PSX_PG_action_export, getattr(action, 'psa_export'))
|
||||
pg.fps = target_fps
|
||||
|
||||
action.use_fake_user = options.should_use_fake_user
|
||||
|
||||
|
||||
@@ -1,35 +1,32 @@
|
||||
import bmesh
|
||||
import bpy
|
||||
import numpy as np
|
||||
from bpy.types import Armature, Collection, Context, Depsgraph, Object, ArmatureModifier, Mesh
|
||||
from bpy.types import Armature, Context, Object, Mesh, Material
|
||||
from mathutils import Matrix
|
||||
from typing import Dict, Iterable, List, Optional, Set, cast as typing_cast
|
||||
from typing import Iterable, Sequence, cast as typing_cast
|
||||
from psk_psa_py.shared.data import Vector3
|
||||
from psk_psa_py.psk.data import Psk
|
||||
from .properties import triangle_type_and_bit_flags_to_poly_flags
|
||||
from ..shared.dfs import DfsObject, dfs_collection_objects, dfs_view_layer_objects
|
||||
from ..shared.helpers import (
|
||||
ObjectNode,
|
||||
ObjectTree,
|
||||
PskInputObjects,
|
||||
PsxBoneCollection,
|
||||
convert_string_to_cp1252_bytes,
|
||||
create_psx_bones,
|
||||
get_armatures_for_mesh_objects,
|
||||
get_armature_for_mesh_object,
|
||||
get_coordinate_system_transform,
|
||||
get_materials_for_mesh_objects,
|
||||
)
|
||||
|
||||
|
||||
class PskInputObjects(object):
|
||||
def __init__(self):
|
||||
self.mesh_dfs_objects: List[DfsObject] = []
|
||||
self.armature_objects: Set[Object] = set()
|
||||
|
||||
|
||||
class PskBuildOptions(object):
|
||||
def __init__(self):
|
||||
self.bone_filter_mode = 'ALL'
|
||||
self.bone_collection_indices: List[PsxBoneCollection] = []
|
||||
self.bone_collection_indices: list[PsxBoneCollection] = []
|
||||
self.object_eval_state = 'EVALUATED'
|
||||
self.material_order_mode = 'AUTOMATIC'
|
||||
self.material_name_list: List[str] = []
|
||||
self.material_name_list: list[str] = []
|
||||
self.scale = 1.0
|
||||
self.export_space = 'WORLD'
|
||||
self.forward_axis = 'X'
|
||||
@@ -37,101 +34,51 @@ class PskBuildOptions(object):
|
||||
self.root_bone_name = 'ROOT'
|
||||
|
||||
|
||||
def get_materials_for_mesh_objects(depsgraph: Depsgraph, mesh_objects: Iterable[Object]):
|
||||
yielded_materials = set()
|
||||
for mesh_object in mesh_objects:
|
||||
evaluated_mesh_object = mesh_object.evaluated_get(depsgraph)
|
||||
for i, material_slot in enumerate(evaluated_mesh_object.material_slots):
|
||||
material = material_slot.material
|
||||
if material is None:
|
||||
raise RuntimeError(f'Material slots cannot be empty. ({mesh_object.name}, index {i})')
|
||||
if material not in yielded_materials:
|
||||
yielded_materials.add(material)
|
||||
yield material
|
||||
|
||||
|
||||
def get_mesh_objects_for_collection(collection: Collection) -> Iterable[DfsObject]:
|
||||
return filter(lambda x: x.obj.type == 'MESH', dfs_collection_objects(collection))
|
||||
|
||||
|
||||
def get_mesh_objects_for_context(context: Context) -> Iterable[DfsObject]:
|
||||
if context.view_layer is None:
|
||||
return
|
||||
for dfs_object in dfs_view_layer_objects(context.view_layer):
|
||||
if dfs_object.obj.type == 'MESH' and dfs_object.is_selected:
|
||||
yield dfs_object
|
||||
|
||||
|
||||
def get_armature_for_mesh_object(mesh_object: Object) -> Optional[Object]:
|
||||
if mesh_object.type != 'MESH':
|
||||
return None
|
||||
# Get the first armature modifier with a non-empty armature object.
|
||||
for modifier in filter(lambda x: x.type == 'ARMATURE', mesh_object.modifiers):
|
||||
armature_modifier = typing_cast(ArmatureModifier, modifier)
|
||||
if armature_modifier.object is not None:
|
||||
return armature_modifier.object
|
||||
return None
|
||||
|
||||
|
||||
def _get_psk_input_objects(mesh_dfs_objects: Iterable[DfsObject]) -> PskInputObjects:
|
||||
mesh_dfs_objects = list(mesh_dfs_objects)
|
||||
if len(mesh_dfs_objects) == 0:
|
||||
raise RuntimeError('At least one mesh must be selected')
|
||||
input_objects = PskInputObjects()
|
||||
input_objects.mesh_dfs_objects = mesh_dfs_objects
|
||||
input_objects.armature_objects |= set(get_armatures_for_mesh_objects(map(lambda x: x.obj, mesh_dfs_objects)))
|
||||
return input_objects
|
||||
|
||||
|
||||
def get_psk_input_objects_for_context(context: Context) -> PskInputObjects:
|
||||
mesh_objects = list(get_mesh_objects_for_context(context))
|
||||
return _get_psk_input_objects(mesh_objects)
|
||||
|
||||
|
||||
def get_psk_input_objects_for_collection(collection: Collection) -> PskInputObjects:
|
||||
mesh_objects = get_mesh_objects_for_collection(collection)
|
||||
return _get_psk_input_objects(mesh_objects)
|
||||
|
||||
|
||||
class PskBuildResult(object):
|
||||
def __init__(self, psk: Psk, warnings: list[str]):
|
||||
self.psk: Psk = psk
|
||||
self.warnings: List[str] = warnings
|
||||
self.warnings: list[str] = warnings
|
||||
|
||||
|
||||
def _get_mesh_export_space_matrix(armature_object: Optional[Object], export_space: str) -> Matrix:
|
||||
if armature_object is None:
|
||||
def _get_mesh_export_space_matrix(node: ObjectNode | None, export_space: str) -> Matrix:
|
||||
if node is None:
|
||||
return Matrix.Identity(4)
|
||||
|
||||
armature_object = node.object
|
||||
root_armature_object = node.root.object
|
||||
|
||||
def get_object_space_matrix(obj: Object) -> Matrix:
|
||||
translation, rotation, _ = obj.matrix_world.decompose()
|
||||
# We neutralize the scale here because the scale is already applied to the mesh objects implicitly.
|
||||
return Matrix.Translation(translation) @ rotation.to_matrix().to_4x4()
|
||||
|
||||
armature_space_matrix = get_object_space_matrix(armature_object)
|
||||
root_armature_space_matrix = get_object_space_matrix(root_armature_object)
|
||||
relative_matrix = root_armature_space_matrix @ armature_space_matrix.inverted()
|
||||
|
||||
match export_space:
|
||||
case 'WORLD':
|
||||
return Matrix.Identity(4)
|
||||
case 'ARMATURE':
|
||||
return get_object_space_matrix(armature_object).inverted()
|
||||
return (armature_space_matrix @ relative_matrix).inverted()
|
||||
case 'ROOT':
|
||||
armature_data = typing_cast(Armature, armature_object.data)
|
||||
armature_space_matrix = get_object_space_matrix(armature_object) @ armature_data.bones[0].matrix_local
|
||||
return armature_space_matrix.inverted()
|
||||
root_armature_data = typing_cast(Armature, root_armature_object.data)
|
||||
if len(root_armature_data.bones) == 0:
|
||||
raise RuntimeError(f'Armature {root_armature_data.name} has no bones')
|
||||
return (armature_space_matrix @ relative_matrix @ root_armature_data.bones[0].matrix_local).inverted()
|
||||
case _:
|
||||
assert False, f'Invalid export space: {export_space}'
|
||||
|
||||
|
||||
def _get_material_name_indices(obj: Object, material_names: List[str]) -> Iterable[int]:
|
||||
def _get_material_name_indices(obj: Object, material_names: list[str]) -> Iterable[int]:
|
||||
"""
|
||||
Returns the index of the material in the list of material names.
|
||||
If the material is not found, the index 0 is returned.
|
||||
If the material is not found or the slot is empty, the index of 'None' is returned.
|
||||
"""
|
||||
for material_slot in obj.material_slots:
|
||||
if material_slot.material is None:
|
||||
yield 0
|
||||
else:
|
||||
try:
|
||||
yield material_names.index(material_slot.material.name)
|
||||
material_name = material_slot.material.name if material_slot.material is not None else 'None'
|
||||
yield material_names.index(material_name)
|
||||
except ValueError:
|
||||
yield 0
|
||||
|
||||
@@ -141,6 +88,7 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil
|
||||
assert context.window_manager
|
||||
|
||||
armature_objects = list(input_objects.armature_objects)
|
||||
armature_object_tree = ObjectTree(input_objects.armature_objects)
|
||||
|
||||
warnings: list[str] = []
|
||||
psk = Psk()
|
||||
@@ -156,18 +104,37 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil
|
||||
bone_collection_indices=options.bone_collection_indices
|
||||
)
|
||||
|
||||
psk.bones = [psx_bone for psx_bone, _ in psx_bone_create_result.bones]
|
||||
psk.bones = [bone.psx_bone for bone in psx_bone_create_result.bones]
|
||||
|
||||
# Materials
|
||||
mesh_objects = [dfs_object.obj for dfs_object in input_objects.mesh_dfs_objects]
|
||||
|
||||
match options.material_order_mode:
|
||||
case 'AUTOMATIC':
|
||||
mesh_objects = [dfs_object.obj for dfs_object in input_objects.mesh_dfs_objects]
|
||||
materials = list(get_materials_for_mesh_objects(context.evaluated_depsgraph_get(), mesh_objects))
|
||||
case 'MANUAL':
|
||||
# The material name list may contain materials that are not on the mesh objects.
|
||||
# Therefore, we can take the material_name_list as gospel and simply use it as a lookup table.
|
||||
# If a look-up fails, replace it with an empty material.
|
||||
materials = [bpy.data.materials.get(x, None) for x in options.material_name_list]
|
||||
|
||||
# Check if any mesh needs a None material (has no slots or empty slots)
|
||||
needs_none_material = False
|
||||
for mesh_object in mesh_objects:
|
||||
evaluated_mesh_object = mesh_object.evaluated_get(context.evaluated_depsgraph_get())
|
||||
if len(evaluated_mesh_object.material_slots) == 0:
|
||||
needs_none_material = True
|
||||
break
|
||||
for material_slot in evaluated_mesh_object.material_slots:
|
||||
if material_slot.material is None:
|
||||
needs_none_material = True
|
||||
break
|
||||
if needs_none_material:
|
||||
break
|
||||
|
||||
# Append None at the end if needed and not already present
|
||||
if needs_none_material and None not in materials:
|
||||
materials.append(None)
|
||||
case _:
|
||||
assert False, f'Invalid material order mode: {options.material_order_mode}'
|
||||
|
||||
@@ -180,11 +147,7 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil
|
||||
material.psk.mesh_triangle_bit_flags)
|
||||
psk.materials.append(psk_material)
|
||||
|
||||
# TODO: This wasn't left in a good state. We should detect if we need to add a "default" material.
|
||||
# This can be done by checking if there is an empty material slot on any of the mesh objects, or if there are
|
||||
# no material slots on any of the mesh objects.
|
||||
# If so, it should be added to the end of the list of materials, and its index should mapped to a None value in the
|
||||
# material indices list.
|
||||
# Ensure at least one material exists
|
||||
if len(psk.materials) == 0:
|
||||
# Add a default material if no materials are present.
|
||||
psk_material = Psk.Material()
|
||||
@@ -192,30 +155,32 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil
|
||||
psk.materials.append(psk_material)
|
||||
|
||||
context.window_manager.progress_begin(0, len(input_objects.mesh_dfs_objects))
|
||||
|
||||
coordinate_system_matrix = get_coordinate_system_transform(options.forward_axis, options.up_axis)
|
||||
root_armature_object = next(iter(armature_object_tree), None)
|
||||
|
||||
# Calculate the export spaces for the armature objects.
|
||||
# This is used later to transform the mesh object geometry into the export space.
|
||||
armature_mesh_export_space_matrices: Dict[Optional[Object], Matrix] = {None: Matrix.Identity(4)}
|
||||
armature_mesh_export_space_matrices: dict[Object | None, Matrix] = {None: Matrix.Identity(4)}
|
||||
|
||||
if options.export_space == 'ARMATURE':
|
||||
# For meshes without an armature modifier, we need to set the export space to the armature object.
|
||||
armature_mesh_export_space_matrices[None] = _get_mesh_export_space_matrix(next(iter(input_objects.armature_objects), None), options.export_space)
|
||||
for armature_object in armature_objects:
|
||||
armature_mesh_export_space_matrices[armature_object] = _get_mesh_export_space_matrix(armature_object, options.export_space)
|
||||
# For meshes without an armature modifier, we need to set the export space to the first armature object.
|
||||
armature_mesh_export_space_matrices[None] = _get_mesh_export_space_matrix(root_armature_object, options.export_space)
|
||||
|
||||
scale_matrix = Matrix.Scale(options.scale, 4)
|
||||
|
||||
original_armature_object_pose_positions = {a: a.data.pose_position for a in armature_objects}
|
||||
# TODO: also handle the case of multiple roots; dont' just assume we have one!
|
||||
for armature_node in iter(armature_object_tree):
|
||||
armature_mesh_export_space_matrices[armature_node.object] = _get_mesh_export_space_matrix(armature_node, options.export_space)
|
||||
|
||||
# Temporarily force the armature into the rest position.
|
||||
# We will undo this later.
|
||||
# The original pose position setting will be restored at the end.
|
||||
original_armature_object_pose_positions = {a: a.data.pose_position for a in armature_objects}
|
||||
for armature_object in armature_objects:
|
||||
armature_data = typing_cast(Armature, armature_object.data)
|
||||
armature_data.pose_position = 'REST'
|
||||
|
||||
material_names = [m.name if m is not None else 'None' for m in materials]
|
||||
|
||||
scale_matrix = Matrix.Scale(options.scale, 4)
|
||||
|
||||
for object_index, input_mesh_object in enumerate(input_objects.mesh_dfs_objects):
|
||||
obj, matrix_world = input_mesh_object.obj, input_mesh_object.matrix_world
|
||||
armature_object = get_armature_for_mesh_object(obj)
|
||||
@@ -225,8 +190,12 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil
|
||||
material_indices = list(_get_material_name_indices(obj, material_names))
|
||||
|
||||
if len(material_indices) == 0:
|
||||
# Add a default material if no materials are present.
|
||||
material_indices = [0]
|
||||
# If the mesh has no material slots, map to the 'None' material index
|
||||
try:
|
||||
none_material_index = material_names.index('None')
|
||||
except ValueError:
|
||||
none_material_index = 0
|
||||
material_indices = [none_material_index]
|
||||
|
||||
# Store the reference to the evaluated object and data so that we can clean them up later.
|
||||
evaluated_mesh_object = None
|
||||
@@ -286,7 +255,7 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil
|
||||
case _:
|
||||
assert False, f'Invalid export space: {options.export_space}'
|
||||
|
||||
vertex_transform_matrix = scale_matrix @ coordinate_system_matrix @ mesh_export_space_matrix
|
||||
vertex_transform_matrix = scale_matrix @ coordinate_system_matrix.inverted() @ mesh_export_space_matrix
|
||||
point_transform_matrix = vertex_transform_matrix @ mesh_object.matrix_world
|
||||
|
||||
# Vertices
|
||||
@@ -319,6 +288,7 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil
|
||||
for loop_index, loop in enumerate(mesh_data.loops):
|
||||
wedges.append(Psk.Wedge(point_index=loop.vertex_index + vertex_offset, u=0.0, v=0.0))
|
||||
|
||||
|
||||
# Assign material indices to the wedges.
|
||||
for triangle in mesh_data.loop_triangles:
|
||||
for loop_index in triangle.loops:
|
||||
@@ -363,7 +333,7 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil
|
||||
|
||||
bone_names = psx_bone_create_result.armature_object_bone_names[armature_object]
|
||||
vertex_group_names = [x.name for x in mesh_object.vertex_groups]
|
||||
vertex_group_bone_indices: Dict[int, int] = dict()
|
||||
vertex_group_bone_indices: dict[int, int] = dict()
|
||||
for vertex_group_index, vertex_group_name in enumerate(vertex_group_names):
|
||||
try:
|
||||
vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name) + bone_index_offset
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
from pathlib import Path
|
||||
from typing import Iterable, List
|
||||
from typing import Iterable, cast as typing_cast
|
||||
|
||||
import bpy
|
||||
from bpy.props import BoolProperty, StringProperty
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import Context, Depsgraph, Material, Object, Operator, Scene
|
||||
from bpy_extras.io_utils import ExportHelper
|
||||
|
||||
from .properties import PskExportMixin
|
||||
from .properties import PSK_PG_export, PskExportMixin
|
||||
from ..builder import (
|
||||
PskBuildOptions,
|
||||
build_psk,
|
||||
get_materials_for_mesh_objects,
|
||||
get_psk_input_objects_for_collection,
|
||||
get_psk_input_objects_for_context,
|
||||
)
|
||||
from psk_psa_py.psk.writer import write_psk_to_path
|
||||
from ...shared.helpers import PsxBoneCollection, get_collection_export_operator_from_context, populate_bone_collection_list
|
||||
from ...shared.helpers import PsxBoneCollection, get_collection_export_operator_from_context, get_psk_input_objects_for_collection, populate_bone_collection_list, get_psk_input_objects_for_context
|
||||
from ...shared.ui import draw_bone_filter_mode
|
||||
from ...shared.operators import PSK_OT_bone_collection_list_populate, PSK_OT_bone_collection_list_select_all
|
||||
|
||||
|
||||
def populate_material_name_list(depsgraph: Depsgraph, mesh_objects: Iterable[Object], material_list):
|
||||
@@ -30,55 +29,10 @@ def populate_material_name_list(depsgraph: Depsgraph, mesh_objects: Iterable[Obj
|
||||
material_list.clear()
|
||||
for index, material in enumerate(materials):
|
||||
m = material_list.add()
|
||||
m.material_name = material.name
|
||||
m.material_name = material.name if material is not None else 'None'
|
||||
m.index = index
|
||||
|
||||
|
||||
|
||||
class PSK_OT_bone_collection_list_populate(Operator):
|
||||
bl_idname = 'psk.bone_collection_list_populate'
|
||||
bl_label = 'Populate Bone Collection List'
|
||||
bl_description = 'Populate the bone collection list from the armature that will be used in this collection export'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
def execute(self, context):
|
||||
export_operator = get_collection_export_operator_from_context(context)
|
||||
if export_operator is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||
return {'CANCELLED'}
|
||||
if context.collection is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No active collection')
|
||||
return {'CANCELLED'}
|
||||
try:
|
||||
input_objects = get_psk_input_objects_for_collection(context.collection)
|
||||
except RuntimeError as e:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
if not input_objects.armature_objects:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No armature modifiers found on mesh objects')
|
||||
return {'CANCELLED'}
|
||||
populate_bone_collection_list(export_operator.bone_collection_list, input_objects.armature_objects)
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSK_OT_bone_collection_list_select_all(Operator):
|
||||
bl_idname = 'psk.bone_collection_list_select_all'
|
||||
bl_label = 'Select All'
|
||||
bl_description = 'Select all bone collections'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
is_selected: BoolProperty(default=True)
|
||||
|
||||
def execute(self, context):
|
||||
export_operator = get_collection_export_operator_from_context(context)
|
||||
if export_operator is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||
return {'CANCELLED'}
|
||||
for item in export_operator.bone_collection_list:
|
||||
item.is_selected = self.is_selected
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSK_OT_populate_material_name_list(Operator):
|
||||
bl_idname = 'psk.export_populate_material_name_list'
|
||||
bl_label = 'Populate Material Name List'
|
||||
@@ -90,6 +44,7 @@ class PSK_OT_populate_material_name_list(Operator):
|
||||
if export_operator is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||
return {'CANCELLED'}
|
||||
export_operator = typing_cast(PskExportMixin, export_operator)
|
||||
depsgraph = context.evaluated_depsgraph_get()
|
||||
assert context.collection
|
||||
input_objects = get_psk_input_objects_for_collection(context.collection)
|
||||
@@ -124,6 +79,7 @@ class PSK_OT_material_list_name_add(Operator):
|
||||
if export_operator is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||
return {'CANCELLED'}
|
||||
export_operator = typing_cast(PskExportMixin, export_operator)
|
||||
m = export_operator.material_name_list.add()
|
||||
m.material_name = self.name
|
||||
m.index = len(export_operator.material_name_list) - 1
|
||||
@@ -139,11 +95,11 @@ class PSK_OT_material_list_move_up(Operator):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export'))
|
||||
return pg.material_name_list_index > 0
|
||||
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export'))
|
||||
pg.material_name_list.move(pg.material_name_list_index, pg.material_name_list_index - 1)
|
||||
pg.material_name_list_index -= 1
|
||||
return {'FINISHED'}
|
||||
@@ -157,11 +113,11 @@ class PSK_OT_material_list_move_down(Operator):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export'))
|
||||
return pg.material_name_list_index < len(pg.material_name_list) - 1
|
||||
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export'))
|
||||
pg.material_name_list.move(pg.material_name_list_index, pg.material_name_list_index + 1)
|
||||
pg.material_name_list_index += 1
|
||||
return {'FINISHED'}
|
||||
@@ -178,6 +134,7 @@ class PSK_OT_material_list_name_move_up(Operator):
|
||||
export_operator = get_collection_export_operator_from_context(context)
|
||||
if export_operator is None:
|
||||
return False
|
||||
export_operator = typing_cast(PskExportMixin, export_operator)
|
||||
return export_operator.material_name_list_index > 0
|
||||
|
||||
def execute(self, context):
|
||||
@@ -185,6 +142,7 @@ class PSK_OT_material_list_name_move_up(Operator):
|
||||
if export_operator is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||
return {'CANCELLED'}
|
||||
export_operator = typing_cast(PskExportMixin, export_operator)
|
||||
export_operator.material_name_list.move(export_operator.material_name_list_index, export_operator.material_name_list_index - 1)
|
||||
export_operator.material_name_list_index -= 1
|
||||
return {'FINISHED'}
|
||||
@@ -201,6 +159,7 @@ class PSK_OT_material_list_name_move_down(Operator):
|
||||
export_operator = get_collection_export_operator_from_context(context)
|
||||
if export_operator is None:
|
||||
return False
|
||||
export_operator = typing_cast(PskExportMixin, export_operator)
|
||||
return export_operator.material_name_list_index < len(export_operator.material_name_list) - 1
|
||||
|
||||
def execute(self, context):
|
||||
@@ -208,24 +167,38 @@ class PSK_OT_material_list_name_move_down(Operator):
|
||||
if export_operator is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||
return {'CANCELLED'}
|
||||
export_operator = typing_cast(PskExportMixin, export_operator)
|
||||
export_operator.material_name_list.move(export_operator.material_name_list_index, export_operator.material_name_list_index + 1)
|
||||
export_operator.material_name_list_index += 1
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def get_sorted_materials_by_names(materials: Iterable[Material], material_names: List[str]) -> List[Material]:
|
||||
def get_sorted_materials_by_names(materials: Iterable[Material | None], material_names: list[str]) -> list[Material | None]:
|
||||
"""
|
||||
Sorts the materials by the order of the material names list. Any materials not in the list will be appended to the
|
||||
end of the list in the order they are found.
|
||||
end of the list in the order they are found. None materials (representing empty material slots) are always
|
||||
appended at the very end.
|
||||
|
||||
@param materials: A list of materials to sort
|
||||
@param materials: A list of materials to sort (can include None)
|
||||
@param material_names: A list of material names to sort by
|
||||
@return: A sorted list of materials
|
||||
@return: A sorted list of materials (with None at the end if present)
|
||||
"""
|
||||
materials = list(materials)
|
||||
has_none = None in materials
|
||||
materials = [m for m in materials if m is not None]
|
||||
|
||||
materials_in_collection = [m for m in materials if m.name in material_names]
|
||||
materials_not_in_collection = [m for m in materials if m.name not in material_names]
|
||||
materials_in_collection = sorted(materials_in_collection, key=lambda x: material_names.index(x.name))
|
||||
return materials_in_collection + materials_not_in_collection
|
||||
|
||||
result: list[Material | None] = []
|
||||
result.extend(materials_in_collection)
|
||||
result.extend(materials_not_in_collection)
|
||||
|
||||
if has_none:
|
||||
result.append(None)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_psk_build_options_from_property_group(scene: Scene, pg: PskExportMixin) -> PskBuildOptions:
|
||||
@@ -291,6 +264,9 @@ class PSK_OT_export_collection(Operator, ExportHelper, PskExportMixin):
|
||||
self.report({'WARNING'}, f'PSK export successful with {len(result.warnings)} warnings')
|
||||
else:
|
||||
self.report({'INFO'}, f'PSK export successful')
|
||||
except IOError as e:
|
||||
self.report({'ERROR'}, f'Failed to write PSK file ({filepath}): {e}')
|
||||
return {'CANCELLED'}
|
||||
except RuntimeError as e:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
@@ -411,7 +387,7 @@ class PSK_OT_export(Operator, ExportHelper):
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export'))
|
||||
|
||||
populate_bone_collection_list(pg.bone_collection_list, input_objects.armature_objects)
|
||||
|
||||
@@ -433,7 +409,7 @@ class PSK_OT_export(Operator, ExportHelper):
|
||||
|
||||
assert layout
|
||||
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export'))
|
||||
|
||||
# Mesh
|
||||
mesh_header, mesh_panel = layout.panel('Mesh', default_closed=False)
|
||||
@@ -531,6 +507,9 @@ class PSK_OT_export(Operator, ExportHelper):
|
||||
self.report({'WARNING'}, f'PSK export successful with {len(result.warnings)} warnings')
|
||||
else:
|
||||
self.report({'INFO'}, f'PSK export successful')
|
||||
except IOError as e:
|
||||
self.report({'ERROR'}, f'Failed to write PSK file ({self.filepath}): {e}')
|
||||
return {'CANCELLED'}
|
||||
except RuntimeError as e:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
@@ -543,8 +522,6 @@ _classes = (
|
||||
PSK_OT_material_list_move_down,
|
||||
PSK_OT_export,
|
||||
PSK_OT_export_collection,
|
||||
PSK_OT_bone_collection_list_populate,
|
||||
PSK_OT_bone_collection_list_select_all,
|
||||
PSK_OT_populate_material_name_list,
|
||||
PSK_OT_material_list_name_move_up,
|
||||
PSK_OT_material_list_name_move_down,
|
||||
|
||||
@@ -8,7 +8,7 @@ from bpy.props import (
|
||||
)
|
||||
from bpy.types import Material, PropertyGroup
|
||||
|
||||
from ...shared.types import ExportSpaceMixin, TransformMixin, PsxBoneExportMixin
|
||||
from ...shared.types import ExportSpaceMixin, TransformMixin, PsxBoneExportMixin, TransformSourceMixin
|
||||
|
||||
object_eval_state_items = (
|
||||
('EVALUATED', 'Evaluated', 'Use data from fully evaluated object'),
|
||||
@@ -20,22 +20,13 @@ material_order_mode_items = (
|
||||
('MANUAL', 'Manual', 'Manually arrange the materials'),
|
||||
)
|
||||
|
||||
transform_source_items = (
|
||||
('SCENE', 'Scene', 'Use the scene transform settings'),
|
||||
('CUSTOM', 'Custom', 'Use custom transform settings'),
|
||||
)
|
||||
|
||||
class PSK_PG_material_list_item(PropertyGroup):
|
||||
material: PointerProperty(type=Material)
|
||||
index: IntProperty()
|
||||
|
||||
|
||||
class PSK_PG_material_name_list_item(PropertyGroup):
|
||||
material_name: StringProperty()
|
||||
index: IntProperty()
|
||||
|
||||
|
||||
class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin):
|
||||
class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin, TransformSourceMixin):
|
||||
object_eval_state: EnumProperty(
|
||||
items=object_eval_state_items,
|
||||
name='Object Evaluation State',
|
||||
@@ -54,11 +45,6 @@ class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin):
|
||||
default=False,
|
||||
description='Export VTXNORMS section.'
|
||||
)
|
||||
transform_source: EnumProperty(
|
||||
items=transform_source_items,
|
||||
name='Transform Source',
|
||||
default='SCENE'
|
||||
)
|
||||
|
||||
|
||||
class PSK_PG_export(PropertyGroup, PskExportMixin):
|
||||
@@ -66,7 +52,6 @@ class PSK_PG_export(PropertyGroup, PskExportMixin):
|
||||
|
||||
|
||||
_classes = (
|
||||
PSK_PG_material_list_item,
|
||||
PSK_PG_material_name_list_item,
|
||||
PSK_PG_export,
|
||||
)
|
||||
|
||||
20
io_scene_psk_psa/psk/export/properties.pyi
Normal file
20
io_scene_psk_psa/psk/export/properties.pyi
Normal file
@@ -0,0 +1,20 @@
|
||||
from bpy.types import Material
|
||||
|
||||
from ...shared.types import BpyCollectionProperty, ExportSpaceMixin, TransformMixin, PsxBoneExportMixin, TransformSourceMixin
|
||||
|
||||
|
||||
class PSK_PG_material_name_list_item:
|
||||
material_name: str
|
||||
index: int
|
||||
|
||||
|
||||
class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin, TransformSourceMixin):
|
||||
object_eval_state: str
|
||||
material_order_mode: str
|
||||
material_name_list: BpyCollectionProperty[PSK_PG_material_name_list_item]
|
||||
material_name_list_index: int
|
||||
should_export_vertex_normals: bool
|
||||
|
||||
|
||||
class PSK_PG_export(PskExportMixin):
|
||||
pass
|
||||
@@ -1,13 +1,43 @@
|
||||
import bpy
|
||||
from bpy.types import UIList
|
||||
from typing import cast as typing_cast
|
||||
|
||||
from .properties import PSK_PG_material_name_list_item
|
||||
|
||||
|
||||
class PSK_UL_material_names(UIList):
|
||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||
def draw_item(
|
||||
self,
|
||||
context,
|
||||
layout,
|
||||
data,
|
||||
item,
|
||||
icon,
|
||||
active_data,
|
||||
active_property,
|
||||
index,
|
||||
flt_flag
|
||||
):
|
||||
row = layout.row()
|
||||
item = typing_cast(PSK_PG_material_name_list_item, item)
|
||||
material = bpy.data.materials.get(item.material_name, None)
|
||||
icon_value = layout.icon(material) if material else 0
|
||||
row.prop(item, 'material_name', text='', emboss=False, icon_value=icon_value, icon='BLANK1' if icon_value == 0 else 'NONE')
|
||||
|
||||
# If the material is not found by name and the name is not 'None', show a not found icon
|
||||
if item.material_name == 'None':
|
||||
icon = 'NODE_MATERIAL'
|
||||
else:
|
||||
icon = 'NOT_FOUND' if material is None else 'NONE'
|
||||
|
||||
row.prop(item, 'material_name', text='', emboss=False,
|
||||
icon_value=layout.icon(material) if material else 0,
|
||||
icon=icon)
|
||||
|
||||
# Add right-aligned "Not Found" label if material is not found
|
||||
if item.material_name != 'None' and material is None:
|
||||
label_row = row.row()
|
||||
label_row.alignment = 'RIGHT'
|
||||
label_row.enabled = False
|
||||
label_row.label(text='Not Found')
|
||||
|
||||
|
||||
_classes = (
|
||||
|
||||
@@ -163,7 +163,7 @@ class PSK_OT_import_drag_and_drop(Operator, PskImportMixin):
|
||||
|
||||
|
||||
# TODO: move to another file
|
||||
class PSK_FH_import(FileHandler):
|
||||
class PSK_FH_file_handler(FileHandler):
|
||||
bl_idname = 'PSK_FH_import'
|
||||
bl_label = 'Unreal PSK'
|
||||
bl_import_operator = PSK_OT_import_drag_and_drop.bl_idname
|
||||
@@ -178,7 +178,7 @@ class PSK_FH_import(FileHandler):
|
||||
_classes = (
|
||||
PSK_OT_import,
|
||||
PSK_OT_import_drag_and_drop,
|
||||
PSK_FH_import,
|
||||
PSK_FH_file_handler,
|
||||
)
|
||||
|
||||
from bpy.utils import register_classes_factory
|
||||
|
||||
@@ -4,7 +4,7 @@ import numpy as np
|
||||
|
||||
from bpy.types import Context, Object, VertexGroup, ArmatureModifier, FloatColorAttribute
|
||||
from mathutils import Matrix, Quaternion, Vector
|
||||
from typing import List, Optional, cast as typing_cast
|
||||
from typing import cast as typing_cast
|
||||
|
||||
from psk_psa_py.psk.data import Psk
|
||||
from psk_psa_py.shared.data import PsxBone
|
||||
@@ -25,7 +25,7 @@ class PskImportOptions:
|
||||
self.bone_length = 1.0
|
||||
self.should_import_materials = True
|
||||
self.scale = 1.0
|
||||
self.bdk_repository_id = None
|
||||
self.bdk_repository_id: str | None = None
|
||||
|
||||
|
||||
class ImportBone:
|
||||
@@ -35,12 +35,11 @@ class ImportBone:
|
||||
def __init__(self, index: int, psk_bone: PsxBone):
|
||||
self.index: int = index
|
||||
self.psk_bone: PsxBone = psk_bone
|
||||
self.parent: Optional[ImportBone] = None
|
||||
self.parent: ImportBone | None = None
|
||||
self.local_rotation: Quaternion = Quaternion()
|
||||
self.local_translation: Vector = Vector()
|
||||
self.world_rotation_matrix: Matrix = Matrix()
|
||||
self.world_matrix: Matrix = Matrix()
|
||||
self.vertex_group = None
|
||||
self.original_rotation: Quaternion = Quaternion()
|
||||
self.original_location: Vector = Vector()
|
||||
self.post_rotation: Quaternion = Quaternion()
|
||||
@@ -48,9 +47,9 @@ class ImportBone:
|
||||
|
||||
class PskImportResult:
|
||||
def __init__(self):
|
||||
self.warnings: List[str] = []
|
||||
self.armature_object: Optional[Object] = None
|
||||
self.mesh_object: Optional[Object] = None
|
||||
self.warnings: list[str] = []
|
||||
self.armature_object: Object | None = None
|
||||
self.mesh_object: Object | None = None
|
||||
|
||||
@property
|
||||
def root_object(self) -> Object:
|
||||
@@ -83,7 +82,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions)
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
import_bones = []
|
||||
import_bones: list[ImportBone] = []
|
||||
|
||||
for bone_index, psk_bone in enumerate(psk.bones):
|
||||
import_bone = ImportBone(bone_index, psk_bone)
|
||||
@@ -107,10 +106,16 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions)
|
||||
bone.world_rotation_matrix = bone.local_rotation.conjugated().to_matrix()
|
||||
bone.world_rotation_matrix.rotate(parent.world_rotation_matrix)
|
||||
|
||||
# Create all the bones up-front.
|
||||
# This allows us to set up the parent-child relationships properly even if the parent bone comes after the child bone in the PSK file.
|
||||
for import_bone in import_bones:
|
||||
bone_name = import_bone.psk_bone.name.decode('utf-8')
|
||||
edit_bone = armature_data.edit_bones.new(bone_name)
|
||||
|
||||
for import_bone in import_bones:
|
||||
bone_name = import_bone.psk_bone.name.decode('utf-8')
|
||||
edit_bone = armature_data.edit_bones[bone_name]
|
||||
|
||||
if import_bone.parent is not None:
|
||||
edit_bone.parent = armature_data.edit_bones[import_bone.psk_bone.parent_index]
|
||||
else:
|
||||
@@ -257,7 +262,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions)
|
||||
# Weights
|
||||
# Get a list of all bones that have weights associated with them.
|
||||
vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights))
|
||||
vertex_groups: List[Optional[VertexGroup]] = [None] * len(psk.bones)
|
||||
vertex_groups: list[VertexGroup | None] = [None] * len(psk.bones)
|
||||
for bone_index, psk_bone in map(lambda x: (x, psk.bones[x]), vertex_group_bone_indices):
|
||||
vertex_groups[bone_index] = mesh_object.vertex_groups.new(name=psk_bone.name.decode('windows-1252'))
|
||||
|
||||
|
||||
23
io_scene_psk_psa/psk/properties.pyi
Normal file
23
io_scene_psk_psa/psk/properties.pyi
Normal file
@@ -0,0 +1,23 @@
|
||||
class PSX_PG_material:
|
||||
mesh_triangle_type: str
|
||||
mesh_triangle_bit_flags: set[str]
|
||||
|
||||
|
||||
class PskImportMixin:
|
||||
should_import_vertex_colors: bool
|
||||
vertex_color_space: str
|
||||
should_import_vertex_normals: bool
|
||||
should_import_extra_uvs: bool
|
||||
components: str
|
||||
should_import_mesh: bool
|
||||
should_import_materials: bool
|
||||
should_import_armature: bool
|
||||
bone_length: float
|
||||
should_import_shape_keys: bool
|
||||
scale: float
|
||||
bdk_repository_id: str
|
||||
|
||||
|
||||
def triangle_type_and_bit_flags_to_poly_flags(mesh_triangle_type: str, mesh_triangle_bit_flags: set[str]) -> int: ...
|
||||
|
||||
def poly_flags_to_triangle_type_and_bit_flags(poly_flags: int) -> tuple[str, set[str]]: ...
|
||||
@@ -5,7 +5,7 @@ These functions are used to iterate over objects in a collection or view layer i
|
||||
instances. This is useful for exporters that need to traverse the object hierarchy in a predictable order.
|
||||
"""
|
||||
|
||||
from typing import Optional, Set, Iterable, List
|
||||
from typing import Iterable
|
||||
|
||||
from bpy.types import Collection, Object, ViewLayer, LayerCollection
|
||||
from mathutils import Matrix
|
||||
@@ -15,7 +15,7 @@ class DfsObject:
|
||||
"""
|
||||
Represents an object in a depth-first search.
|
||||
"""
|
||||
def __init__(self, obj: Object, instance_objects: List[Object], matrix_world: Matrix):
|
||||
def __init__(self, obj: Object, instance_objects: list[Object], matrix_world: Matrix):
|
||||
self.obj = obj
|
||||
self.instance_objects = instance_objects
|
||||
self.matrix_world = matrix_world
|
||||
@@ -85,9 +85,9 @@ def dfs_collection_objects(collection: Collection, visible_only: bool = False) -
|
||||
|
||||
def _dfs_collection_objects_recursive(
|
||||
collection: Collection,
|
||||
instance_objects: Optional[List[Object]] = None,
|
||||
instance_objects: list[Object] | None = None,
|
||||
matrix_world: Matrix = Matrix.Identity(4),
|
||||
visited: Optional[Set[Object]]=None
|
||||
visited: set[Object] | None = None
|
||||
) -> Iterable[DfsObject]:
|
||||
"""
|
||||
Depth-first search of objects in a collection, including recursing into instances.
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import bpy
|
||||
from collections import Counter
|
||||
from typing import List, Iterable, Optional, Dict, Tuple, cast as typing_cast
|
||||
from bpy.types import Armature, AnimData, Collection, Context, Object, ArmatureModifier, SpaceProperties
|
||||
from typing import Iterable, cast as typing_cast
|
||||
from bpy.types import Armature, AnimData, Collection, Context, Object, ArmatureModifier, SpaceProperties, PropertyGroup, Material
|
||||
from mathutils import Matrix, Vector, Quaternion as BpyQuaternion
|
||||
from psk_psa_py.shared.data import PsxBone, Vector3, Quaternion
|
||||
from psk_psa_py.shared.data import PsxBone, Quaternion, Vector3
|
||||
|
||||
from ..shared.types import BpyCollectionProperty, PSX_PG_bone_collection_list_item
|
||||
|
||||
|
||||
def rgb_to_srgb(c: float) -> float:
|
||||
@@ -23,7 +25,11 @@ def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, fr
|
||||
yield strip
|
||||
|
||||
|
||||
def populate_bone_collection_list(bone_collection_list, armature_objects: Iterable[Object], primary_key: str = 'OBJECT'):
|
||||
def populate_bone_collection_list(
|
||||
bone_collection_list: BpyCollectionProperty[PSX_PG_bone_collection_list_item],
|
||||
armature_objects: Iterable[Object],
|
||||
primary_key: str = 'OBJECT'
|
||||
):
|
||||
"""
|
||||
Updates the bone collection list.
|
||||
|
||||
@@ -63,37 +69,40 @@ def populate_bone_collection_list(bone_collection_list, armature_objects: Iterab
|
||||
unique_armature_data = set()
|
||||
|
||||
for armature_object in armature_objects:
|
||||
armature = typing_cast(Armature, armature_object.data)
|
||||
armature_data = typing_cast(Armature, armature_object.data)
|
||||
|
||||
if armature is None:
|
||||
if armature_data is None:
|
||||
continue
|
||||
|
||||
if primary_key == 'DATA' and armature_object.data in unique_armature_data:
|
||||
if primary_key == 'DATA':
|
||||
if armature_data in unique_armature_data:
|
||||
# Skip this armature since we have already added an entry for it and we are using the data as the key.
|
||||
continue
|
||||
unique_armature_data.add(armature_data)
|
||||
|
||||
unique_armature_data.add(armature_object.data)
|
||||
unassigned_bone_count = sum(map(lambda bone: 1 if len(bone.collections) == 0 else 0, armature_data.bones))
|
||||
|
||||
if unassigned_bone_count > 0:
|
||||
item = bone_collection_list.add()
|
||||
item.armature_object_name = armature_object.name
|
||||
item.armature_data_name = armature_object.data.name if armature_object.data else ''
|
||||
item.name = 'Unassigned' # TODO: localize
|
||||
item.armature_data_name = armature_data.name if armature_data else ''
|
||||
item.name = 'Unassigned'
|
||||
item.index = -1
|
||||
# Count the number of bones without an assigned bone collection
|
||||
item.count = sum(map(lambda bone: 1 if len(bone.collections) == 0 else 0, armature.bones))
|
||||
item.count = unassigned_bone_count
|
||||
item.is_selected = unassigned_collection_is_selected
|
||||
|
||||
for bone_collection_index, bone_collection in enumerate(armature.collections_all):
|
||||
for bone_collection_index, bone_collection in enumerate(armature_data.collections_all):
|
||||
item = bone_collection_list.add()
|
||||
item.armature_object_name = armature_object.name
|
||||
item.armature_data_name = armature_object.data.name if armature_object.data else ''
|
||||
item.armature_data_name = armature_data.name if armature_data else ''
|
||||
item.name = bone_collection.name
|
||||
item.index = bone_collection_index
|
||||
item.count = len(bone_collection.bones)
|
||||
item.is_selected = bone_collection.name in selected_assigned_collection_names if has_selected_collections else True
|
||||
|
||||
|
||||
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: Iterable[int]) -> List[str]:
|
||||
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: Iterable[int]) -> list[str]:
|
||||
"""
|
||||
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone collections.
|
||||
|
||||
@@ -146,32 +155,8 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_c
|
||||
# We use the bone names for the return values because the bone name is a more universal way of referencing them.
|
||||
# For example, users of this function may modify bone lists, which would invalidate the indices and require an
|
||||
# index mapping scheme to resolve it. Using strings is more comfy and results in less code downstream.
|
||||
instigator_bone_names = [bones[x[1]].name if x[1] is not None else None for x in bone_indices]
|
||||
bone_names = [bones[x[0]].name for x in bone_indices]
|
||||
|
||||
# Ensure that the hierarchy we are sending back has a single root bone.
|
||||
# TODO: This is only relevant if we are exporting a single armature; how should we reorganize this call?
|
||||
bone_indices = [x[0] for x in bone_indices]
|
||||
root_bones = [bones[bone_index] for bone_index in bone_indices if bones[bone_index].parent is None]
|
||||
if len(root_bones) > 1:
|
||||
# There is more than one root bone.
|
||||
# Print out why each root bone was included by linking it to one of the explicitly included bones.
|
||||
root_bone_names = [bone.name for bone in root_bones]
|
||||
for root_bone_name in root_bone_names:
|
||||
bone_name = root_bone_name
|
||||
while True:
|
||||
# Traverse the instigator chain until the end to find the true instigator bone.
|
||||
# TODO: in future, it would be preferential to have a readout of *all* instigator bones.
|
||||
instigator_bone_name = instigator_bone_names[bone_names.index(bone_name)]
|
||||
if instigator_bone_name is None:
|
||||
print(f'Root bone "{root_bone_name}" was included because {bone_name} was marked for export')
|
||||
break
|
||||
bone_name = instigator_bone_name
|
||||
|
||||
raise RuntimeError('Exported bone hierarchy must have a single root bone.\n'
|
||||
f'The bone hierarchy marked for export has {len(root_bones)} root bones: {root_bone_names}.\n'
|
||||
f'Additional debugging information has been written to the console.')
|
||||
|
||||
return bone_names
|
||||
|
||||
|
||||
@@ -186,121 +171,95 @@ def convert_string_to_cp1252_bytes(string: str) -> bytes:
|
||||
raise RuntimeError(f'The string "{string}" contains characters that cannot be encoded in the Windows-1252 codepage') from e
|
||||
|
||||
|
||||
# TODO: Perhaps export space should just be a transform matrix, since the below is not actually used unless we're using WORLD space.
|
||||
def create_psx_bones_from_blender_bones(
|
||||
bones: List[bpy.types.Bone],
|
||||
export_space: str = 'WORLD',
|
||||
armature_object_matrix_world: Matrix = Matrix.Identity(4),
|
||||
scale = 1.0,
|
||||
forward_axis: str = 'X',
|
||||
up_axis: str = 'Z',
|
||||
root_bone: Optional = None,
|
||||
) -> List[PsxBone]:
|
||||
bones: list[bpy.types.Bone],
|
||||
armature_object_matrix_world: Matrix,
|
||||
) -> list[PsxBone]:
|
||||
"""
|
||||
Creates PSX bones from the given Blender bones.
|
||||
|
||||
scale_matrix = Matrix.Scale(scale, 4)
|
||||
The bones are in world space based on the armature object's world matrix.
|
||||
"""
|
||||
# Apply the scale of the armature object to the bone location.
|
||||
_, _, armature_object_scale = armature_object_matrix_world.decompose()
|
||||
|
||||
coordinate_system_transform = get_coordinate_system_transform(forward_axis, up_axis)
|
||||
coordinate_system_default_rotation = coordinate_system_transform.to_quaternion()
|
||||
|
||||
psx_bones = []
|
||||
psx_bones: list[PsxBone] = []
|
||||
for bone in bones:
|
||||
psx_bone = PsxBone()
|
||||
psx_bone.name = convert_string_to_cp1252_bytes(bone.name)
|
||||
|
||||
if bone.parent is not None:
|
||||
try:
|
||||
parent_index = bones.index(bone.parent)
|
||||
psx_bone.parent_index = parent_index
|
||||
psx_bones[parent_index].children_count += 1
|
||||
except ValueError:
|
||||
psx_bone.parent_index = 0
|
||||
pass
|
||||
|
||||
if bone.parent is not None:
|
||||
# Child bone.
|
||||
rotation = bone.matrix.to_quaternion().conjugated()
|
||||
inverse_parent_rotation = bone.parent.matrix.to_quaternion().inverted()
|
||||
parent_head = inverse_parent_rotation @ bone.parent.head
|
||||
parent_tail = inverse_parent_rotation @ bone.parent.tail
|
||||
location = (parent_tail - parent_head) + bone.head
|
||||
elif bone.parent is None and root_bone is not None:
|
||||
# This is a special case for the root bone when export
|
||||
# Because the root bone and child bones are in different spaces, we need to treat the root bone of this
|
||||
# armature as though it were a child bone.
|
||||
bone_rotation = bone.matrix.to_quaternion().conjugated()
|
||||
local_rotation = armature_object_matrix_world.to_3x3().to_quaternion().conjugated()
|
||||
rotation = bone_rotation @ local_rotation
|
||||
translation, _, scale = armature_object_matrix_world.decompose()
|
||||
# Invert the scale of the armature object matrix.
|
||||
inverse_scale_matrix = Matrix.Identity(4)
|
||||
inverse_scale_matrix[0][0] = 1.0 / scale.x
|
||||
inverse_scale_matrix[1][1] = 1.0 / scale.y
|
||||
inverse_scale_matrix[2][2] = 1.0 / scale.z
|
||||
|
||||
translation = translation @ inverse_scale_matrix
|
||||
location = translation + bone.head
|
||||
else:
|
||||
def get_armature_local_matrix():
|
||||
match export_space:
|
||||
case 'WORLD':
|
||||
return armature_object_matrix_world
|
||||
case 'ARMATURE':
|
||||
return Matrix.Identity(4)
|
||||
case 'ROOT':
|
||||
return bone.matrix.inverted()
|
||||
case _:
|
||||
assert False, f'Invalid export space: {export_space}'
|
||||
|
||||
armature_local_matrix = get_armature_local_matrix()
|
||||
location = armature_local_matrix @ bone.head
|
||||
location = coordinate_system_transform @ location
|
||||
location = armature_object_matrix_world @ bone.head
|
||||
bone_rotation = bone.matrix.to_quaternion().conjugated()
|
||||
local_rotation = armature_local_matrix.to_3x3().to_quaternion().conjugated()
|
||||
rotation = bone_rotation @ local_rotation
|
||||
rotation = bone_rotation @ armature_object_matrix_world.to_3x3().to_quaternion()
|
||||
rotation.conjugate()
|
||||
rotation = coordinate_system_default_rotation @ rotation
|
||||
|
||||
location = scale_matrix @ location
|
||||
|
||||
# If the armature object has been scaled, we need to scale the bone's location to match.
|
||||
_, _, armature_object_scale = armature_object_matrix_world.decompose()
|
||||
location.x *= armature_object_scale.x
|
||||
location.y *= armature_object_scale.y
|
||||
location.z *= armature_object_scale.z
|
||||
|
||||
psx_bone.location.x = location.x
|
||||
psx_bone.location.y = location.y
|
||||
psx_bone.location.z = location.z
|
||||
|
||||
psx_bone.rotation.w = rotation.w
|
||||
psx_bone.rotation.x = rotation.x
|
||||
psx_bone.rotation.y = rotation.y
|
||||
psx_bone.rotation.z = rotation.z
|
||||
# Copy the calculated location and rotation to the bone.
|
||||
psx_bone.location = convert_vector_to_vector3(location)
|
||||
psx_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(rotation)
|
||||
|
||||
psx_bones.append(psx_bone)
|
||||
|
||||
return psx_bones
|
||||
|
||||
|
||||
class PsxBoneResult:
|
||||
def __init__(self, psx_bone: PsxBone, armature_object: Object | None) -> None:
|
||||
self.psx_bone: PsxBone = psx_bone
|
||||
self.armature_object: Object | None = armature_object
|
||||
|
||||
|
||||
class PsxBoneCreateResult:
|
||||
def __init__(self,
|
||||
bones: List[Tuple[PsxBone, Optional[Object]]], # List of tuples of (psx_bone, armature_object)
|
||||
armature_object_root_bone_indices: Dict[Object, int],
|
||||
armature_object_bone_names: Dict[Object, List[str]],
|
||||
bones: list[PsxBoneResult], # List of tuples of (psx_bone, armature_object)
|
||||
armature_object_root_bone_indices: dict[Object, int],
|
||||
armature_object_bone_names: dict[Object, list[str]],
|
||||
):
|
||||
self.bones = bones
|
||||
self.armature_object_root_bone_indices = armature_object_root_bone_indices
|
||||
self.armature_object_bone_names = armature_object_bone_names
|
||||
|
||||
@property
|
||||
def has_false_root_bone(self) -> bool:
|
||||
return len(self.bones) > 0 and self.bones[0][1] is None
|
||||
|
||||
def convert_vector_to_vector3(vector: Vector) -> Vector3:
|
||||
"""
|
||||
Convert a Blender mathutils.Vector to a psk_psa_py Vector3.
|
||||
"""
|
||||
vector3 = Vector3()
|
||||
vector3.x = vector.x
|
||||
vector3.y = vector.y
|
||||
vector3.z = vector.z
|
||||
return vector3
|
||||
|
||||
|
||||
def convert_bpy_quaternion_to_psx_quaternion(other: BpyQuaternion) -> Quaternion:
|
||||
quaternion = Quaternion()
|
||||
quaternion.x = other.x
|
||||
quaternion.y = other.y
|
||||
quaternion.z = other.z
|
||||
quaternion.w = other.w
|
||||
return quaternion
|
||||
def convert_bpy_quaternion_to_psx_quaternion(quaternion: BpyQuaternion) -> Quaternion:
|
||||
"""
|
||||
Convert a Blender mathutils.Quaternion to a psk_psa_py Quaternion.
|
||||
"""
|
||||
psx_quaternion = Quaternion()
|
||||
psx_quaternion.x = quaternion.x
|
||||
psx_quaternion.y = quaternion.y
|
||||
psx_quaternion.z = quaternion.z
|
||||
psx_quaternion.w = quaternion.w
|
||||
return psx_quaternion
|
||||
|
||||
|
||||
class PsxBoneCollection:
|
||||
@@ -313,15 +272,72 @@ class PsxBoneCollection:
|
||||
self.index = index
|
||||
|
||||
|
||||
class ObjectNode:
|
||||
def __init__(self, obj: Object):
|
||||
self.object = obj
|
||||
self.parent: ObjectNode | None = None
|
||||
self.children: list[ObjectNode] = []
|
||||
|
||||
@property
|
||||
def root(self):
|
||||
"""
|
||||
Gets the root in the object hierarchy. This can return itself if this node has no parent.
|
||||
"""
|
||||
n = self
|
||||
while n.parent is not None:
|
||||
n = n.parent
|
||||
return n
|
||||
|
||||
|
||||
class ObjectTree:
|
||||
'''
|
||||
A tree of the armature objects based on their hierarchy.
|
||||
'''
|
||||
def __init__(self, objects: Iterable[Object]):
|
||||
self.root_nodes: list[ObjectNode] = []
|
||||
object_node_map: dict[Object, ObjectNode] = {x: ObjectNode(x) for x in objects}
|
||||
|
||||
for obj, object_node in object_node_map.items():
|
||||
if obj.parent in object_node_map:
|
||||
parent_node = object_node_map[obj.parent]
|
||||
object_node.parent = parent_node
|
||||
parent_node.children.append(object_node)
|
||||
else:
|
||||
self.root_nodes.append(object_node)
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
An depth-first iterator over the armature tree.
|
||||
"""
|
||||
node_stack = [] + self.root_nodes
|
||||
while node_stack:
|
||||
node = node_stack.pop()
|
||||
yield node
|
||||
node_stack = node.children + node_stack
|
||||
|
||||
def objects_iterator(self):
|
||||
for node in self:
|
||||
yield node.object
|
||||
|
||||
def dump(self):
|
||||
# Print out the hierarchy of armature objects for debugging using the root nodes, with indentation to show parent-child relationships.
|
||||
for root_node in self.root_nodes:
|
||||
def print_object_node(node: ObjectNode, indent: int = 0):
|
||||
print(' ' * indent + f'- {node.object.name}')
|
||||
for child_node in node.children:
|
||||
print_object_node(child_node, indent + 2)
|
||||
print_object_node(root_node)
|
||||
|
||||
|
||||
def create_psx_bones(
|
||||
armature_objects: List[Object],
|
||||
armature_objects: list[Object],
|
||||
export_space: str = 'WORLD',
|
||||
root_bone_name: str = 'ROOT',
|
||||
forward_axis: str = 'X',
|
||||
up_axis: str = 'Z',
|
||||
scale: float = 1.0,
|
||||
bone_filter_mode: str = 'ALL',
|
||||
bone_collection_indices: Optional[List[PsxBoneCollection]] = None,
|
||||
bone_collection_indices: list[PsxBoneCollection] | None = None,
|
||||
bone_collection_primary_key: str = 'OBJECT',
|
||||
) -> PsxBoneCreateResult:
|
||||
"""
|
||||
@@ -332,15 +348,16 @@ def create_psx_bones(
|
||||
if bone_collection_indices is None:
|
||||
bone_collection_indices = []
|
||||
|
||||
bones: List[Tuple[PsxBone, Optional[Object]]] = []
|
||||
armature_tree = ObjectTree(armature_objects)
|
||||
|
||||
if export_space != 'WORLD' and len(armature_objects) >= 2:
|
||||
armature_object_names = [armature_object.name for armature_object in armature_objects]
|
||||
raise RuntimeError(f'When exporting multiple armatures, the Export Space must be World.\n' \
|
||||
f'The following armatures are attempting to be exported: {armature_object_names}')
|
||||
if len(armature_tree.root_nodes) >= 2:
|
||||
raise RuntimeError(
|
||||
'Multiple root armature objects were found. '
|
||||
'Only one root armature object is allowed. '
|
||||
'To use multiple armature objects, parent them to one another in a hierarchy using Bone parenting.'
|
||||
)
|
||||
|
||||
coordinate_system_matrix = get_coordinate_system_transform(forward_axis, up_axis)
|
||||
coordinate_system_default_rotation = coordinate_system_matrix.to_quaternion()
|
||||
# TODO: confirm this to be working with non-bone parented armature hierarchies.
|
||||
|
||||
total_bone_count = 0
|
||||
for armature_object in filter(lambda x: x.data is not None, armature_objects):
|
||||
@@ -348,9 +365,9 @@ def create_psx_bones(
|
||||
total_bone_count += len(armature_data.bones)
|
||||
|
||||
# Store the bone names to be exported for each armature object.
|
||||
armature_object_bone_names: Dict[Object, List[str]] = dict()
|
||||
armature_object_bone_names: dict[Object, list[str]] = dict()
|
||||
for armature_object in armature_objects:
|
||||
armature_bone_collection_indices: List[int] = []
|
||||
armature_bone_collection_indices: list[int] = []
|
||||
match bone_collection_primary_key:
|
||||
case 'OBJECT':
|
||||
armature_bone_collection_indices.extend([x.index for x in bone_collection_indices if x.armature_object_name == armature_object.name])
|
||||
@@ -363,50 +380,66 @@ def create_psx_bones(
|
||||
|
||||
# Store the index of the root bone for each armature object.
|
||||
# We will need this later to correctly assign vertex weights.
|
||||
armature_object_root_bone_indices: Dict[Optional[Object], int] = dict()
|
||||
|
||||
if len(armature_objects) == 0 or total_bone_count == 0:
|
||||
# If the mesh has no armature object or no bones, simply assign it a dummy bone at the root to satisfy the
|
||||
# requirement that a PSK file must have at least one bone.
|
||||
psx_bone = PsxBone()
|
||||
psx_bone.name = convert_string_to_cp1252_bytes(root_bone_name)
|
||||
psx_bone.flags = 0
|
||||
psx_bone.children_count = 0
|
||||
psx_bone.parent_index = 0
|
||||
psx_bone.location = Vector3.zero()
|
||||
psx_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(coordinate_system_default_rotation)
|
||||
bones.append((psx_bone, None))
|
||||
|
||||
armature_object_root_bone_indices[None] = 0
|
||||
else:
|
||||
# If we have multiple armature objects, create a root bone at the world origin.
|
||||
if len(armature_objects) > 1:
|
||||
psx_bone = PsxBone()
|
||||
psx_bone.name = convert_string_to_cp1252_bytes(root_bone_name)
|
||||
psx_bone.flags = 0
|
||||
psx_bone.children_count = total_bone_count
|
||||
psx_bone.parent_index = 0
|
||||
psx_bone.location = Vector3.zero()
|
||||
psx_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(coordinate_system_default_rotation)
|
||||
bones.append((psx_bone, None))
|
||||
|
||||
armature_object_root_bone_indices[None] = 0
|
||||
|
||||
root_bone = bones[0][0] if len(bones) > 0 else None
|
||||
armature_object_root_bone_indices: dict[Object | None, int] = dict()
|
||||
bones: list[PsxBoneResult] = []
|
||||
|
||||
# Iterate through all the armature objects.
|
||||
for armature_object in armature_objects:
|
||||
bone_names = armature_object_bone_names[armature_object]
|
||||
armature_data = typing_cast(Armature, armature_object.data)
|
||||
armature_bones = [armature_data.bones[bone_name] for bone_name in bone_names]
|
||||
|
||||
# Ensure that we don't have multiple root bones in this armature.
|
||||
root_bone_count = sum(1 for bone in armature_bones if bone.parent is None)
|
||||
if root_bone_count > 1:
|
||||
raise RuntimeError(f'Armature object \'{armature_object.name}\' has multiple root bones. '
|
||||
f'Only one root bone is allowed per armature.'
|
||||
)
|
||||
|
||||
armature_psx_bones = create_psx_bones_from_blender_bones(
|
||||
bones=armature_bones,
|
||||
export_space=export_space,
|
||||
armature_object_matrix_world=armature_object.matrix_world,
|
||||
scale=scale,
|
||||
forward_axis=forward_axis,
|
||||
up_axis=up_axis,
|
||||
root_bone=root_bone,
|
||||
)
|
||||
|
||||
if len(armature_psx_bones) == 0:
|
||||
continue
|
||||
|
||||
# We have the bones in world space. If we are attaching this armature to a parent bone, we need to convert
|
||||
# the root bone to be relative to the target parent bone.
|
||||
if armature_object.parent in armature_objects:
|
||||
match armature_object.parent_type:
|
||||
case 'BONE':
|
||||
# Parent to a bone in the parent armature object.
|
||||
# We just need to get the world-space location of each of the bones and get the relative pose, then
|
||||
# assign that location and rotation to the root bone.
|
||||
parent_bone_name = armature_object.parent_bone
|
||||
|
||||
if parent_bone_name == '':
|
||||
raise RuntimeError(f'Armature object \'{armature_object.name}\' is parented to a bone but no parent bone name is specified.')
|
||||
|
||||
parent_armature_data = typing_cast(Armature, armature_object.parent.data)
|
||||
if parent_armature_data is None:
|
||||
raise RuntimeError(f'Parent object \'{armature_object.parent.name}\' is not an armature.')
|
||||
try:
|
||||
parent_bone = parent_armature_data.bones[parent_bone_name]
|
||||
except KeyError:
|
||||
raise RuntimeError(f'Bone \'{parent_bone_name}\' could not be found in armature \'{armature_object.parent.name}\'.')
|
||||
|
||||
parent_bone_world_matrix = armature_object.parent.matrix_world @ parent_bone.matrix_local.to_4x4()
|
||||
parent_bone_world_location, parent_bone_world_rotation, _ = parent_bone_world_matrix.decompose()
|
||||
|
||||
# Convert the root bone location to be relative to the parent bone.
|
||||
root_bone = armature_psx_bones[0]
|
||||
root_bone_location = Vector((root_bone.location.x, root_bone.location.y, root_bone.location.z))
|
||||
relative_location = parent_bone_world_rotation.inverted() @ (root_bone_location - parent_bone_world_location)
|
||||
root_bone.location = convert_vector_to_vector3(relative_location)
|
||||
# Convert the root bone rotation to be relative to the parent bone.
|
||||
root_bone_rotation = BpyQuaternion((root_bone.rotation.w, root_bone.rotation.x, root_bone.rotation.y, root_bone.rotation.z))
|
||||
relative_rotation = parent_bone_world_rotation.inverted() @ root_bone_rotation
|
||||
root_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(relative_rotation)
|
||||
case _:
|
||||
raise RuntimeError(f'Unhandled parent type ({armature_object.parent_type}) for object {armature_object.name}.\n'
|
||||
f'Parent type must be \'Bone\'.'
|
||||
)
|
||||
|
||||
# If we are appending these bones to an existing list of bones, we need to adjust the parent indices for
|
||||
@@ -418,10 +451,70 @@ def create_psx_bones(
|
||||
|
||||
armature_object_root_bone_indices[armature_object] = len(bones)
|
||||
|
||||
bones.extend((psx_bone, armature_object) for psx_bone in armature_psx_bones)
|
||||
bones.extend(PsxBoneResult(psx_bone, armature_object) for psx_bone in armature_psx_bones)
|
||||
|
||||
# Check if any of the armatures are parented to one another.
|
||||
# If so, adjust the hierarchy as though they are part of the same armature object.
|
||||
# This will let us re-use rig components without destructively joining them.
|
||||
for armature_object in armature_objects:
|
||||
if armature_object.parent not in armature_objects:
|
||||
continue
|
||||
|
||||
# This armature object is parented to another armature object that we are exporting.
|
||||
# First fetch the root bone indices for the two armature objects.
|
||||
root_bone_index = armature_object_root_bone_indices.get(armature_object, None)
|
||||
parent_root_bone_index = armature_object_root_bone_indices.get(armature_object.parent, None)
|
||||
|
||||
if root_bone_index is None or parent_root_bone_index is None:
|
||||
raise RuntimeError(f'Could not find root bone index for armature object \'{armature_object.name}\' or its parent \'{armature_object.parent.name}\'.\n'
|
||||
'This likely means that one of the armatures does not have any bones that are being exported, which is not allowed when using armature parenting between multiple armatures.'
|
||||
)
|
||||
|
||||
match armature_object.parent_type:
|
||||
case 'OBJECT':
|
||||
# Parent this armature's root bone to the root bone of the parent object.
|
||||
bones[root_bone_index].psx_bone.parent_index = parent_root_bone_index
|
||||
case 'BONE':
|
||||
# Parent this armature's root bone to the specified bone in the parent.
|
||||
new_parent_index = None
|
||||
for bone_index, bone in enumerate(bones):
|
||||
if bone.psx_bone.name == convert_string_to_cp1252_bytes(armature_object.parent_bone) and bone.armature_object == armature_object.parent:
|
||||
new_parent_index = bone_index
|
||||
break
|
||||
if new_parent_index == None:
|
||||
raise RuntimeError(f'Bone \'{armature_object.parent_bone}\' could not be found in armature \'{armature_object.parent.name}\'.')
|
||||
bones[root_bone_index].psx_bone.parent_index = new_parent_index
|
||||
case _:
|
||||
raise RuntimeError(f'Unhandled parent type ({armature_object.parent_type}) for object {armature_object.name}.\n'
|
||||
f'Parent type must be \'Object\' or \'Bone\'.'
|
||||
)
|
||||
|
||||
match export_space:
|
||||
case 'WORLD':
|
||||
# No action needed, bones are already in world space.
|
||||
pass
|
||||
case 'ARMATURE':
|
||||
# The bone is in world-space. We need to convert it to armature (object) space.
|
||||
# Get this from matrix_local.
|
||||
root_bone, root_bone_armature_object = bones[0].psx_bone, bones[0].armature_object
|
||||
if root_bone_armature_object is None:
|
||||
raise RuntimeError('Cannot export to Armature space when multiple armatures are being exported.')
|
||||
|
||||
armature_data = typing_cast(Armature, root_bone_armature_object.data)
|
||||
matrix_local = armature_data.bones[root_bone.name.decode('windows-1252')].matrix_local
|
||||
location, rotation, _ = matrix_local.decompose()
|
||||
root_bone.location = convert_vector_to_vector3(location)
|
||||
root_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(rotation)
|
||||
case 'ROOT':
|
||||
# Zero out the root bone transforms.
|
||||
root_bone = bones[0].psx_bone
|
||||
root_bone.location = Vector3.zero()
|
||||
root_bone.rotation = Quaternion.identity()
|
||||
case _:
|
||||
assert False, f'Invalid export space: {export_space}'
|
||||
|
||||
# Check if there are bone name conflicts between armatures.
|
||||
bone_name_counts = Counter(bone[0].name.decode('windows-1252').upper() for bone in bones)
|
||||
bone_name_counts = Counter(bone.psx_bone.name.decode('windows-1252').upper() for bone in bones)
|
||||
for bone_name, count in bone_name_counts.items():
|
||||
if count > 1:
|
||||
error_message = f'Found {count} bones with the name "{bone_name}". '
|
||||
@@ -432,6 +525,31 @@ def create_psx_bones(
|
||||
f''
|
||||
raise RuntimeError(error_message)
|
||||
|
||||
# Apply the scale to the bone locations.
|
||||
for bone in bones:
|
||||
bone.psx_bone.location.x *= scale
|
||||
bone.psx_bone.location.y *= scale
|
||||
bone.psx_bone.location.z *= scale
|
||||
|
||||
coordinate_system_matrix = get_coordinate_system_transform(forward_axis, up_axis)
|
||||
coordinate_system_default_rotation = coordinate_system_matrix.to_quaternion()
|
||||
|
||||
# Apply the coordinate system transform to the root bone.
|
||||
root_psx_bone = bones[0].psx_bone
|
||||
# Get transform matrix from root bone location and rotation.
|
||||
root_bone_location = Vector((root_psx_bone.location.x, root_psx_bone.location.y, root_psx_bone.location.z))
|
||||
root_bone_rotation = BpyQuaternion((root_psx_bone.rotation.w, root_psx_bone.rotation.x, root_psx_bone.rotation.y, root_psx_bone.rotation.z))
|
||||
root_bone_matrix = (
|
||||
Matrix.Translation(root_bone_location) @
|
||||
root_bone_rotation.to_matrix().to_4x4()
|
||||
)
|
||||
root_bone_matrix = coordinate_system_default_rotation.inverted().to_matrix().to_4x4() @ root_bone_matrix
|
||||
location, rotation, _ = root_bone_matrix.decompose()
|
||||
root_psx_bone.location = convert_vector_to_vector3(location)
|
||||
root_psx_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(rotation)
|
||||
|
||||
convert_bpy_quaternion_to_psx_quaternion(coordinate_system_default_rotation)
|
||||
|
||||
return PsxBoneCreateResult(
|
||||
bones=bones,
|
||||
armature_object_root_bone_indices=armature_object_root_bone_indices,
|
||||
@@ -482,7 +600,7 @@ def get_armatures_for_mesh_objects(mesh_objects: Iterable[Object]):
|
||||
yield from armature_objects
|
||||
|
||||
|
||||
def get_collection_from_context(context: Context) -> Optional[Collection]:
|
||||
def get_collection_from_context(context: Context) -> Collection | None:
|
||||
if context.space_data is None or context.space_data.type != 'PROPERTIES':
|
||||
return None
|
||||
space_data = typing_cast(SpaceProperties, context.space_data)
|
||||
@@ -492,7 +610,7 @@ def get_collection_from_context(context: Context) -> Optional[Collection]:
|
||||
return context.collection
|
||||
|
||||
|
||||
def get_collection_export_operator_from_context(context: Context) -> Optional[object]:
|
||||
def get_collection_export_operator_from_context(context: Context) -> PropertyGroup | None:
|
||||
collection = get_collection_from_context(context)
|
||||
if collection is None or collection.active_exporter_index is None:
|
||||
return None
|
||||
@@ -500,3 +618,86 @@ def get_collection_export_operator_from_context(context: Context) -> Optional[ob
|
||||
return None
|
||||
exporter = collection.exporters[collection.active_exporter_index]
|
||||
return exporter.export_properties
|
||||
|
||||
|
||||
from ..shared.dfs import DfsObject, dfs_collection_objects, dfs_view_layer_objects
|
||||
from typing import Set
|
||||
from bpy.types import Depsgraph
|
||||
|
||||
|
||||
class PskInputObjects(object):
|
||||
def __init__(self):
|
||||
self.mesh_dfs_objects: list[DfsObject] = []
|
||||
self.armature_objects: list[Object] = []
|
||||
|
||||
|
||||
def get_materials_for_mesh_objects(depsgraph: Depsgraph, mesh_objects: Iterable[Object]):
|
||||
'''
|
||||
Yields unique materials used by the given mesh objects.
|
||||
If any mesh has no material slots or any empty material slots, None is yielded at the end.
|
||||
'''
|
||||
yielded_materials: Set[Material] = set()
|
||||
has_none_material = False
|
||||
for mesh_object in mesh_objects:
|
||||
evaluated_mesh_object = mesh_object.evaluated_get(depsgraph)
|
||||
# Check if mesh has no material slots or any empty material slots
|
||||
if len(evaluated_mesh_object.material_slots) == 0:
|
||||
has_none_material = True
|
||||
else:
|
||||
for material_slot in evaluated_mesh_object.material_slots:
|
||||
material = material_slot.material
|
||||
if material is None:
|
||||
has_none_material = True
|
||||
else:
|
||||
if material not in yielded_materials:
|
||||
yielded_materials.add(material)
|
||||
yield material
|
||||
# Yield None at the end if any mesh had no material slots or empty material slots
|
||||
if has_none_material:
|
||||
yield None
|
||||
|
||||
|
||||
def get_mesh_objects_for_collection(collection: Collection) -> Iterable[DfsObject]:
|
||||
return filter(lambda x: x.obj.type == 'MESH', dfs_collection_objects(collection))
|
||||
|
||||
|
||||
def get_mesh_objects_for_context(context: Context) -> Iterable[DfsObject]:
|
||||
if context.view_layer is None:
|
||||
return
|
||||
for dfs_object in dfs_view_layer_objects(context.view_layer):
|
||||
if dfs_object.obj.type == 'MESH' and dfs_object.is_selected:
|
||||
yield dfs_object
|
||||
|
||||
|
||||
def get_armature_for_mesh_object(mesh_object: Object) -> Object | None:
|
||||
if mesh_object.type != 'MESH':
|
||||
return None
|
||||
# Get the first armature modifier with a non-empty armature object.
|
||||
for modifier in filter(lambda x: x.type == 'ARMATURE', mesh_object.modifiers):
|
||||
armature_modifier = typing_cast(ArmatureModifier, modifier)
|
||||
if armature_modifier.object is not None:
|
||||
return armature_modifier.object
|
||||
return None
|
||||
|
||||
|
||||
def _get_psk_input_objects(mesh_dfs_objects: Iterable[DfsObject]) -> PskInputObjects:
|
||||
mesh_dfs_objects = list(mesh_dfs_objects)
|
||||
if len(mesh_dfs_objects) == 0:
|
||||
raise RuntimeError('No mesh objects were found to export.')
|
||||
input_objects = PskInputObjects()
|
||||
input_objects.mesh_dfs_objects = mesh_dfs_objects
|
||||
# Get the armature objects used on all the meshes being exported.
|
||||
armature_objects = get_armatures_for_mesh_objects(map(lambda x: x.obj, mesh_dfs_objects))
|
||||
# Sort them in hierarchy order.
|
||||
input_objects.armature_objects = list(ObjectTree(armature_objects).objects_iterator())
|
||||
return input_objects
|
||||
|
||||
|
||||
def get_psk_input_objects_for_context(context: Context) -> PskInputObjects:
|
||||
mesh_objects = list(get_mesh_objects_for_context(context))
|
||||
return _get_psk_input_objects(mesh_objects)
|
||||
|
||||
|
||||
def get_psk_input_objects_for_collection(collection: Collection) -> PskInputObjects:
|
||||
mesh_objects = get_mesh_objects_for_collection(collection)
|
||||
return _get_psk_input_objects(mesh_objects)
|
||||
|
||||
72
io_scene_psk_psa/shared/operators.py
Normal file
72
io_scene_psk_psa/shared/operators.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from bpy.types import Operator
|
||||
from bpy.props import BoolProperty
|
||||
|
||||
from .types import PsxBoneExportMixin
|
||||
from typing import cast as typing_cast
|
||||
|
||||
from .helpers import get_collection_export_operator_from_context, get_psk_input_objects_for_collection, populate_bone_collection_list
|
||||
|
||||
|
||||
|
||||
class PSK_OT_bone_collection_list_populate(Operator):
|
||||
bl_idname = 'psk.bone_collection_list_populate'
|
||||
bl_label = 'Populate Bone Collection List'
|
||||
bl_description = 'Populate the bone collection list from the armature that will be used in this collection export'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
def execute(self, context):
|
||||
export_operator = get_collection_export_operator_from_context(context)
|
||||
if export_operator is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||
return {'CANCELLED'}
|
||||
if context.collection is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No active collection')
|
||||
return {'CANCELLED'}
|
||||
try:
|
||||
input_objects = get_psk_input_objects_for_collection(context.collection)
|
||||
except RuntimeError as e:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
if not input_objects.armature_objects:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No armature modifiers found on mesh objects')
|
||||
return {'CANCELLED'}
|
||||
export_operator = typing_cast(PsxBoneExportMixin, export_operator)
|
||||
|
||||
# Save and restore the selected status of the bones collections.
|
||||
selected_status: dict[int, bool] = dict()
|
||||
for bone_collection in export_operator.bone_collection_list:
|
||||
selected_status[hash(bone_collection)] = bone_collection.is_selected
|
||||
|
||||
populate_bone_collection_list(export_operator.bone_collection_list, input_objects.armature_objects)
|
||||
|
||||
for bone_collection in export_operator.bone_collection_list:
|
||||
bone_collection.is_selected = selected_status.get(hash(bone_collection), False)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSK_OT_bone_collection_list_select_all(Operator):
|
||||
bl_idname = 'psk.bone_collection_list_select_all'
|
||||
bl_label = 'Select All'
|
||||
bl_description = 'Select all bone collections'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
is_selected: BoolProperty(default=True)
|
||||
|
||||
def execute(self, context):
|
||||
export_operator = get_collection_export_operator_from_context(context)
|
||||
if export_operator is None:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||
return {'CANCELLED'}
|
||||
export_operator = typing_cast(PsxBoneExportMixin, export_operator)
|
||||
for item in export_operator.bone_collection_list:
|
||||
item.is_selected = self.is_selected
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
_classes = (
|
||||
PSK_OT_bone_collection_list_populate,
|
||||
PSK_OT_bone_collection_list_select_all,
|
||||
)
|
||||
from bpy.utils import register_classes_factory
|
||||
register, unregister = register_classes_factory(_classes)
|
||||
@@ -1,54 +0,0 @@
|
||||
from typing import Tuple
|
||||
|
||||
class SemanticVersion(object):
|
||||
def __init__(self, version: Tuple[int, int, int]):
|
||||
self.major, self.minor, self.patch = version
|
||||
|
||||
def __iter__(self):
|
||||
yield self.major
|
||||
yield self.minor
|
||||
yield self.patch
|
||||
|
||||
@staticmethod
|
||||
def compare(lhs: 'SemanticVersion', rhs: 'SemanticVersion') -> int:
|
||||
"""
|
||||
Compares two semantic versions.
|
||||
|
||||
Returns:
|
||||
-1 if lhs < rhs
|
||||
0 if lhs == rhs
|
||||
1 if lhs > rhs
|
||||
"""
|
||||
for l, r in zip(lhs, rhs):
|
||||
if l < r:
|
||||
return -1
|
||||
if l > r:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.major}.{self.minor}.{self.patch}'
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.compare(self, other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.compare(self, other) == -1
|
||||
|
||||
def __le__(self, other):
|
||||
return self.compare(self, other) <= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.compare(self, other) == 1
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.compare(self, other) >= 0
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.major, self.minor, self.patch))
|
||||
@@ -1,12 +1,29 @@
|
||||
from typing import Generic, Iterable, Sized, TypeVar
|
||||
import bpy
|
||||
from bpy.props import CollectionProperty, EnumProperty, StringProperty, IntProperty, BoolProperty, FloatProperty
|
||||
from bpy.types import PropertyGroup, UIList, UILayout, Context, AnyType, Panel
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
# Don't actually use this, this is just for typing.
|
||||
class BpyCollectionProperty(Generic[T], Iterable[T], Sized):
|
||||
def add(self) -> T:
|
||||
return T() # type: ignore
|
||||
|
||||
def clear(self) -> None:
|
||||
pass
|
||||
|
||||
def move(self, src_index: int, dst_index: int):
|
||||
pass
|
||||
|
||||
def remove(self, index: int):
|
||||
pass
|
||||
|
||||
|
||||
class PSX_UL_bone_collection_list(UIList):
|
||||
|
||||
def draw_item(self, _context: Context, layout: UILayout, _data: AnyType, item: AnyType, _icon: int,
|
||||
_active_data: AnyType, _active_property: str, _index: int = 0, _flt_flag: int = 0):
|
||||
def draw_item(self, context: Context, layout: UILayout, data: AnyType, item: AnyType, icon: int,
|
||||
active_data: AnyType, active_property: str, index: int = 0, flt_flag: int = 0):
|
||||
row = layout.row()
|
||||
|
||||
row.prop(item, 'is_selected', text=getattr(item, 'name'))
|
||||
@@ -27,6 +44,9 @@ class PSX_PG_bone_collection_list_item(PropertyGroup):
|
||||
count: IntProperty()
|
||||
is_selected: BoolProperty(default=False)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(f'{self.name}/{self.armature_object_name}/{self.armature_data_name}')
|
||||
|
||||
|
||||
class PSX_PG_action_export(PropertyGroup):
|
||||
group: StringProperty(name='Group', description='The group of the sequence', maxlen=64)
|
||||
@@ -45,7 +65,7 @@ class PSX_PT_action(Panel):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: 'Context'):
|
||||
return context.active_object and context.active_object.type == 'ARMATURE' and context.active_action is not None
|
||||
return context.active_object is not None and context.active_object.type == 'ARMATURE' and context.active_action is not None
|
||||
|
||||
def draw(self, context: 'Context'):
|
||||
action = context.active_action
|
||||
@@ -87,13 +107,13 @@ up_items = (
|
||||
)
|
||||
|
||||
|
||||
def forward_axis_update(self, __context):
|
||||
def forward_axis_update(self, context):
|
||||
if self.forward_axis == self.up_axis:
|
||||
# Automatically set the up axis to the next available axis
|
||||
self.up_axis = next((axis for axis in axis_identifiers if axis != self.forward_axis), 'Z')
|
||||
|
||||
|
||||
def up_axis_update(self, __context):
|
||||
def up_axis_update(self, context):
|
||||
if self.up_axis == self.forward_axis:
|
||||
# Automatically set the forward axis to the next available axis
|
||||
self.forward_axis = next((axis for axis in axis_identifiers if axis != self.up_axis), 'X')
|
||||
@@ -138,6 +158,17 @@ class ExportSpaceMixin:
|
||||
default='WORLD'
|
||||
)
|
||||
|
||||
transform_source_items = (
|
||||
('SCENE', 'Scene', 'Use the scene transform settings'),
|
||||
('CUSTOM', 'Custom', 'Use custom transform settings'),
|
||||
)
|
||||
|
||||
class TransformSourceMixin:
|
||||
transform_source: EnumProperty(
|
||||
items=transform_source_items,
|
||||
name='Transform Source',
|
||||
default='SCENE'
|
||||
)
|
||||
|
||||
class PsxBoneExportMixin:
|
||||
bone_filter_mode: EnumProperty(
|
||||
|
||||
63
io_scene_psk_psa/shared/types.pyi
Normal file
63
io_scene_psk_psa/shared/types.pyi
Normal file
@@ -0,0 +1,63 @@
|
||||
from typing import Generic, TypeVar, Iterable, Sized
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
# https://docs.blender.org/api/current/bpy.types.bpy_prop_collection_idprop.html#bpy.types.bpy_prop_collection_idprop
|
||||
class BpyCollectionProperty(Generic[T], Iterable[T], Sized):
|
||||
def add(self) -> T:
|
||||
pass
|
||||
|
||||
def clear(self) -> None:
|
||||
pass
|
||||
|
||||
def move(self, src_index: int, dst_index: int):
|
||||
pass
|
||||
|
||||
def remove(self, index: int):
|
||||
pass
|
||||
|
||||
|
||||
class PSX_PG_bone_collection_list_item:
|
||||
armature_object_name: str
|
||||
armature_data_name: str
|
||||
name: str
|
||||
index: int
|
||||
count: int
|
||||
is_selected: bool
|
||||
|
||||
|
||||
class PSX_PG_action_export:
|
||||
group: str
|
||||
compression_ratio: float
|
||||
key_quota: int
|
||||
fps: float
|
||||
|
||||
|
||||
class AxisMixin:
|
||||
forward_axis: str
|
||||
up_axis: str
|
||||
|
||||
|
||||
class TransformMixin(AxisMixin):
|
||||
scale: float
|
||||
|
||||
|
||||
class ExportSpaceMixin:
|
||||
export_space: str
|
||||
|
||||
|
||||
class TransformSourceMixin:
|
||||
transform_source: str
|
||||
|
||||
|
||||
class PsxBoneExportMixin:
|
||||
bone_filter_mode: str
|
||||
bone_collection_list: BpyCollectionProperty[PSX_PG_bone_collection_list_item]
|
||||
bone_collection_list_index: int
|
||||
root_bone_name: str
|
||||
|
||||
|
||||
class PSX_PG_scene_export(TransformSourceMixin):
|
||||
pass
|
||||
|
||||
bone_filter_mode_items: tuple[tuple[str, str, str]]
|
||||
Binary file not shown.
@@ -37,3 +37,29 @@ def test_psa_import_all():
|
||||
EXPECTED_ACTION_COUNT = 135
|
||||
assert len(bpy.data.actions) == EXPECTED_ACTION_COUNT, \
|
||||
f"Expected {EXPECTED_ACTION_COUNT} actions, but found {len(bpy.data.actions)}."
|
||||
|
||||
|
||||
def test_psa_import_convert_to_samples():
|
||||
assert bpy.ops.psk.import_file(
|
||||
filepath=SHREK_PSK_FILEPATH,
|
||||
components='ALL',
|
||||
) == {'FINISHED'}, "PSK import failed."
|
||||
|
||||
armature_object = bpy.data.objects.get('Shrek', None)
|
||||
assert armature_object is not None, "Armature object not found in the scene."
|
||||
assert armature_object.type == 'ARMATURE', "Object is not of type ARMATURE."
|
||||
|
||||
# Select the armature object
|
||||
bpy.context.view_layer.objects.active = armature_object
|
||||
armature_object.select_set(True)
|
||||
|
||||
# Import the associated PSA file with import_all operator, and convert to samples.
|
||||
assert bpy.ops.psa.import_all(
|
||||
filepath=SHREK_PSA_FILEPATH,
|
||||
should_convert_to_samples=True
|
||||
) == {'FINISHED'}, "PSA import failed."
|
||||
|
||||
# TODO: More thorough tests on the imported data for the animations.
|
||||
EXPECTED_ACTION_COUNT = 135
|
||||
assert len(bpy.data.actions) == EXPECTED_ACTION_COUNT, \
|
||||
f"Expected {EXPECTED_ACTION_COUNT} actions, but found {len(bpy.data.actions)}."
|
||||
@@ -1,3 +1,3 @@
|
||||
pytest
|
||||
pytest-cov
|
||||
psk-psa-py
|
||||
psk-psa-py == 0.0.4
|
||||
|
||||
Reference in New Issue
Block a user