Added psk_psa_py package to replace basic data IO
Fixed PSK and PSA export
This commit is contained in:
@@ -22,7 +22,7 @@ RUN pip install pytest-cov
|
||||
# Source the environment variables and install Python dependencies
|
||||
RUN . /etc/environment && \
|
||||
$BLENDER_PYTHON -m ensurepip && \
|
||||
$BLENDER_PYTHON -m pip install pytest pytest-cov
|
||||
$BLENDER_PYTHON -m pip install pytest pytest-cov psk-psa-py
|
||||
|
||||
# Persist BLENDER_EXECUTABLE as an environment variable
|
||||
RUN echo $(cat /blender_executable_path) > /tmp/blender_executable_path_env && \
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
from bpy.app.handlers import persistent
|
||||
|
||||
from .shared import data as shared_data, types as shared_types, helpers as shared_helpers
|
||||
from .shared import types as shared_types, helpers as shared_helpers
|
||||
from .shared import dfs as shared_dfs, ui as shared_ui
|
||||
from .psk import (
|
||||
builder as psk_builder,
|
||||
data as psk_data,
|
||||
importer as psk_importer,
|
||||
properties as psk_properties,
|
||||
writer as psk_writer,
|
||||
)
|
||||
from .psk import reader as psk_reader, ui as psk_ui
|
||||
from .psk import ui as psk_ui
|
||||
from .psk.export import (
|
||||
operators as psk_export_operators,
|
||||
properties as psk_export_properties,
|
||||
@@ -19,9 +17,6 @@ from .psk.import_ import operators as psk_import_operators
|
||||
|
||||
from .psa import (
|
||||
config as psa_config,
|
||||
data as psa_data,
|
||||
writer as psa_writer,
|
||||
reader as psa_reader,
|
||||
builder as psa_builder,
|
||||
importer as psa_importer,
|
||||
)
|
||||
@@ -38,15 +33,11 @@ _needs_reload = 'bpy' in locals()
|
||||
if _needs_reload:
|
||||
import importlib
|
||||
|
||||
importlib.reload(shared_data)
|
||||
importlib.reload(shared_helpers)
|
||||
importlib.reload(shared_types)
|
||||
importlib.reload(shared_dfs)
|
||||
importlib.reload(shared_ui)
|
||||
|
||||
importlib.reload(psk_data)
|
||||
importlib.reload(psk_reader)
|
||||
importlib.reload(psk_writer)
|
||||
importlib.reload(psk_builder)
|
||||
importlib.reload(psk_importer)
|
||||
importlib.reload(psk_properties)
|
||||
@@ -56,10 +47,7 @@ if _needs_reload:
|
||||
importlib.reload(psk_export_ui)
|
||||
importlib.reload(psk_import_operators)
|
||||
|
||||
importlib.reload(psa_data)
|
||||
importlib.reload(psa_config)
|
||||
importlib.reload(psa_reader)
|
||||
importlib.reload(psa_writer)
|
||||
importlib.reload(psa_builder)
|
||||
importlib.reload(psa_importer)
|
||||
importlib.reload(psa_export_properties)
|
||||
|
||||
@@ -13,6 +13,9 @@ blender_version_min = "5.0.0"
|
||||
license = [
|
||||
"SPDX:GPL-3.0-or-later",
|
||||
]
|
||||
wheels = [
|
||||
'./wheels/psk_psa_py-0.0.1-py3-none-any.whl'
|
||||
]
|
||||
|
||||
[build]
|
||||
paths_exclude_pattern = [
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from bpy.types import Action, AnimData, Context, Object, PoseBone
|
||||
|
||||
from .data import Psa
|
||||
from psk_psa_py.psa.data import Psa
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from mathutils import Matrix, Quaternion, Vector
|
||||
|
||||
@@ -22,6 +22,7 @@ class PsaBuildSequence:
|
||||
self.compression_ratio: float = 1.0
|
||||
self.key_quota: int = 0
|
||||
self.fps: float = 30.0
|
||||
self.group: Optional[str] = None
|
||||
|
||||
|
||||
class PsaBuildOptions:
|
||||
@@ -171,11 +172,20 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||
sequence_duration = frame_count_raw / export_sequence.fps
|
||||
|
||||
psa_sequence = Psa.Sequence()
|
||||
|
||||
try:
|
||||
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
|
||||
except UnicodeEncodeError:
|
||||
raise RuntimeError(
|
||||
f'Sequence name "{export_sequence.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
|
||||
try:
|
||||
if export_sequence.group is not None:
|
||||
psa_sequence.group = bytes(export_sequence.group, encoding='windows-1252')
|
||||
except UnicodeDecodeError:
|
||||
raise RuntimeError(
|
||||
f'Group name "{export_sequence.group} contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
|
||||
psa_sequence.frame_count = frame_count
|
||||
psa_sequence.frame_start_index = frame_start_index
|
||||
psa_sequence.fps = frame_count / sequence_duration
|
||||
|
||||
@@ -55,7 +55,7 @@ def read_psa_config(psa_sequence_names: List[str], file_path: str) -> PsaConfig:
|
||||
|
||||
if config.has_section('RemoveTracks'):
|
||||
for key, value in config.items('RemoveTracks'):
|
||||
match = re.match(f'^(.+)\.(\d+)$', key)
|
||||
match = re.match(rf'^(.+)\.(\d+)$', key)
|
||||
if not match:
|
||||
continue
|
||||
sequence_name = match.group(1)
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
from collections import OrderedDict
|
||||
from typing import List, OrderedDict as OrderedDictType
|
||||
|
||||
from ctypes import Structure, c_char, c_int32, c_float
|
||||
from ..shared.data import PsxBone, Quaternion, Vector3
|
||||
|
||||
|
||||
class Psa:
|
||||
"""
|
||||
Note that keys are not stored within the Psa object.
|
||||
Use the `PsaReader.get_sequence_keys` to get the keys for a sequence.
|
||||
"""
|
||||
|
||||
class Sequence(Structure):
|
||||
_fields_ = [
|
||||
('name', c_char * 64),
|
||||
('group', c_char * 64),
|
||||
('bone_count', c_int32),
|
||||
('root_include', c_int32),
|
||||
('compression_style', c_int32),
|
||||
('key_quotum', c_int32),
|
||||
('key_reduction', c_float),
|
||||
('track_time', c_float),
|
||||
('fps', c_float),
|
||||
('start_bone', c_int32),
|
||||
('frame_start_index', c_int32),
|
||||
('frame_count', c_int32)
|
||||
]
|
||||
|
||||
class Key(Structure):
|
||||
_fields_ = [
|
||||
('location', Vector3),
|
||||
('rotation', Quaternion),
|
||||
('time', c_float)
|
||||
]
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
yield self.rotation.w
|
||||
yield self.rotation.x
|
||||
yield self.rotation.y
|
||||
yield self.rotation.z
|
||||
yield self.location.x
|
||||
yield self.location.y
|
||||
yield self.location.z
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr((self.location, self.rotation, self.time))
|
||||
|
||||
def __init__(self):
|
||||
self.bones: List[PsxBone] = []
|
||||
self.sequences: OrderedDictType[str, Psa.Sequence] = OrderedDict()
|
||||
self.keys: List[Psa.Key] = []
|
||||
@@ -15,7 +15,7 @@ from .properties import (
|
||||
)
|
||||
from .ui import PSA_UL_export_sequences
|
||||
from ..builder import build_psa, PsaBuildSequence, PsaBuildOptions
|
||||
from ..writer import write_psa
|
||||
from psk_psa_py.psa.writer import write_psa_to_file
|
||||
from ...shared.helpers import populate_bone_collection_list, get_nla_strips_in_frame_range, PsxBoneCollection
|
||||
from ...shared.ui import draw_bone_filter_mode
|
||||
from ...shared.types import PSX_PG_action_export, PSX_PG_scene_export
|
||||
@@ -475,6 +475,7 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
continue
|
||||
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||
export_sequence.name = action_item.name
|
||||
export_sequence.group = action_item.group
|
||||
export_sequence.nla_state.action = action_item.action
|
||||
export_sequence.nla_state.frame_start = action_item.frame_start
|
||||
export_sequence.nla_state.frame_end = action_item.frame_end
|
||||
@@ -497,6 +498,7 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list):
|
||||
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||
export_sequence.name = nla_strip_item.name
|
||||
export_sequence.group = nla_strip_item.action.psa_export.group
|
||||
export_sequence.nla_state.frame_start = nla_strip_item.frame_start
|
||||
export_sequence.nla_state.frame_end = nla_strip_item.frame_end
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [nla_strip_item.action])
|
||||
@@ -508,6 +510,7 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
export_sequence = PsaBuildSequence(active_action_item.armature_object, active_action_item.armature_object.animation_data)
|
||||
action = active_action_item.action
|
||||
export_sequence.name = action.name
|
||||
export_sequence.group = action.psa_export.group
|
||||
export_sequence.nla_state.action = action
|
||||
export_sequence.nla_state.frame_start = int(action.frame_range[0])
|
||||
export_sequence.nla_state.frame_end = int(action.frame_range[1])
|
||||
@@ -545,7 +548,7 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
return {'CANCELLED'}
|
||||
|
||||
write_psa(psa, self.filepath)
|
||||
write_psa_to_file(psa, self.filepath)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ class PSA_PG_export_action_list_item(PropertyGroup):
|
||||
frame_start: IntProperty(options={'HIDDEN'})
|
||||
frame_end: IntProperty(options={'HIDDEN'})
|
||||
is_pose_marker: BoolProperty(options={'HIDDEN'})
|
||||
group: StringProperty()
|
||||
|
||||
|
||||
class PSA_PG_export_active_action_list_item(PropertyGroup):
|
||||
@@ -38,6 +39,7 @@ class PSA_PG_export_active_action_list_item(PropertyGroup):
|
||||
is_selected: BoolProperty(default=True)
|
||||
frame_start: IntProperty(options={'HIDDEN'})
|
||||
frame_end: IntProperty(options={'HIDDEN'})
|
||||
group: StringProperty()
|
||||
|
||||
|
||||
class PSA_PG_export_timeline_markers(PropertyGroup): # TODO: rename this to singular
|
||||
@@ -46,6 +48,7 @@ class PSA_PG_export_timeline_markers(PropertyGroup): # TODO: rename this to sin
|
||||
is_selected: BoolProperty(default=True)
|
||||
frame_start: IntProperty(options={'HIDDEN'})
|
||||
frame_end: IntProperty(options={'HIDDEN'})
|
||||
group: StringProperty()
|
||||
|
||||
|
||||
class PSA_PG_export_nla_strip_list_item(PropertyGroup):
|
||||
@@ -54,6 +57,7 @@ class PSA_PG_export_nla_strip_list_item(PropertyGroup):
|
||||
frame_start: FloatProperty()
|
||||
frame_end: FloatProperty()
|
||||
is_selected: BoolProperty(default=True)
|
||||
group: StringProperty()
|
||||
|
||||
|
||||
def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int):
|
||||
|
||||
@@ -9,7 +9,7 @@ from bpy_extras.io_utils import ImportHelper
|
||||
from .properties import PsaImportMixin, get_visible_sequences
|
||||
from ..config import read_psa_config
|
||||
from ..importer import BoneMapping, PsaImportOptions, import_psa
|
||||
from ..reader import PsaReader
|
||||
from psk_psa_py.psa.reader import PsaReader
|
||||
|
||||
|
||||
def psa_import_poll(cls, context: Context):
|
||||
|
||||
@@ -8,8 +8,8 @@ from mathutils import Vector, Quaternion
|
||||
from bpy_extras import anim_utils
|
||||
|
||||
from .config import PsaConfig, REMOVE_TRACK_LOCATION, REMOVE_TRACK_ROTATION
|
||||
from .reader import PsaReader
|
||||
from ..shared.data import PsxBone
|
||||
from psk_psa_py.psa.reader import PsaReader
|
||||
from psk_psa_py.shared.data import PsxBone
|
||||
|
||||
class BoneMapping:
|
||||
def __init__(self,
|
||||
|
||||
@@ -1,126 +0,0 @@
|
||||
from ctypes import sizeof
|
||||
from typing import List
|
||||
|
||||
import numpy as np
|
||||
|
||||
from .data import Psa, PsxBone
|
||||
from ..shared.data import Section
|
||||
|
||||
|
||||
def _try_fix_cue4parse_issue_103(sequences) -> bool:
|
||||
# Detect if the file was exported from CUE4Parse prior to the fix for issue #103.
|
||||
# https://github.com/FabianFG/CUE4Parse/issues/103
|
||||
# The issue was that the frame_start_index was not being set correctly, and was always being set to the same value
|
||||
# as the frame_count.
|
||||
# This fix will eventually be deprecated as it is only necessary for files exported prior to the fix.
|
||||
if len(sequences) > 0 and sequences[0].frame_start_index == sequences[0].frame_count:
|
||||
# Manually set the frame_start_index for each sequence. This assumes that the sequences are in order with
|
||||
# no shared frames between sequences (all exporters that I know of do this, so it's a safe assumption).
|
||||
frame_start_index = 0
|
||||
for i, sequence in enumerate(sequences):
|
||||
sequence.frame_start_index = frame_start_index
|
||||
frame_start_index += sequence.frame_count
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class PsaReader(object):
|
||||
"""
|
||||
This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle.
|
||||
The keyframe data is not read into memory upon instantiation due to its potentially very large size.
|
||||
To read the key data for a particular sequence, call :read_sequence_keys.
|
||||
"""
|
||||
|
||||
def __init__(self, path):
|
||||
self.keys_data_offset: int = 0
|
||||
self.fp = open(path, 'rb')
|
||||
self.psa: Psa = self._read(self.fp)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.fp.close()
|
||||
|
||||
@property
|
||||
def bones(self):
|
||||
return self.psa.bones
|
||||
|
||||
@property
|
||||
def sequences(self):
|
||||
return self.psa.sequences
|
||||
|
||||
def read_sequence_data_matrix(self, sequence_name: str) -> np.ndarray:
|
||||
"""
|
||||
Reads and returns the data matrix for the given sequence.
|
||||
|
||||
@param sequence_name: The name of the sequence.
|
||||
@return: An FxBx7 matrix where F is the number of frames, B is the number of bones.
|
||||
"""
|
||||
sequence = self.psa.sequences[sequence_name]
|
||||
keys = self.read_sequence_keys(sequence_name)
|
||||
bone_count = len(self.bones)
|
||||
matrix_size = sequence.frame_count, bone_count, 7
|
||||
matrix = np.zeros(matrix_size)
|
||||
keys_iter = iter(keys)
|
||||
for frame_index in range(sequence.frame_count):
|
||||
for bone_index in range(bone_count):
|
||||
matrix[frame_index, bone_index, :] = list(next(keys_iter).data)
|
||||
return matrix
|
||||
|
||||
def read_sequence_keys(self, sequence_name: str) -> List[Psa.Key]:
|
||||
"""
|
||||
Reads and returns the key data for a sequence.
|
||||
|
||||
@param sequence_name: The name of the sequence.
|
||||
@return: A list of Psa.Keys.
|
||||
"""
|
||||
# Set the file reader to the beginning of the keys data
|
||||
sequence = self.psa.sequences[sequence_name]
|
||||
data_size = sizeof(Psa.Key)
|
||||
bone_count = len(self.psa.bones)
|
||||
buffer_length = data_size * bone_count * sequence.frame_count
|
||||
sequence_keys_offset = self.keys_data_offset + (sequence.frame_start_index * bone_count * data_size)
|
||||
self.fp.seek(sequence_keys_offset, 0)
|
||||
buffer = self.fp.read(buffer_length)
|
||||
offset = 0
|
||||
keys = []
|
||||
for _ in range(sequence.frame_count * bone_count):
|
||||
key = Psa.Key.from_buffer_copy(buffer, offset)
|
||||
keys.append(key)
|
||||
offset += data_size
|
||||
return keys
|
||||
|
||||
@staticmethod
|
||||
def _read_types(fp, data_class, section: Section, data):
|
||||
buffer_length = section.data_size * section.data_count
|
||||
buffer = fp.read(buffer_length)
|
||||
offset = 0
|
||||
for _ in range(section.data_count):
|
||||
data.append(data_class.from_buffer_copy(buffer, offset))
|
||||
offset += section.data_size
|
||||
|
||||
def _read(self, fp) -> Psa:
|
||||
psa = Psa()
|
||||
while fp.read(1):
|
||||
fp.seek(-1, 1)
|
||||
section = Section.from_buffer_copy(fp.read(sizeof(Section)))
|
||||
if section.name == b'ANIMHEAD':
|
||||
pass
|
||||
elif section.name == b'BONENAMES':
|
||||
PsaReader._read_types(fp, PsxBone, section, psa.bones)
|
||||
elif section.name == b'ANIMINFO':
|
||||
sequences = []
|
||||
PsaReader._read_types(fp, Psa.Sequence, section, sequences)
|
||||
# Try to fix CUE4Parse bug, if necessary.
|
||||
_try_fix_cue4parse_issue_103(sequences)
|
||||
for sequence in sequences:
|
||||
psa.sequences[sequence.name.decode()] = sequence
|
||||
elif section.name == b'ANIMKEYS':
|
||||
# Skip keys on this pass. We will keep this file open and read from it as needed.
|
||||
self.keys_data_offset = fp.tell()
|
||||
fp.seek(section.data_size * section.data_count, 1)
|
||||
else:
|
||||
fp.seek(section.data_size * section.data_count, 1)
|
||||
print(f'Unrecognized section in PSA: "{section.name}"')
|
||||
return psa
|
||||
@@ -1,25 +0,0 @@
|
||||
from ctypes import Structure, sizeof
|
||||
from typing import Optional, Type, Collection
|
||||
|
||||
from .data import Psa
|
||||
from ..shared.data import PsxBone, Section
|
||||
|
||||
|
||||
def write_section(fp, name: bytes, data_type: Optional[Type[Structure]] = None, data: Optional[Collection] = None):
|
||||
section = Section()
|
||||
section.name = name
|
||||
if data_type is not None and data is not None:
|
||||
section.data_size = sizeof(data_type)
|
||||
section.data_count = len(data)
|
||||
fp.write(section)
|
||||
if data is not None:
|
||||
for datum in data:
|
||||
fp.write(datum)
|
||||
|
||||
|
||||
def write_psa(psa: Psa, path: str):
|
||||
with open(path, 'wb') as fp:
|
||||
write_section(fp, b'ANIMHEAD')
|
||||
write_section(fp, b'BONENAMES', PsxBone, psa.bones)
|
||||
write_section(fp, b'ANIMINFO', Psa.Sequence, list(psa.sequences.values()))
|
||||
write_section(fp, b'ANIMKEYS', Psa.Key, psa.keys)
|
||||
@@ -4,9 +4,9 @@ import numpy as np
|
||||
from bpy.types import Armature, Collection, Context, Depsgraph, Object, ArmatureModifier, Mesh
|
||||
from mathutils import Matrix
|
||||
from typing import Dict, Iterable, List, Optional, Set, cast as typing_cast
|
||||
from .data import Psk
|
||||
from psk_psa_py.shared.data import Vector3
|
||||
from psk_psa_py.psk.data import Psk
|
||||
from .properties import triangle_type_and_bit_flags_to_poly_flags
|
||||
from ..shared.data import Vector3
|
||||
from ..shared.dfs import DfsObject, dfs_collection_objects, dfs_view_layer_objects
|
||||
from ..shared.helpers import (
|
||||
PsxBoneCollection,
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
from ctypes import Structure, c_uint32, c_float, c_int32, c_uint8, c_int8, c_int16, c_char, c_uint16
|
||||
from typing import List
|
||||
|
||||
from ..shared.data import Vector3, Quaternion, Color, Vector2, PsxBone
|
||||
|
||||
|
||||
class Psk(object):
|
||||
class Wedge(object):
|
||||
def __init__(self, point_index: int, u: float, v: float, material_index: int = 0):
|
||||
self.point_index: int = point_index
|
||||
self.u: float = u
|
||||
self.v: float = v
|
||||
self.material_index = material_index
|
||||
|
||||
def __hash__(self):
|
||||
return hash(f'{self.point_index}-{self.u}-{self.v}-{self.material_index}')
|
||||
|
||||
class Wedge16(Structure):
|
||||
_fields_ = [
|
||||
('point_index', c_uint32),
|
||||
('u', c_float),
|
||||
('v', c_float),
|
||||
('material_index', c_uint8),
|
||||
('reserved', c_int8),
|
||||
('padding2', c_int16)
|
||||
]
|
||||
|
||||
class Wedge32(Structure):
|
||||
_fields_ = [
|
||||
('point_index', c_uint32),
|
||||
('u', c_float),
|
||||
('v', c_float),
|
||||
('material_index', c_uint32)
|
||||
]
|
||||
|
||||
class Face(Structure):
|
||||
_fields_ = [
|
||||
('wedge_indices', c_uint16 * 3),
|
||||
('material_index', c_uint8),
|
||||
('aux_material_index', c_uint8),
|
||||
('smoothing_groups', c_int32)
|
||||
]
|
||||
|
||||
class Face32(Structure):
|
||||
_pack_ = 1
|
||||
_fields_ = [
|
||||
('wedge_indices', c_uint32 * 3),
|
||||
('material_index', c_uint8),
|
||||
('aux_material_index', c_uint8),
|
||||
('smoothing_groups', c_int32)
|
||||
]
|
||||
|
||||
class Material(Structure):
|
||||
_fields_ = [
|
||||
('name', c_char * 64),
|
||||
('texture_index', c_int32),
|
||||
('poly_flags', c_int32),
|
||||
('aux_material', c_int32),
|
||||
('aux_flags', c_int32),
|
||||
('lod_bias', c_int32),
|
||||
('lod_style', c_int32)
|
||||
]
|
||||
|
||||
class Bone(Structure):
|
||||
_fields_ = [
|
||||
('name', c_char * 64),
|
||||
('flags', c_int32),
|
||||
('children_count', c_int32),
|
||||
('parent_index', c_int32),
|
||||
('rotation', Quaternion),
|
||||
('location', Vector3),
|
||||
('length', c_float),
|
||||
('size', Vector3)
|
||||
]
|
||||
|
||||
class Weight(Structure):
|
||||
_fields_ = [
|
||||
('weight', c_float),
|
||||
('point_index', c_int32),
|
||||
('bone_index', c_int32),
|
||||
]
|
||||
|
||||
class MorphInfo(Structure):
|
||||
_fields_ = [
|
||||
('name', c_char * 64),
|
||||
('vertex_count', c_int32)
|
||||
]
|
||||
|
||||
class MorphData(Structure):
|
||||
_fields_ = [
|
||||
('position_delta', Vector3),
|
||||
('tangent_z_delta', Vector3),
|
||||
('point_index', c_int32)
|
||||
]
|
||||
|
||||
@property
|
||||
def has_extra_uvs(self):
|
||||
return len(self.extra_uvs) > 0
|
||||
|
||||
@property
|
||||
def has_vertex_colors(self):
|
||||
return len(self.vertex_colors) > 0
|
||||
|
||||
@property
|
||||
def has_vertex_normals(self):
|
||||
return len(self.vertex_normals) > 0
|
||||
|
||||
@property
|
||||
def has_material_references(self):
|
||||
return len(self.material_references) > 0
|
||||
|
||||
@property
|
||||
def has_morph_data(self):
|
||||
return len(self.morph_infos) > 0
|
||||
|
||||
def sort_and_normalize_weights(self):
|
||||
self.weights.sort(key=lambda x: x.point_index)
|
||||
|
||||
weight_index = 0
|
||||
weight_total = len(self.weights)
|
||||
|
||||
while weight_index < weight_total:
|
||||
point_index = self.weights[weight_index].point_index
|
||||
weight_sum = self.weights[weight_index].weight
|
||||
point_weight_total = 1
|
||||
|
||||
# Calculate the sum of weights for the current point_index.
|
||||
for i in range(weight_index + 1, weight_total):
|
||||
if self.weights[i].point_index != point_index:
|
||||
break
|
||||
weight_sum += self.weights[i].weight
|
||||
point_weight_total += 1
|
||||
|
||||
# Normalize the weights for the current point_index.
|
||||
for i in range(weight_index, weight_index + point_weight_total):
|
||||
self.weights[i].weight /= weight_sum
|
||||
|
||||
# Move to the next group of weights.
|
||||
weight_index += point_weight_total
|
||||
|
||||
def __init__(self):
|
||||
self.points: List[Vector3] = []
|
||||
self.wedges: List[Psk.Wedge] = []
|
||||
self.faces: List[Psk.Face] = []
|
||||
self.materials: List[Psk.Material] = []
|
||||
self.weights: List[Psk.Weight] = []
|
||||
self.bones: List[PsxBone] = []
|
||||
self.extra_uvs: List[Vector2] = []
|
||||
self.vertex_colors: List[Color] = []
|
||||
self.vertex_normals: List[Vector3] = []
|
||||
self.morph_infos: List[Psk.MorphInfo] = []
|
||||
self.morph_data: List[Psk.MorphData] = []
|
||||
self.material_references: List[str] = []
|
||||
@@ -1,5 +1,5 @@
|
||||
from pathlib import Path
|
||||
from typing import Iterable, List, Optional, cast as typing_cast
|
||||
from typing import Iterable, List
|
||||
|
||||
import bpy
|
||||
from bpy.props import BoolProperty, StringProperty
|
||||
@@ -14,7 +14,7 @@ from ..builder import (
|
||||
get_psk_input_objects_for_collection,
|
||||
get_psk_input_objects_for_context,
|
||||
)
|
||||
from ..writer import write_psk
|
||||
from psk_psa_py.psk.writer import write_psk_to_path
|
||||
from ...shared.helpers import PsxBoneCollection, get_collection_export_operator_from_context, populate_bone_collection_list
|
||||
from ...shared.ui import draw_bone_filter_mode
|
||||
|
||||
@@ -286,7 +286,7 @@ class PSK_OT_export_collection(Operator, ExportHelper, PskExportMixin):
|
||||
result = build_psk(context, input_objects, options)
|
||||
for warning in result.warnings:
|
||||
self.report({'WARNING'}, warning)
|
||||
write_psk(result.psk, filepath)
|
||||
write_psk_to_path(result.psk, filepath)
|
||||
if len(result.warnings) > 0:
|
||||
self.report({'WARNING'}, f'PSK export successful with {len(result.warnings)} warnings')
|
||||
else:
|
||||
@@ -526,7 +526,7 @@ class PSK_OT_export(Operator, ExportHelper):
|
||||
result = build_psk(context, input_objects, options)
|
||||
for warning in result.warnings:
|
||||
self.report({'WARNING'}, warning)
|
||||
write_psk(result.psk, self.filepath)
|
||||
write_psk_to_path(result.psk, self.filepath)
|
||||
if len(result.warnings) > 0:
|
||||
self.report({'WARNING'}, f'PSK export successful with {len(result.warnings)} warnings')
|
||||
else:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from bpy.types import Context
|
||||
from bpy.props import (
|
||||
BoolProperty,
|
||||
CollectionProperty,
|
||||
@@ -9,7 +8,6 @@ from bpy.props import (
|
||||
)
|
||||
from bpy.types import Material, PropertyGroup
|
||||
|
||||
from ...shared.helpers import get_collection_export_operator_from_context
|
||||
from ...shared.types import ExportSpaceMixin, TransformMixin, PsxBoneExportMixin
|
||||
|
||||
object_eval_state_items = (
|
||||
|
||||
@@ -7,7 +7,7 @@ from bpy_extras.io_utils import ImportHelper
|
||||
|
||||
from ..importer import PskImportOptions, import_psk
|
||||
from ..properties import PskImportMixin
|
||||
from ..reader import read_psk
|
||||
from psk_psa_py.psk.reader import read_psk_from_file
|
||||
|
||||
|
||||
def get_psk_import_options_from_properties(property_group: PskImportMixin):
|
||||
@@ -91,7 +91,7 @@ class PSK_OT_import(Operator, ImportHelper, PskImportMixin):
|
||||
|
||||
def execute(self, context):
|
||||
try:
|
||||
psk = read_psk(self.filepath)
|
||||
psk = read_psk_from_file(self.filepath)
|
||||
except OSError as e:
|
||||
self.report({'ERROR'}, f'Failed to read "{self.filepath}". The file may be corrupted or not a valid PSK file: {e}')
|
||||
return {'CANCELLED'}
|
||||
@@ -144,7 +144,7 @@ class PSK_OT_import_drag_and_drop(Operator, PskImportMixin):
|
||||
for file in self.files:
|
||||
filepath = Path(self.directory) / file.name
|
||||
try:
|
||||
psk = read_psk(filepath)
|
||||
psk = read_psk_from_file(filepath)
|
||||
except OSError as e:
|
||||
self.report({'ERROR'}, f'Failed to read "{filepath}". The file may be corrupted or not a valid PSK file: {e}')
|
||||
return {'CANCELLED'}
|
||||
|
||||
@@ -2,13 +2,13 @@ import bmesh
|
||||
import bpy
|
||||
import numpy as np
|
||||
|
||||
from bpy.types import Context, Object, VertexGroup
|
||||
from bpy.types import Context, Object, VertexGroup, ArmatureModifier, FloatColorAttribute
|
||||
from mathutils import Matrix, Quaternion, Vector
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, cast as typing_cast
|
||||
|
||||
from .data import Psk
|
||||
from psk_psa_py.psk.data import Psk
|
||||
from psk_psa_py.shared.data import PsxBone
|
||||
from .properties import poly_flags_to_triangle_type_and_bit_flags
|
||||
from ..shared.data import PsxBone
|
||||
from ..shared.helpers import is_bdk_addon_loaded, rgb_to_srgb
|
||||
|
||||
|
||||
@@ -201,16 +201,15 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions)
|
||||
|
||||
# Extra UVs
|
||||
if psk.has_extra_uvs and options.should_import_extra_uvs:
|
||||
extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges))
|
||||
wedge_index_offset = 0
|
||||
for extra_uv_index, extra_uvs in enumerate(psk.extra_uvs):
|
||||
uv_layer_data = np.zeros((face_count * 3, 2), dtype=np.float32)
|
||||
for extra_uv_index in range(extra_uv_channel_count):
|
||||
uv_layer_data_index = 0
|
||||
for face_index, face in enumerate(psk.faces):
|
||||
if face_index in invalid_face_indices:
|
||||
continue
|
||||
for wedge_index in reversed(face.wedge_indices):
|
||||
u, v = psk.extra_uvs[wedge_index_offset + wedge_index]
|
||||
u, v = extra_uvs[wedge_index_offset + wedge_index]
|
||||
uv_layer_data[uv_layer_data_index] = u, 1.0 - v
|
||||
uv_layer_data_index += 1
|
||||
wedge_index_offset += len(psk.wedges)
|
||||
@@ -241,6 +240,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions)
|
||||
|
||||
# Create the vertex color attribute.
|
||||
face_corner_color_attribute = mesh_data.attributes.new(name='VERTEXCOLOR', type='FLOAT_COLOR', domain='CORNER')
|
||||
face_corner_color_attribute = typing_cast(FloatColorAttribute, face_corner_color_attribute)
|
||||
face_corner_color_attribute.data.foreach_set('color', face_corner_colors.ravel())
|
||||
|
||||
# Vertex Normals
|
||||
@@ -287,10 +287,12 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions)
|
||||
# Add armature modifier to our mesh object.
|
||||
if options.should_import_armature:
|
||||
armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE')
|
||||
armature_modifier = typing_cast(ArmatureModifier, armature_modifier)
|
||||
armature_modifier.object = armature_object
|
||||
mesh_object.parent = armature_object
|
||||
|
||||
root_object = armature_object if options.should_import_armature else mesh_object
|
||||
assert root_object
|
||||
root_object.scale = (options.scale, options.scale, options.scale)
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
import ctypes
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from ..shared.data import Section
|
||||
from .data import Color, Psk, PsxBone, Vector2, Vector3
|
||||
|
||||
|
||||
def _read_types(fp, data_class, section: Section, data):
|
||||
buffer_length = section.data_size * section.data_count
|
||||
buffer = fp.read(buffer_length)
|
||||
offset = 0
|
||||
for _ in range(section.data_count):
|
||||
data.append(data_class.from_buffer_copy(buffer, offset))
|
||||
offset += section.data_size
|
||||
|
||||
|
||||
def _read_material_references(path: str) -> List[str]:
|
||||
property_file_path = Path(path).with_suffix('.props.txt')
|
||||
if not property_file_path.is_file():
|
||||
# Property file does not exist.
|
||||
return []
|
||||
# Do a crude regex match to find the Material list entries.
|
||||
contents = property_file_path.read_text()
|
||||
pattern = r'Material\s*=\s*([^\s^,]+)'
|
||||
return re.findall(pattern, contents)
|
||||
|
||||
|
||||
def read_psk(path: str) -> Psk:
|
||||
psk = Psk()
|
||||
|
||||
# Read the PSK file sections.
|
||||
with open(path, 'rb') as fp:
|
||||
while fp.read(1):
|
||||
fp.seek(-1, 1)
|
||||
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
|
||||
match section.name:
|
||||
case b'ACTRHEAD':
|
||||
pass
|
||||
case b'PNTS0000':
|
||||
_read_types(fp, Vector3, section, psk.points)
|
||||
case b'VTXW0000':
|
||||
if section.data_size == ctypes.sizeof(Psk.Wedge16):
|
||||
_read_types(fp, Psk.Wedge16, section, psk.wedges)
|
||||
elif section.data_size == ctypes.sizeof(Psk.Wedge32):
|
||||
_read_types(fp, Psk.Wedge32, section, psk.wedges)
|
||||
else:
|
||||
raise RuntimeError('Unrecognized wedge format')
|
||||
case b'FACE0000':
|
||||
_read_types(fp, Psk.Face, section, psk.faces)
|
||||
case b'MATT0000':
|
||||
_read_types(fp, Psk.Material, section, psk.materials)
|
||||
case b'REFSKELT':
|
||||
_read_types(fp, PsxBone, section, psk.bones)
|
||||
case b'RAWWEIGHTS':
|
||||
_read_types(fp, Psk.Weight, section, psk.weights)
|
||||
case b'FACE3200':
|
||||
_read_types(fp, Psk.Face32, section, psk.faces)
|
||||
case b'VERTEXCOLOR':
|
||||
_read_types(fp, Color, section, psk.vertex_colors)
|
||||
case b'VTXNORMS':
|
||||
_read_types(fp, Vector3, section, psk.vertex_normals)
|
||||
case b'MRPHINFO':
|
||||
_read_types(fp, Psk.MorphInfo, section, psk.morph_infos)
|
||||
case b'MRPHDATA':
|
||||
_read_types(fp, Psk.MorphData, section, psk.morph_data)
|
||||
case _:
|
||||
if section.name.startswith(b'EXTRAUV'):
|
||||
_read_types(fp, Vector2, section, psk.extra_uvs)
|
||||
else:
|
||||
# Section is not handled, skip it.
|
||||
fp.seek(section.data_size * section.data_count, os.SEEK_CUR)
|
||||
warnings.warn(f'Unrecognized section "{section.name} at position {fp.tell():15}"')
|
||||
|
||||
"""
|
||||
UEViewer exports a sidecar file (*.props.txt) with fully-qualified reference paths for each material
|
||||
(e.g., Texture'Package.Group.Object').
|
||||
"""
|
||||
psk.material_references = _read_material_references(path)
|
||||
|
||||
"""
|
||||
Tools like UEViewer and CUE4Parse write the point index as a 32-bit integer, exploiting the fact that due to struct
|
||||
alignment, there were 16-bits of padding following the original 16-bit point index in the wedge struct.
|
||||
However, this breaks compatibility with PSK files that were created with older tools that treated the
|
||||
point index as a 16-bit integer and might have junk data written to the padding bits.
|
||||
To work around this, we check if each point is still addressable using a 16-bit index, and if it is, assume the
|
||||
point index is a 16-bit integer and truncate the high bits.
|
||||
"""
|
||||
if len(psk.points) <= 65536:
|
||||
for wedge in psk.wedges:
|
||||
wedge.point_index &= 0xFFFF
|
||||
|
||||
return psk
|
||||
@@ -1,61 +0,0 @@
|
||||
import os
|
||||
from ctypes import Structure, sizeof
|
||||
from typing import Type
|
||||
|
||||
from .data import Psk
|
||||
from ..shared.data import PsxBone, Section, Vector3
|
||||
|
||||
MAX_WEDGE_COUNT = 65536
|
||||
MAX_POINT_COUNT = 4294967296
|
||||
MAX_BONE_COUNT = 2147483647
|
||||
MAX_MATERIAL_COUNT = 256
|
||||
|
||||
|
||||
def _write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
||||
section = Section()
|
||||
section.name = name
|
||||
if data_type is not None and data is not None:
|
||||
section.data_size = sizeof(data_type)
|
||||
section.data_count = len(data)
|
||||
fp.write(section)
|
||||
if data is not None:
|
||||
for datum in data:
|
||||
fp.write(datum)
|
||||
|
||||
|
||||
def write_psk(psk: Psk, path: str):
|
||||
if len(psk.wedges) > MAX_WEDGE_COUNT:
|
||||
raise RuntimeError(f'Number of wedges ({len(psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
|
||||
if len(psk.points) > MAX_POINT_COUNT:
|
||||
raise RuntimeError(f'Numbers of vertices ({len(psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
|
||||
if len(psk.materials) > MAX_MATERIAL_COUNT:
|
||||
raise RuntimeError(f'Number of materials ({len(psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
|
||||
if len(psk.bones) > MAX_BONE_COUNT:
|
||||
raise RuntimeError(f'Number of bones ({len(psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
|
||||
if len(psk.bones) == 0:
|
||||
raise RuntimeError(f'At least one bone must be marked for export')
|
||||
|
||||
# Make the directory for the file if it doesn't exist.
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
|
||||
try:
|
||||
with open(path, 'wb') as fp:
|
||||
_write_section(fp, b'ACTRHEAD')
|
||||
_write_section(fp, b'PNTS0000', Vector3, psk.points)
|
||||
|
||||
wedges = []
|
||||
for index, w in enumerate(psk.wedges):
|
||||
wedge = Psk.Wedge16()
|
||||
wedge.material_index = w.material_index
|
||||
wedge.u = w.u
|
||||
wedge.v = w.v
|
||||
wedge.point_index = w.point_index
|
||||
wedges.append(wedge)
|
||||
|
||||
_write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
|
||||
_write_section(fp, b'FACE0000', Psk.Face, psk.faces)
|
||||
_write_section(fp, b'MATT0000', Psk.Material, psk.materials)
|
||||
_write_section(fp, b'REFSKELT', PsxBone, psk.bones)
|
||||
_write_section(fp, b'RAWWEIGHTS', Psk.Weight, psk.weights)
|
||||
except PermissionError as e:
|
||||
raise RuntimeError(f'The current user "{os.getlogin()}" does not have permission to write to "{path}"') from e
|
||||
@@ -1,113 +0,0 @@
|
||||
from ctypes import Structure, c_char, c_int32, c_float, c_ubyte
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
class Color(Structure):
|
||||
_fields_ = [
|
||||
('r', c_ubyte),
|
||||
('g', c_ubyte),
|
||||
('b', c_ubyte),
|
||||
('a', c_ubyte),
|
||||
]
|
||||
|
||||
def __iter__(self):
|
||||
yield self.r
|
||||
yield self.g
|
||||
yield self.b
|
||||
yield self.a
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.r == other.r and self.g == other.g and self.b == other.b and self.a == other.a
|
||||
|
||||
def __repr__(self):
|
||||
return repr(tuple(self))
|
||||
|
||||
def normalized(self) -> Tuple:
|
||||
return (
|
||||
self.r / 255.0,
|
||||
self.g / 255.0,
|
||||
self.b / 255.0,
|
||||
self.a / 255.0
|
||||
)
|
||||
|
||||
|
||||
class Vector2(Structure):
|
||||
_fields_ = [
|
||||
('x', c_float),
|
||||
('y', c_float),
|
||||
]
|
||||
|
||||
def __iter__(self):
|
||||
yield self.x
|
||||
yield self.y
|
||||
|
||||
def __repr__(self):
|
||||
return repr(tuple(self))
|
||||
|
||||
|
||||
class Vector3(Structure):
|
||||
_fields_ = [
|
||||
('x', c_float),
|
||||
('y', c_float),
|
||||
('z', c_float),
|
||||
]
|
||||
|
||||
def __iter__(self):
|
||||
yield self.x
|
||||
yield self.y
|
||||
yield self.z
|
||||
|
||||
def __repr__(self):
|
||||
return repr(tuple(self))
|
||||
|
||||
@classmethod
|
||||
def zero(cls):
|
||||
return Vector3(0, 0, 0)
|
||||
|
||||
|
||||
class Quaternion(Structure):
|
||||
_fields_ = [
|
||||
('x', c_float),
|
||||
('y', c_float),
|
||||
('z', c_float),
|
||||
('w', c_float),
|
||||
]
|
||||
|
||||
def __iter__(self):
|
||||
yield self.w
|
||||
yield self.x
|
||||
yield self.y
|
||||
yield self.z
|
||||
|
||||
def __repr__(self):
|
||||
return repr(tuple(self))
|
||||
|
||||
@classmethod
|
||||
def identity(cls):
|
||||
return Quaternion(0, 0, 0, 1)
|
||||
|
||||
|
||||
class PsxBone(Structure):
|
||||
_fields_ = [
|
||||
('name', c_char * 64),
|
||||
('flags', c_int32),
|
||||
('children_count', c_int32),
|
||||
('parent_index', c_int32),
|
||||
('rotation', Quaternion),
|
||||
('location', Vector3),
|
||||
('length', c_float),
|
||||
('size', Vector3)
|
||||
]
|
||||
|
||||
|
||||
class Section(Structure):
|
||||
_fields_ = [
|
||||
('name', c_char * 20),
|
||||
('type_flags', c_int32),
|
||||
('data_size', c_int32),
|
||||
('data_count', c_int32)
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super().__init__(*args, **kw)
|
||||
self.type_flags = 1999801
|
||||
@@ -3,8 +3,7 @@ from collections import Counter
|
||||
from typing import List, Iterable, Optional, Dict, Tuple, cast as typing_cast
|
||||
from bpy.types import Armature, AnimData, Collection, Context, Object, ArmatureModifier, SpaceProperties
|
||||
from mathutils import Matrix, Vector, Quaternion as BpyQuaternion
|
||||
from .data import Vector3, Quaternion
|
||||
from ..shared.data import PsxBone
|
||||
from psk_psa_py.shared.data import PsxBone, Vector3, Quaternion
|
||||
|
||||
|
||||
def rgb_to_srgb(c: float) -> float:
|
||||
|
||||
@@ -29,6 +29,7 @@ class PSX_PG_bone_collection_list_item(PropertyGroup):
|
||||
|
||||
|
||||
class PSX_PG_action_export(PropertyGroup):
|
||||
group: StringProperty(name='Group', description='The group of the sequence', maxlen=64)
|
||||
compression_ratio: FloatProperty(name='Compression Ratio', default=1.0, min=0.0, max=1.0, subtype='FACTOR', description='The key sampling ratio of the exported sequence.\n\nA compression ratio of 1.0 will export all frames, while a compression ratio of 0.5 will export half of the frames')
|
||||
key_quota: IntProperty(name='Key Quota', default=0, min=1, description='The minimum number of frames to be exported')
|
||||
fps: FloatProperty(name='FPS', default=30.0, min=0.0, description='The frame rate of the exported sequence')
|
||||
@@ -49,12 +50,16 @@ class PSX_PT_action(Panel):
|
||||
def draw(self, context: 'Context'):
|
||||
action = context.active_action
|
||||
layout = self.layout
|
||||
assert layout is not None
|
||||
flow = layout.grid_flow(columns=1)
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
flow.prop(action.psa_export, 'compression_ratio')
|
||||
flow.prop(action.psa_export, 'key_quota')
|
||||
flow.prop(action.psa_export, 'fps')
|
||||
psa_export = getattr(action, 'psa_export')
|
||||
assert psa_export
|
||||
flow.prop(psa_export, 'compression_ratio')
|
||||
flow.prop(psa_export, 'key_quota')
|
||||
flow.prop(psa_export, 'fps')
|
||||
flow.prop(psa_export, 'group', placeholder='Group')
|
||||
|
||||
|
||||
bone_filter_mode_items = (
|
||||
|
||||
@@ -283,6 +283,7 @@ def test_psk_import_shape_keys():
|
||||
assert shape_key.value == expected_value, f"Shape key {shape_key.name} should have a value of {expected_value} (found {shape_key.value})"
|
||||
assert shape_key.name == shape_key_names[i], f"Shape key {i} name should be named {shape_key_names[i]}"
|
||||
|
||||
|
||||
def test_psk_import_without_shape_keys():
|
||||
assert bpy.ops.psk.import_file(
|
||||
filepath=SLURP_MONSTER_AXE_FILEPATH,
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
pytest
|
||||
pytest-cov
|
||||
psk-psa-py
|
||||
|
||||
Reference in New Issue
Block a user