Merge branch 'feature-psk-mesh-wysiwyg'

This commit is contained in:
Colin Basnett
2022-07-26 15:03:58 -07:00
10 changed files with 943 additions and 798 deletions

View File

@@ -1,5 +1,6 @@
import datetime
from collections import Counter
import re
from typing import List, Iterable
from bpy.types import NlaStrip, Object
@@ -31,22 +32,11 @@ def rgb_to_srgb(c):
return 12.92 * c
def get_nla_strips_ending_at_frame(object, frame) -> List[NlaStrip]:
if object is None or object.animation_data is None:
def get_nla_strips_in_timeframe(animation_data, frame_min, frame_max) -> List[NlaStrip]:
if animation_data is None:
return []
strips = []
for nla_track in object.animation_data.nla_tracks:
for strip in nla_track.strips:
if strip.frame_end == frame:
strips.append(strip)
return strips
def get_nla_strips_in_timeframe(object, frame_min, frame_max) -> List[NlaStrip]:
if object is None or object.animation_data is None:
return []
strips = []
for nla_track in object.animation_data.nla_tracks:
for nla_track in animation_data.nla_tracks:
if nla_track.mute:
continue
for strip in nla_track.strips:
@@ -103,6 +93,14 @@ def get_psa_sequence_name(action, should_use_original_sequence_name):
return action.name
def check_bone_names(bone_names: Iterable[str]):
pattern = re.compile(r'^[a-zA-Z0-9_ ]+$')
invalid_bone_names = [x for x in bone_names if pattern.match(x) is None]
if len(invalid_bone_names) > 0:
raise RuntimeError(f'The following bone names are invalid: {invalid_bone_names}.\n'
f'Bone names must only contain letters, numbers, spaces, and underscores.')
def get_export_bone_names(armature_object, bone_filter_mode, bone_group_indices: List[int]) -> List[str]:
"""
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone groups.

View File

@@ -1,14 +1,15 @@
from typing import Dict, Iterable
from bpy.types import Action
from mathutils import Matrix
from .data import *
from ..helpers import *
class PsaBuilderOptions(object):
class PsaBuildOptions(object):
def __init__(self):
self.should_override_animation_data = False
self.animation_data_override = None
self.fps_source = 'SCENE'
self.fps_custom = 30.0
self.sequence_source = 'ACTIONS'
@@ -23,11 +24,7 @@ class PsaBuilderOptions(object):
self.root_motion = False
class PsaBuilder(object):
def __init__(self):
pass
def get_sequence_fps(self, context, options: PsaBuilderOptions, actions: Iterable[Action]) -> float:
def get_sequence_fps(context, options: PsaBuildOptions, actions: Iterable[Action]) -> float:
if options.fps_source == 'SCENE':
return context.scene.render.fps
if options.fps_source == 'CUSTOM':
@@ -47,16 +44,64 @@ class PsaBuilder(object):
else:
raise RuntimeError(f'Invalid FPS source "{options.fps_source}"')
def build(self, context, options: PsaBuilderOptions) -> Psa:
def get_timeline_marker_sequence_frame_ranges(animation_data, context, options: PsaBuildOptions) -> Dict:
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
sequence_frame_ranges = dict()
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
for marker_name in options.marker_names:
marker = context.scene.timeline_markers[marker_name]
frame_min = marker.frame
# Determine the final frame of the sequence based on the next marker.
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
marker_index = sorted_timeline_marker_names.index(marker_name)
next_marker_index = marker_index + 1
frame_max = 0
if next_marker_index < len(sorted_timeline_markers):
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
frame_max = sorted_timeline_markers[next_marker_index].frame
if options.should_trim_timeline_marker_sequences:
nla_strips = get_nla_strips_in_timeframe(animation_data, marker.frame, frame_max)
if len(nla_strips) > 0:
frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
else:
# No strips in between this marker and the next, just export this as a one-frame animation.
frame_max = frame_min
else:
# There is no next marker.
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
for nla_track in animation_data.nla_tracks:
if nla_track.mute:
continue
for strip in nla_track.strips:
frame_max = max(frame_max, strip.frame_end)
if frame_min > frame_max:
continue
sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max)
return sequence_frame_ranges
def build_psa(context, options: PsaBuildOptions) -> Psa:
active_object = context.view_layer.objects.active
if active_object.type != 'ARMATURE':
raise RuntimeError('Selected object must be an Armature')
armature = active_object
if options.should_override_animation_data:
animation_data_object = options.animation_data_override
else:
animation_data_object = active_object
if armature.animation_data is None:
raise RuntimeError('No animation data for armature')
animation_data = animation_data_object.animation_data
if animation_data is None:
raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'')
# Ensure that we actually have items that we are going to be exporting.
if options.sequence_source == 'ACTIONS' and len(options.actions) == 0:
@@ -66,10 +111,11 @@ class PsaBuilder(object):
psa = Psa()
armature = active_object
bones = list(armature.data.bones)
# The order of the armature bones and the pose bones is not guaranteed to be the same.
# As as a result, we need to reconstruct the list of pose bones in the same order as the
# As a result, we need to reconstruct the list of pose bones in the same order as the
# armature bones.
bone_names = [x.name for x in bones]
pose_bones = [(bone_names.index(bone.name), bone) for bone in armature.pose.bones]
@@ -88,10 +134,13 @@ class PsaBuilder(object):
if len(bones) == 0:
raise RuntimeError('No bones available for export')
# Check that all bone names are valid.
check_bone_names(map(lambda bone: bone.name, bones))
# Build list of PSA bones.
for bone in bones:
psa_bone = Psa.Bone()
psa_bone.name = bytes(bone.name, encoding='utf-8')
psa_bone.name = bytes(bone.name, encoding='windows-1252')
try:
parent_index = bones.index(bone.parent)
@@ -101,13 +150,10 @@ class PsaBuilder(object):
psa_bone.parent_index = -1
if bone.parent is not None:
rotation = bone.matrix.to_quaternion()
rotation.x = -rotation.x
rotation.y = -rotation.y
rotation.z = -rotation.z
quat_parent = bone.parent.matrix.to_quaternion().inverted()
parent_head = quat_parent @ bone.parent.head
parent_tail = quat_parent @ bone.parent.tail
rotation = bone.matrix.to_quaternion().conjugated()
inverse_parent_rotation = bone.parent.matrix.to_quaternion().inverted()
parent_head = inverse_parent_rotation @ bone.parent.head
parent_tail = inverse_parent_rotation @ bone.parent.tail
location = (parent_tail - parent_head) + bone.head
else:
location = armature.matrix_local @ bone.head
@@ -150,11 +196,11 @@ class PsaBuilder(object):
frame_min, frame_max = [int(x) for x in action.frame_range]
export_sequence.nla_state.frame_min = frame_min
export_sequence.nla_state.frame_max = frame_max
export_sequence.fps = self.get_sequence_fps(context, options, [action])
export_sequence.fps = get_sequence_fps(context, options, [action])
export_sequences.append(export_sequence)
pass
elif options.sequence_source == 'TIMELINE_MARKERS':
sequence_frame_ranges = self.get_timeline_marker_sequence_frame_ranges(armature, context, options)
sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, options)
for name, (frame_min, frame_max) in sequence_frame_ranges.items():
export_sequence = ExportSequence()
@@ -163,8 +209,8 @@ class PsaBuilder(object):
export_sequence.nla_state.frame_min = frame_min
export_sequence.nla_state.frame_max = frame_max
nla_strips_actions = set(
map(lambda x: x.action, get_nla_strips_in_timeframe(active_object, frame_min, frame_max)))
export_sequence.fps = self.get_sequence_fps(context, options, nla_strips_actions)
map(lambda x: x.action, get_nla_strips_in_timeframe(animation_data, frame_min, frame_max)))
export_sequence.fps = get_sequence_fps(context, options, nla_strips_actions)
export_sequences.append(export_sequence)
else:
raise ValueError(f'Unhandled sequence source: {options.sequence_source}')
@@ -178,22 +224,20 @@ class PsaBuilder(object):
frame_start_index = 0
for export_sequence in export_sequences:
armature.animation_data.action = export_sequence.nla_state.action
# Link the action to the animation data and update view layer.
animation_data.action = export_sequence.nla_state.action
context.view_layer.update()
psa_sequence = Psa.Sequence()
frame_min = export_sequence.nla_state.frame_min
frame_max = export_sequence.nla_state.frame_max
frame_count = frame_max - frame_min + 1
psa_sequence = Psa.Sequence()
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
psa_sequence.frame_count = frame_count
psa_sequence.frame_start_index = frame_start_index
psa_sequence.fps = export_sequence.fps
frame_count = frame_max - frame_min + 1
for frame in range(frame_count):
context.scene.frame_set(frame_min + frame)
@@ -236,44 +280,3 @@ class PsaBuilder(object):
psa.sequences[export_sequence.name] = psa_sequence
return psa
def get_timeline_marker_sequence_frame_ranges(self, object, context, options: PsaBuilderOptions) -> Dict:
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
sequence_frame_ranges = dict()
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
for marker_name in options.marker_names:
marker = context.scene.timeline_markers[marker_name]
frame_min = marker.frame
# Determine the final frame of the sequence based on the next marker.
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
marker_index = sorted_timeline_marker_names.index(marker_name)
next_marker_index = marker_index + 1
frame_max = 0
if next_marker_index < len(sorted_timeline_markers):
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
frame_max = sorted_timeline_markers[next_marker_index].frame
if options.should_trim_timeline_marker_sequences:
nla_strips = get_nla_strips_in_timeframe(object, marker.frame, frame_max)
if len(nla_strips) > 0:
frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
else:
# No strips in between this marker and the next, just export this as a one-frame animation.
frame_max = frame_min
else:
# There is no next marker.
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
for nla_track in object.animation_data.nla_tracks:
if nla_track.mute:
continue
for strip in nla_track.strips:
frame_max = max(frame_max, strip.frame_end)
if frame_min == frame_max:
continue
sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max)
return sequence_frame_ranges

View File

@@ -10,18 +10,13 @@ from bpy.props import BoolProperty, CollectionProperty, EnumProperty, FloatPrope
from bpy.types import Action, Operator, PropertyGroup, UIList
from bpy_extras.io_utils import ExportHelper
from .builder import PsaBuilder, PsaBuilderOptions
from .builder import PsaBuildOptions, build_psa
from .data import *
from ..helpers import *
from ..types import BoneGroupListItem
class PsaExporter(object):
def __init__(self, psa: Psa):
self.psa: Psa = psa
# This method is shared by both PSA/K file formats, move this?
@staticmethod
def export_psa(psa: Psa, path: str):
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
section = Section()
section.name = name
@@ -32,13 +27,11 @@ class PsaExporter(object):
if data is not None:
for datum in data:
fp.write(datum)
def export(self, path: str):
with open(path, 'wb') as fp:
self.write_section(fp, b'ANIMHEAD')
self.write_section(fp, b'BONENAMES', Psa.Bone, self.psa.bones)
self.write_section(fp, b'ANIMINFO', Psa.Sequence, list(self.psa.sequences.values()))
self.write_section(fp, b'ANIMKEYS', Psa.Key, self.psa.keys)
write_section(fp, b'ANIMHEAD')
write_section(fp, b'BONENAMES', Psa.Bone, psa.bones)
write_section(fp, b'ANIMINFO', Psa.Sequence, list(psa.sequences.values()))
write_section(fp, b'ANIMKEYS', Psa.Key, psa.keys)
class PsaExportActionListItem(PropertyGroup):
@@ -64,12 +57,26 @@ def should_use_original_sequence_names_updated(_, context):
update_action_names(context)
def psa_export_property_group_animation_data_override_poll(_context, obj):
return obj.animation_data is not None
class PsaExportPropertyGroup(PropertyGroup):
root_motion: BoolProperty(
name='Root Motion',
options=set(),
default=False,
description='When set, the root bone will be transformed as it appears in the scene',
description='The root bone will be transformed as it appears in the scene',
)
should_override_animation_data: BoolProperty(
name='Override Animation Data',
options=set(),
default=False,
description='Use the animation data from a different object instead of the selected object'
)
animation_data_override: PointerProperty(
type=bpy.types.Object,
poll=psa_export_property_group_animation_data_override_poll
)
sequence_source: EnumProperty(
name='Source',
@@ -191,8 +198,11 @@ class PsaExportOperator(Operator, ExportHelper):
# SOURCE
layout.prop(pg, 'sequence_source', text='Source')
# ROOT MOTION
layout.prop(pg, 'root_motion', text='Root Motion')
if pg.sequence_source == 'TIMELINE_MARKERS':
# ANIMDATA SOURCE
layout.prop(pg, 'should_override_animation_data')
if pg.should_override_animation_data:
layout.prop(pg, 'animation_data_override', text='')
# SELECT ALL/NONE
row = layout.row(align=True)
@@ -249,15 +259,17 @@ class PsaExportOperator(Operator, ExportHelper):
layout.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index',
rows=rows)
def should_action_be_selected_by_default(self, action):
return action is not None and action.asset_data is None
layout.separator()
# ROOT MOTION
layout.prop(pg, 'root_motion', text='Root Motion')
def is_action_for_armature(self, action):
if len(action.fcurves) == 0:
return False
bone_names = set([x.name for x in self.armature.data.bones])
for fcurve in action.fcurves:
match = re.match(r'pose\.bones\["(.+)"\].\w+', fcurve.data_path)
match = re.match(r'pose\.bones\["(.+)"].\w+', fcurve.data_path)
if not match:
continue
bone_name = match.group(1)
@@ -273,7 +285,7 @@ class PsaExportOperator(Operator, ExportHelper):
if context.view_layer.objects.active.type != 'ARMATURE':
raise RuntimeError('The selected object must be an armature')
def invoke(self, context, event):
def invoke(self, context, _event):
try:
self._check_context(context)
except RuntimeError as e:
@@ -290,7 +302,7 @@ class PsaExportOperator(Operator, ExportHelper):
item = pg.action_list.add()
item.action = action
item.name = action.name
item.is_selected = self.should_action_be_selected_by_default(action)
item.is_selected = False
update_action_names(context)
@@ -299,6 +311,7 @@ class PsaExportOperator(Operator, ExportHelper):
for marker in context.scene.timeline_markers:
item = pg.marker_list.add()
item.name = marker.name
item.is_selected = False
if len(pg.action_list) == 0 and len(pg.marker_list) == 0:
# If there are no actions at all, we have nothing to export, so just cancel the operation.
@@ -318,7 +331,9 @@ class PsaExportOperator(Operator, ExportHelper):
actions = [x.action for x in pg.action_list if x.is_selected]
marker_names = [x.name for x in pg.marker_list if x.is_selected]
options = PsaBuilderOptions()
options = PsaBuildOptions()
options.should_override_animation_data = pg.should_override_animation_data
options.animation_data_override = pg.animation_data_override
options.fps_source = pg.fps_source
options.fps_custom = pg.fps_custom
options.sequence_source = pg.sequence_source
@@ -332,16 +347,14 @@ class PsaExportOperator(Operator, ExportHelper):
options.sequence_name_suffix = pg.sequence_name_suffix
options.root_motion = pg.root_motion
builder = PsaBuilder()
try:
psa = builder.build(context, options)
psa = build_psa(context, options)
except RuntimeError as e:
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
return {'CANCELLED'}
exporter = PsaExporter(psa)
exporter.export(self.filepath)
export_psa(psa, self.filepath)
return {'FINISHED'}
@@ -368,8 +381,7 @@ def filter_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_c
return flt_flags
def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[
PsaExportActionListItem]:
def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[PsaExportActionListItem]:
visible_sequences = []
for i, flag in enumerate(filter_sequences(pg, sequences)):
if bool(flag & (1 << 30)):
@@ -401,10 +413,9 @@ class PSA_UL_ExportSequenceList(UIList):
subrow = row.row(align=True)
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
def filter_items(self, context, data, property):
def filter_items(self, context, data, prop):
pg = context.scene.psa_export
actions = getattr(data, property)
actions = getattr(data, prop)
flt_flags = filter_sequences(pg, actions)
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
return flt_flags, flt_neworder

View File

@@ -26,11 +26,7 @@ class PsaImportOptions(object):
self.action_name_prefix = ''
class PsaImporter(object):
def __init__(self):
pass
def import_psa(self, psa_reader: PsaReader, armature_object, options: PsaImportOptions):
def import_psa(psa_reader: PsaReader, armature_object, options: PsaImportOptions):
sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names)
armature_data = armature_object.data
@@ -104,7 +100,8 @@ class PsaImporter(object):
import_bone.parent = import_bones_dict[armature_bone.parent.name]
# Calculate the original location & rotation of each bone (in world-space maybe?)
if armature_bone.get('orig_quat') is not None:
# TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect (animations are flipped 180 around Z)
# TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect
# (animations are flipped 180 around Z)
import_bone.orig_quat = Quaternion(armature_bone['orig_quat'])
import_bone.orig_loc = Vector(armature_bone['orig_loc'])
import_bone.post_quat = Quaternion(armature_bone['post_quat'])
@@ -389,6 +386,9 @@ class PsaImportSequencesFromText(Operator):
def execute(self, context):
pg = context.scene.psa_import
if pg.select_text is None:
self.report({'ERROR_INVALID_CONTEXT'}, 'No text block selected')
return {'CANCELLED'}
contents = pg.select_text.as_string()
count = 0
for line in contents.split('\n'):
@@ -403,7 +403,7 @@ class PsaImportSequencesFromText(Operator):
class PsaImportSequencesSelectAll(Operator):
bl_idname = 'psa_import.sequences_select_all'
bl_label = 'All'
bl_description = 'Select all visible sequences'
bl_description = 'Select all sequences'
bl_options = {'INTERNAL'}
@classmethod
@@ -589,7 +589,7 @@ class PsaImportOperator(Operator):
options.should_write_metadata = pg.should_write_metadata
options.should_write_keyframes = pg.should_write_keyframes
PsaImporter().import_psa(psa_reader, context.view_layer.objects.active, options)
import_psa(psa_reader, context.view_layer.objects.active, options)
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')

View File

@@ -7,8 +7,8 @@ from .data import *
class PsaReader(object):
"""
This class reads the sequences and bone information immediately upon instantiation and hold onto a file handle.
The key data is not read into memory upon instantiation due to it's potentially very large size.
This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle.
The keyframe data is not read into memory upon instantiation due to it's potentially very large size.
To read the key data for a particular sequence, call `read_sequence_keys`.
"""

View File

@@ -1,7 +1,10 @@
from collections import OrderedDict
from typing import Dict, List
from .data import *
from ..helpers import *
import bmesh
import bpy
class PskInputObjects(object):
@@ -10,43 +13,40 @@ class PskInputObjects(object):
self.armature_object = None
class PskBuilderOptions(object):
class PskBuildOptions(object):
def __init__(self):
self.bone_filter_mode = 'ALL'
self.bone_group_indices = []
self.bone_group_indices: List[int] = []
self.use_raw_mesh_data = True
self.material_names: List[str] = []
class PskBuilder(object):
def __init__(self):
pass
@staticmethod
def get_input_objects(context) -> PskInputObjects:
def get_psk_input_objects(context) -> PskInputObjects:
input_objects = PskInputObjects()
for obj in context.view_layer.objects.selected:
if obj.type != 'MESH':
raise RuntimeError(f'Selected object "{obj.name}" is not a mesh')
for selected_object in context.view_layer.objects.selected:
if selected_object.type != 'MESH':
raise RuntimeError(f'Selected object "{selected_object.name}" is not a mesh')
input_objects.mesh_objects = context.view_layer.objects.selected
if len(input_objects.mesh_objects) == 0:
raise RuntimeError('At least one mesh must be selected')
for obj in input_objects.mesh_objects:
if len(obj.data.materials) == 0:
raise RuntimeError(f'Mesh "{obj.name}" must have at least one material')
for mesh_object in input_objects.mesh_objects:
if len(mesh_object.data.materials) == 0:
raise RuntimeError(f'Mesh "{mesh_object.name}" must have at least one material')
# Ensure that there are either no armature modifiers (static mesh)
# or that there is exactly one armature modifier object shared between
# all selected meshes
armature_modifier_objects = set()
for obj in input_objects.mesh_objects:
modifiers = [x for x in obj.modifiers if x.type == 'ARMATURE']
for mesh_object in input_objects.mesh_objects:
modifiers = [x for x in mesh_object.modifiers if x.type == 'ARMATURE']
if len(modifiers) == 0:
continue
elif len(modifiers) > 1:
raise RuntimeError(f'Mesh "{obj.name}" must have only one armature modifier')
raise RuntimeError(f'Mesh "{mesh_object.name}" must have only one armature modifier')
armature_modifier_objects.add(modifiers[0].object)
if len(armature_modifier_objects) > 1:
@@ -56,14 +56,14 @@ class PskBuilder(object):
return input_objects
def build(self, context, options: PskBuilderOptions) -> Psk:
input_objects = PskBuilder.get_input_objects(context)
def build_psk(context, options: PskBuildOptions) -> Psk:
input_objects = get_psk_input_objects(context)
armature_object = input_objects.armature_object
psk = Psk()
bones = []
materials = OrderedDict()
if armature_object is None:
# If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement
@@ -80,6 +80,9 @@ class PskBuilder(object):
bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices)
bones = [armature_object.data.bones[bone_name] for bone_name in bone_names]
# Check that all bone names are valid.
check_bone_names(map(lambda x: x.name, bones))
for bone in bones:
psk_bone = Psk.Bone()
psk_bone.name = bytes(bone.name, encoding='windows-1252')
@@ -91,71 +94,92 @@ class PskBuilder(object):
psk_bone.parent_index = parent_index
psk.bones[parent_index].children_count += 1
except ValueError:
psk_bone.parent_index = 0
psk_bone.parent_index = -1
if bone.parent is not None:
rotation = bone.matrix.to_quaternion()
rotation.x = -rotation.x
rotation.y = -rotation.y
rotation.z = -rotation.z
rotation = bone.matrix.to_quaternion().conjugated()
quat_parent = bone.parent.matrix.to_quaternion().inverted()
parent_head = quat_parent @ bone.parent.head
parent_tail = quat_parent @ bone.parent.tail
location = (parent_tail - parent_head) + bone.head
else:
location = armature_object.matrix_local @ bone.head
rot_matrix = bone.matrix @ armature_object.matrix_local.to_3x3()
local_matrix = armature_object.matrix_local
location = local_matrix @ bone.head
rot_matrix = bone.matrix @ local_matrix.to_3x3()
rotation = rot_matrix.to_quaternion()
psk_bone.location.x = location.x
psk_bone.location.y = location.y
psk_bone.location.z = location.z
psk_bone.rotation.w = rotation.w
psk_bone.rotation.x = rotation.x
psk_bone.rotation.y = rotation.y
psk_bone.rotation.z = rotation.z
psk_bone.rotation.w = rotation.w
psk.bones.append(psk_bone)
for object in input_objects.mesh_objects:
# MATERIALS
material_names = options.material_names
for material_name in material_names:
psk_material = Psk.Material()
psk_material.name = bytes(material_name, encoding='windows-1252')
psk_material.texture_index = len(psk.materials)
psk.materials.append(psk_material)
for input_mesh_object in input_objects.mesh_objects:
# MATERIALS
material_indices = [material_names.index(material.name) for material in input_mesh_object.data.materials]
if options.use_raw_mesh_data:
mesh_object = input_mesh_object
mesh_data = input_mesh_object.data
else:
# Create a copy of the mesh object after non-armature modifiers are applied.
# Temporarily deactivate any armature modifiers on the input mesh object.
active_armature_modifiers = [x for x in filter(lambda x: x.type == 'ARMATURE' and x.is_active, input_mesh_object.modifiers)]
for modifier in active_armature_modifiers:
modifier.show_viewport = False
depsgraph = context.evaluated_depsgraph_get()
bm = bmesh.new()
bm.from_object(input_mesh_object, depsgraph)
mesh_data = bpy.data.meshes.new('')
bm.to_mesh(mesh_data)
del bm
mesh_object = bpy.data.objects.new('', mesh_data)
mesh_object.matrix_world = input_mesh_object.matrix_world
# Copy the vertex groups
for vertex_group in input_mesh_object.vertex_groups:
mesh_object.vertex_groups.new(name=vertex_group.name)
# Reactivate previously active armature modifiers
for modifier in active_armature_modifiers:
modifier.show_viewport = True
vertex_offset = len(psk.points)
# VERTICES
for vertex in object.data.vertices:
for vertex in mesh_data.vertices:
point = Vector3()
v = object.matrix_world @ vertex.co
v = mesh_object.matrix_world @ vertex.co
point.x = v.x
point.y = v.y
point.z = v.z
psk.points.append(point)
uv_layer = object.data.uv_layers.active.data
# MATERIALS
material_indices = []
for i, m in enumerate(object.data.materials):
if m is None:
raise RuntimeError('Material cannot be empty (index ' + str(i) + ')')
if m.name in materials:
# Material already evaluated, just get its index.
material_index = list(materials.keys()).index(m.name)
else:
# New material.
material = Psk.Material()
material.name = bytes(m.name, encoding='utf-8')
material.texture_index = len(psk.materials)
psk.materials.append(material)
materials[m.name] = m
material_index = material.texture_index
material_indices.append(material_index)
uv_layer = mesh_data.uv_layers.active.data
# WEDGES
object.data.calc_loop_triangles()
mesh_data.calc_loop_triangles()
# Build a list of non-unique wedges.
wedges = []
for loop_index, loop in enumerate(object.data.loops):
for loop_index, loop in enumerate(mesh_data.loops):
wedge = Psk.Wedge()
wedge.point_index = loop.vertex_index + vertex_offset
wedge.u, wedge.v = uv_layer[loop_index].uv
@@ -163,13 +187,13 @@ class PskBuilder(object):
wedges.append(wedge)
# Assign material indices to the wedges.
for triangle in object.data.loop_triangles:
for triangle in mesh_data.loop_triangles:
for loop_index in triangle.loops:
wedges[loop_index].material_index = material_indices[triangle.material_index]
# Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices
wedge_indices = {}
loop_wedge_indices = [-1] * len(object.data.loops)
loop_wedge_indices = [-1] * len(mesh_data.loops)
for loop_index, wedge in enumerate(wedges):
wedge_hash = hash(wedge)
if wedge_hash in wedge_indices:
@@ -181,8 +205,8 @@ class PskBuilder(object):
loop_wedge_indices[loop_index] = wedge_index
# FACES
poly_groups, groups = object.data.calc_smooth_groups(use_bitflags=True)
for f in object.data.loop_triangles:
poly_groups, groups = mesh_data.calc_smooth_groups(use_bitflags=True)
for f in mesh_data.loop_triangles:
face = Psk.Face()
face.material_index = material_indices[f.material_index]
face.wedge_indices[0] = loop_wedge_indices[f.loops[2]]
@@ -196,7 +220,7 @@ class PskBuilder(object):
# Because the vertex groups may contain entries for which there is no matching bone in the armature,
# we must filter them out and not export any weights for these vertex groups.
bone_names = [x.name for x in bones]
vertex_group_names = [x.name for x in object.vertex_groups]
vertex_group_names = [x.name for x in mesh_object.vertex_groups]
vertex_group_bone_indices = dict()
for vertex_group_index, vertex_group_name in enumerate(vertex_group_names):
try:
@@ -215,12 +239,12 @@ class PskBuilder(object):
break
except ValueError:
bone = bone.parent
for vertex_group_index, vertex_group in enumerate(object.vertex_groups):
for vertex_group_index, vertex_group in enumerate(mesh_object.vertex_groups):
if vertex_group_index not in vertex_group_bone_indices:
# Vertex group has no associated bone, skip it.
continue
bone_index = vertex_group_bone_indices[vertex_group_index]
for vertex_index in range(len(object.data.vertices)):
for vertex_index in range(len(mesh_data.vertices)):
try:
weight = vertex_group.weight(vertex_index)
except RuntimeError:
@@ -233,4 +257,9 @@ class PskBuilder(object):
w.weight = weight
psk.weights.append(w)
if not options.use_raw_mesh_data:
bpy.data.objects.remove(mesh_object)
bpy.data.meshes.remove(mesh_data)
del mesh_data
return psk

View File

@@ -1,10 +1,10 @@
from typing import Type
from bpy.props import StringProperty, CollectionProperty, IntProperty, EnumProperty
from bpy.types import Operator, PropertyGroup
from bpy.props import BoolProperty, StringProperty, CollectionProperty, IntProperty, EnumProperty, PointerProperty
from bpy.types import Operator, PropertyGroup, UIList, Material
from bpy_extras.io_utils import ExportHelper
from .builder import PskBuilder, PskBuilderOptions
from .builder import build_psk, PskBuildOptions, get_psk_input_objects
from .data import *
from ..helpers import populate_bone_group_list
from ..types import BoneGroupListItem
@@ -15,13 +15,7 @@ MAX_BONE_COUNT = 256
MAX_MATERIAL_COUNT = 256
class PskExporter(object):
def __init__(self, psk: Psk):
self.psk: Psk = psk
@staticmethod
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
def _write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
section = Section()
section.name = name
if data_type is not None and data is not None:
@@ -32,22 +26,23 @@ class PskExporter(object):
for datum in data:
fp.write(datum)
def export(self, path: str):
if len(self.psk.wedges) > MAX_WEDGE_COUNT:
raise RuntimeError(f'Number of wedges ({len(self.psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
if len(self.psk.bones) > MAX_BONE_COUNT:
raise RuntimeError(f'Number of bones ({len(self.psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
if len(self.psk.points) > MAX_POINT_COUNT:
raise RuntimeError(f'Numbers of vertices ({len(self.psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
if len(self.psk.materials) > MAX_MATERIAL_COUNT:
raise RuntimeError(f'Number of materials ({len(self.psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
def export_psk(psk: Psk, path: str):
if len(psk.wedges) > MAX_WEDGE_COUNT:
raise RuntimeError(f'Number of wedges ({len(psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
if len(psk.bones) > MAX_BONE_COUNT:
raise RuntimeError(f'Number of bones ({len(psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
if len(psk.points) > MAX_POINT_COUNT:
raise RuntimeError(f'Numbers of vertices ({len(psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
if len(psk.materials) > MAX_MATERIAL_COUNT:
raise RuntimeError(f'Number of materials ({len(psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
with open(path, 'wb') as fp:
self.write_section(fp, b'ACTRHEAD')
self.write_section(fp, b'PNTS0000', Vector3, self.psk.points)
_write_section(fp, b'ACTRHEAD')
_write_section(fp, b'PNTS0000', Vector3, psk.points)
wedges = []
for index, w in enumerate(self.psk.wedges):
for index, w in enumerate(psk.wedges):
wedge = Psk.Wedge16()
wedge.material_index = w.material_index
wedge.u = w.u
@@ -55,15 +50,15 @@ class PskExporter(object):
wedge.point_index = w.point_index
wedges.append(wedge)
self.write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
self.write_section(fp, b'FACE0000', Psk.Face, self.psk.faces)
self.write_section(fp, b'MATT0000', Psk.Material, self.psk.materials)
self.write_section(fp, b'REFSKELT', Psk.Bone, self.psk.bones)
self.write_section(fp, b'RAWWEIGHTS', Psk.Weight, self.psk.weights)
_write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
_write_section(fp, b'FACE0000', Psk.Face, psk.faces)
_write_section(fp, b'MATT0000', Psk.Material, psk.materials)
_write_section(fp, b'REFSKELT', Psk.Bone, psk.bones)
_write_section(fp, b'RAWWEIGHTS', Psk.Weight, psk.weights)
def is_bone_filter_mode_item_available(context, identifier):
input_objects = PskBuilder.get_input_objects(context)
input_objects = get_psk_input_objects(context)
armature_object = input_objects.armature_object
if identifier == 'BONE_GROUPS':
if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups:
@@ -72,6 +67,75 @@ def is_bone_filter_mode_item_available(context, identifier):
return True
class PSK_UL_MaterialList(UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
row = layout.row()
row.label(text=str(item.material_name), icon='MATERIAL')
class MaterialListItem(PropertyGroup):
material_name: StringProperty()
index: IntProperty()
@property
def name(self):
return self.material_name
def populate_material_list(mesh_objects, material_list):
material_list.clear()
material_names = []
for mesh_object in mesh_objects:
for i, material in enumerate(mesh_object.data.materials):
# TODO: put this in the poll arg?
if material is None:
raise RuntimeError('Material cannot be empty (index ' + str(i) + ')')
if material.name not in material_names:
material_names.append(material.name)
for index, material_name in enumerate(material_names):
m = material_list.add()
m.material_name = material_name
m.index = index
class PskMaterialListItemMoveUp(Operator):
bl_idname = 'psk_export.material_list_item_move_up'
bl_label = 'Move Up'
bl_options = {'INTERNAL'}
bl_description = 'Move the selected material up one slot'
@classmethod
def poll(cls, context):
pg = context.scene.psk_export
return pg.material_list_index > 0
def execute(self, context):
pg = context.scene.psk_export
pg.material_list.move(pg.material_list_index, pg.material_list_index - 1)
pg.material_list_index -= 1
return {"FINISHED"}
class PskMaterialListItemMoveDown(Operator):
bl_idname = 'psk_export.material_list_item_move_down'
bl_label = 'Move Down'
bl_options = {'INTERNAL'}
bl_description = 'Move the selected material down one slot'
@classmethod
def poll(cls, context):
pg = context.scene.psk_export
return pg.material_list_index < len(pg.material_list) - 1
def execute(self, context):
pg = context.scene.psk_export
pg.material_list.move(pg.material_list_index, pg.material_list_index + 1)
pg.material_list_index += 1
return {"FINISHED"}
class PskExportOperator(Operator, ExportHelper):
bl_idname = 'export.psk'
bl_label = 'Export'
@@ -88,7 +152,7 @@ class PskExportOperator(Operator, ExportHelper):
def invoke(self, context, event):
try:
input_objects = PskBuilder.get_input_objects(context)
input_objects = get_psk_input_objects(context)
except RuntimeError as e:
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
return {'CANCELLED'}
@@ -97,6 +161,7 @@ class PskExportOperator(Operator, ExportHelper):
# Populate bone groups list.
populate_bone_group_list(input_objects.armature_object, pg.bone_group_list)
populate_material_list(input_objects.mesh_objects, pg.material_list)
context.window_manager.fileselect_add(self)
@@ -105,7 +170,7 @@ class PskExportOperator(Operator, ExportHelper):
@classmethod
def poll(cls, context):
try:
PskBuilder.get_input_objects(context)
get_psk_input_objects(context)
except RuntimeError as e:
cls.poll_message_set(str(e))
return False
@@ -116,11 +181,12 @@ class PskExportOperator(Operator, ExportHelper):
scene = context.scene
pg = scene.psk_export
layout.prop(pg, 'use_raw_mesh_data')
# BONES
box = layout.box()
box.label(text='Bones', icon='BONE_DATA')
layout.label(text='Bones', icon='BONE_DATA')
bone_filter_mode_items = pg.bl_rna.properties['bone_filter_mode'].enum_items_static
row = box.row(align=True)
row = layout.row(align=True)
for item in bone_filter_mode_items:
identifier = item.identifier
item_layout = row.row(align=True)
@@ -128,20 +194,32 @@ class PskExportOperator(Operator, ExportHelper):
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
if pg.bone_filter_mode == 'BONE_GROUPS':
row = box.row()
row = layout.row()
rows = max(3, min(len(pg.bone_group_list), 10))
row.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index', rows=rows)
layout.separator()
# MATERIALS
layout.label(text='Materials', icon='MATERIAL')
row = layout.row()
rows = max(3, min(len(pg.bone_group_list), 10))
row.template_list('PSK_UL_MaterialList', '', pg, 'material_list', pg, 'material_list_index', rows=rows)
col = row.column(align=True)
col.operator(PskMaterialListItemMoveUp.bl_idname, text='', icon='TRIA_UP')
col.operator(PskMaterialListItemMoveDown.bl_idname, text='', icon='TRIA_DOWN')
def execute(self, context):
pg = context.scene.psk_export
builder = PskBuilder()
options = PskBuilderOptions()
options = PskBuildOptions()
options.bone_filter_mode = pg.bone_filter_mode
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
options.use_raw_mesh_data = pg.use_raw_mesh_data
options.material_names = [m.material_name for m in pg.material_list]
try:
psk = builder.build(context, options)
exporter = PskExporter(psk)
exporter.export(self.filepath)
psk = build_psk(context, options)
export_psk(psk, self.filepath)
except RuntimeError as e:
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
return {'CANCELLED'}
@@ -161,9 +239,16 @@ class PskExportPropertyGroup(PropertyGroup):
)
bone_group_list: CollectionProperty(type=BoneGroupListItem)
bone_group_list_index: IntProperty(default=0)
use_raw_mesh_data: BoolProperty(default=False, name='Raw Mesh Data', description='No modifiers will be evaluated as part of the exported mesh')
material_list: CollectionProperty(type=MaterialListItem)
material_list_index: IntProperty(default=0)
classes = (
MaterialListItem,
PSK_UL_MaterialList,
PskMaterialListItemMoveUp,
PskMaterialListItemMoveDown,
PskExportOperator,
PskExportPropertyGroup
PskExportPropertyGroup,
)

View File

@@ -12,25 +12,26 @@ from bpy_extras.io_utils import ImportHelper
from mathutils import Quaternion, Vector, Matrix
from .data import Psk
from .reader import PskReader
from .reader import read_psk
from ..helpers import rgb_to_srgb
class PskImportOptions(object):
def __init__(self):
self.name = ''
self.should_import_mesh = True
self.should_import_vertex_colors = True
self.vertex_color_space = 'sRGB'
self.should_import_vertex_normals = True
self.should_import_extra_uvs = True
self.should_import_skeleton = True
self.bone_length = 1.0
class PskImporter(object):
def __init__(self):
pass
def import_psk(psk: Psk, context, options: PskImportOptions):
armature_object = None
def import_psk(self, psk: Psk, context, options: PskImportOptions):
if options.should_import_skeleton:
# ARMATURE
armature_data = bpy.data.armatures.new(options.name)
armature_object = bpy.data.objects.new(options.name, armature_data)
@@ -110,6 +111,7 @@ class PskImporter(object):
edit_bone['post_quat'] = import_bone.local_rotation.conjugated()
# MESH
if options.should_import_mesh:
mesh_data = bpy.data.meshes.new(options.name)
mesh_object = bpy.data.objects.new(options.name, mesh_data)
@@ -211,20 +213,21 @@ class PskImporter(object):
# Get a list of all bones that have weights associated with them.
vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights))
for import_bone in map(lambda x: import_bones[x], sorted(list(vertex_group_bone_indices))):
import_bone.vertex_group = mesh_object.vertex_groups.new(
name=import_bone.psk_bone.name.decode('windows-1252'))
vertex_groups = [None] * len(psk.bones)
for bone_index, psk_bone in map(lambda x: (x, psk.bones[x]), vertex_group_bone_indices):
vertex_groups[bone_index] = mesh_object.vertex_groups.new(name=psk_bone.name.decode('windows-1252'))
for weight in psk.weights:
import_bones[weight.bone_index].vertex_group.add((weight.point_index,), weight.weight, 'ADD')
vertex_groups[weight.bone_index].add((weight.point_index,), weight.weight, 'ADD')
context.scene.collection.objects.link(mesh_object)
# Add armature modifier to our mesh object.
if options.should_import_skeleton:
armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE')
armature_modifier.object = armature_object
mesh_object.parent = armature_object
context.scene.collection.objects.link(mesh_object)
try:
bpy.ops.object.mode_set(mode='OBJECT')
except:
@@ -260,6 +263,18 @@ class PskImportPropertyGroup(PropertyGroup):
options=set(),
description='Import extra UV maps from PSKX files, if available'
)
should_import_mesh: BoolProperty(
default=True,
name='Import Mesh',
options=set(),
description='Import mesh'
)
should_import_skeleton: BoolProperty(
default=True,
name='Import Skeleton',
options=set(),
description='Import skeleton'
)
bone_length: FloatProperty(
default=1.0,
min=sys.float_info.epsilon,
@@ -273,7 +288,7 @@ class PskImportPropertyGroup(PropertyGroup):
class PskImportOperator(Operator, ImportHelper):
bl_idname = 'import.psk'
bl_label = 'Export'
bl_label = 'Import'
bl_options = {'INTERNAL', 'UNDO'}
__doc__ = 'Load a PSK file'
filename_ext = '.psk'
@@ -286,29 +301,42 @@ class PskImportOperator(Operator, ImportHelper):
def execute(self, context):
pg = context.scene.psk_import
reader = PskReader()
psk = reader.read(self.filepath)
psk = read_psk(self.filepath)
options = PskImportOptions()
options.name = os.path.splitext(os.path.basename(self.filepath))[0]
options.should_import_mesh = pg.should_import_mesh
options.should_import_extra_uvs = pg.should_import_extra_uvs
options.should_import_vertex_colors = pg.should_import_vertex_colors
options.should_import_vertex_normals = pg.should_import_vertex_normals
options.vertex_color_space = pg.vertex_color_space
options.should_import_skeleton = pg.should_import_skeleton
options.bone_length = pg.bone_length
PskImporter().import_psk(psk, context, options)
import_psk(psk, context, options)
return {'FINISHED'}
def draw(self, context):
pg = context.scene.psk_import
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
layout.prop(pg, 'should_import_vertex_normals')
layout.prop(pg, 'should_import_extra_uvs')
layout.prop(pg, 'should_import_vertex_colors')
layout.prop(pg, 'should_import_mesh')
row = layout.column()
row.use_property_split = True
row.use_property_decorate = False
if pg.should_import_mesh:
row.prop(pg, 'should_import_vertex_normals')
row.prop(pg, 'should_import_extra_uvs')
row.prop(pg, 'should_import_vertex_colors')
if pg.should_import_vertex_colors:
layout.prop(pg, 'vertex_color_space')
layout.prop(pg, 'bone_length')
row.prop(pg, 'vertex_color_space')
layout.prop(pg, 'should_import_skeleton')
row = layout.column()
row.use_property_split = True
row.use_property_decorate = False
if pg.should_import_skeleton:
row.prop(pg, 'bone_length')
classes = (

View File

@@ -3,13 +3,7 @@ import ctypes
from .data import *
class PskReader(object):
def __init__(self):
pass
@staticmethod
def read_types(fp, data_class: ctypes.Structure, section: Section, data):
def _read_types(fp, data_class: ctypes.Structure, section: Section, data):
buffer_length = section.data_size * section.data_count
buffer = fp.read(buffer_length)
offset = 0
@@ -17,7 +11,8 @@ class PskReader(object):
data.append(data_class.from_buffer_copy(buffer, offset))
offset += section.data_size
def read(self, path) -> Psk:
def read_psk(path) -> Psk:
psk = Psk()
with open(path, 'rb') as fp:
while fp.read(1):
@@ -26,30 +21,30 @@ class PskReader(object):
if section.name == b'ACTRHEAD':
pass
elif section.name == b'PNTS0000':
PskReader.read_types(fp, Vector3, section, psk.points)
_read_types(fp, Vector3, section, psk.points)
elif section.name == b'VTXW0000':
if section.data_size == ctypes.sizeof(Psk.Wedge16):
PskReader.read_types(fp, Psk.Wedge16, section, psk.wedges)
_read_types(fp, Psk.Wedge16, section, psk.wedges)
elif section.data_size == ctypes.sizeof(Psk.Wedge32):
PskReader.read_types(fp, Psk.Wedge32, section, psk.wedges)
_read_types(fp, Psk.Wedge32, section, psk.wedges)
else:
raise RuntimeError('Unrecognized wedge format')
elif section.name == b'FACE0000':
PskReader.read_types(fp, Psk.Face, section, psk.faces)
_read_types(fp, Psk.Face, section, psk.faces)
elif section.name == b'MATT0000':
PskReader.read_types(fp, Psk.Material, section, psk.materials)
_read_types(fp, Psk.Material, section, psk.materials)
elif section.name == b'REFSKELT':
PskReader.read_types(fp, Psk.Bone, section, psk.bones)
_read_types(fp, Psk.Bone, section, psk.bones)
elif section.name == b'RAWWEIGHTS':
PskReader.read_types(fp, Psk.Weight, section, psk.weights)
_read_types(fp, Psk.Weight, section, psk.weights)
elif section.name == b'FACE3200':
PskReader.read_types(fp, Psk.Face32, section, psk.faces)
_read_types(fp, Psk.Face32, section, psk.faces)
elif section.name == b'VERTEXCOLOR':
PskReader.read_types(fp, Color, section, psk.vertex_colors)
_read_types(fp, Color, section, psk.vertex_colors)
elif section.name.startswith(b'EXTRAUVS'):
PskReader.read_types(fp, Vector2, section, psk.extra_uvs)
_read_types(fp, Vector2, section, psk.extra_uvs)
elif section.name == b'VTXNORMS':
PskReader.read_types(fp, Vector3, section, psk.vertex_normals)
_read_types(fp, Vector3, section, psk.vertex_normals)
else:
raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"')
return psk

View File

@@ -15,10 +15,6 @@ class BoneGroupListItem(PropertyGroup):
count: IntProperty()
is_selected: BoolProperty(default=False)
@property
def name(self):
return self.name
classes = (
BoneGroupListItem,