Refactoring to reduce pointless class encapsulation when just a function would do.

This commit is contained in:
Colin Basnett
2022-06-27 18:10:37 -07:00
parent 616593d0fb
commit 4937f8f779
9 changed files with 956 additions and 960 deletions

View File

@@ -32,22 +32,22 @@ def rgb_to_srgb(c):
return 12.92 * c return 12.92 * c
def get_nla_strips_ending_at_frame(object, frame) -> List[NlaStrip]: def get_nla_strips_ending_at_frame(animation_data, frame) -> List[NlaStrip]:
if object is None or object.animation_data is None: if animation_data is None:
return [] return []
strips = [] strips = []
for nla_track in object.animation_data.nla_tracks: for nla_track in animation_data.nla_tracks:
for strip in nla_track.strips: for strip in nla_track.strips:
if strip.frame_end == frame: if strip.frame_end == frame:
strips.append(strip) strips.append(strip)
return strips return strips
def get_nla_strips_in_timeframe(object, frame_min, frame_max) -> List[NlaStrip]: def get_nla_strips_in_timeframe(animation_data, frame_min, frame_max) -> List[NlaStrip]:
if object is None or object.animation_data is None: if animation_data is None:
return [] return []
strips = [] strips = []
for nla_track in object.animation_data.nla_tracks: for nla_track in animation_data.nla_tracks:
if nla_track.mute: if nla_track.mute:
continue continue
for strip in nla_track.strips: for strip in nla_track.strips:

View File

@@ -1,14 +1,15 @@
from typing import Dict, Iterable from typing import Dict, Iterable
from bpy.types import Action from bpy.types import Action
from mathutils import Matrix
from .data import * from .data import *
from ..helpers import * from ..helpers import *
class PsaBuilderOptions(object): class PsaBuildOptions(object):
def __init__(self): def __init__(self):
self.should_override_animation_data = False
self.animation_data_override = None
self.fps_source = 'SCENE' self.fps_source = 'SCENE'
self.fps_custom = 30.0 self.fps_custom = 30.0
self.sequence_source = 'ACTIONS' self.sequence_source = 'ACTIONS'
@@ -23,11 +24,7 @@ class PsaBuilderOptions(object):
self.root_motion = False self.root_motion = False
class PsaBuilder(object): def get_sequence_fps(context, options: PsaBuildOptions, actions: Iterable[Action]) -> float:
def __init__(self):
pass
def get_sequence_fps(self, context, options: PsaBuilderOptions, actions: Iterable[Action]) -> float:
if options.fps_source == 'SCENE': if options.fps_source == 'SCENE':
return context.scene.render.fps return context.scene.render.fps
if options.fps_source == 'CUSTOM': if options.fps_source == 'CUSTOM':
@@ -47,16 +44,64 @@ class PsaBuilder(object):
else: else:
raise RuntimeError(f'Invalid FPS source "{options.fps_source}"') raise RuntimeError(f'Invalid FPS source "{options.fps_source}"')
def build(self, context, options: PsaBuilderOptions) -> Psa:
def get_timeline_marker_sequence_frame_ranges(animation_data, context, options: PsaBuildOptions) -> Dict:
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
sequence_frame_ranges = dict()
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
for marker_name in options.marker_names:
marker = context.scene.timeline_markers[marker_name]
frame_min = marker.frame
# Determine the final frame of the sequence based on the next marker.
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
marker_index = sorted_timeline_marker_names.index(marker_name)
next_marker_index = marker_index + 1
frame_max = 0
if next_marker_index < len(sorted_timeline_markers):
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
frame_max = sorted_timeline_markers[next_marker_index].frame
if options.should_trim_timeline_marker_sequences:
nla_strips = get_nla_strips_in_timeframe(animation_data, marker.frame, frame_max)
if len(nla_strips) > 0:
frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
else:
# No strips in between this marker and the next, just export this as a one-frame animation.
frame_max = frame_min
else:
# There is no next marker.
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
for nla_track in animation_data.nla_tracks:
if nla_track.mute:
continue
for strip in nla_track.strips:
frame_max = max(frame_max, strip.frame_end)
if frame_min > frame_max:
continue
sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max)
return sequence_frame_ranges
def build_psa(context, options: PsaBuildOptions) -> Psa:
active_object = context.view_layer.objects.active active_object = context.view_layer.objects.active
if active_object.type != 'ARMATURE': if active_object.type != 'ARMATURE':
raise RuntimeError('Selected object must be an Armature') raise RuntimeError('Selected object must be an Armature')
armature = active_object if options.should_override_animation_data:
animation_data_object = options.animation_data_override
else:
animation_data_object = active_object
if armature.animation_data is None: animation_data = animation_data_object.animation_data
raise RuntimeError('No animation data for armature')
if animation_data is None:
raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'')
# Ensure that we actually have items that we are going to be exporting. # Ensure that we actually have items that we are going to be exporting.
if options.sequence_source == 'ACTIONS' and len(options.actions) == 0: if options.sequence_source == 'ACTIONS' and len(options.actions) == 0:
@@ -66,6 +111,7 @@ class PsaBuilder(object):
psa = Psa() psa = Psa()
armature = active_object
bones = list(armature.data.bones) bones = list(armature.data.bones)
# The order of the armature bones and the pose bones is not guaranteed to be the same. # The order of the armature bones and the pose bones is not guaranteed to be the same.
@@ -108,9 +154,9 @@ class PsaBuilder(object):
rotation.x = -rotation.x rotation.x = -rotation.x
rotation.y = -rotation.y rotation.y = -rotation.y
rotation.z = -rotation.z rotation.z = -rotation.z
quat_parent = bone.parent.matrix.to_quaternion().inverted() inverse_parent_rotation = bone.parent.matrix.to_quaternion().inverted()
parent_head = quat_parent @ bone.parent.head parent_head = inverse_parent_rotation @ bone.parent.head
parent_tail = quat_parent @ bone.parent.tail parent_tail = inverse_parent_rotation @ bone.parent.tail
location = (parent_tail - parent_head) + bone.head location = (parent_tail - parent_head) + bone.head
else: else:
location = armature.matrix_local @ bone.head location = armature.matrix_local @ bone.head
@@ -153,11 +199,11 @@ class PsaBuilder(object):
frame_min, frame_max = [int(x) for x in action.frame_range] frame_min, frame_max = [int(x) for x in action.frame_range]
export_sequence.nla_state.frame_min = frame_min export_sequence.nla_state.frame_min = frame_min
export_sequence.nla_state.frame_max = frame_max export_sequence.nla_state.frame_max = frame_max
export_sequence.fps = self.get_sequence_fps(context, options, [action]) export_sequence.fps = get_sequence_fps(context, options, [action])
export_sequences.append(export_sequence) export_sequences.append(export_sequence)
pass pass
elif options.sequence_source == 'TIMELINE_MARKERS': elif options.sequence_source == 'TIMELINE_MARKERS':
sequence_frame_ranges = self.get_timeline_marker_sequence_frame_ranges(armature, context, options) sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, options)
for name, (frame_min, frame_max) in sequence_frame_ranges.items(): for name, (frame_min, frame_max) in sequence_frame_ranges.items():
export_sequence = ExportSequence() export_sequence = ExportSequence()
@@ -166,8 +212,8 @@ class PsaBuilder(object):
export_sequence.nla_state.frame_min = frame_min export_sequence.nla_state.frame_min = frame_min
export_sequence.nla_state.frame_max = frame_max export_sequence.nla_state.frame_max = frame_max
nla_strips_actions = set( nla_strips_actions = set(
map(lambda x: x.action, get_nla_strips_in_timeframe(active_object, frame_min, frame_max))) map(lambda x: x.action, get_nla_strips_in_timeframe(animation_data, frame_min, frame_max)))
export_sequence.fps = self.get_sequence_fps(context, options, nla_strips_actions) export_sequence.fps = get_sequence_fps(context, options, nla_strips_actions)
export_sequences.append(export_sequence) export_sequences.append(export_sequence)
else: else:
raise ValueError(f'Unhandled sequence source: {options.sequence_source}') raise ValueError(f'Unhandled sequence source: {options.sequence_source}')
@@ -181,22 +227,20 @@ class PsaBuilder(object):
frame_start_index = 0 frame_start_index = 0
for export_sequence in export_sequences: for export_sequence in export_sequences:
armature.animation_data.action = export_sequence.nla_state.action # Link the action to the animation data and update view layer.
animation_data.action = export_sequence.nla_state.action
context.view_layer.update() context.view_layer.update()
psa_sequence = Psa.Sequence()
frame_min = export_sequence.nla_state.frame_min frame_min = export_sequence.nla_state.frame_min
frame_max = export_sequence.nla_state.frame_max frame_max = export_sequence.nla_state.frame_max
frame_count = frame_max - frame_min + 1 frame_count = frame_max - frame_min + 1
psa_sequence = Psa.Sequence()
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252') psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
psa_sequence.frame_count = frame_count psa_sequence.frame_count = frame_count
psa_sequence.frame_start_index = frame_start_index psa_sequence.frame_start_index = frame_start_index
psa_sequence.fps = export_sequence.fps psa_sequence.fps = export_sequence.fps
frame_count = frame_max - frame_min + 1
for frame in range(frame_count): for frame in range(frame_count):
context.scene.frame_set(frame_min + frame) context.scene.frame_set(frame_min + frame)
@@ -239,44 +283,3 @@ class PsaBuilder(object):
psa.sequences[export_sequence.name] = psa_sequence psa.sequences[export_sequence.name] = psa_sequence
return psa return psa
def get_timeline_marker_sequence_frame_ranges(self, object, context, options: PsaBuilderOptions) -> Dict:
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
sequence_frame_ranges = dict()
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
for marker_name in options.marker_names:
marker = context.scene.timeline_markers[marker_name]
frame_min = marker.frame
# Determine the final frame of the sequence based on the next marker.
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
marker_index = sorted_timeline_marker_names.index(marker_name)
next_marker_index = marker_index + 1
frame_max = 0
if next_marker_index < len(sorted_timeline_markers):
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
frame_max = sorted_timeline_markers[next_marker_index].frame
if options.should_trim_timeline_marker_sequences:
nla_strips = get_nla_strips_in_timeframe(object, marker.frame, frame_max)
if len(nla_strips) > 0:
frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
else:
# No strips in between this marker and the next, just export this as a one-frame animation.
frame_max = frame_min
else:
# There is no next marker.
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
for nla_track in object.animation_data.nla_tracks:
if nla_track.mute:
continue
for strip in nla_track.strips:
frame_max = max(frame_max, strip.frame_end)
if frame_min > frame_max:
continue
sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max)
return sequence_frame_ranges

View File

@@ -10,18 +10,13 @@ from bpy.props import BoolProperty, CollectionProperty, EnumProperty, FloatPrope
from bpy.types import Action, Operator, PropertyGroup, UIList from bpy.types import Action, Operator, PropertyGroup, UIList
from bpy_extras.io_utils import ExportHelper from bpy_extras.io_utils import ExportHelper
from .builder import PsaBuilder, PsaBuilderOptions from .builder import PsaBuildOptions, build_psa
from .data import * from .data import *
from ..helpers import * from ..helpers import *
from ..types import BoneGroupListItem from ..types import BoneGroupListItem
class PsaExporter(object): def export_psa(psa: Psa, path: str):
def __init__(self, psa: Psa):
self.psa: Psa = psa
# This method is shared by both PSA/K file formats, move this?
@staticmethod
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None): def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
section = Section() section = Section()
section.name = name section.name = name
@@ -32,13 +27,11 @@ class PsaExporter(object):
if data is not None: if data is not None:
for datum in data: for datum in data:
fp.write(datum) fp.write(datum)
def export(self, path: str):
with open(path, 'wb') as fp: with open(path, 'wb') as fp:
self.write_section(fp, b'ANIMHEAD') write_section(fp, b'ANIMHEAD')
self.write_section(fp, b'BONENAMES', Psa.Bone, self.psa.bones) write_section(fp, b'BONENAMES', Psa.Bone, psa.bones)
self.write_section(fp, b'ANIMINFO', Psa.Sequence, list(self.psa.sequences.values())) write_section(fp, b'ANIMINFO', Psa.Sequence, list(psa.sequences.values()))
self.write_section(fp, b'ANIMKEYS', Psa.Key, self.psa.keys) write_section(fp, b'ANIMKEYS', Psa.Key, psa.keys)
class PsaExportActionListItem(PropertyGroup): class PsaExportActionListItem(PropertyGroup):
@@ -64,6 +57,10 @@ def should_use_original_sequence_names_updated(_, context):
update_action_names(context) update_action_names(context)
def psa_export_property_group_animation_data_override_poll(_context, obj):
return obj.animation_data is not None
class PsaExportPropertyGroup(PropertyGroup): class PsaExportPropertyGroup(PropertyGroup):
root_motion: BoolProperty( root_motion: BoolProperty(
name='Root Motion', name='Root Motion',
@@ -71,6 +68,15 @@ class PsaExportPropertyGroup(PropertyGroup):
default=False, default=False,
description='When set, the root bone will be transformed as it appears in the scene', description='When set, the root bone will be transformed as it appears in the scene',
) )
should_override_animation_data: BoolProperty(
name='Override Animation Data',
options=set(),
default=False
)
animation_data_override: PointerProperty(
type=bpy.types.Object,
poll=psa_export_property_group_animation_data_override_poll
)
sequence_source: EnumProperty( sequence_source: EnumProperty(
name='Source', name='Source',
options=set(), options=set(),
@@ -154,6 +160,10 @@ def is_bone_filter_mode_item_available(context, identifier):
return True return True
def should_action_be_selected_by_default(action):
return action is not None and action.asset_data is None
class PsaExportOperator(Operator, ExportHelper): class PsaExportOperator(Operator, ExportHelper):
bl_idname = 'psa_export.operator' bl_idname = 'psa_export.operator'
bl_label = 'Export' bl_label = 'Export'
@@ -191,8 +201,11 @@ class PsaExportOperator(Operator, ExportHelper):
# SOURCE # SOURCE
layout.prop(pg, 'sequence_source', text='Source') layout.prop(pg, 'sequence_source', text='Source')
# ROOT MOTION if pg.sequence_source == 'TIMELINE_MARKERS':
layout.prop(pg, 'root_motion', text='Root Motion') # ANIMDATA SOURCE
layout.prop(pg, 'should_override_animation_data')
if pg.should_override_animation_data:
layout.prop(pg, 'animation_data_override')
# SELECT ALL/NONE # SELECT ALL/NONE
row = layout.row(align=True) row = layout.row(align=True)
@@ -249,15 +262,17 @@ class PsaExportOperator(Operator, ExportHelper):
layout.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index', layout.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index',
rows=rows) rows=rows)
def should_action_be_selected_by_default(self, action): layout.separator()
return action is not None and action.asset_data is None
# ROOT MOTION
layout.prop(pg, 'root_motion', text='Root Motion')
def is_action_for_armature(self, action): def is_action_for_armature(self, action):
if len(action.fcurves) == 0: if len(action.fcurves) == 0:
return False return False
bone_names = set([x.name for x in self.armature.data.bones]) bone_names = set([x.name for x in self.armature.data.bones])
for fcurve in action.fcurves: for fcurve in action.fcurves:
match = re.match(r'pose\.bones\["(.+)"\].\w+', fcurve.data_path) match = re.match(r'pose\.bones\["(.+)"].\w+', fcurve.data_path)
if not match: if not match:
continue continue
bone_name = match.group(1) bone_name = match.group(1)
@@ -273,7 +288,7 @@ class PsaExportOperator(Operator, ExportHelper):
if context.view_layer.objects.active.type != 'ARMATURE': if context.view_layer.objects.active.type != 'ARMATURE':
raise RuntimeError('The selected object must be an armature') raise RuntimeError('The selected object must be an armature')
def invoke(self, context, event): def invoke(self, context, _event):
try: try:
self._check_context(context) self._check_context(context)
except RuntimeError as e: except RuntimeError as e:
@@ -290,7 +305,7 @@ class PsaExportOperator(Operator, ExportHelper):
item = pg.action_list.add() item = pg.action_list.add()
item.action = action item.action = action
item.name = action.name item.name = action.name
item.is_selected = self.should_action_be_selected_by_default(action) item.is_selected = should_action_be_selected_by_default(action)
update_action_names(context) update_action_names(context)
@@ -318,7 +333,9 @@ class PsaExportOperator(Operator, ExportHelper):
actions = [x.action for x in pg.action_list if x.is_selected] actions = [x.action for x in pg.action_list if x.is_selected]
marker_names = [x.name for x in pg.marker_list if x.is_selected] marker_names = [x.name for x in pg.marker_list if x.is_selected]
options = PsaBuilderOptions() options = PsaBuildOptions()
options.should_override_animation_data = pg.should_override_animation_data
options.animation_data_override = pg.animation_data_override
options.fps_source = pg.fps_source options.fps_source = pg.fps_source
options.fps_custom = pg.fps_custom options.fps_custom = pg.fps_custom
options.sequence_source = pg.sequence_source options.sequence_source = pg.sequence_source
@@ -332,16 +349,14 @@ class PsaExportOperator(Operator, ExportHelper):
options.sequence_name_suffix = pg.sequence_name_suffix options.sequence_name_suffix = pg.sequence_name_suffix
options.root_motion = pg.root_motion options.root_motion = pg.root_motion
builder = PsaBuilder()
try: try:
psa = builder.build(context, options) psa = build_psa(context, options)
except RuntimeError as e: except RuntimeError as e:
self.report({'ERROR_INVALID_CONTEXT'}, str(e)) self.report({'ERROR_INVALID_CONTEXT'}, str(e))
return {'CANCELLED'} return {'CANCELLED'}
exporter = PsaExporter(psa) export_psa(psa, self.filepath)
exporter.export(self.filepath)
return {'FINISHED'} return {'FINISHED'}
@@ -368,8 +383,7 @@ def filter_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_c
return flt_flags return flt_flags
def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[ def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[PsaExportActionListItem]:
PsaExportActionListItem]:
visible_sequences = [] visible_sequences = []
for i, flag in enumerate(filter_sequences(pg, sequences)): for i, flag in enumerate(filter_sequences(pg, sequences)):
if bool(flag & (1 << 30)): if bool(flag & (1 << 30)):
@@ -401,10 +415,9 @@ class PSA_UL_ExportSequenceList(UIList):
subrow = row.row(align=True) subrow = row.row(align=True)
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER') subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
def filter_items(self, context, data, prop):
def filter_items(self, context, data, property):
pg = context.scene.psa_export pg = context.scene.psa_export
actions = getattr(data, property) actions = getattr(data, prop)
flt_flags = filter_sequences(pg, actions) flt_flags = filter_sequences(pg, actions)
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name') flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
return flt_flags, flt_neworder return flt_flags, flt_neworder

View File

@@ -26,11 +26,7 @@ class PsaImportOptions(object):
self.action_name_prefix = '' self.action_name_prefix = ''
class PsaImporter(object): def import_psa(psa_reader: PsaReader, armature_object, options: PsaImportOptions):
def __init__(self):
pass
def import_psa(self, psa_reader: PsaReader, armature_object, options: PsaImportOptions):
sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names) sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names)
armature_data = armature_object.data armature_data = armature_object.data
@@ -104,7 +100,8 @@ class PsaImporter(object):
import_bone.parent = import_bones_dict[armature_bone.parent.name] import_bone.parent = import_bones_dict[armature_bone.parent.name]
# Calculate the original location & rotation of each bone (in world-space maybe?) # Calculate the original location & rotation of each bone (in world-space maybe?)
if armature_bone.get('orig_quat') is not None: if armature_bone.get('orig_quat') is not None:
# TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect (animations are flipped 180 around Z) # TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect
# (animations are flipped 180 around Z)
import_bone.orig_quat = Quaternion(armature_bone['orig_quat']) import_bone.orig_quat = Quaternion(armature_bone['orig_quat'])
import_bone.orig_loc = Vector(armature_bone['orig_loc']) import_bone.orig_loc = Vector(armature_bone['orig_loc'])
import_bone.post_quat = Quaternion(armature_bone['post_quat']) import_bone.post_quat = Quaternion(armature_bone['post_quat'])
@@ -403,7 +400,7 @@ class PsaImportSequencesFromText(Operator):
class PsaImportSequencesSelectAll(Operator): class PsaImportSequencesSelectAll(Operator):
bl_idname = 'psa_import.sequences_select_all' bl_idname = 'psa_import.sequences_select_all'
bl_label = 'All' bl_label = 'All'
bl_description = 'Select all visible sequences' bl_description = 'Select all sequences'
bl_options = {'INTERNAL'} bl_options = {'INTERNAL'}
@classmethod @classmethod
@@ -589,7 +586,7 @@ class PsaImportOperator(Operator):
options.should_write_metadata = pg.should_write_metadata options.should_write_metadata = pg.should_write_metadata
options.should_write_keyframes = pg.should_write_keyframes options.should_write_keyframes = pg.should_write_keyframes
PsaImporter().import_psa(psa_reader, context.view_layer.objects.active, options) import_psa(psa_reader, context.view_layer.objects.active, options)
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)') self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')

View File

@@ -7,8 +7,8 @@ from .data import *
class PsaReader(object): class PsaReader(object):
""" """
This class reads the sequences and bone information immediately upon instantiation and hold onto a file handle. This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle.
The key data is not read into memory upon instantiation due to it's potentially very large size. The keyframe data is not read into memory upon instantiation due to it's potentially very large size.
To read the key data for a particular sequence, call `read_sequence_keys`. To read the key data for a particular sequence, call `read_sequence_keys`.
""" """

View File

@@ -12,19 +12,14 @@ class PskInputObjects(object):
self.armature_object = None self.armature_object = None
class PskBuilderOptions(object): class PskBuildOptions(object):
def __init__(self): def __init__(self):
self.bone_filter_mode = 'ALL' self.bone_filter_mode = 'ALL'
self.bone_group_indices = [] self.bone_group_indices = []
self.use_raw_mesh_data = True self.use_raw_mesh_data = True
class PskBuilder(object): def get_psk_input_objects(context) -> PskInputObjects:
def __init__(self):
pass
@staticmethod
def get_input_objects(context) -> PskInputObjects:
input_objects = PskInputObjects() input_objects = PskInputObjects()
for selected_object in context.view_layer.objects.selected: for selected_object in context.view_layer.objects.selected:
if selected_object.type != 'MESH': if selected_object.type != 'MESH':
@@ -59,8 +54,9 @@ class PskBuilder(object):
return input_objects return input_objects
def build(self, context, options: PskBuilderOptions) -> Psk:
input_objects = PskBuilder.get_input_objects(context) def build_psk(context, options: PskBuildOptions) -> Psk:
input_objects = get_psk_input_objects(context)
armature_object = input_objects.armature_object armature_object = input_objects.armature_object

View File

@@ -4,7 +4,7 @@ from bpy.props import BoolProperty, StringProperty, CollectionProperty, IntPrope
from bpy.types import Operator, PropertyGroup from bpy.types import Operator, PropertyGroup
from bpy_extras.io_utils import ExportHelper from bpy_extras.io_utils import ExportHelper
from .builder import PskBuilder, PskBuilderOptions from .builder import build_psk, PskBuildOptions, get_psk_input_objects
from .data import * from .data import *
from ..helpers import populate_bone_group_list from ..helpers import populate_bone_group_list
from ..types import BoneGroupListItem from ..types import BoneGroupListItem
@@ -15,13 +15,7 @@ MAX_BONE_COUNT = 256
MAX_MATERIAL_COUNT = 256 MAX_MATERIAL_COUNT = 256
class PskExporter(object): def _write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
def __init__(self, psk: Psk):
self.psk: Psk = psk
@staticmethod
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
section = Section() section = Section()
section.name = name section.name = name
if data_type is not None and data is not None: if data_type is not None and data is not None:
@@ -32,22 +26,23 @@ class PskExporter(object):
for datum in data: for datum in data:
fp.write(datum) fp.write(datum)
def export(self, path: str):
if len(self.psk.wedges) > MAX_WEDGE_COUNT: def export_psk(psk: Psk, path: str):
raise RuntimeError(f'Number of wedges ({len(self.psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}') if len(psk.wedges) > MAX_WEDGE_COUNT:
if len(self.psk.bones) > MAX_BONE_COUNT: raise RuntimeError(f'Number of wedges ({len(psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
raise RuntimeError(f'Number of bones ({len(self.psk.bones)}) exceeds limit of {MAX_BONE_COUNT}') if len(psk.bones) > MAX_BONE_COUNT:
if len(self.psk.points) > MAX_POINT_COUNT: raise RuntimeError(f'Number of bones ({len(psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
raise RuntimeError(f'Numbers of vertices ({len(self.psk.points)}) exceeds limit of {MAX_POINT_COUNT}') if len(psk.points) > MAX_POINT_COUNT:
if len(self.psk.materials) > MAX_MATERIAL_COUNT: raise RuntimeError(f'Numbers of vertices ({len(psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
raise RuntimeError(f'Number of materials ({len(self.psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}') if len(psk.materials) > MAX_MATERIAL_COUNT:
raise RuntimeError(f'Number of materials ({len(psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
with open(path, 'wb') as fp: with open(path, 'wb') as fp:
self.write_section(fp, b'ACTRHEAD') _write_section(fp, b'ACTRHEAD')
self.write_section(fp, b'PNTS0000', Vector3, self.psk.points) _write_section(fp, b'PNTS0000', Vector3, psk.points)
wedges = [] wedges = []
for index, w in enumerate(self.psk.wedges): for index, w in enumerate(psk.wedges):
wedge = Psk.Wedge16() wedge = Psk.Wedge16()
wedge.material_index = w.material_index wedge.material_index = w.material_index
wedge.u = w.u wedge.u = w.u
@@ -55,15 +50,15 @@ class PskExporter(object):
wedge.point_index = w.point_index wedge.point_index = w.point_index
wedges.append(wedge) wedges.append(wedge)
self.write_section(fp, b'VTXW0000', Psk.Wedge16, wedges) _write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
self.write_section(fp, b'FACE0000', Psk.Face, self.psk.faces) _write_section(fp, b'FACE0000', Psk.Face, psk.faces)
self.write_section(fp, b'MATT0000', Psk.Material, self.psk.materials) _write_section(fp, b'MATT0000', Psk.Material, psk.materials)
self.write_section(fp, b'REFSKELT', Psk.Bone, self.psk.bones) _write_section(fp, b'REFSKELT', Psk.Bone, psk.bones)
self.write_section(fp, b'RAWWEIGHTS', Psk.Weight, self.psk.weights) _write_section(fp, b'RAWWEIGHTS', Psk.Weight, psk.weights)
def is_bone_filter_mode_item_available(context, identifier): def is_bone_filter_mode_item_available(context, identifier):
input_objects = PskBuilder.get_input_objects(context) input_objects = get_psk_input_objects(context)
armature_object = input_objects.armature_object armature_object = input_objects.armature_object
if identifier == 'BONE_GROUPS': if identifier == 'BONE_GROUPS':
if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups: if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups:
@@ -88,7 +83,7 @@ class PskExportOperator(Operator, ExportHelper):
def invoke(self, context, event): def invoke(self, context, event):
try: try:
input_objects = PskBuilder.get_input_objects(context) input_objects = get_psk_input_objects(context)
except RuntimeError as e: except RuntimeError as e:
self.report({'ERROR_INVALID_CONTEXT'}, str(e)) self.report({'ERROR_INVALID_CONTEXT'}, str(e))
return {'CANCELLED'} return {'CANCELLED'}
@@ -105,7 +100,7 @@ class PskExportOperator(Operator, ExportHelper):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
try: try:
PskBuilder.get_input_objects(context) get_psk_input_objects(context)
except RuntimeError as e: except RuntimeError as e:
cls.poll_message_set(str(e)) cls.poll_message_set(str(e))
return False return False
@@ -136,15 +131,13 @@ class PskExportOperator(Operator, ExportHelper):
def execute(self, context): def execute(self, context):
pg = context.scene.psk_export pg = context.scene.psk_export
builder = PskBuilder() options = PskBuildOptions()
options = PskBuilderOptions()
options.bone_filter_mode = pg.bone_filter_mode options.bone_filter_mode = pg.bone_filter_mode
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected] options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
options.use_raw_mesh_data = pg.use_raw_mesh_data options.use_raw_mesh_data = pg.use_raw_mesh_data
try: try:
psk = builder.build(context, options) psk = build_psk(context, options)
exporter = PskExporter(psk) export_psk(psk, self.filepath)
exporter.export(self.filepath)
except RuntimeError as e: except RuntimeError as e:
self.report({'ERROR_INVALID_CONTEXT'}, str(e)) self.report({'ERROR_INVALID_CONTEXT'}, str(e))
return {'CANCELLED'} return {'CANCELLED'}

View File

@@ -12,7 +12,7 @@ from bpy_extras.io_utils import ImportHelper
from mathutils import Quaternion, Vector, Matrix from mathutils import Quaternion, Vector, Matrix
from .data import Psk from .data import Psk
from .reader import PskReader from .reader import read_psk
from ..helpers import rgb_to_srgb from ..helpers import rgb_to_srgb
@@ -26,11 +26,7 @@ class PskImportOptions(object):
self.bone_length = 1.0 self.bone_length = 1.0
class PskImporter(object): def import_psk(psk: Psk, context, options: PskImportOptions):
def __init__(self):
pass
def import_psk(self, psk: Psk, context, options: PskImportOptions):
# ARMATURE # ARMATURE
armature_data = bpy.data.armatures.new(options.name) armature_data = bpy.data.armatures.new(options.name)
armature_object = bpy.data.objects.new(options.name, armature_data) armature_object = bpy.data.objects.new(options.name, armature_data)
@@ -286,8 +282,9 @@ class PskImportOperator(Operator, ImportHelper):
def execute(self, context): def execute(self, context):
pg = context.scene.psk_import pg = context.scene.psk_import
reader = PskReader()
psk = reader.read(self.filepath) psk = read_psk(self.filepath)
options = PskImportOptions() options = PskImportOptions()
options.name = os.path.splitext(os.path.basename(self.filepath))[0] options.name = os.path.splitext(os.path.basename(self.filepath))[0]
options.should_import_extra_uvs = pg.should_import_extra_uvs options.should_import_extra_uvs = pg.should_import_extra_uvs
@@ -295,7 +292,9 @@ class PskImportOperator(Operator, ImportHelper):
options.should_import_vertex_normals = pg.should_import_vertex_normals options.should_import_vertex_normals = pg.should_import_vertex_normals
options.vertex_color_space = pg.vertex_color_space options.vertex_color_space = pg.vertex_color_space
options.bone_length = pg.bone_length options.bone_length = pg.bone_length
PskImporter().import_psk(psk, context, options)
import_psk(psk, context, options)
return {'FINISHED'} return {'FINISHED'}
def draw(self, context): def draw(self, context):

View File

@@ -3,13 +3,7 @@ import ctypes
from .data import * from .data import *
class PskReader(object): def _read_types(fp, data_class: ctypes.Structure, section: Section, data):
def __init__(self):
pass
@staticmethod
def read_types(fp, data_class: ctypes.Structure, section: Section, data):
buffer_length = section.data_size * section.data_count buffer_length = section.data_size * section.data_count
buffer = fp.read(buffer_length) buffer = fp.read(buffer_length)
offset = 0 offset = 0
@@ -17,7 +11,8 @@ class PskReader(object):
data.append(data_class.from_buffer_copy(buffer, offset)) data.append(data_class.from_buffer_copy(buffer, offset))
offset += section.data_size offset += section.data_size
def read(self, path) -> Psk:
def read_psk(path) -> Psk:
psk = Psk() psk = Psk()
with open(path, 'rb') as fp: with open(path, 'rb') as fp:
while fp.read(1): while fp.read(1):
@@ -26,30 +21,30 @@ class PskReader(object):
if section.name == b'ACTRHEAD': if section.name == b'ACTRHEAD':
pass pass
elif section.name == b'PNTS0000': elif section.name == b'PNTS0000':
PskReader.read_types(fp, Vector3, section, psk.points) _read_types(fp, Vector3, section, psk.points)
elif section.name == b'VTXW0000': elif section.name == b'VTXW0000':
if section.data_size == ctypes.sizeof(Psk.Wedge16): if section.data_size == ctypes.sizeof(Psk.Wedge16):
PskReader.read_types(fp, Psk.Wedge16, section, psk.wedges) _read_types(fp, Psk.Wedge16, section, psk.wedges)
elif section.data_size == ctypes.sizeof(Psk.Wedge32): elif section.data_size == ctypes.sizeof(Psk.Wedge32):
PskReader.read_types(fp, Psk.Wedge32, section, psk.wedges) _read_types(fp, Psk.Wedge32, section, psk.wedges)
else: else:
raise RuntimeError('Unrecognized wedge format') raise RuntimeError('Unrecognized wedge format')
elif section.name == b'FACE0000': elif section.name == b'FACE0000':
PskReader.read_types(fp, Psk.Face, section, psk.faces) _read_types(fp, Psk.Face, section, psk.faces)
elif section.name == b'MATT0000': elif section.name == b'MATT0000':
PskReader.read_types(fp, Psk.Material, section, psk.materials) _read_types(fp, Psk.Material, section, psk.materials)
elif section.name == b'REFSKELT': elif section.name == b'REFSKELT':
PskReader.read_types(fp, Psk.Bone, section, psk.bones) _read_types(fp, Psk.Bone, section, psk.bones)
elif section.name == b'RAWWEIGHTS': elif section.name == b'RAWWEIGHTS':
PskReader.read_types(fp, Psk.Weight, section, psk.weights) _read_types(fp, Psk.Weight, section, psk.weights)
elif section.name == b'FACE3200': elif section.name == b'FACE3200':
PskReader.read_types(fp, Psk.Face32, section, psk.faces) _read_types(fp, Psk.Face32, section, psk.faces)
elif section.name == b'VERTEXCOLOR': elif section.name == b'VERTEXCOLOR':
PskReader.read_types(fp, Color, section, psk.vertex_colors) _read_types(fp, Color, section, psk.vertex_colors)
elif section.name.startswith(b'EXTRAUVS'): elif section.name.startswith(b'EXTRAUVS'):
PskReader.read_types(fp, Vector2, section, psk.extra_uvs) _read_types(fp, Vector2, section, psk.extra_uvs)
elif section.name == b'VTXNORMS': elif section.name == b'VTXNORMS':
PskReader.read_types(fp, Vector3, section, psk.vertex_normals) _read_types(fp, Vector3, section, psk.vertex_normals)
else: else:
raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"') raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"')
return psk return psk