diff --git a/io_scene_psk_psa/__init__.py b/io_scene_psk_psa/__init__.py index 428a265..858aee6 100644 --- a/io_scene_psk_psa/__init__.py +++ b/io_scene_psk_psa/__init__.py @@ -2,6 +2,7 @@ from bpy.app.handlers import persistent from .shared import types as shared_types, helpers as shared_helpers from .shared import dfs as shared_dfs, ui as shared_ui +from .shared import operators as shared_operators from .psk import ( builder as psk_builder, importer as psk_importer, @@ -28,6 +29,8 @@ from .psa.export import ( from .psa.import_ import operators as psa_import_operators from .psa.import_ import ui as psa_import_ui, properties as psa_import_properties +from .psa import file_handlers as psa_file_handlers + _needs_reload = 'bpy' in locals() if _needs_reload: @@ -37,6 +40,7 @@ if _needs_reload: importlib.reload(shared_types) importlib.reload(shared_dfs) importlib.reload(shared_ui) + importlib.reload(shared_operators) importlib.reload(psk_builder) importlib.reload(psk_importer) @@ -56,6 +60,7 @@ if _needs_reload: importlib.reload(psa_import_properties) importlib.reload(psa_import_operators) importlib.reload(psa_import_ui) + importlib.reload(psa_file_handlers) import bpy from bpy.props import PointerProperty @@ -80,6 +85,7 @@ def psa_import_menu_func(self, context): _modules = ( shared_types, shared_ui, + shared_operators, psk_properties, psk_ui, psk_import_operators, @@ -91,7 +97,8 @@ _modules = ( psa_export_ui, psa_import_properties, psa_import_operators, - psa_import_ui + psa_import_ui, + psa_file_handlers, ) def register(): @@ -101,21 +108,23 @@ def register(): bpy.types.TOPBAR_MT_file_import.append(psk_import_menu_func) bpy.types.TOPBAR_MT_file_export.append(psa_export_menu_func) bpy.types.TOPBAR_MT_file_import.append(psa_import_menu_func) - bpy.types.Material.psk = PointerProperty(type=psk_properties.PSX_PG_material, options={'HIDDEN'}) - bpy.types.Scene.psx_export = PointerProperty(type=shared_types.PSX_PG_scene_export, options={'HIDDEN'}) - bpy.types.Scene.psa_import = PointerProperty(type=psa_import_properties.PSA_PG_import, options={'HIDDEN'}) - bpy.types.Scene.psa_export = PointerProperty(type=psa_export_properties.PSA_PG_export, options={'HIDDEN'}) - bpy.types.Scene.psk_export = PointerProperty(type=psk_export_properties.PSK_PG_export, options={'HIDDEN'}) - bpy.types.Action.psa_export = PointerProperty(type=shared_types.PSX_PG_action_export, options={'HIDDEN'}) + + setattr(bpy.types.Material, 'psk', PointerProperty(type=psk_properties.PSX_PG_material, options={'HIDDEN'})) + setattr(bpy.types.Scene, 'psx_export', PointerProperty(type=shared_types.PSX_PG_scene_export, options={'HIDDEN'})) + setattr(bpy.types.Scene, 'psa_import', PointerProperty(type=psa_import_properties.PSA_PG_import, options={'HIDDEN'})) + setattr(bpy.types.Scene, 'psa_export', PointerProperty(type=psa_export_properties.PSA_PG_export, options={'HIDDEN'})) + setattr(bpy.types.Scene, 'psk_export', PointerProperty(type=psk_export_properties.PSK_PG_export, options={'HIDDEN'})) + setattr(bpy.types.Action, 'psa_export', PointerProperty(type=shared_types.PSX_PG_action_export, options={'HIDDEN'})) def unregister(): - del bpy.types.Material.psk - del bpy.types.Scene.psx_export - del bpy.types.Scene.psa_import - del bpy.types.Scene.psa_export - del bpy.types.Scene.psk_export - del bpy.types.Action.psa_export + delattr(bpy.types.Material, 'psk') + delattr(bpy.types.Scene, 'psx_export') + delattr(bpy.types.Scene, 'psa_import') + delattr(bpy.types.Scene, 'psa_export') + delattr(bpy.types.Scene, 'psk_export') + delattr(bpy.types.Action, 'psa_export') + bpy.types.TOPBAR_MT_file_export.remove(psk_export_menu_func) bpy.types.TOPBAR_MT_file_import.remove(psk_import_menu_func) bpy.types.TOPBAR_MT_file_export.remove(psa_export_menu_func) diff --git a/io_scene_psk_psa/psa/builder.py b/io_scene_psk_psa/psa/builder.py index 54198b0..fa5154a 100644 --- a/io_scene_psk_psa/psa/builder.py +++ b/io_scene_psk_psa/psa/builder.py @@ -48,8 +48,8 @@ class PsaBuildOptions: def _get_pose_bone_location_and_rotation( - pose_bone: Optional[PoseBone], - armature_object: Optional[Object], + pose_bone: PoseBone | None, + armature_object: Object | None, export_space: str, scale: Vector, coordinate_system_transform: Matrix, @@ -57,6 +57,8 @@ def _get_pose_bone_location_and_rotation( ) -> Tuple[Vector, Quaternion]: is_false_root_bone = pose_bone is None and armature_object is None + # TODO: this is such a disaster; the false root bone idea needs revising. + if is_false_root_bone: pose_bone_matrix = coordinate_system_transform elif pose_bone is not None and pose_bone.parent is not None: @@ -144,7 +146,7 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa: export_sequence.name = export_sequence.name.strip() # Save each armature object's current action and frame so that we can restore the state once we are done. - saved_armature_object_actions = {o: o.animation_data.action for o in options.armature_objects} + saved_armature_object_actions = {o: (o.animation_data.action if o.animation_data else None) for o in options.armature_objects} saved_frame_current = context.scene.frame_current # Now build the PSA sequences. @@ -197,8 +199,10 @@ def build_psa(context: Context, options: PsaBuildOptions) -> Psa: # Link the action to the animation data and update view layer. for armature_object in options.armature_objects: - armature_object.animation_data.action = export_sequence.nla_state.action + if armature_object.animation_data: + armature_object.animation_data.action = export_sequence.nla_state.action + assert context.view_layer context.view_layer.update() def add_key(location: Vector, rotation: Quaternion): diff --git a/io_scene_psk_psa/psa/export/operators.py b/io_scene_psk_psa/psa/export/operators.py index eccd916..438db27 100644 --- a/io_scene_psk_psa/psa/export/operators.py +++ b/io_scene_psk_psa/psa/export/operators.py @@ -1,23 +1,29 @@ +from abc import abstractmethod from collections import Counter -from typing import List, Iterable, Dict, Tuple, cast as typing_cast +from typing import List, Iterable, Dict, Protocol, Sequence, Tuple, cast as typing_cast import bpy import re from bpy.props import StringProperty -from bpy.types import Context, Action, Object, AnimData, TimelineMarker, Operator, Armature +from bpy.types import Context, Action, Object, AnimData, TimelineMarker, Operator, Armature, UILayout, Scene from bpy_extras.io_utils import ExportHelper from .properties import ( PSA_PG_export, PSA_PG_export_action_list_item, + PsaExportMixin, + PsaExportSequenceMixin, + PsaExportSequenceWithActionMixin, filter_sequences, get_sequences_from_name_and_frame_range, ) from .ui import PSA_UL_export_sequences from ..builder import build_psa, PsaBuildSequence, PsaBuildOptions from psk_psa_py.psa.writer import write_psa_to_file -from ...shared.helpers import populate_bone_collection_list, get_nla_strips_in_frame_range, PsxBoneCollection +from ...shared.helpers import get_collection_export_operator_from_context, get_collection_from_context, get_psk_input_objects_for_collection, populate_bone_collection_list, get_nla_strips_in_frame_range, PsxBoneCollection +from ...shared.types import BpyCollectionProperty, PSX_PG_action_export from ...shared.ui import draw_bone_filter_mode +from ...shared.operators import PSK_OT_bone_collection_list_populate, PSK_OT_bone_collection_list_select_all def get_sequences_propnames_from_source(sequence_source: str) -> Tuple[str, str]: @@ -59,18 +65,23 @@ def is_action_for_object(obj: Object, action: Action): return False -def update_actions_and_timeline_markers(context: Context, armature_objects: Iterable[Object]): - pg = getattr(context.scene, 'psa_export') +def update_actions_and_timeline_markers(context: Context, armature_objects: Sequence[Object], pg: PsaExportMixin): + assert context.scene is not None # Clear actions and markers. pg.action_list.clear() pg.marker_list.clear() pg.active_action_list.clear() - # Get animation data. - # TODO: Not sure how to handle this with multiple armatures. - animation_data_object = get_animation_data_object(context) - animation_data = animation_data_object.animation_data if animation_data_object else None + # TODO: this is cleared in the callback, although this should probably be changed. + # pg.nla_strip_list.clear() + + assert len(armature_objects) >= 0, 'Must have at least one armature object' + + # TODO: for now, use the first armature object's animation data. + # animation_data_object = get_animation_data_object(context, pg) + armature_object = armature_objects[0] + animation_data = armature_object.animation_data if armature_object else None if animation_data is None: return @@ -83,7 +94,7 @@ def update_actions_and_timeline_markers(context: Context, armature_objects: Iter for (name, frame_start, frame_end) in get_sequences_from_action(action): item = pg.action_list.add() - item.action = action + item.action_name = action.name item.name = name item.is_selected = False item.is_pose_marker = False @@ -93,12 +104,10 @@ def update_actions_and_timeline_markers(context: Context, armature_objects: Iter # Pose markers are not guaranteed to be in frame-order, so make sure that they are. pose_markers = sorted(action.pose_markers, key=lambda x: x.frame) for pose_marker_index, pose_marker in enumerate(pose_markers): - if pose_marker.name.strip() == '' or pose_marker.name.startswith('#'): - continue sequences = get_sequences_from_action_pose_markers(action, pose_markers, pose_marker, pose_marker_index) for (name, frame_start, frame_end) in sequences: item = pg.action_list.add() - item.action = action + item.action_name = action.name item.name = name item.is_selected = False item.is_pose_marker = True @@ -107,7 +116,7 @@ def update_actions_and_timeline_markers(context: Context, armature_objects: Iter # Populate timeline markers list. marker_names = [x.name for x in context.scene.timeline_markers] - sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, marker_names) + sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context.scene, marker_names) for marker_name in marker_names: if marker_name not in sequence_frame_ranges: @@ -124,30 +133,32 @@ def update_actions_and_timeline_markers(context: Context, armature_objects: Iter item.frame_end = frame_end # Populate the active action list. - for armature_object in context.selected_objects: - if armature_object.type != 'ARMATURE': + for armature_object in armature_objects: + active_action = armature_object.animation_data.action if armature_object.animation_data else None + if active_action is None: continue - action = armature_object.animation_data.action if armature_object.animation_data else None - if action is None: - continue - item = pg.active_action_list.add() - item.name = action.name - item.armature_object = armature_object - item.action = action - item.frame_start = int(item.action.frame_range[0]) - item.frame_end = int(item.action.frame_range[1]) - item.is_selected = True + sequences = get_sequences_from_action(active_action) + for (sequence_name, frame_start, frame_end) in sequences: + # TODO: for some reason we aren't doing the sequence name parsing here. + item = pg.active_action_list.add() + item.name = sequence_name + item.armature_object_name = armature_object.name + item.action_name = active_action.name + item.frame_start = frame_start + item.frame_end = frame_end + item.is_selected = True def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actions: Iterable[Action]) -> float: match fps_source: case 'SCENE': + assert context.scene return context.scene.render.fps case 'CUSTOM': return fps_custom case 'ACTION_METADATA': # Get the minimum value of action metadata FPS values. - return min([action.psa_export.fps for action in actions]) + return min([typing_cast(PSX_PG_action_export, getattr(action, 'psa_export')).fps for action in actions]) case _: assert False, f'Invalid FPS source: {fps_source}' @@ -160,41 +171,25 @@ def get_sequence_compression_ratio( match compression_ratio_source: case 'ACTION_METADATA': # Get the minimum value of action metadata compression ratio values. - return min(map(lambda action: action.psa_export.compression_ratio, actions)) + return min(map(lambda action: typing_cast(PSX_PG_action_export, getattr(action, 'psa_export')).compression_ratio, actions)) case 'CUSTOM': return compression_ratio_custom case _: assert False, f'Invalid compression ratio source: {compression_ratio_source}' -def get_animation_data_object(context: Context) -> Object: - pg: PSA_PG_export = getattr(context.scene, 'psa_export') - - active_object = context.view_layer.objects.active - - if active_object is None or active_object.type != 'ARMATURE': - raise RuntimeError('Active object must be an Armature') - - if pg.sequence_source != 'ACTIONS' and pg.should_override_animation_data: - animation_data_object = pg.animation_data_override - else: - animation_data_object = active_object - - return animation_data_object - - def get_timeline_marker_sequence_frame_ranges( animation_data: AnimData, - context: Context, + scene: Scene, marker_names: List[str], - ) -> Dict: + ) -> dict[str, tuple[int, int]]: # Timeline markers need to be sorted so that we can determine the sequence start and end positions. - sequence_frame_ranges = dict() - sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame)) + sequence_frame_ranges: dict[str, tuple[int, int]] = dict() + sorted_timeline_markers = list(sorted(scene.timeline_markers, key=lambda x: x.frame)) sorted_timeline_marker_names = [x.name for x in sorted_timeline_markers] for marker_name in marker_names: - marker = context.scene.timeline_markers[marker_name] + marker = scene.timeline_markers[marker_name] frame_start = marker.frame # Determine the final frame of the sequence based on the next marker. # If no subsequent marker exists, use the maximum frame_end from all NLA strips. @@ -253,6 +248,8 @@ def get_sequences_from_action_pose_markers( ): frame_start = pose_marker.frame sequence_name = pose_marker.name + if pose_marker.name.strip() == '' or pose_marker.name.startswith('#'): + return if pose_marker.name.startswith('!'): # If the pose marker name starts with an exclamation mark, only export the first frame. frame_end = frame_start @@ -264,7 +261,7 @@ def get_sequences_from_action_pose_markers( yield from get_sequences_from_name_and_frame_range(sequence_name, frame_start, frame_end) -def get_visible_sequences(pg: PSA_PG_export, sequences) -> List[PSA_PG_export_action_list_item]: +def get_visible_sequences(pg: PsaExportMixin, sequences) -> List[PSA_PG_export_action_list_item]: visible_sequences = [] for i, flag in enumerate(filter_sequences(pg, sequences)): if bool(flag & (1 << 30)): @@ -272,10 +269,307 @@ def get_visible_sequences(pg: PSA_PG_export, sequences) -> List[PSA_PG_export_ac return visible_sequences + +class PSA_OT_export_collection(Operator, ExportHelper, PsaExportMixin): + bl_idname = 'psa.export_collection' + bl_label = 'Export' + bl_options = {'INTERNAL'} + bl_description = 'Export actions to PSA' + filename_ext = '.psa' + filter_glob: StringProperty(default='*.psa', options={'HIDDEN'}) + filepath: StringProperty( + name='File Path', + description='File path used for exporting the PSA file', + maxlen=1024, + default='') + + def execute(self, context: Context): + # TODO: get the armature objects from the collection export operator + collection = get_collection_from_context(context) + if collection is None: + self.report({'ERROR'}, 'No collection found for export') + return {'CANCELLED'} + import_objects = get_psk_input_objects_for_collection(collection) + + options = create_psa_export_options(context, import_objects.armature_objects, self) + + if len(options.sequences) == 0: + self.report({'ERROR'}, 'No sequences were selected for export') + return {'CANCELLED'} + + try: + psa = build_psa(context, options) + self.report({'INFO'}, f'PSA export successful') + except RuntimeError as e: + self.report({'ERROR_INVALID_CONTEXT'}, str(e)) + return {'CANCELLED'} + + write_psa_to_file(psa, self.filepath) + + return {'FINISHED'} + + def draw(self, context: Context): + layout = self.layout + + assert layout is not None + + flow = layout.grid_flow(row_major=True) + flow.use_property_split = True + flow.use_property_decorate = False + + # Sequences + draw_sequences_panel(layout, self, + PSA_OT_export_collection_sequences_select_all.bl_idname, + PSA_OT_export_collection_sequences_deselect_all.bl_idname, + ) + + # Bones + bones_header, bones_panel = layout.panel('Bones', default_closed=False) + bones_header.label(text='Bones', icon='BONE_DATA') + if bones_panel: + draw_bone_filter_mode(bones_panel, self, True) + + if self.bone_filter_mode == 'BONE_COLLECTIONS': + row = bones_panel.row() + rows = max(3, min(len(self.bone_collection_list), 10)) + row.template_list('PSX_UL_bone_collection_list', '', self, 'bone_collection_list', self, 'bone_collection_list_index', rows=rows) + col = row.column(align=True) + col.operator(PSK_OT_bone_collection_list_populate.bl_idname, text='', icon='FILE_REFRESH') + col.separator() + op = col.operator(PSK_OT_bone_collection_list_select_all.bl_idname, text='', icon='CHECKBOX_HLT') + op.is_selected = True + op = col.operator(PSK_OT_bone_collection_list_select_all.bl_idname, text='', icon='CHECKBOX_DEHLT') + op.is_selected = False + + advanced_bones_header, advanced_bones_panel = bones_panel.panel('Advanced', default_closed=True) + advanced_bones_header.label(text='Advanced') + if advanced_bones_panel: + flow = advanced_bones_panel.grid_flow(row_major=True) + flow.use_property_split = True + flow.use_property_decorate = False + flow.prop(self, 'root_bone_name') + + # Transform + transform_header, transform_panel = layout.panel('Transform', default_closed=False) + transform_header.label(text='Transform', icon='DRIVER_TRANSFORM') + if transform_panel: + flow = transform_panel.grid_flow(row_major=True) + flow.use_property_split = True + flow.use_property_decorate = False + flow.prop(self, 'export_space') + flow.prop(self, 'transform_source') + + flow = transform_panel.grid_flow(row_major=True) + flow.use_property_split = True + flow.use_property_decorate = False + + match self.transform_source: + case 'SCENE': + transform_source = getattr(context.scene, 'psx_export') + flow.enabled = False + case 'CUSTOM': + transform_source = self + case _: + assert False, f'Invalid transform source: {self.transform_source}' + + flow.prop(transform_source, 'scale') + flow.prop(transform_source, 'forward_axis') + flow.prop(transform_source, 'up_axis') + + +def draw_sequences_panel( + layout: UILayout, + pg: PsaExportMixin, + sequences_select_all_operator_idname: str, + sequences_deselect_all_operator_idname: str, + ): + sequences_header, sequences_panel = layout.panel('Sequences', default_closed=False) + sequences_header.label(text='Sequences', icon='ACTION') + + if sequences_panel: + sequences_panel.operator(PSA_OT_export_collection_populate_sequences.bl_idname, text='Refresh', icon='FILE_REFRESH') + + flow = sequences_panel.grid_flow() + flow.use_property_split = True + flow.use_property_decorate = False + flow.prop(pg, 'sequence_source', text='Source') + + if pg.sequence_source == 'NLA_TRACK_STRIPS': + flow = sequences_panel.grid_flow() + flow.use_property_split = True + flow.use_property_decorate = False + flow.prop(pg, 'nla_track') + + # SELECT ALL/NONE + row = sequences_panel.row(align=True) + row.label(text='Select') + row.operator(sequences_select_all_operator_idname, text='All', icon='CHECKBOX_HLT') + row.operator(sequences_deselect_all_operator_idname, text='None', icon='CHECKBOX_DEHLT') + + propname, active_propname = get_sequences_propnames_from_source(pg.sequence_source) + sequences_panel.template_list(PSA_UL_export_sequences.bl_idname, '', pg, propname, pg, active_propname, + rows=max(3, min(len(getattr(pg, propname)), 10))) + + name_header, name_panel = sequences_panel.panel('Name', default_closed=False) + name_header.label(text='Name') + if name_panel: + flow = name_panel.grid_flow() + flow.use_property_split = True + flow.use_property_decorate = False + flow.prop(pg, 'sequence_name_prefix', text='Name Prefix') + flow.prop(pg, 'sequence_name_suffix') + + # Determine if there is going to be a naming conflict and display an error, if so. + selected_items = [x for x in pg.action_list if x.is_selected] + action_names = [x.name for x in selected_items] + action_name_counts = Counter(action_names) + for action_name, count in action_name_counts.items(): + if count > 1: + layout.label(text=f'Duplicate action: {action_name}', icon='ERROR') + break + + # Group + group_header, group_panel = sequences_panel.panel('Group', default_closed=True) + group_header.label(text='Group') + if group_panel is not None: + group_flow = group_panel.grid_flow() + group_flow.use_property_split = True + group_flow.use_property_decorate = False + group_flow.prop(pg, 'group_source') + if pg.group_source == 'CUSTOM': + group_flow.prop(pg, 'group_custom', placeholder='Group') + + # Sampling + sampling_header, sampling_panel = sequences_panel.panel('Data Source', default_closed=False) + sampling_header.label(text='Sampling') + if sampling_panel: + flow = sampling_panel.grid_flow() + flow.use_property_split = True + flow.use_property_decorate = False + + # SAMPLING MODE + flow.prop(pg, 'sampling_mode', text='Sampling Mode') + + # FPS + col = flow.row(align=True) + col.prop(pg, 'fps_source', text='FPS') + if pg.fps_source == 'CUSTOM': + col.prop(pg, 'fps_custom', text='') + + # COMPRESSION RATIO + col = flow.row(align=True) + col.prop(pg, 'compression_ratio_source', text='Compression Ratio') + if pg.compression_ratio_source == 'CUSTOM': + col.prop(pg, 'compression_ratio_custom', text='') + + +def create_psa_export_options(context: Context, armature_objects: Sequence[Object], pg: PsaExportMixin) -> PsaBuildOptions: + if len(armature_objects) == 0: + raise RuntimeError(f'No armatures') + + animation_data = armature_objects[0].animation_data + export_sequences: List[PsaBuildSequence] = [] + + # TODO: this needs to be changed so that we iterate over all of the armature objects? + # do we need to check for primary key? (data vs. object?) + + def get_export_sequence_group(group_source: str, group_custom: str | None, action: Action | None) -> str | None: + match group_source: + case 'ACTIONS': + if action is None: + return None + action_psa_export = typing_cast(PSX_PG_action_export, getattr(action, 'psa_export')) + return action_psa_export.group + case 'CUSTOM': + return group_custom + case _: + return None + + match pg.sequence_source: + case 'ACTIONS': + for action_item in filter(lambda x: x.is_selected, pg.action_list): + if action_item.action is None: + continue + if len(action_item.action.layers) == 0: + continue + export_sequence = PsaBuildSequence(context.active_object, animation_data) + export_sequence.name = action_item.name + export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, action_item.action) + export_sequence.nla_state.action = action_item.action + export_sequence.nla_state.frame_start = action_item.frame_start + export_sequence.nla_state.frame_end = action_item.frame_end + export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action_item.action]) + export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action_item.action]) + export_sequence.key_quota = action_item.action.psa_export.key_quota + export_sequences.append(export_sequence) + case 'TIMELINE_MARKERS': + for marker_item in filter(lambda x: x.is_selected, pg.marker_list): + nla_strips_actions: List[Action] = [] + for nla_strip in get_nla_strips_in_frame_range(animation_data, marker_item.frame_start, marker_item.frame_end): + if nla_strip.action: + nla_strips_actions.append(nla_strip.action) + export_sequence = PsaBuildSequence(context.active_object, animation_data) + export_sequence.name = marker_item.name + export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, next(iter(nla_strips_actions), None)) + export_sequence.nla_state.frame_start = marker_item.frame_start + export_sequence.nla_state.frame_end = marker_item.frame_end + export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions) + export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, nla_strips_actions) + export_sequences.append(export_sequence) + case 'NLA_TRACK_STRIPS': + for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list): + if nla_strip_item.action is None: + continue + export_sequence = PsaBuildSequence(context.active_object, animation_data) + export_sequence.name = nla_strip_item.name + export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, nla_strip_item.action) + export_sequence.nla_state.frame_start = nla_strip_item.frame_start + export_sequence.nla_state.frame_end = nla_strip_item.frame_end + export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [nla_strip_item.action]) + export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [nla_strip_item.action]) + export_sequence.key_quota = nla_strip_item.action.psa_export.key_quota + export_sequences.append(export_sequence) + case 'ACTIVE_ACTION': + for active_action_item in filter(lambda x: x.is_selected, pg.active_action_list): + export_sequence = PsaBuildSequence(active_action_item.armature_object, active_action_item.armature_object.animation_data) + action = active_action_item.action + if action is None: + continue + export_sequence.name = action.name + export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, action) + export_sequence.nla_state.action = action + export_sequence.nla_state.frame_start = int(action.frame_range[0]) + export_sequence.nla_state.frame_end = int(action.frame_range[1]) + export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action]) + export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action]) + export_sequence.key_quota = action.psa_export.key_quota + export_sequences.append(export_sequence) + case _: + assert False, f'Invalid sequence source: {pg.sequence_source}' + + options = PsaBuildOptions() + options.armature_objects = list(armature_objects) + options.animation_data = animation_data + options.sequences = export_sequences + options.bone_filter_mode = pg.bone_filter_mode + options.bone_collection_indices = [PsxBoneCollection(x.armature_object_name, x.armature_data_name, x.index) for x in pg.bone_collection_list if x.is_selected] + options.sequence_name_prefix = pg.sequence_name_prefix + options.sequence_name_suffix = pg.sequence_name_suffix + options.sampling_mode = pg.sampling_mode + options.export_space = pg.export_space + options.scale = pg.scale + options.forward_axis = pg.forward_axis + options.up_axis = pg.up_axis + options.root_bone_name = pg.root_bone_name + options.sequence_source = pg.sequence_source + + return options + + class PSA_OT_export(Operator, ExportHelper): bl_idname = 'psa.export' bl_label = 'Export' - bl_options = {'INTERNAL', 'UNDO'} + bl_options = {'INTERNAL'} bl_description = 'Export actions to PSA' filename_ext = '.psa' filter_glob: StringProperty(default='*.psa', options={'HIDDEN'}) @@ -301,90 +595,12 @@ class PSA_OT_export(Operator, ExportHelper): def draw(self, context): layout = self.layout assert layout - pg = getattr(context.scene, 'psa_export') + pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export')) - sequences_header, sequences_panel = layout.panel('Sequences', default_closed=False) - sequences_header.label(text='Sequences', icon='ACTION') - - if sequences_panel: - flow = sequences_panel.grid_flow() - flow.use_property_split = True - flow.use_property_decorate = False - flow.prop(pg, 'sequence_source', text='Source') - - if pg.sequence_source in {'TIMELINE_MARKERS', 'NLA_TRACK_STRIPS'}: - # ANIMDATA SOURCE - flow.prop(pg, 'should_override_animation_data') - if pg.should_override_animation_data: - flow.prop(pg, 'animation_data_override', text=' ') - - if pg.sequence_source == 'NLA_TRACK_STRIPS': - flow = sequences_panel.grid_flow() - flow.use_property_split = True - flow.use_property_decorate = False - flow.prop(pg, 'nla_track') - - # SELECT ALL/NONE - row = sequences_panel.row(align=True) - row.label(text='Select') - row.operator(PSA_OT_export_actions_select_all.bl_idname, text='All', icon='CHECKBOX_HLT') - row.operator(PSA_OT_export_actions_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT') - - propname, active_propname = get_sequences_propnames_from_source(pg.sequence_source) - sequences_panel.template_list(PSA_UL_export_sequences.bl_idname, '', pg, propname, pg, active_propname, - rows=max(3, min(len(getattr(pg, propname)), 10))) - - name_header, name_panel = sequences_panel.panel('Name', default_closed=False) - name_header.label(text='Name') - if name_panel: - flow = name_panel.grid_flow() - flow.use_property_split = True - flow.use_property_decorate = False - flow.prop(pg, 'sequence_name_prefix', text='Name Prefix') - flow.prop(pg, 'sequence_name_suffix') - - # Determine if there is going to be a naming conflict and display an error, if so. - selected_items = [x for x in pg.action_list if x.is_selected] - action_names = [x.name for x in selected_items] - action_name_counts = Counter(action_names) - for action_name, count in action_name_counts.items(): - if count > 1: - layout.label(text=f'Duplicate action: {action_name}', icon='ERROR') - break - - # Group - group_header, group_panel = sequences_panel.panel('Group', default_closed=True) - group_header.label(text='Group') - if group_panel is not None: - group_flow = group_panel.grid_flow() - group_flow.use_property_split = True - group_flow.use_property_decorate = False - group_flow.prop(pg, 'group_source') - if pg.group_source == 'CUSTOM': - group_flow.prop(pg, 'group_custom', placeholder='Group') - - # Sampling - sampling_header, sampling_panel = sequences_panel.panel('Data Source', default_closed=False) - sampling_header.label(text='Sampling') - if sampling_panel: - flow = sampling_panel.grid_flow() - flow.use_property_split = True - flow.use_property_decorate = False - - # SAMPLING MODE - flow.prop(pg, 'sampling_mode', text='Sampling Mode') - - # FPS - col = flow.row(align=True) - col.prop(pg, 'fps_source', text='FPS') - if pg.fps_source == 'CUSTOM': - col.prop(pg, 'fps_custom', text='') - - # COMPRESSION RATIO - col = flow.row(align=True) - col.prop(pg, 'compression_ratio_source', text='Compression Ratio') - if pg.compression_ratio_source == 'CUSTOM': - col.prop(pg, 'compression_ratio_custom', text='') + # SEQUENCES + draw_sequences_panel(layout, pg, + PSA_OT_export_sequences_select_all.bl_idname, + PSA_OT_export_sequences_deselect_all.bl_idname) # BONES bones_header, bones_panel = layout.panel('Bones', default_closed=False) @@ -405,7 +621,7 @@ class PSA_OT_export(Operator, ExportHelper): rows=rows ) - bones_advanced_header, bones_advanced_panel = layout.panel('Bones Advanced', default_closed=True) + bones_advanced_header, bones_advanced_panel = bones_panel.panel('Bones Advanced', default_closed=True) bones_advanced_header.label(text='Advanced') if bones_advanced_panel: flow = bones_advanced_panel.grid_flow() @@ -415,7 +631,7 @@ class PSA_OT_export(Operator, ExportHelper): # TRANSFORM transform_header, transform_panel = layout.panel('Advanced', default_closed=False) - transform_header.label(text='Transform') + transform_header.label(text='Transform', icon='DRIVER_TRANSFORM') if transform_panel: flow = transform_panel.grid_flow(row_major=True) @@ -437,8 +653,7 @@ class PSA_OT_export(Operator, ExportHelper): if context.scene.is_nla_tweakmode: raise RuntimeError('Cannot export PSA while in NLA tweak mode') - - def invoke(self, context, _event): + def invoke(self, context, event): try: self._check_context(context) except RuntimeError as e: @@ -447,6 +662,8 @@ class PSA_OT_export(Operator, ExportHelper): pg: PSA_PG_export = getattr(context.scene, 'psa_export') + assert context.view_layer is not None + self.armature_objects = [x for x in context.view_layer.objects.selected if x.type == 'ARMATURE'] for armature_object in self.armature_objects: @@ -455,117 +672,28 @@ class PSA_OT_export(Operator, ExportHelper): if armature_object.animation_data is None: armature_object.animation_data_create() - update_actions_and_timeline_markers(context, self.armature_objects) + + pg = getattr(context.scene, 'psa_export') + update_actions_and_timeline_markers(context, self.armature_objects, pg) populate_bone_collection_list( pg.bone_collection_list, self.armature_objects, primary_key='DATA' if pg.sequence_source == 'ACTIVE_ACTION' else 'OBJECT', ) - context.window_manager.fileselect_add(self) + if context.window_manager is not None: + context.window_manager.fileselect_add(self) return {'RUNNING_MODAL'} def execute(self, context): pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export')) + options = create_psa_export_options(context, self.armature_objects, pg) - # Populate the export sequence list. - animation_data_object = get_animation_data_object(context) - animation_data = animation_data_object.animation_data - - if animation_data is None: - raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'') - - if context.active_object is None: - raise RuntimeError('No active object') - - export_sequences: List[PsaBuildSequence] = [] - - def get_export_sequence_group(group_source: str, group_custom: str | None, action: Action | None) -> str | None: - match group_source: - case 'ACTIONS': - return action.psa_export.group if action else None - case 'CUSTOM': - return group_custom - case _: - return None - - match pg.sequence_source: - case 'ACTIONS': - for action_item in filter(lambda x: x.is_selected, pg.action_list): - if len(action_item.action.layers) == 0: - continue - export_sequence = PsaBuildSequence(context.active_object, animation_data) - export_sequence.name = action_item.name - export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, action_item.action) - export_sequence.nla_state.action = action_item.action - export_sequence.nla_state.frame_start = action_item.frame_start - export_sequence.nla_state.frame_end = action_item.frame_end - export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action_item.action]) - export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action_item.action]) - export_sequence.key_quota = action_item.action.psa_export.key_quota - export_sequences.append(export_sequence) - case 'TIMELINE_MARKERS': - for marker_item in filter(lambda x: x.is_selected, pg.marker_list): - nla_strips_actions: List[Action] = [] - for nla_strip in get_nla_strips_in_frame_range(animation_data, marker_item.frame_start, marker_item.frame_end): - if nla_strip.action: - nla_strips_actions.append(nla_strip.action) - export_sequence = PsaBuildSequence(context.active_object, animation_data) - export_sequence.name = marker_item.name - export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, next(iter(nla_strips_actions), None)) - export_sequence.nla_state.frame_start = marker_item.frame_start - export_sequence.nla_state.frame_end = marker_item.frame_end - export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions) - export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, nla_strips_actions) - export_sequences.append(export_sequence) - case 'NLA_TRACK_STRIPS': - for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list): - export_sequence = PsaBuildSequence(context.active_object, animation_data) - export_sequence.name = nla_strip_item.name - export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, nla_strip_item.action) - export_sequence.nla_state.frame_start = nla_strip_item.frame_start - export_sequence.nla_state.frame_end = nla_strip_item.frame_end - export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [nla_strip_item.action]) - export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [nla_strip_item.action]) - export_sequence.key_quota = nla_strip_item.action.psa_export.key_quota - export_sequences.append(export_sequence) - case 'ACTIVE_ACTION': - for active_action_item in filter(lambda x: x.is_selected, pg.active_action_list): - export_sequence = PsaBuildSequence(active_action_item.armature_object, active_action_item.armature_object.animation_data) - action = active_action_item.action - export_sequence.name = action.name - export_sequence.group = get_export_sequence_group(pg.group_source, pg.group_custom, action) - export_sequence.nla_state.action = action - export_sequence.nla_state.frame_start = int(action.frame_range[0]) - export_sequence.nla_state.frame_end = int(action.frame_range[1]) - export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action]) - export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action]) - export_sequence.key_quota = action.psa_export.key_quota - export_sequences.append(export_sequence) - case _: - assert False, f'Invalid sequence source: {pg.sequence_source}' - - if len(export_sequences) == 0: + if len(options.sequences) == 0: self.report({'ERROR'}, 'No sequences were selected for export') return {'CANCELLED'} - options = PsaBuildOptions() - options.armature_objects = self.armature_objects - options.animation_data = animation_data - options.sequences = export_sequences - options.bone_filter_mode = pg.bone_filter_mode - options.bone_collection_indices = [PsxBoneCollection(x.armature_object_name, x.armature_data_name, x.index) for x in pg.bone_collection_list if x.is_selected] - options.sequence_name_prefix = pg.sequence_name_prefix - options.sequence_name_suffix = pg.sequence_name_suffix - options.sampling_mode = pg.sampling_mode - options.export_space = pg.export_space - options.scale = pg.scale - options.forward_axis = pg.forward_axis - options.up_axis = pg.up_axis - options.root_bone_name = pg.root_bone_name - options.sequence_source = pg.sequence_source - try: psa = build_psa(context, options) self.report({'INFO'}, f'PSA export successful') @@ -578,15 +706,15 @@ class PSA_OT_export(Operator, ExportHelper): return {'FINISHED'} -class PSA_OT_export_actions_select_all(Operator): - bl_idname = 'psa.export_actions_select_all' - bl_label = 'Select All' - bl_description = 'Select all visible sequences' - bl_options = {'INTERNAL'} +class PsaExportActionsSelectOperator(Operator): + @classmethod + @abstractmethod + def get_psa_export(cls, context: Context) -> PsaExportMixin: + pass @classmethod - def get_item_list(cls, context): - pg = context.scene.psa_export + def get_item_list(cls, context: Context): + pg = cls.get_psa_export(context) match pg.sequence_source: case 'ACTIONS': return pg.action_list @@ -599,49 +727,40 @@ class PSA_OT_export_actions_select_all(Operator): case _: assert False, f'Invalid sequence source: {pg.sequence_source}' - @classmethod - def poll(cls, context): - pg = getattr(context.scene, 'psa_export') - item_list = cls.get_item_list(context) - visible_sequences = get_visible_sequences(pg, item_list) - has_unselected_sequences = any(map(lambda item: not item.is_selected, visible_sequences)) - return has_unselected_sequences - +class PsaExportActionsSelectAllOperator(PsaExportActionsSelectOperator): def execute(self, context): - pg = getattr(context.scene, 'psa_export') + pg = self.__class__.get_psa_export(context) sequences = self.get_item_list(context) for sequence in get_visible_sequences(pg, sequences): sequence.is_selected = True return {'FINISHED'} -class PSA_OT_export_actions_deselect_all(Operator): - bl_idname = 'psa.export_sequences_deselect_all' - bl_label = 'Deselect All' - bl_description = 'Deselect all visible sequences' +class PSA_OT_export_sequences_select_all(PsaExportActionsSelectAllOperator): + bl_idname = 'psa.export_actions_select_all' + bl_label = 'Select All' + bl_description = 'Select all visible sequences' bl_options = {'INTERNAL'} @classmethod - def get_item_list(cls, context): - pg = context.scene.psa_export - match pg.sequence_source: - case 'ACTIONS': - return pg.action_list - case 'TIMELINE_MARKERS': - return pg.marker_list - case 'NLA_TRACK_STRIPS': - return pg.nla_strip_list - case 'ACTIVE_ACTION': - return pg.active_action_list - case _: - return None + def get_psa_export(cls, context: Context) -> PsaExportMixin: + return typing_cast(PsaExportMixin, getattr(context.scene, 'psa_export')) + + +class PSA_OT_export_collection_sequences_select_all(PsaExportActionsSelectAllOperator): + bl_idname = 'psa.export_collection_sequences_select_all' + bl_label = 'Select All' + bl_description = 'Select all visible sequences' + bl_options = {'INTERNAL'} @classmethod - def poll(cls, context): - item_list = cls.get_item_list(context) - has_selected_items = any(map(lambda item: item.is_selected, item_list)) - return len(item_list) > 0 and has_selected_items + def get_psa_export(cls, context: Context) -> PsaExportMixin: + operator = get_collection_export_operator_from_context(context) + operator = typing_cast(PsaExportMixin, operator) + return operator + +class PsaExportActionsDeselectAllOperator(PsaExportActionsSelectOperator): def execute(self, context): pg = getattr(context.scene, 'psa_export') item_list = self.get_item_list(context) @@ -650,6 +769,30 @@ class PSA_OT_export_actions_deselect_all(Operator): return {'FINISHED'} +class PSA_OT_export_collection_sequences_deselect_all(PsaExportActionsDeselectAllOperator): + bl_idname = 'psa.export_collection_sequences_deselect_all' + bl_label = 'Deselect All' + bl_description = 'Deselect all visible sequences' + bl_options = {'INTERNAL'} + + @classmethod + def get_psa_export(cls, context: Context) -> PsaExportMixin: + operator = get_collection_export_operator_from_context(context) + operator = typing_cast(PsaExportMixin, operator) + return operator + + +class PSA_OT_export_sequences_deselect_all(PsaExportActionsDeselectAllOperator): + bl_idname = 'psa.export_sequences_deselect_all' + bl_label = 'Deselect All' + bl_description = 'Deselect all visible sequences' + bl_options = {'INTERNAL'} + + @classmethod + def get_psa_export(cls, context: Context) -> PsaExportMixin: + return typing_cast(PsaExportMixin, getattr(context.scene, 'psa_export')) + + class PSA_OT_export_bone_collections_select_all(Operator): bl_idname = 'psa.export_bone_collections_select_all' bl_label = 'Select All' @@ -658,13 +801,13 @@ class PSA_OT_export_bone_collections_select_all(Operator): @classmethod def poll(cls, context): - pg = getattr(context.scene, 'psa_export') + pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export')) item_list = pg.bone_collection_list has_unselected_items = any(map(lambda action: not action.is_selected, item_list)) return len(item_list) > 0 and has_unselected_items def execute(self, context): - pg = getattr(context.scene, 'psa_export') + pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export')) for item in pg.bone_collection_list: item.is_selected = True return {'FINISHED'} @@ -678,24 +821,67 @@ class PSA_OT_export_bone_collections_deselect_all(Operator): @classmethod def poll(cls, context): - pg = getattr(context.scene, 'psa_export') + pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export')) item_list = pg.bone_collection_list has_selected_actions = any(map(lambda action: action.is_selected, item_list)) return len(item_list) > 0 and has_selected_actions def execute(self, context): - pg = getattr(context.scene, 'psa_export') + pg = typing_cast(PSA_PG_export, getattr(context.scene, 'psa_export')) for action in pg.bone_collection_list: action.is_selected = False return {'FINISHED'} +class PSA_OT_export_collection_populate_sequences(Operator): + bl_idname = 'psa.export_collection_populate_sequences' + bl_label = 'Populate Sequences' + bl_description = 'Populate the sequences list based on the armatures in the collection' + bl_options = {'INTERNAL'} + + def execute(self, context: Context): + export_operator = get_collection_export_operator_from_context(context) + assert export_operator is not None + export_operator = typing_cast(PSA_OT_export_collection, export_operator) + collection = get_collection_from_context(context) + if collection is None: + self.report({'ERROR'}, 'No collection found in context') + return {'CANCELLED'} + input_objects = get_psk_input_objects_for_collection(collection) + + # Keep track of what sequences were selected, then restore the selected status after we have updated the lists. + def store_is_selected_for_sequence_list(sequences: Iterable[PsaExportSequenceMixin]) -> dict[int, bool]: + return {hash(x): x.is_selected for x in sequences} + + def restore_is_selected_for_sequence_list(sequence_list: Iterable[PsaExportSequenceMixin], is_selected_map: dict[int, bool]): + for sequence in sequence_list: + sequence.is_selected = is_selected_map.get(hash(sequence), False) + + action_list_is_selected = store_is_selected_for_sequence_list(export_operator.action_list) + markers_list_is_selected = store_is_selected_for_sequence_list(export_operator.marker_list) + nla_strip_list_is_selected = store_is_selected_for_sequence_list(export_operator.nla_strip_list) + active_action_list_is_selected = store_is_selected_for_sequence_list(export_operator.active_action_list) + + update_actions_and_timeline_markers(context, input_objects.armature_objects, export_operator) + + restore_is_selected_for_sequence_list(export_operator.action_list, action_list_is_selected) + restore_is_selected_for_sequence_list(export_operator.marker_list, markers_list_is_selected) + restore_is_selected_for_sequence_list(export_operator.nla_strip_list, nla_strip_list_is_selected) + restore_is_selected_for_sequence_list(export_operator.active_action_list, active_action_list_is_selected) + + return {'FINISHED'} + + _classes = ( PSA_OT_export, - PSA_OT_export_actions_select_all, - PSA_OT_export_actions_deselect_all, + PSA_OT_export_collection, + PSA_OT_export_sequences_select_all, + PSA_OT_export_sequences_deselect_all, + PSA_OT_export_collection_sequences_select_all, + PSA_OT_export_collection_sequences_deselect_all, PSA_OT_export_bone_collections_select_all, PSA_OT_export_bone_collections_deselect_all, + PSA_OT_export_collection_populate_sequences, ) from bpy.utils import register_classes_factory diff --git a/io_scene_psk_psa/psa/export/properties.py b/io_scene_psk_psa/psa/export/properties.py index a6f237b..6116fd8 100644 --- a/io_scene_psk_psa/psa/export/properties.py +++ b/io_scene_psk_psa/psa/export/properties.py @@ -1,10 +1,10 @@ import re import sys from fnmatch import fnmatch -from typing import List, Optional +from typing import List, Optional, Sequence +import bpy from bpy.props import ( BoolProperty, - PointerProperty, EnumProperty, FloatProperty, CollectionProperty, @@ -15,49 +15,50 @@ from bpy.types import PropertyGroup, Object, Action, AnimData, Context from ...shared.dfs import dfs_view_layer_objects from ...shared.helpers import populate_bone_collection_list -from ...shared.types import TransformMixin, ExportSpaceMixin, PsxBoneExportMixin +from ...shared.types import TransformMixin, ExportSpaceMixin, PsxBoneExportMixin, TransformSourceMixin def psa_export_property_group_animation_data_override_poll(_context, obj): return obj.animation_data is not None +class PsaExportSequenceMixin(PropertyGroup): + name: StringProperty(name='Name') + is_selected: BoolProperty(name='Selected', default=True) + frame_start: IntProperty(name='Start Frame', options={'HIDDEN'}) + frame_end: IntProperty(name='End Frame', options={'HIDDEN'}) + group: StringProperty(name='Group') -class PSA_PG_export_action_list_item(PropertyGroup): - action: PointerProperty(type=Action) - name: StringProperty() - is_selected: BoolProperty(default=True) - frame_start: IntProperty(options={'HIDDEN'}) - frame_end: IntProperty(options={'HIDDEN'}) + def __hash__(self) -> int: + return hash(self.name) + +class PsaExportSequenceWithActionMixin(PsaExportSequenceMixin): + action_name: StringProperty() + + @property + def action(self) -> Optional[Action]: + return bpy.data.actions.get(self.action_name) + +class PSA_PG_export_action_list_item(PsaExportSequenceWithActionMixin): is_pose_marker: BoolProperty(options={'HIDDEN'}) - group: StringProperty() -class PSA_PG_export_active_action_list_item(PropertyGroup): - action: PointerProperty(type=Action) - name: StringProperty() - armature_object: PointerProperty(type=Object) - is_selected: BoolProperty(default=True) - frame_start: IntProperty(options={'HIDDEN'}) - frame_end: IntProperty(options={'HIDDEN'}) - group: StringProperty() +class PSA_PG_export_active_action_list_item(PsaExportSequenceWithActionMixin): + armature_object_name: StringProperty() + + @property + def armature_object(self) -> Optional[Object]: + return bpy.data.objects.get(self.armature_object_name) + + def __hash__(self) -> int: + return super().__hash__() -class PSA_PG_export_timeline_markers(PropertyGroup): # TODO: rename this to singular +class PSA_PG_export_timeline_marker(PsaExportSequenceMixin): marker_index: IntProperty() - name: StringProperty() - is_selected: BoolProperty(default=True) - frame_start: IntProperty(options={'HIDDEN'}) - frame_end: IntProperty(options={'HIDDEN'}) - group: StringProperty() -class PSA_PG_export_nla_strip_list_item(PropertyGroup): - name: StringProperty() - action: PointerProperty(type=Action) - frame_start: FloatProperty() - frame_end: FloatProperty() - is_selected: BoolProperty(default=True) - group: StringProperty() +class PSA_PG_export_nla_strip_list_item(PsaExportSequenceWithActionMixin): + pass def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int): @@ -105,7 +106,7 @@ def nla_track_update_cb(self: 'PSA_PG_export', context: Context) -> None: for nla_strip in nla_track.strips: for sequence_name, frame_start, frame_end in get_sequences_from_name_and_frame_range(nla_strip.name, nla_strip.frame_start, nla_strip.frame_end): strip: PSA_PG_export_nla_strip_list_item = self.nla_strip_list.add() - strip.action = nla_strip.action + strip.action_name = nla_strip.action strip.name = sequence_name strip.frame_start = frame_start strip.frame_end = frame_end @@ -113,8 +114,6 @@ def nla_track_update_cb(self: 'PSA_PG_export', context: Context) -> None: def get_animation_data(pg: 'PSA_PG_export', context: Context) -> Optional[AnimData]: animation_data_object = context.object - if pg.should_override_animation_data: - animation_data_object = pg.animation_data_override return animation_data_object.animation_data if animation_data_object else None @@ -173,19 +172,7 @@ def sequence_source_update_cb(self: 'PSA_PG_export', context: Context) -> None: primary_key='DATA' if self.sequence_source == 'ACTIVE_ACTION' else 'OBJECT') -class PSA_PG_export(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin): - should_override_animation_data: BoolProperty( - name='Override Animation Data', - options=set(), - default=False, - description='Use the animation data from a different object instead of the selected object', - update=animation_data_override_update_cb, - ) - animation_data_override: PointerProperty( - type=Object, - update=animation_data_override_update_cb, - poll=psa_export_property_group_animation_data_override_poll - ) +class PsaExportMixin(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin, TransformSourceMixin): sequence_source: EnumProperty( name='Source', options=set(), @@ -215,14 +202,16 @@ class PSA_PG_export(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExpo items=compression_ratio_source_items, ) compression_ratio_custom: FloatProperty(default=1.0, min=0.0, max=1.0, subtype='FACTOR', description='The key sampling ratio of the exported sequence.\n\nA compression ratio of 1.0 will export all frames, while a compression ratio of 0.5 will export half of the frames') + action_list: CollectionProperty(type=PSA_PG_export_action_list_item) action_list_index: IntProperty(default=0) - marker_list: CollectionProperty(type=PSA_PG_export_timeline_markers) + marker_list: CollectionProperty(type=PSA_PG_export_timeline_marker) marker_list_index: IntProperty(default=0) nla_strip_list: CollectionProperty(type=PSA_PG_export_nla_strip_list_item) nla_strip_list_index: IntProperty(default=0) active_action_list: CollectionProperty(type=PSA_PG_export_active_action_list_item) active_action_list_index: IntProperty(default=0) + sequence_name_prefix: StringProperty(name='Prefix', options=set()) sequence_name_suffix: StringProperty(name='Suffix', options=set()) sequence_filter_name: StringProperty( @@ -271,8 +260,11 @@ class PSA_PG_export(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExpo description='The group to apply to all exported sequences. Only applicable when Group Source is Custom.' ) +class PSA_PG_export(PsaExportMixin): + pass -def filter_sequences(pg: PSA_PG_export, sequences) -> List[int]: + +def filter_sequences(pg: PsaExportMixin, sequences: Sequence[PsaExportSequenceMixin]) -> List[int]: bitflag_filter_item = 1 << 30 flt_flags = [bitflag_filter_item] * len(sequences) @@ -287,6 +279,8 @@ def filter_sequences(pg: PSA_PG_export, sequences) -> List[int]: for i, sequence in enumerate(sequences): flt_flags[i] ^= bitflag_filter_item + # TODO: perhaps just make one type that has all of the possible data types? hasattr is very flakey. + # we could just add the "type" as a variable and switch on that for different behaviors. if not pg.sequence_filter_asset: for i, sequence in enumerate(sequences): if hasattr(sequence, 'action') and sequence.action is not None and sequence.action.asset_data is not None: @@ -307,7 +301,7 @@ def filter_sequences(pg: PSA_PG_export, sequences) -> List[int]: _classes = ( PSA_PG_export_action_list_item, - PSA_PG_export_timeline_markers, + PSA_PG_export_timeline_marker, PSA_PG_export_nla_strip_list_item, PSA_PG_export_active_action_list_item, PSA_PG_export, diff --git a/io_scene_psk_psa/psa/export/properties.pyi b/io_scene_psk_psa/psa/export/properties.pyi new file mode 100644 index 0000000..ce4d616 --- /dev/null +++ b/io_scene_psk_psa/psa/export/properties.pyi @@ -0,0 +1,75 @@ +from bpy.types import PropertyGroup, Object, Action + +from ...shared.types import BpyCollectionProperty, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin, TransformSourceMixin + +class PsaExportSequenceMixin(PropertyGroup): + name: str + is_selected: bool + frame_start: int + frame_end: int + group: str + +class PsaExportSequenceWithActionMixin(PsaExportSequenceMixin): + action_name: str + + @property + def action(self) -> Action | None: + pass + +class PSA_PG_export_action_list_item(PsaExportSequenceWithActionMixin): + is_pose_marker: bool + + +class PSA_PG_export_active_action_list_item(PsaExportSequenceWithActionMixin): + armature_object_name: str + + @property + def armature_object(self) -> Object | None: + pass + + +class PSA_PG_export_timeline_marker(PsaExportSequenceMixin): + marker_index: int + +class PSA_PG_export_nla_strip_list_item(PsaExportSequenceWithActionMixin): + pass + + +class PsaExportMixin(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin, TransformSourceMixin): + sequence_source: str + nla_track: str + nla_track_index: int + fps_source: str + fps_custom: float + compression_ratio_source: str + compression_ratio_custom: float + action_list: BpyCollectionProperty[PSA_PG_export_action_list_item] + action_list_index: int + marker_list: BpyCollectionProperty[PSA_PG_export_timeline_marker] + marker_list_index: int + nla_strip_list: BpyCollectionProperty[PSA_PG_export_nla_strip_list_item] + nla_strip_list_index: int + active_action_list: BpyCollectionProperty[PSA_PG_export_active_action_list_item] + active_action_list_index: int + sequence_name_prefix: str + sequence_name_suffix: str + sequence_filter_name: str + sequence_use_filter_invert: bool + sequence_filter_asset: bool + sequence_filter_pose_marker: bool + sequence_use_filter_sort_reverse: bool + sequence_filter_reversed: bool + sampling_mode: str + group_source: str + group_custom: str + + +class PSA_PG_export(PsaExportMixin): + pass + + +def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int): + pass + +def filter_sequences(pg: PsaExportMixin, sequences) -> list[int]: + pass diff --git a/io_scene_psk_psa/psa/file_handlers.py b/io_scene_psk_psa/psa/file_handlers.py new file mode 100644 index 0000000..eea5341 --- /dev/null +++ b/io_scene_psk_psa/psa/file_handlers.py @@ -0,0 +1,24 @@ +from bpy.types import Context +from bpy.types import FileHandler + +from .import_.operators import PSA_OT_import_drag_and_drop +from .export.operators import PSA_OT_export_collection + +class PSA_FH_file_handler(FileHandler): + bl_idname = 'PSA_FH_file_handler' + bl_label = 'Unreal PSA' + bl_import_operator = PSA_OT_import_drag_and_drop.bl_idname + bl_export_operator = PSA_OT_export_collection.bl_idname + bl_file_extensions = '.psa' + + @classmethod + def poll_drop(cls, context: Context) -> bool: + return context.area is not None and context.area.type == 'VIEW_3D' + + +_classes = ( + PSA_FH_file_handler, +) + +from bpy.utils import register_classes_factory +register, unregister = register_classes_factory(_classes) diff --git a/io_scene_psk_psa/psa/import_/operators.py b/io_scene_psk_psa/psa/import_/operators.py index 818253b..70c35ef 100644 --- a/io_scene_psk_psa/psa/import_/operators.py +++ b/io_scene_psk_psa/psa/import_/operators.py @@ -452,18 +452,6 @@ def draw_psa_import_options_no_panels(layout, pg: PsaImportMixin): col.prop(pg, 'should_use_config_file') -class PSA_FH_import(FileHandler): # TODO: rename and add handling for PSA export. - bl_idname = 'PSA_FH_import' - bl_label = 'File handler for Unreal PSA import' - bl_import_operator = PSA_OT_import_drag_and_drop.bl_idname - # bl_export_operator = 'psa_export.export' - bl_file_extensions = '.psa' - - @classmethod - def poll_drop(cls, context: Context) -> bool: - return context.area is not None and context.area.type == 'VIEW_3D' - - _classes = ( PSA_OT_import_sequences_select_all, PSA_OT_import_sequences_deselect_all, @@ -471,7 +459,6 @@ _classes = ( PSA_OT_import, PSA_OT_import_all, PSA_OT_import_drag_and_drop, - PSA_FH_import, ) from bpy.utils import register_classes_factory diff --git a/io_scene_psk_psa/psa/import_/properties.pyi b/io_scene_psk_psa/psa/import_/properties.pyi new file mode 100644 index 0000000..90f95d1 --- /dev/null +++ b/io_scene_psk_psa/psa/import_/properties.pyi @@ -0,0 +1,59 @@ +from bpy.types import PropertyGroup, Text + +from ...shared.types import BpyCollectionProperty + + +class PSA_PG_import_action_list_item: + action_name: str + is_selected: bool + + +class PSA_PG_bone: + bone_name: str + + +class PSA_PG_data(PropertyGroup): + bones: BpyCollectionProperty[PSA_PG_bone] + sequence_count: int + +class PsaImportMixin: + should_use_fake_user: bool + should_use_config_file: bool + should_stash: bool + should_use_action_name_prefix: bool + action_name_prefix: str + should_overwrite: bool + should_write_keyframes: bool + should_write_metadata: bool + sequence_filter_name: str + sequence_filter_is_selected: bool + sequence_use_filter_invert: bool + sequence_use_filter_regex: bool + should_convert_to_samples: bool + bone_mapping_is_case_sensitive: bool + bone_mapping_should_ignore_trailing_whitespace: bool + fps_source: str + fps_custom: float + compression_ratio_source: str + compression_ratio_custom: float + translation_scale: float + +class PSA_PG_import: + psa_error: str + psa: PSA_PG_data + sequence_list: BpyCollectionProperty[PSA_PG_import_action_list_item] + sequence_list_index: int + sequence_filter_name: str + sequence_filter_is_selected: bool + sequence_use_filter_invert: bool + sequence_use_filter_regex: bool + select_text: Text | None + + + +def filter_sequences(pg: PSA_PG_import, sequences) -> list[int]: + pass + + +def get_visible_sequences(pg: PSA_PG_import, sequences) -> list[PSA_PG_import_action_list_item]: + pass diff --git a/io_scene_psk_psa/psa/import_/ui.py b/io_scene_psk_psa/psa/import_/ui.py index 40da274..7ab5b33 100644 --- a/io_scene_psk_psa/psa/import_/ui.py +++ b/io_scene_psk_psa/psa/import_/ui.py @@ -22,9 +22,9 @@ class PSA_UL_sequences_mixin(UIList): sub_row.prop(pg, 'sequence_use_filter_regex', text='', icon='SORTBYEXT') sub_row.prop(pg, 'sequence_filter_is_selected', text='', icon='CHECKBOX_HLT') - def filter_items(self, context, data, property_): + def filter_items(self, context, data, property): pg = getattr(context.scene, 'psa_import') - sequences = getattr(data, property_) + sequences = getattr(data, property) flt_flags = filter_sequences(pg, sequences) flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(sequences, 'action_name') return flt_flags, flt_neworder diff --git a/io_scene_psk_psa/psa/importer.py b/io_scene_psk_psa/psa/importer.py index 94158b2..8f9fc2a 100644 --- a/io_scene_psk_psa/psa/importer.py +++ b/io_scene_psk_psa/psa/importer.py @@ -7,6 +7,8 @@ from bpy.types import Armature, Context, FCurve, Object, Bone, PoseBone from mathutils import Vector, Quaternion from bpy_extras import anim_utils +from ..shared.types import PSX_PG_action_export + from .config import PsaConfig, REMOVE_TRACK_LOCATION, REMOVE_TRACK_ROTATION from psk_psa_py.psa.reader import PsaReader from psk_psa_py.shared.data import PsxBone @@ -369,7 +371,8 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object, # Write meta-data. if options.should_write_metadata: - action.psa_export.fps = target_fps + pg = typing_cast(PSX_PG_action_export, getattr(action, 'psa_export')) + pg.fps = target_fps action.use_fake_user = options.should_use_fake_user diff --git a/io_scene_psk_psa/psk/builder.py b/io_scene_psk_psa/psk/builder.py index 2988c35..11b1e65 100644 --- a/io_scene_psk_psa/psk/builder.py +++ b/io_scene_psk_psa/psk/builder.py @@ -1,28 +1,23 @@ import bmesh import bpy import numpy as np -from bpy.types import Armature, Collection, Context, Depsgraph, Object, ArmatureModifier, Mesh +from bpy.types import Armature, Context, Object, Mesh from mathutils import Matrix -from typing import Dict, Iterable, List, Optional, Set, cast as typing_cast +from typing import Dict, Iterable, List, Optional, cast as typing_cast from psk_psa_py.shared.data import Vector3 from psk_psa_py.psk.data import Psk from .properties import triangle_type_and_bit_flags_to_poly_flags -from ..shared.dfs import DfsObject, dfs_collection_objects, dfs_view_layer_objects from ..shared.helpers import ( + PskInputObjects, PsxBoneCollection, convert_string_to_cp1252_bytes, create_psx_bones, - get_armatures_for_mesh_objects, + get_armature_for_mesh_object, get_coordinate_system_transform, + get_materials_for_mesh_objects, ) -class PskInputObjects(object): - def __init__(self): - self.mesh_dfs_objects: List[DfsObject] = [] - self.armature_objects: Set[Object] = set() - - class PskBuildOptions(object): def __init__(self): self.bone_filter_mode = 'ALL' @@ -37,62 +32,6 @@ class PskBuildOptions(object): self.root_bone_name = 'ROOT' -def get_materials_for_mesh_objects(depsgraph: Depsgraph, mesh_objects: Iterable[Object]): - yielded_materials = set() - for mesh_object in mesh_objects: - evaluated_mesh_object = mesh_object.evaluated_get(depsgraph) - for i, material_slot in enumerate(evaluated_mesh_object.material_slots): - material = material_slot.material - if material is None: - raise RuntimeError(f'Material slots cannot be empty. ({mesh_object.name}, index {i})') - if material not in yielded_materials: - yielded_materials.add(material) - yield material - - -def get_mesh_objects_for_collection(collection: Collection) -> Iterable[DfsObject]: - return filter(lambda x: x.obj.type == 'MESH', dfs_collection_objects(collection)) - - -def get_mesh_objects_for_context(context: Context) -> Iterable[DfsObject]: - if context.view_layer is None: - return - for dfs_object in dfs_view_layer_objects(context.view_layer): - if dfs_object.obj.type == 'MESH' and dfs_object.is_selected: - yield dfs_object - - -def get_armature_for_mesh_object(mesh_object: Object) -> Optional[Object]: - if mesh_object.type != 'MESH': - return None - # Get the first armature modifier with a non-empty armature object. - for modifier in filter(lambda x: x.type == 'ARMATURE', mesh_object.modifiers): - armature_modifier = typing_cast(ArmatureModifier, modifier) - if armature_modifier.object is not None: - return armature_modifier.object - return None - - -def _get_psk_input_objects(mesh_dfs_objects: Iterable[DfsObject]) -> PskInputObjects: - mesh_dfs_objects = list(mesh_dfs_objects) - if len(mesh_dfs_objects) == 0: - raise RuntimeError('At least one mesh must be selected') - input_objects = PskInputObjects() - input_objects.mesh_dfs_objects = mesh_dfs_objects - input_objects.armature_objects |= set(get_armatures_for_mesh_objects(map(lambda x: x.obj, mesh_dfs_objects))) - return input_objects - - -def get_psk_input_objects_for_context(context: Context) -> PskInputObjects: - mesh_objects = list(get_mesh_objects_for_context(context)) - return _get_psk_input_objects(mesh_objects) - - -def get_psk_input_objects_for_collection(collection: Collection) -> PskInputObjects: - mesh_objects = get_mesh_objects_for_collection(collection) - return _get_psk_input_objects(mesh_objects) - - class PskBuildResult(object): def __init__(self, psk: Psk, warnings: list[str]): self.psk: Psk = psk @@ -199,12 +138,14 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil # This is used later to transform the mesh object geometry into the export space. armature_mesh_export_space_matrices: Dict[Optional[Object], Matrix] = {None: Matrix.Identity(4)} if options.export_space == 'ARMATURE': - # For meshes without an armature modifier, we need to set the export space to the armature object. + # For meshes without an armature modifier, we need to set the export space to the first armature object. armature_mesh_export_space_matrices[None] = _get_mesh_export_space_matrix(next(iter(input_objects.armature_objects), None), options.export_space) + for armature_object in armature_objects: armature_mesh_export_space_matrices[armature_object] = _get_mesh_export_space_matrix(armature_object, options.export_space) - scale_matrix = Matrix.Scale(options.scale, 4) + # TODO: we need to handle armature hierarchies here. if an object is parented to another armature, + # we need to take that into account when calculating the export space matrix. original_armature_object_pose_positions = {a: a.data.pose_position for a in armature_objects} @@ -216,6 +157,8 @@ def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuil material_names = [m.name if m is not None else 'None' for m in materials] + scale_matrix = Matrix.Scale(options.scale, 4) + for object_index, input_mesh_object in enumerate(input_objects.mesh_dfs_objects): obj, matrix_world = input_mesh_object.obj, input_mesh_object.matrix_world armature_object = get_armature_for_mesh_object(obj) diff --git a/io_scene_psk_psa/psk/export/operators.py b/io_scene_psk_psa/psk/export/operators.py index 5e6a31f..af58a72 100644 --- a/io_scene_psk_psa/psk/export/operators.py +++ b/io_scene_psk_psa/psk/export/operators.py @@ -1,22 +1,21 @@ from pathlib import Path -from typing import Iterable, List +from typing import Iterable, List, cast as typing_cast import bpy -from bpy.props import BoolProperty, StringProperty +from bpy.props import StringProperty from bpy.types import Context, Depsgraph, Material, Object, Operator, Scene from bpy_extras.io_utils import ExportHelper -from .properties import PskExportMixin +from .properties import PSK_PG_export, PskExportMixin from ..builder import ( PskBuildOptions, build_psk, get_materials_for_mesh_objects, - get_psk_input_objects_for_collection, - get_psk_input_objects_for_context, ) from psk_psa_py.psk.writer import write_psk_to_path -from ...shared.helpers import PsxBoneCollection, get_collection_export_operator_from_context, populate_bone_collection_list +from ...shared.helpers import PsxBoneCollection, get_collection_export_operator_from_context, get_psk_input_objects_for_collection, populate_bone_collection_list, get_psk_input_objects_for_context from ...shared.ui import draw_bone_filter_mode +from ...shared.operators import PSK_OT_bone_collection_list_populate, PSK_OT_bone_collection_list_select_all def populate_material_name_list(depsgraph: Depsgraph, mesh_objects: Iterable[Object], material_list): @@ -34,51 +33,6 @@ def populate_material_name_list(depsgraph: Depsgraph, mesh_objects: Iterable[Obj m.index = index - -class PSK_OT_bone_collection_list_populate(Operator): - bl_idname = 'psk.bone_collection_list_populate' - bl_label = 'Populate Bone Collection List' - bl_description = 'Populate the bone collection list from the armature that will be used in this collection export' - bl_options = {'INTERNAL'} - - def execute(self, context): - export_operator = get_collection_export_operator_from_context(context) - if export_operator is None: - self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context') - return {'CANCELLED'} - if context.collection is None: - self.report({'ERROR_INVALID_CONTEXT'}, 'No active collection') - return {'CANCELLED'} - try: - input_objects = get_psk_input_objects_for_collection(context.collection) - except RuntimeError as e: - self.report({'ERROR_INVALID_CONTEXT'}, str(e)) - return {'CANCELLED'} - if not input_objects.armature_objects: - self.report({'ERROR_INVALID_CONTEXT'}, 'No armature modifiers found on mesh objects') - return {'CANCELLED'} - populate_bone_collection_list(export_operator.bone_collection_list, input_objects.armature_objects) - return {'FINISHED'} - - -class PSK_OT_bone_collection_list_select_all(Operator): - bl_idname = 'psk.bone_collection_list_select_all' - bl_label = 'Select All' - bl_description = 'Select all bone collections' - bl_options = {'INTERNAL'} - - is_selected: BoolProperty(default=True) - - def execute(self, context): - export_operator = get_collection_export_operator_from_context(context) - if export_operator is None: - self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context') - return {'CANCELLED'} - for item in export_operator.bone_collection_list: - item.is_selected = self.is_selected - return {'FINISHED'} - - class PSK_OT_populate_material_name_list(Operator): bl_idname = 'psk.export_populate_material_name_list' bl_label = 'Populate Material Name List' @@ -90,6 +44,7 @@ class PSK_OT_populate_material_name_list(Operator): if export_operator is None: self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context') return {'CANCELLED'} + export_operator = typing_cast(PskExportMixin, export_operator) depsgraph = context.evaluated_depsgraph_get() assert context.collection input_objects = get_psk_input_objects_for_collection(context.collection) @@ -124,6 +79,7 @@ class PSK_OT_material_list_name_add(Operator): if export_operator is None: self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context') return {'CANCELLED'} + export_operator = typing_cast(PskExportMixin, export_operator) m = export_operator.material_name_list.add() m.material_name = self.name m.index = len(export_operator.material_name_list) - 1 @@ -139,11 +95,11 @@ class PSK_OT_material_list_move_up(Operator): @classmethod def poll(cls, context): - pg = getattr(context.scene, 'psk_export') + pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export')) return pg.material_name_list_index > 0 def execute(self, context): - pg = getattr(context.scene, 'psk_export') + pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export')) pg.material_name_list.move(pg.material_name_list_index, pg.material_name_list_index - 1) pg.material_name_list_index -= 1 return {'FINISHED'} @@ -157,11 +113,11 @@ class PSK_OT_material_list_move_down(Operator): @classmethod def poll(cls, context): - pg = getattr(context.scene, 'psk_export') + pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export')) return pg.material_name_list_index < len(pg.material_name_list) - 1 def execute(self, context): - pg = getattr(context.scene, 'psk_export') + pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export')) pg.material_name_list.move(pg.material_name_list_index, pg.material_name_list_index + 1) pg.material_name_list_index += 1 return {'FINISHED'} @@ -178,6 +134,7 @@ class PSK_OT_material_list_name_move_up(Operator): export_operator = get_collection_export_operator_from_context(context) if export_operator is None: return False + export_operator = typing_cast(PskExportMixin, export_operator) return export_operator.material_name_list_index > 0 def execute(self, context): @@ -185,6 +142,7 @@ class PSK_OT_material_list_name_move_up(Operator): if export_operator is None: self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context') return {'CANCELLED'} + export_operator = typing_cast(PskExportMixin, export_operator) export_operator.material_name_list.move(export_operator.material_name_list_index, export_operator.material_name_list_index - 1) export_operator.material_name_list_index -= 1 return {'FINISHED'} @@ -201,6 +159,7 @@ class PSK_OT_material_list_name_move_down(Operator): export_operator = get_collection_export_operator_from_context(context) if export_operator is None: return False + export_operator = typing_cast(PskExportMixin, export_operator) return export_operator.material_name_list_index < len(export_operator.material_name_list) - 1 def execute(self, context): @@ -208,6 +167,7 @@ class PSK_OT_material_list_name_move_down(Operator): if export_operator is None: self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context') return {'CANCELLED'} + export_operator = typing_cast(PskExportMixin, export_operator) export_operator.material_name_list.move(export_operator.material_name_list_index, export_operator.material_name_list_index + 1) export_operator.material_name_list_index += 1 return {'FINISHED'} @@ -411,7 +371,7 @@ class PSK_OT_export(Operator, ExportHelper): self.report({'ERROR_INVALID_CONTEXT'}, str(e)) return {'CANCELLED'} - pg = getattr(context.scene, 'psk_export') + pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export')) populate_bone_collection_list(pg.bone_collection_list, input_objects.armature_objects) @@ -433,7 +393,7 @@ class PSK_OT_export(Operator, ExportHelper): assert layout - pg = getattr(context.scene, 'psk_export') + pg = typing_cast(PSK_PG_export, getattr(context.scene, 'psk_export')) # Mesh mesh_header, mesh_panel = layout.panel('Mesh', default_closed=False) @@ -543,8 +503,6 @@ _classes = ( PSK_OT_material_list_move_down, PSK_OT_export, PSK_OT_export_collection, - PSK_OT_bone_collection_list_populate, - PSK_OT_bone_collection_list_select_all, PSK_OT_populate_material_name_list, PSK_OT_material_list_name_move_up, PSK_OT_material_list_name_move_down, diff --git a/io_scene_psk_psa/psk/export/properties.py b/io_scene_psk_psa/psk/export/properties.py index 2aee5fd..7a72742 100644 --- a/io_scene_psk_psa/psk/export/properties.py +++ b/io_scene_psk_psa/psk/export/properties.py @@ -8,7 +8,7 @@ from bpy.props import ( ) from bpy.types import Material, PropertyGroup -from ...shared.types import ExportSpaceMixin, TransformMixin, PsxBoneExportMixin +from ...shared.types import ExportSpaceMixin, TransformMixin, PsxBoneExportMixin, TransformSourceMixin object_eval_state_items = ( ('EVALUATED', 'Evaluated', 'Use data from fully evaluated object'), @@ -20,11 +20,6 @@ material_order_mode_items = ( ('MANUAL', 'Manual', 'Manually arrange the materials'), ) -transform_source_items = ( - ('SCENE', 'Scene', 'Use the scene transform settings'), - ('CUSTOM', 'Custom', 'Use custom transform settings'), -) - class PSK_PG_material_list_item(PropertyGroup): material: PointerProperty(type=Material) index: IntProperty() @@ -35,7 +30,7 @@ class PSK_PG_material_name_list_item(PropertyGroup): index: IntProperty() -class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin): +class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin, TransformSourceMixin): object_eval_state: EnumProperty( items=object_eval_state_items, name='Object Evaluation State', @@ -54,11 +49,6 @@ class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin): default=False, description='Export VTXNORMS section.' ) - transform_source: EnumProperty( - items=transform_source_items, - name='Transform Source', - default='SCENE' - ) class PSK_PG_export(PropertyGroup, PskExportMixin): diff --git a/io_scene_psk_psa/psk/export/properties.pyi b/io_scene_psk_psa/psk/export/properties.pyi new file mode 100644 index 0000000..4a9b64d --- /dev/null +++ b/io_scene_psk_psa/psk/export/properties.pyi @@ -0,0 +1,25 @@ +from bpy.types import Material + +from ...shared.types import BpyCollectionProperty, ExportSpaceMixin, TransformMixin, PsxBoneExportMixin, TransformSourceMixin + +# TODO: eliminate this one and just use the name version with a prop version to fetch from data +class PSK_PG_material_list_item: + material: Material + index: int + + +class PSK_PG_material_name_list_item: + material_name: str + index: int + + +class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin, TransformSourceMixin): + object_eval_state: str + material_order_mode: str + material_name_list: BpyCollectionProperty[PSK_PG_material_name_list_item] + material_name_list_index: int + should_export_vertex_normals: bool + + +class PSK_PG_export(PskExportMixin): + pass \ No newline at end of file diff --git a/io_scene_psk_psa/psk/export/ui.py b/io_scene_psk_psa/psk/export/ui.py index 459316b..f12d2a5 100644 --- a/io_scene_psk_psa/psk/export/ui.py +++ b/io_scene_psk_psa/psk/export/ui.py @@ -1,10 +1,25 @@ import bpy from bpy.types import UIList +from typing import cast as typing_cast + +from .properties import PSK_PG_material_name_list_item class PSK_UL_material_names(UIList): - def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): + def draw_item( + self, + context, + layout, + data, + item, + icon, + active_data, + active_property, + index, + flt_flag + ): row = layout.row() + item = typing_cast(PSK_PG_material_name_list_item, item) material = bpy.data.materials.get(item.material_name, None) icon_value = layout.icon(material) if material else 0 row.prop(item, 'material_name', text='', emboss=False, icon_value=icon_value, icon='BLANK1' if icon_value == 0 else 'NONE') diff --git a/io_scene_psk_psa/psk/import_/operators.py b/io_scene_psk_psa/psk/import_/operators.py index 14babfe..5bef8be 100644 --- a/io_scene_psk_psa/psk/import_/operators.py +++ b/io_scene_psk_psa/psk/import_/operators.py @@ -163,7 +163,7 @@ class PSK_OT_import_drag_and_drop(Operator, PskImportMixin): # TODO: move to another file -class PSK_FH_import(FileHandler): +class PSK_FH_file_handler(FileHandler): bl_idname = 'PSK_FH_import' bl_label = 'Unreal PSK' bl_import_operator = PSK_OT_import_drag_and_drop.bl_idname @@ -178,7 +178,7 @@ class PSK_FH_import(FileHandler): _classes = ( PSK_OT_import, PSK_OT_import_drag_and_drop, - PSK_FH_import, + PSK_FH_file_handler, ) from bpy.utils import register_classes_factory diff --git a/io_scene_psk_psa/psk/importer.py b/io_scene_psk_psa/psk/importer.py index 0e807c2..b9b64ed 100644 --- a/io_scene_psk_psa/psk/importer.py +++ b/io_scene_psk_psa/psk/importer.py @@ -25,7 +25,7 @@ class PskImportOptions: self.bone_length = 1.0 self.should_import_materials = True self.scale = 1.0 - self.bdk_repository_id = None + self.bdk_repository_id: str | None = None class ImportBone: @@ -83,7 +83,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) bpy.ops.object.mode_set(mode='EDIT') - import_bones = [] + import_bones: List[ImportBone] = [] for bone_index, psk_bone in enumerate(psk.bones): import_bone = ImportBone(bone_index, psk_bone) @@ -107,10 +107,16 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) bone.world_rotation_matrix = bone.local_rotation.conjugated().to_matrix() bone.world_rotation_matrix.rotate(parent.world_rotation_matrix) + # Create all the bones up-front. + # This allows us to set up the parent-child relationships properly even if the parent bone comes after the child bone in the PSK file. for import_bone in import_bones: bone_name = import_bone.psk_bone.name.decode('utf-8') edit_bone = armature_data.edit_bones.new(bone_name) + for import_bone in import_bones: + bone_name = import_bone.psk_bone.name.decode('utf-8') + edit_bone = armature_data.edit_bones[bone_name] + if import_bone.parent is not None: edit_bone.parent = armature_data.edit_bones[import_bone.psk_bone.parent_index] else: diff --git a/io_scene_psk_psa/psk/properties.pyi b/io_scene_psk_psa/psk/properties.pyi new file mode 100644 index 0000000..c92bca9 --- /dev/null +++ b/io_scene_psk_psa/psk/properties.pyi @@ -0,0 +1,18 @@ +class PSX_PG_material: + mesh_triangle_type: str + mesh_triangle_bit_flags: set[str] + + +class PskImportMixin: + should_import_vertex_colors: bool + vertex_color_space: str + should_import_vertex_normals: bool + should_import_extra_uvs: bool + components: str + should_import_mesh: bool + should_import_materials: bool + should_import_armature: bool + bone_length: float + should_import_shape_keys: bool + scale: float + bdk_repository_id: str diff --git a/io_scene_psk_psa/shared/helpers.py b/io_scene_psk_psa/shared/helpers.py index d5462a5..d153925 100644 --- a/io_scene_psk_psa/shared/helpers.py +++ b/io_scene_psk_psa/shared/helpers.py @@ -1,9 +1,11 @@ import bpy from collections import Counter from typing import List, Iterable, Optional, Dict, Tuple, cast as typing_cast -from bpy.types import Armature, AnimData, Collection, Context, Object, ArmatureModifier, SpaceProperties +from bpy.types import Armature, AnimData, Collection, Context, Object, ArmatureModifier, SpaceProperties, PropertyGroup from mathutils import Matrix, Vector, Quaternion as BpyQuaternion -from psk_psa_py.shared.data import PsxBone, Vector3, Quaternion +from psk_psa_py.shared.data import PsxBone, Quaternion + +from ..shared.types import BpyCollectionProperty, PSX_PG_bone_collection_list_item def rgb_to_srgb(c: float) -> float: @@ -23,7 +25,11 @@ def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, fr yield strip -def populate_bone_collection_list(bone_collection_list, armature_objects: Iterable[Object], primary_key: str = 'OBJECT'): +def populate_bone_collection_list( + bone_collection_list: BpyCollectionProperty[PSX_PG_bone_collection_list_item], + armature_objects: Iterable[Object], + primary_key: str = 'OBJECT' + ): """ Updates the bone collection list. @@ -146,31 +152,31 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_c # We use the bone names for the return values because the bone name is a more universal way of referencing them. # For example, users of this function may modify bone lists, which would invalidate the indices and require an # index mapping scheme to resolve it. Using strings is more comfy and results in less code downstream. - instigator_bone_names = [bones[x[1]].name if x[1] is not None else None for x in bone_indices] bone_names = [bones[x[0]].name for x in bone_indices] - # Ensure that the hierarchy we are sending back has a single root bone. - # TODO: This is only relevant if we are exporting a single armature; how should we reorganize this call? - bone_indices = [x[0] for x in bone_indices] - root_bones = [bones[bone_index] for bone_index in bone_indices if bones[bone_index].parent is None] - if len(root_bones) > 1: - # There is more than one root bone. - # Print out why each root bone was included by linking it to one of the explicitly included bones. - root_bone_names = [bone.name for bone in root_bones] - for root_bone_name in root_bone_names: - bone_name = root_bone_name - while True: - # Traverse the instigator chain until the end to find the true instigator bone. - # TODO: in future, it would be preferential to have a readout of *all* instigator bones. - instigator_bone_name = instigator_bone_names[bone_names.index(bone_name)] - if instigator_bone_name is None: - print(f'Root bone "{root_bone_name}" was included because {bone_name} was marked for export') - break - bone_name = instigator_bone_name + # instigator_bone_names = [bones[x[1]].name if x[1] is not None else None for x in bone_indices] + # # Ensure that the hierarchy we are sending back has a single root bone. + # # TODO: This is only relevant if we are exporting a single armature; how should we reorganize this call? + # bone_indices = [x[0] for x in bone_indices] + # root_bones = [bones[bone_index] for bone_index in bone_indices if bones[bone_index].parent is None] + # if len(root_bones) > 1: + # # There is more than one root bone. + # # Print out why each root bone was included by linking it to one of the explicitly included bones. + # root_bone_names = [bone.name for bone in root_bones] + # for root_bone_name in root_bone_names: + # bone_name = root_bone_name + # while True: + # # Traverse the instigator chain until the end to find the true instigator bone. + # # TODO: in future, it would be preferential to have a readout of *all* instigator bones. + # instigator_bone_name = instigator_bone_names[bone_names.index(bone_name)] + # if instigator_bone_name is None: + # print(f'Root bone "{root_bone_name}" was included because {bone_name} was marked for export') + # break + # bone_name = instigator_bone_name - raise RuntimeError('Exported bone hierarchy must have a single root bone.\n' - f'The bone hierarchy marked for export has {len(root_bones)} root bones: {root_bone_names}.\n' - f'Additional debugging information has been written to the console.') + # raise RuntimeError('Exported bone hierarchy must have a single root bone.\n' + # f'The bone hierarchy marked for export has {len(root_bones)} root bones: {root_bone_names}.\n' + # f'Additional debugging information has been written to the console.') return bone_names @@ -194,7 +200,7 @@ def create_psx_bones_from_blender_bones( scale = 1.0, forward_axis: str = 'X', up_axis: str = 'Z', - root_bone: Optional = None, + root_bone: PsxBone | None = None ) -> List[PsxBone]: scale_matrix = Matrix.Scale(scale, 4) @@ -207,55 +213,66 @@ def create_psx_bones_from_blender_bones( psx_bone = PsxBone() psx_bone.name = convert_string_to_cp1252_bytes(bone.name) - try: - parent_index = bones.index(bone.parent) - psx_bone.parent_index = parent_index - psx_bones[parent_index].children_count += 1 - except ValueError: - psx_bone.parent_index = 0 + if bone.parent is not None: + try: + parent_index = bones.index(bone.parent) + psx_bone.parent_index = parent_index + psx_bones[parent_index].children_count += 1 + except ValueError: + pass + + # TODO: Need to add handling here for case where the root is being parented to another armature. + # In that case, we need to convert the root bone from world space to the local space of the target bone. + # I think we actually have an opportunity to make this more understandable. If we pass the root_bone in here, + # we can handle both cases in the same logic, since `root_bone` is assumed to be at origin currently. + # `root_bone` could be changed to be (Bone, Object) tuple? if bone.parent is not None: + # Child bone. rotation = bone.matrix.to_quaternion().conjugated() inverse_parent_rotation = bone.parent.matrix.to_quaternion().inverted() parent_head = inverse_parent_rotation @ bone.parent.head parent_tail = inverse_parent_rotation @ bone.parent.tail location = (parent_tail - parent_head) + bone.head - elif bone.parent is None and root_bone is not None: - # This is a special case for the root bone when export - # Because the root bone and child bones are in different spaces, we need to treat the root bone of this - # armature as though it were a child bone. - bone_rotation = bone.matrix.to_quaternion().conjugated() - local_rotation = armature_object_matrix_world.to_3x3().to_quaternion().conjugated() - rotation = bone_rotation @ local_rotation - translation, _, scale = armature_object_matrix_world.decompose() - # Invert the scale of the armature object matrix. - inverse_scale_matrix = Matrix.Identity(4) - inverse_scale_matrix[0][0] = 1.0 / scale.x - inverse_scale_matrix[1][1] = 1.0 / scale.y - inverse_scale_matrix[2][2] = 1.0 / scale.z + else: # bone.parent is None + if root_bone is not None: + # This is a special case for when a root bone is being passed. + # Because the root bone and child bones are in different spaces, we need to treat the root bone of this + # armature as though it were a child bone. + bone_rotation = bone.matrix.to_quaternion().conjugated() + local_rotation = armature_object_matrix_world.to_3x3().to_quaternion().conjugated() + rotation = bone_rotation @ local_rotation + translation, _, scale = armature_object_matrix_world.decompose() + # Invert the scale of the armature object matrix. + inverse_scale_matrix = Matrix.Identity(4) + inverse_scale_matrix[0][0] = 1.0 / scale.x + inverse_scale_matrix[1][1] = 1.0 / scale.y + inverse_scale_matrix[2][2] = 1.0 / scale.z - translation = translation @ inverse_scale_matrix - location = translation + bone.head - else: - def get_armature_local_matrix(): - match export_space: - case 'WORLD': - return armature_object_matrix_world - case 'ARMATURE': - return Matrix.Identity(4) - case 'ROOT': - return bone.matrix.inverted() - case _: - assert False, f'Invalid export space: {export_space}' + translation = translation @ inverse_scale_matrix + location = translation + bone.head + else: + # Parent is none AND there is no special root bone. + # This is the default case for the root bone of single-armature exports. + def get_armature_local_matrix(): + match export_space: + case 'WORLD': + return armature_object_matrix_world + case 'ARMATURE': + return Matrix.Identity(4) + case 'ROOT': + return bone.matrix.inverted() + case _: + assert False, f'Invalid export space: {export_space}' - armature_local_matrix = get_armature_local_matrix() - location = armature_local_matrix @ bone.head - location = coordinate_system_transform @ location - bone_rotation = bone.matrix.to_quaternion().conjugated() - local_rotation = armature_local_matrix.to_3x3().to_quaternion().conjugated() - rotation = bone_rotation @ local_rotation - rotation.conjugate() - rotation = coordinate_system_default_rotation @ rotation + armature_local_matrix = get_armature_local_matrix() + location = armature_local_matrix @ bone.head + location = coordinate_system_transform @ location + bone_rotation = bone.matrix.to_quaternion().conjugated() + local_rotation = armature_local_matrix.to_3x3().to_quaternion().conjugated() + rotation = bone_rotation @ local_rotation + rotation.conjugate() + rotation = coordinate_system_default_rotation @ rotation location = scale_matrix @ location @@ -265,6 +282,7 @@ def create_psx_bones_from_blender_bones( location.y *= armature_object_scale.y location.z *= armature_object_scale.z + # Copy the calculated location and rotation to the bone. psx_bone.location.x = location.x psx_bone.location.y = location.y psx_bone.location.z = location.z @@ -313,6 +331,57 @@ class PsxBoneCollection: self.index = index +class ObjectNode: + def __init__(self, obj: Object): + self.object = obj + self.children: List['ObjectNode'] = [] + + +class ObjectTree: + def __init__(self) -> None: + self.root_nodes: List[ObjectNode] = [] + + @staticmethod + def from_objects(objects: Iterable[Object]) -> 'ObjectTree': + ''' + Make a tree of the armature objects based on their hierarchy. + ''' + tree = ObjectTree() + object_node_map: Dict[Object, ObjectNode] = {x: ObjectNode(x) for x in objects} + + for obj, object_node in object_node_map.items(): + if obj.parent in object_node_map: + parent_node = object_node_map[obj.parent] + parent_node.children.append(object_node) + else: + tree.root_nodes.append(object_node) + + return tree + + def __iter__(self): + """ + An depth-first iterator over the armature tree. + """ + node_stack = self.root_nodes + while node_stack: + node = node_stack.pop() + yield node + node_stack = node.children + node_stack + + def objects_dfs(self): + for node in self: + yield node.object + + def dump(self): + # Print out the hierarchy of armature objects for debugging using the root nodes, with indentation to show parent-child relationships. + for root_node in self.root_nodes: + def print_object_node(node: ObjectNode, indent: int = 0): + print(' ' * indent + f'- {node.object.name}') + for child_node in node.children: + print_object_node(child_node, indent + 2) + print_object_node(root_node) + + def create_psx_bones( armature_objects: List[Object], export_space: str = 'WORLD', @@ -332,12 +401,13 @@ def create_psx_bones( if bone_collection_indices is None: bone_collection_indices = [] - bones: List[Tuple[PsxBone, Optional[Object]]] = [] + armature_tree = ObjectTree.from_objects(armature_objects) - if export_space != 'WORLD' and len(armature_objects) >= 2: - armature_object_names = [armature_object.name for armature_object in armature_objects] + # Check that there is only one root bone. If there are multiple armature objects, the export space must be WORLD. + if len(armature_tree.root_nodes) >= 2 and export_space != 'WORLD': + root_armature_names = [node.object.name for node in armature_tree.root_nodes] raise RuntimeError(f'When exporting multiple armatures, the Export Space must be World.\n' \ - f'The following armatures are attempting to be exported: {armature_object_names}') + f'The following armatures are attempting to be exported: {root_armature_names}') coordinate_system_matrix = get_coordinate_system_transform(forward_axis, up_axis) coordinate_system_default_rotation = coordinate_system_matrix.to_quaternion() @@ -364,29 +434,23 @@ def create_psx_bones( # Store the index of the root bone for each armature object. # We will need this later to correctly assign vertex weights. armature_object_root_bone_indices: Dict[Optional[Object], int] = dict() + bones: List[Tuple[PsxBone, Optional[Object]]] = [] if len(armature_objects) == 0 or total_bone_count == 0: # If the mesh has no armature object or no bones, simply assign it a dummy bone at the root to satisfy the # requirement that a PSK file must have at least one bone. psx_bone = PsxBone() psx_bone.name = convert_string_to_cp1252_bytes(root_bone_name) - psx_bone.flags = 0 - psx_bone.children_count = 0 - psx_bone.parent_index = 0 - psx_bone.location = Vector3.zero() psx_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(coordinate_system_default_rotation) bones.append((psx_bone, None)) armature_object_root_bone_indices[None] = 0 else: - # If we have multiple armature objects, create a root bone at the world origin. - if len(armature_objects) > 1: + # If we have multiple root armature objects, create a root bone at the world origin. + if len(armature_tree.root_nodes) > 1: psx_bone = PsxBone() psx_bone.name = convert_string_to_cp1252_bytes(root_bone_name) - psx_bone.flags = 0 psx_bone.children_count = total_bone_count - psx_bone.parent_index = 0 - psx_bone.location = Vector3.zero() psx_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(coordinate_system_default_rotation) bones.append((psx_bone, None)) @@ -394,6 +458,9 @@ def create_psx_bones( root_bone = bones[0][0] if len(bones) > 0 else None + # TODO: child armatures are not being correctly transformed when parented to a bone. + + # Iterate through all the armature objects. for armature_object in armature_objects: bone_names = armature_object_bone_names[armature_object] armature_data = typing_cast(Armature, armature_object.data) @@ -420,6 +487,36 @@ def create_psx_bones( bones.extend((psx_bone, armature_object) for psx_bone in armature_psx_bones) + # Check if any of the armatures are parented to one another. + # If so, adjust the hierarchy as though they are part of the same armature object. + # This will let us re-use rig components without destructively joining them. + for armature_object in armature_objects: + if armature_object.parent not in armature_objects: + continue + # This armature object is parented to another armature object that we are exporting. + # First fetch the root bone indices for the two armature objects. + root_bone_index = armature_object_root_bone_indices[armature_object] + parent_root_bone_index = armature_object_root_bone_indices[armature_object.parent] + + match armature_object.parent_type: + case 'OBJECT': + # Parent this armature's root bone to the root bone of the parent object. + bones[root_bone_index][0].parent_index = parent_root_bone_index + case 'BONE': + # Parent this armature's root bone to the specified bone in the parent. + new_parent_index = None + for bone_index, (bone, bone_armature_object) in enumerate(bones): + if bone.name == convert_string_to_cp1252_bytes(armature_object.parent_bone) and bone_armature_object == armature_object.parent: + new_parent_index = bone_index + break + if new_parent_index == None: + raise RuntimeError(f'Bone \'{armature_object.parent_bone}\' could not be found in armature \'{armature_object.parent.name}\'.') + bones[root_bone_index][0].parent_index = new_parent_index + case _: + raise RuntimeError(f'Unhandled parent type ({armature_object.parent_type}) for object {armature_object.name}.\n' + f'Parent type must be \'Object\' or \'Bone\'.' + ) + # Check if there are bone name conflicts between armatures. bone_name_counts = Counter(bone[0].name.decode('windows-1252').upper() for bone in bones) for bone_name, count in bone_name_counts.items(): @@ -482,7 +579,7 @@ def get_armatures_for_mesh_objects(mesh_objects: Iterable[Object]): yield from armature_objects -def get_collection_from_context(context: Context) -> Optional[Collection]: +def get_collection_from_context(context: Context) -> Collection | None: if context.space_data is None or context.space_data.type != 'PROPERTIES': return None space_data = typing_cast(SpaceProperties, context.space_data) @@ -492,11 +589,81 @@ def get_collection_from_context(context: Context) -> Optional[Collection]: return context.collection -def get_collection_export_operator_from_context(context: Context) -> Optional[object]: +def get_collection_export_operator_from_context(context: Context) -> PropertyGroup | None: collection = get_collection_from_context(context) if collection is None or collection.active_exporter_index is None: return None if 0 > collection.active_exporter_index >= len(collection.exporters): return None exporter = collection.exporters[collection.active_exporter_index] - return exporter.export_properties \ No newline at end of file + return exporter.export_properties + + +from ..shared.dfs import DfsObject, dfs_collection_objects, dfs_view_layer_objects +from typing import Set +from bpy.types import Depsgraph + + +class PskInputObjects(object): + def __init__(self): + self.mesh_dfs_objects: List[DfsObject] = [] + self.armature_objects: List[Object] = [] + + +def get_materials_for_mesh_objects(depsgraph: Depsgraph, mesh_objects: Iterable[Object]): + yielded_materials = set() + for mesh_object in mesh_objects: + evaluated_mesh_object = mesh_object.evaluated_get(depsgraph) + for i, material_slot in enumerate(evaluated_mesh_object.material_slots): + material = material_slot.material + if material is None: + raise RuntimeError(f'Material slots cannot be empty. ({mesh_object.name}, index {i})') + if material not in yielded_materials: + yielded_materials.add(material) + yield material + + +def get_mesh_objects_for_collection(collection: Collection) -> Iterable[DfsObject]: + return filter(lambda x: x.obj.type == 'MESH', dfs_collection_objects(collection)) + + +def get_mesh_objects_for_context(context: Context) -> Iterable[DfsObject]: + if context.view_layer is None: + return + for dfs_object in dfs_view_layer_objects(context.view_layer): + if dfs_object.obj.type == 'MESH' and dfs_object.is_selected: + yield dfs_object + + +def get_armature_for_mesh_object(mesh_object: Object) -> Optional[Object]: + if mesh_object.type != 'MESH': + return None + # Get the first armature modifier with a non-empty armature object. + for modifier in filter(lambda x: x.type == 'ARMATURE', mesh_object.modifiers): + armature_modifier = typing_cast(ArmatureModifier, modifier) + if armature_modifier.object is not None: + return armature_modifier.object + return None + + +def _get_psk_input_objects(mesh_dfs_objects: Iterable[DfsObject]) -> PskInputObjects: + mesh_dfs_objects = list(mesh_dfs_objects) + if len(mesh_dfs_objects) == 0: + raise RuntimeError('At least one mesh must be selected') + input_objects = PskInputObjects() + input_objects.mesh_dfs_objects = mesh_dfs_objects + # Get the armature objects used on all the meshes being exported. + armature_objects = get_armatures_for_mesh_objects(map(lambda x: x.obj, mesh_dfs_objects)) + # Sort them in hierarchy order. + input_objects.armature_objects = list(ObjectTree.from_objects(armature_objects).objects_dfs()) + return input_objects + + +def get_psk_input_objects_for_context(context: Context) -> PskInputObjects: + mesh_objects = list(get_mesh_objects_for_context(context)) + return _get_psk_input_objects(mesh_objects) + + +def get_psk_input_objects_for_collection(collection: Collection) -> PskInputObjects: + mesh_objects = get_mesh_objects_for_collection(collection) + return _get_psk_input_objects(mesh_objects) diff --git a/io_scene_psk_psa/shared/operators.py b/io_scene_psk_psa/shared/operators.py new file mode 100644 index 0000000..a7e621b --- /dev/null +++ b/io_scene_psk_psa/shared/operators.py @@ -0,0 +1,72 @@ +from bpy.types import Operator +from bpy.props import BoolProperty + +from .types import PsxBoneExportMixin +from typing import cast as typing_cast + +from .helpers import get_collection_export_operator_from_context, get_psk_input_objects_for_collection, populate_bone_collection_list + + + +class PSK_OT_bone_collection_list_populate(Operator): + bl_idname = 'psk.bone_collection_list_populate' + bl_label = 'Populate Bone Collection List' + bl_description = 'Populate the bone collection list from the armature that will be used in this collection export' + bl_options = {'INTERNAL'} + + def execute(self, context): + export_operator = get_collection_export_operator_from_context(context) + if export_operator is None: + self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context') + return {'CANCELLED'} + if context.collection is None: + self.report({'ERROR_INVALID_CONTEXT'}, 'No active collection') + return {'CANCELLED'} + try: + input_objects = get_psk_input_objects_for_collection(context.collection) + except RuntimeError as e: + self.report({'ERROR_INVALID_CONTEXT'}, str(e)) + return {'CANCELLED'} + if not input_objects.armature_objects: + self.report({'ERROR_INVALID_CONTEXT'}, 'No armature modifiers found on mesh objects') + return {'CANCELLED'} + export_operator = typing_cast(PsxBoneExportMixin, export_operator) + + # Save and restore the selected status of the bones collections. + selected_status: dict[int, bool] = dict() + for bone_collection in export_operator.bone_collection_list: + selected_status[hash(bone_collection)] = bone_collection.is_selected + + populate_bone_collection_list(export_operator.bone_collection_list, input_objects.armature_objects) + + for bone_collection in export_operator.bone_collection_list: + bone_collection.is_selected = selected_status[hash(bone_collection)] + + return {'FINISHED'} + + +class PSK_OT_bone_collection_list_select_all(Operator): + bl_idname = 'psk.bone_collection_list_select_all' + bl_label = 'Select All' + bl_description = 'Select all bone collections' + bl_options = {'INTERNAL'} + + is_selected: BoolProperty(default=True) + + def execute(self, context): + export_operator = get_collection_export_operator_from_context(context) + if export_operator is None: + self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context') + return {'CANCELLED'} + export_operator = typing_cast(PsxBoneExportMixin, export_operator) + for item in export_operator.bone_collection_list: + item.is_selected = self.is_selected + return {'FINISHED'} + + +_classes = ( + PSK_OT_bone_collection_list_populate, + PSK_OT_bone_collection_list_select_all, +) +from bpy.utils import register_classes_factory +register, unregister = register_classes_factory(_classes) diff --git a/io_scene_psk_psa/shared/semver.py b/io_scene_psk_psa/shared/semver.py deleted file mode 100644 index cc1606e..0000000 --- a/io_scene_psk_psa/shared/semver.py +++ /dev/null @@ -1,54 +0,0 @@ -from typing import Tuple - -class SemanticVersion(object): - def __init__(self, version: Tuple[int, int, int]): - self.major, self.minor, self.patch = version - - def __iter__(self): - yield self.major - yield self.minor - yield self.patch - - @staticmethod - def compare(lhs: 'SemanticVersion', rhs: 'SemanticVersion') -> int: - """ - Compares two semantic versions. - - Returns: - -1 if lhs < rhs - 0 if lhs == rhs - 1 if lhs > rhs - """ - for l, r in zip(lhs, rhs): - if l < r: - return -1 - if l > r: - return 1 - return 0 - - def __str__(self): - return f'{self.major}.{self.minor}.{self.patch}' - - def __repr__(self): - return str(self) - - def __eq__(self, other): - return self.compare(self, other) == 0 - - def __ne__(self, other): - return not self == other - - def __lt__(self, other): - return self.compare(self, other) == -1 - - def __le__(self, other): - return self.compare(self, other) <= 0 - - def __gt__(self, other): - return self.compare(self, other) == 1 - - def __ge__(self, other): - return self.compare(self, other) >= 0 - - def __hash__(self): - return hash((self.major, self.minor, self.patch)) diff --git a/io_scene_psk_psa/shared/types.py b/io_scene_psk_psa/shared/types.py index 908b3cb..c3c4b04 100644 --- a/io_scene_psk_psa/shared/types.py +++ b/io_scene_psk_psa/shared/types.py @@ -1,12 +1,29 @@ +from typing import Generic, Iterable, Sized, TypeVar import bpy from bpy.props import CollectionProperty, EnumProperty, StringProperty, IntProperty, BoolProperty, FloatProperty from bpy.types import PropertyGroup, UIList, UILayout, Context, AnyType, Panel +T = TypeVar('T') + +# Don't actually use this, this is just for typing. +class BpyCollectionProperty(Generic[T], Iterable[T], Sized): + def add(self) -> T: + return T() # type: ignore + + def clear(self) -> None: + pass + + def move(self, src_index: int, dst_index: int): + pass + + def remove(self, index: int): + pass + class PSX_UL_bone_collection_list(UIList): - def draw_item(self, _context: Context, layout: UILayout, _data: AnyType, item: AnyType, _icon: int, - _active_data: AnyType, _active_property: str, _index: int = 0, _flt_flag: int = 0): + def draw_item(self, context: Context, layout: UILayout, data: AnyType, item: AnyType, icon: int, + active_data: AnyType, active_property: str, index: int = 0, flt_flag: int = 0): row = layout.row() row.prop(item, 'is_selected', text=getattr(item, 'name')) @@ -27,6 +44,9 @@ class PSX_PG_bone_collection_list_item(PropertyGroup): count: IntProperty() is_selected: BoolProperty(default=False) + def __hash__(self) -> int: + return hash(f'{self.name}/{self.armature_object_name}/{self.armature_data_name}') + class PSX_PG_action_export(PropertyGroup): group: StringProperty(name='Group', description='The group of the sequence', maxlen=64) @@ -45,7 +65,7 @@ class PSX_PT_action(Panel): @classmethod def poll(cls, context: 'Context'): - return context.active_object and context.active_object.type == 'ARMATURE' and context.active_action is not None + return context.active_object is not None and context.active_object.type == 'ARMATURE' and context.active_action is not None def draw(self, context: 'Context'): action = context.active_action @@ -87,13 +107,13 @@ up_items = ( ) -def forward_axis_update(self, __context): +def forward_axis_update(self, context): if self.forward_axis == self.up_axis: # Automatically set the up axis to the next available axis self.up_axis = next((axis for axis in axis_identifiers if axis != self.forward_axis), 'Z') -def up_axis_update(self, __context): +def up_axis_update(self, context): if self.up_axis == self.forward_axis: # Automatically set the forward axis to the next available axis self.forward_axis = next((axis for axis in axis_identifiers if axis != self.up_axis), 'X') @@ -138,6 +158,17 @@ class ExportSpaceMixin: default='WORLD' ) +transform_source_items = ( + ('SCENE', 'Scene', 'Use the scene transform settings'), + ('CUSTOM', 'Custom', 'Use custom transform settings'), +) + +class TransformSourceMixin: + transform_source: EnumProperty( + items=transform_source_items, + name='Transform Source', + default='SCENE' + ) class PsxBoneExportMixin: bone_filter_mode: EnumProperty( diff --git a/io_scene_psk_psa/shared/types.pyi b/io_scene_psk_psa/shared/types.pyi new file mode 100644 index 0000000..735280a --- /dev/null +++ b/io_scene_psk_psa/shared/types.pyi @@ -0,0 +1,61 @@ +from typing import Generic, TypeVar, Iterable, Sized + +T = TypeVar("T") + +# https://docs.blender.org/api/current/bpy.types.bpy_prop_collection_idprop.html#bpy.types.bpy_prop_collection_idprop +class BpyCollectionProperty(Generic[T], Iterable[T], Sized): + def add(self) -> T: + pass + + def clear(self) -> None: + pass + + def move(self, src_index: int, dst_index: int): + pass + + def remove(self, index: int): + pass + + +class PSX_PG_bone_collection_list_item: + armature_object_name: str + armature_data_name: str + name: str + index: int + count: int + is_selected: bool + + +class PSX_PG_action_export: + group: str + compression_ratio: float + key_quota: int + fps: float + + +class AxisMixin: + forward_axis: str + up_axis: str + + +class TransformMixin(AxisMixin): + scale: float + + +class ExportSpaceMixin: + export_space: str + + +class TransformSourceMixin: + transform_source: str + + +class PsxBoneExportMixin: + bone_filter_mode: str + bone_collection_list: BpyCollectionProperty[PSX_PG_bone_collection_list_item] + bone_collection_list_index: int + root_bone_name: str + + +class PSX_PG_scene_export(TransformSourceMixin): + pass