diff --git a/io_scene_psk_psa/psa/export/operators.py b/io_scene_psk_psa/psa/export/operators.py index aeafe47..6d88f7e 100644 --- a/io_scene_psk_psa/psa/export/operators.py +++ b/io_scene_psk_psa/psa/export/operators.py @@ -43,11 +43,10 @@ def is_action_for_object(obj: Object, action: Action): version = SemanticVersion(bpy.app.version) def is_action_for_object_legacy(action: Action, obj: Object): - ''' + """ This is the legacy behavior before slotted actions were introduced in Blender 4.4. It would simply check if it had any f-curves that corresponded to any bones in the armature. - @return: - ''' + """ import re armature_data = obj.data bone_names = set([x.name for x in armature_data.bones]) diff --git a/io_scene_psk_psa/psk/builder.py b/io_scene_psk_psa/psk/builder.py index 1dc8d78..6dbd22a 100644 --- a/io_scene_psk_psa/psk/builder.py +++ b/io_scene_psk_psa/psk/builder.py @@ -131,10 +131,10 @@ def _get_mesh_export_space_matrix(armature_object: Optional[Object], export_spac def _get_material_name_indices(obj: Object, material_names: List[str]) -> Iterable[int]: - ''' + """ Returns the index of the material in the list of material names. If the material is not found, the index 0 is returned. - ''' + """ for material_slot in obj.material_slots: if material_slot.material is None: yield 0 diff --git a/io_scene_psk_psa/psk/importer.py b/io_scene_psk_psa/psk/importer.py index a902aa3..771ded8 100644 --- a/io_scene_psk_psa/psk/importer.py +++ b/io_scene_psk_psa/psk/importer.py @@ -29,9 +29,9 @@ class PskImportOptions: class ImportBone: - ''' + """ Intermediate bone type for the purpose of construction. - ''' + """ def __init__(self, index: int, psk_bone: PsxBone): self.index: int = index self.psk_bone: PsxBone = psk_bone @@ -63,7 +63,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) mesh_object = None if options.should_import_skeleton: - # ARMATURE + # Armature armature_data = bpy.data.armatures.new(name) armature_object = bpy.data.objects.new(name, armature_data) armature_object.show_in_front = True @@ -120,12 +120,12 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) edit_bone_matrix.translation = import_bone.world_matrix.translation edit_bone.matrix = edit_bone_matrix - # MESH + # Mesh if options.should_import_mesh: mesh_data = bpy.data.meshes.new(name) mesh_object = bpy.data.objects.new(name, mesh_data) - # MATERIALS + # Materials if options.should_import_materials: for material_index, psk_material in enumerate(psk.materials): material_name = psk_material.name.decode('utf-8') @@ -154,13 +154,13 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) bm = bmesh.new() - # VERTICES + # Vertices for point in psk.points: bm.verts.new(tuple(point)) bm.verts.ensure_lookup_table() - # FACES + # Faces invalid_face_indices = set() for face_index, face in enumerate(psk.faces): point_indices = map(lambda i: psk.wedges[i].point_index, reversed(face.wedge_indices)) @@ -180,7 +180,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) bm.to_mesh(mesh_data) - # TEXTURE COORDINATES + # Texture Coordinates uv_layer_data_index = 0 uv_layer = mesh_data.uv_layers.new(name='UVMap') for face_index, face in enumerate(psk.faces): @@ -191,7 +191,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) uv_layer.data[uv_layer_data_index].uv = wedge.u, 1.0 - wedge.v uv_layer_data_index += 1 - # EXTRA UVS + # Extra UVs if psk.has_extra_uvs and options.should_import_extra_uvs: extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges)) wedge_index_offset = 0 @@ -207,7 +207,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) uv_layer_data_index += 1 wedge_index_offset += len(psk.wedges) - # VERTEX COLORS + # Vertex Colors if psk.has_vertex_colors and options.should_import_vertex_colors: # Convert vertex colors to sRGB if necessary. psk_vertex_colors = np.zeros((len(psk.vertex_colors), 4)) @@ -235,7 +235,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) face_corner_color_attribute = mesh_data.attributes.new(name='VERTEXCOLOR', type='FLOAT_COLOR', domain='CORNER') face_corner_color_attribute.data.foreach_set('color', face_corner_colors.flatten()) - # VERTEX NORMALS + # Vertex Normals if psk.has_vertex_normals and options.should_import_vertex_normals: mesh_data.polygons.foreach_set('use_smooth', [True] * len(mesh_data.polygons)) normals = [] @@ -248,7 +248,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) bm.normal_update() bm.free() - # WEIGHTS + # Weights # Get a list of all bones that have weights associated with them. vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights)) vertex_groups: List[Optional[VertexGroup]] = [None] * len(psk.bones) @@ -258,7 +258,7 @@ def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) for weight in psk.weights: vertex_groups[weight.bone_index].add((weight.point_index,), weight.weight, 'ADD') - # MORPHS (SHAPE KEYS) + # Morphs (Shape Keys) if options.should_import_shape_keys: morph_data_iterator = iter(psk.morph_data) diff --git a/io_scene_psk_psa/psk/reader.py b/io_scene_psk_psa/psk/reader.py index e67f76c..193c91c 100644 --- a/io_scene_psk_psa/psk/reader.py +++ b/io_scene_psk_psa/psk/reader.py @@ -29,7 +29,6 @@ def _read_material_references(path: str) -> List[str]: def read_psk(path: str) -> Psk: - psk = Psk() # Read the PSK file sections. @@ -75,20 +74,20 @@ def read_psk(path: str) -> Psk: fp.seek(section.data_size * section.data_count, os.SEEK_CUR) warnings.warn(f'Unrecognized section "{section.name} at position {fp.tell():15}"') - ''' + """ UEViewer exports a sidecar file (*.props.txt) with fully-qualified reference paths for each material (e.g., Texture'Package.Group.Object'). - ''' + """ psk.material_references = _read_material_references(path) - ''' + """ Tools like UEViewer and CUE4Parse write the point index as a 32-bit integer, exploiting the fact that due to struct alignment, there were 16-bits of padding following the original 16-bit point index in the wedge struct. However, this breaks compatibility with PSK files that were created with older tools that treated the point index as a 16-bit integer and might have junk data written to the padding bits. To work around this, we check if each point is still addressable using a 16-bit index, and if it is, assume the point index is a 16-bit integer and truncate the high bits. - ''' + """ if len(psk.points) <= 65536: for wedge in psk.wedges: wedge.point_index &= 0xFFFF diff --git a/io_scene_psk_psa/shared/dfs.py b/io_scene_psk_psa/shared/dfs.py index 77bbff2..baba7a5 100644 --- a/io_scene_psk_psa/shared/dfs.py +++ b/io_scene_psk_psa/shared/dfs.py @@ -1,9 +1,9 @@ -''' +""" Depth-first object iterator functions for Blender collections and view layers. These functions are used to iterate over objects in a collection or view layer in a depth-first manner, including instances. This is useful for exporters that need to traverse the object hierarchy in a predictable order. -''' +""" from typing import Optional, Set, Iterable, List @@ -33,24 +33,24 @@ class DfsObject: @property def is_selected(self) -> bool: - ''' + """ Check if the object is selected. @return: True if the object is selected, False otherwise. - ''' + """ if self.instance_objects: return self.instance_objects[-1].select_get() return self.obj.select_get() def _dfs_object_children(obj: Object, collection: Collection) -> Iterable[Object]: - ''' + """ Construct a list of objects in hierarchy order from `collection.objects`, only keeping those that are in the collection. @param obj: The object to start the search from. @param collection: The collection to search in. @return: An iterable of objects in hierarchy order. - ''' + """ yield obj for child in obj.children: if child.name in collection.objects: @@ -58,13 +58,13 @@ def _dfs_object_children(obj: Object, collection: Collection) -> Iterable[Object def dfs_objects_in_collection(collection: Collection) -> Iterable[Object]: - ''' + """ Returns a depth-first iterator over all objects in a collection, only keeping those that are directly in the collection. @param collection: The collection to search in. @return: An iterable of objects in hierarchy order. - ''' + """ objects_hierarchy = [] for obj in collection.objects: if obj.parent is None or obj.parent not in set(collection.objects): @@ -74,12 +74,12 @@ def dfs_objects_in_collection(collection: Collection) -> Iterable[Object]: def dfs_collection_objects(collection: Collection, visible_only: bool = False) -> Iterable[DfsObject]: - ''' + """ Depth-first search of objects in a collection, including recursing into instances. @param collection: The collection to search in. @return: An iterable of tuples containing the object, the instance objects, and the world matrix. - ''' + """ yield from _dfs_collection_objects_recursive(collection) @@ -89,7 +89,7 @@ def _dfs_collection_objects_recursive( matrix_world: Matrix = Matrix.Identity(4), visited: Optional[Set[Object]]=None ) -> Iterable[DfsObject]: - ''' + """ Depth-first search of objects in a collection, including recursing into instances. This is a recursive function. @@ -98,7 +98,7 @@ def _dfs_collection_objects_recursive( @param matrix_world: The world matrix of the current object. @param visited: A set of visited object-instance pairs. @return: An iterable of tuples containing the object, the instance objects, and the world matrix. - ''' + """ # We want to also yield the top-level instance object so that callers can inspect the selection status etc. if visited is None: @@ -132,12 +132,12 @@ def _dfs_collection_objects_recursive( def dfs_view_layer_objects(view_layer: ViewLayer) -> Iterable[DfsObject]: - ''' + """ Depth-first iterator over all objects in a view layer, including recursing into instances. @param view_layer: The view layer to inspect. @return: An iterable of tuples containing the object, the instance objects, and the world matrix. - ''' + """ visited = set() def layer_collection_objects_recursive(layer_collection: LayerCollection): for child in layer_collection.children: @@ -149,13 +149,13 @@ def dfs_view_layer_objects(view_layer: ViewLayer) -> Iterable[DfsObject]: def _is_dfs_object_visible(obj: Object, instance_objects: List[Object]) -> bool: - ''' + """ Check if a DFS object is visible. @param obj: The object. @param instance_objects: The instance objects. @return: True if the object is visible, False otherwise. - ''' + """ if instance_objects: return instance_objects[-1].visible_get() return obj.visible_get() diff --git a/io_scene_psk_psa/shared/helpers.py b/io_scene_psk_psa/shared/helpers.py index 60c5df5..110aea5 100644 --- a/io_scene_psk_psa/shared/helpers.py +++ b/io_scene_psk_psa/shared/helpers.py @@ -283,11 +283,11 @@ def create_psx_bones( bone_filter_mode: str = 'ALL', bone_collection_indices: Optional[List[Tuple[str, int]]] = None, ) -> PsxBoneCreateResult: - ''' + """ Creates a list of PSX bones from the given armature objects and options. This function will throw a RuntimeError if multiple armature objects are passed in and the export space is not WORLD. It will also throw a RuntimeError if the bone names are not unique when compared case-insensitively. - ''' + """ if bone_collection_indices is None: bone_collection_indices = [] diff --git a/io_scene_psk_psa/shared/types.py b/io_scene_psk_psa/shared/types.py index 179a157..75a0861 100644 --- a/io_scene_psk_psa/shared/types.py +++ b/io_scene_psk_psa/shared/types.py @@ -5,8 +5,8 @@ from bpy.types import PropertyGroup, UIList, UILayout, Context, AnyType, Panel class PSX_UL_bone_collection_list(UIList): - def draw_item(self, context: Context, layout: UILayout, data: AnyType, item: AnyType, icon: int, - active_data: AnyType, active_property: str, index: int = 0, flt_flag: int = 0): + def draw_item(self, _context: Context, layout: UILayout, _data: AnyType, item: AnyType, _icon: int, + _active_data: AnyType, _active_property: str, _index: int = 0, _flt_flag: int = 0): row = layout.row() row.prop(item, 'is_selected', text=getattr(item, 'name')) @@ -81,13 +81,13 @@ up_items = ( ) -def forward_axis_update(self, _context): +def forward_axis_update(self, __context): if self.forward_axis == self.up_axis: # Automatically set the up axis to the next available axis self.up_axis = next((axis for axis in axis_identifiers if axis != self.forward_axis), 'Z') -def up_axis_update(self, _context): +def up_axis_update(self, __context): if self.up_axis == self.forward_axis: # Automatically set the forward axis to the next available axis self.forward_axis = next((axis for axis in axis_identifiers if axis != self.up_axis), 'X') @@ -134,12 +134,11 @@ class PsxBoneExportMixin: bone_collection_list_index: IntProperty(default=0, name='', description='') root_bone_name: StringProperty( name='Root Bone Name', - description='The name of the generated root bone when exporting multiple armatures', + description='The name of the root bone when exporting a PSK with either no armature or multiple armatures', default='ROOT', ) - classes = ( PSX_PG_action_export, PSX_PG_bone_collection_list_item,