Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2e2b74edaf | ||
|
|
57d1f78d9e | ||
|
|
6158eb024d | ||
|
|
71622e5ab9 | ||
|
|
6e8faa38bf | ||
|
|
a62ee207e1 | ||
|
|
4f61d341d4 | ||
|
|
24e606a3fd | ||
|
|
8c0b7f84fc | ||
|
|
2ba29b04d3 | ||
|
|
2f5ed901b2 | ||
|
|
4099c95381 | ||
|
|
5a13faeb5e | ||
|
|
3932176a57 | ||
|
|
57a2179412 | ||
|
|
605b618856 |
@@ -1,4 +1,4 @@
|
||||
This Blender add-on allows you to import and export meshes and animations to the [PSK and PSA file formats](https://wiki.beyondunreal.com/PSK_%26_PSA_file_formats).
|
||||
This Blender add-on allows you to import and export meshes and animations to and from the [PSK and PSA file formats](https://wiki.beyondunreal.com/PSK_%26_PSA_file_formats). In addition, the non-standard PSKX format is also supported for import only.
|
||||
|
||||
# Installation
|
||||
1. Download the zip file for the latest version from the [releases](https://github.com/DarklightGames/io_export_psk_psa/releases) page.
|
||||
@@ -15,8 +15,8 @@ This Blender add-on allows you to import and export meshes and animations to the
|
||||
3. Navigate to File > Export > Unreal PSK (.psk)
|
||||
4. Enter the file name and click "Export".
|
||||
|
||||
## Importing a PSK
|
||||
1. Navigate to File > Import > Unreal PSK (.psk)
|
||||
## Importing a PSK/PSKX
|
||||
1. Navigate to File > Import > Unreal PSK (.psk/.pskx)
|
||||
2. Select the PSK file you want to import and click "Import"
|
||||
|
||||
## Exporting a PSA
|
||||
@@ -37,4 +37,4 @@ This Blender add-on allows you to import and export meshes and animations to the
|
||||
## Why are the mesh normals not accurate when importing a PSK extracted from [UE Viewer](https://www.gildor.org/en/projects/umodel)?
|
||||
If preserving the mesh normals of models is important for your workflow, it is *not recommended* to export PSK files from UE Viewer. This is because UE Viewer makes no attempt to reconstruct the original [smoothing groups](https://en.wikipedia.org/wiki/Smoothing_group). As a result, the normals of imported PSK files will be incorrect when imported into Blender and will need to be manually fixed.
|
||||
|
||||
As a workaround, it is recommended to export [glTF](https://en.wikipedia.org/wiki/GlTF) meshes out of UE Viewer instead, since the glTF format has support for explicit normals and UE Viewer can correctly preserve the mesh normals on export. Note, however, that the imported glTF armature may have it's bones oriented incorrectly when imported into blender. To mitigate this, you can combine the armature of PSK and the mesh of the glTF for best results.
|
||||
As a workaround, it is recommended to export [glTF](https://en.wikipedia.org/wiki/GlTF) meshes out of UE Viewer instead, since the glTF format has support for explicit normals and UE Viewer can correctly preserve the mesh normals on export. Note, however, that the imported glTF armature may have it's bones oriented incorrectly when imported into Blender. To mitigate this, you can combine the armature of PSK and the mesh of the glTF for best results.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
bl_info = {
|
||||
"name": "PSK/PSA Importer/Exporter",
|
||||
"author": "Colin Basnett",
|
||||
"version": (1, 2, 0),
|
||||
"version": (2, 1, 0),
|
||||
"blender": (2, 80, 0),
|
||||
# "location": "File > Export > PSK Export (.psk)",
|
||||
"description": "PSK/PSA Import/Export (.psk/.psa)",
|
||||
@@ -58,7 +58,7 @@ def psk_export_menu_func(self, context):
|
||||
|
||||
|
||||
def psk_import_menu_func(self, context):
|
||||
self.layout.operator(psk_importer.PskImportOperator.bl_idname, text='Unreal PSK (.psk)')
|
||||
self.layout.operator(psk_importer.PskImportOperator.bl_idname, text='Unreal PSK (.psk/.pskx)')
|
||||
|
||||
|
||||
def psa_export_menu_func(self, context):
|
||||
@@ -72,6 +72,7 @@ def register():
|
||||
bpy.types.TOPBAR_MT_file_import.append(psk_import_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_export.append(psa_export_menu_func)
|
||||
bpy.types.Scene.psa_import = PointerProperty(type=psa_importer.PsaImportPropertyGroup)
|
||||
bpy.types.Scene.psk_import = PointerProperty(type=psk_importer.PskImportPropertyGroup)
|
||||
bpy.types.Scene.psa_export = PointerProperty(type=psa_exporter.PsaExportPropertyGroup)
|
||||
bpy.types.Scene.psk_export = PointerProperty(type=psk_exporter.PskExportPropertyGroup)
|
||||
|
||||
|
||||
@@ -1,4 +1,43 @@
|
||||
from ctypes import *
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
class Color(Structure):
|
||||
_fields_ = [
|
||||
('r', c_ubyte),
|
||||
('g', c_ubyte),
|
||||
('b', c_ubyte),
|
||||
('a', c_ubyte),
|
||||
]
|
||||
|
||||
def __iter__(self):
|
||||
yield self.r
|
||||
yield self.g
|
||||
yield self.b
|
||||
yield self.a
|
||||
|
||||
def __eq__(self, other):
|
||||
return all(map(lambda x: x[0] == x[1], zip(self, other)))
|
||||
|
||||
def __repr__(self):
|
||||
return repr(tuple(self))
|
||||
|
||||
def normalized(self) -> Tuple:
|
||||
return tuple(map(lambda x: x / 255.0, iter(self)))
|
||||
|
||||
|
||||
class Vector2(Structure):
|
||||
_fields_ = [
|
||||
('x', c_float),
|
||||
('y', c_float),
|
||||
]
|
||||
|
||||
def __iter__(self):
|
||||
yield self.x
|
||||
yield self.y
|
||||
|
||||
def __repr__(self):
|
||||
return repr(tuple(self))
|
||||
|
||||
|
||||
class Vector3(Structure):
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
from typing import List
|
||||
|
||||
|
||||
def rgb_to_srgb(c):
|
||||
if c > 0.0031308:
|
||||
return 1.055 * (pow(c, (1.0 / 2.4))) - 0.055
|
||||
else:
|
||||
return 12.92 * c
|
||||
|
||||
|
||||
def populate_bone_group_list(armature_object, bone_group_list):
|
||||
bone_group_list.clear()
|
||||
|
||||
|
||||
@@ -102,6 +102,7 @@ class PsaExportOperator(Operator, ExportHelper):
|
||||
box.label(text='Bones', icon='BONE_DATA')
|
||||
bone_filter_mode_items = property_group.bl_rna.properties['bone_filter_mode'].enum_items_static
|
||||
row = box.row(align=True)
|
||||
|
||||
for item in bone_filter_mode_items:
|
||||
identifier = item.identifier
|
||||
item_layout = row.row(align=True)
|
||||
@@ -114,6 +115,7 @@ class PsaExportOperator(Operator, ExportHelper):
|
||||
rows = max(3, min(len(property_group.bone_group_list), 10))
|
||||
row.template_list('PSX_UL_BoneGroupList', '', property_group, 'bone_group_list', property_group, 'bone_group_list_index', rows=rows)
|
||||
|
||||
|
||||
def is_action_for_armature(self, action):
|
||||
if len(action.fcurves) == 0:
|
||||
return False
|
||||
|
||||
@@ -10,12 +10,20 @@ from bpy.props import StringProperty, BoolProperty, CollectionProperty, PointerP
|
||||
from .reader import PsaReader
|
||||
|
||||
|
||||
class PsaImportOptions(object):
|
||||
def __init__(self):
|
||||
self.should_clean_keys = True
|
||||
self.should_use_fake_user = False
|
||||
self.should_stash = False
|
||||
self.sequence_names = []
|
||||
|
||||
|
||||
class PsaImporter(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def import_psa(self, psa_reader: PsaReader, sequence_names: List[AnyStr], armature_object):
|
||||
sequences = map(lambda x: psa_reader.sequences[x], sequence_names)
|
||||
def import_psa(self, psa_reader: PsaReader, armature_object, options: PsaImportOptions):
|
||||
sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names)
|
||||
armature_data = armature_object.data
|
||||
|
||||
class ImportBone(object):
|
||||
@@ -104,9 +112,11 @@ class PsaImporter(object):
|
||||
import_bone.post_quat = import_bone.orig_quat.conjugated()
|
||||
|
||||
# Create and populate the data for new sequences.
|
||||
actions = []
|
||||
for sequence in sequences:
|
||||
# Add the action.
|
||||
action = bpy.data.actions.new(name=sequence.name.decode())
|
||||
action.use_fake_user = options.should_use_fake_user
|
||||
|
||||
# Create f-curves for the rotation and location of each bone.
|
||||
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
||||
@@ -124,28 +134,39 @@ class PsaImporter(object):
|
||||
action.fcurves.new(location_data_path, index=2), # Lz
|
||||
]
|
||||
|
||||
# Read the sequence keys from the PSA file.
|
||||
sequence_name = sequence.name.decode('windows-1252')
|
||||
|
||||
# Read the sequence data matrix from the PSA.
|
||||
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
||||
keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8)
|
||||
|
||||
# The first step is to determine the frames at which each bone will write out a keyframe.
|
||||
threshold = 0.001
|
||||
# Convert the sequence's data from world-space to local-space.
|
||||
for bone_index, import_bone in enumerate(import_bones):
|
||||
if import_bone is None:
|
||||
continue
|
||||
for fcurve_index, fcurve in enumerate(import_bone.fcurves):
|
||||
# Get all the keyframe data for the bone's f-curve data from the sequence data matrix.
|
||||
fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index]
|
||||
last_written_datum = 0
|
||||
for frame_index, datum in enumerate(fcurve_frame_data):
|
||||
# If the f-curve data is not different enough to the last written frame, un-mark this data for writing.
|
||||
if frame_index > 0 and abs(datum - last_written_datum) < threshold:
|
||||
keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0
|
||||
else:
|
||||
last_written_datum = fcurve_frame_data[frame_index]
|
||||
for frame_index in range(sequence.frame_count):
|
||||
# This bone has writeable keyframes for this frame.
|
||||
key_data = sequence_data_matrix[frame_index, bone_index]
|
||||
# Calculate the local-space key data for the bone.
|
||||
sequence_data_matrix[frame_index, bone_index] = calculate_fcurve_data(import_bone, key_data)
|
||||
|
||||
# Clean the keyframe data. This is accomplished by writing zeroes to the write matrix when there is an
|
||||
# insufficiently large change in the data from frame-to-frame.
|
||||
if options.should_clean_keys:
|
||||
threshold = 0.001
|
||||
for bone_index, import_bone in enumerate(import_bones):
|
||||
if import_bone is None:
|
||||
continue
|
||||
for fcurve_index in range(len(import_bone.fcurves)):
|
||||
# Get all the keyframe data for the bone's f-curve data from the sequence data matrix.
|
||||
fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index]
|
||||
last_written_datum = 0
|
||||
for frame_index, datum in enumerate(fcurve_frame_data):
|
||||
# If the f-curve data is not different enough to the last written frame, un-mark this data for writing.
|
||||
if frame_index > 0 and abs(datum - last_written_datum) < threshold:
|
||||
keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0
|
||||
else:
|
||||
last_written_datum = datum
|
||||
|
||||
# Write the keyframes out!
|
||||
for frame_index in range(sequence.frame_count):
|
||||
@@ -156,12 +177,22 @@ class PsaImporter(object):
|
||||
if bone_has_writeable_keyframes:
|
||||
# This bone has writeable keyframes for this frame.
|
||||
key_data = sequence_data_matrix[frame_index, bone_index]
|
||||
# Calculate the local-space key data for the bone.
|
||||
fcurve_data = calculate_fcurve_data(import_bone, key_data)
|
||||
for fcurve, should_write, datum in zip(import_bone.fcurves, keyframe_write_matrix[frame_index, bone_index], fcurve_data):
|
||||
for fcurve, should_write, datum in zip(import_bone.fcurves, keyframe_write_matrix[frame_index, bone_index], key_data):
|
||||
if should_write:
|
||||
fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'})
|
||||
|
||||
actions.append(action)
|
||||
|
||||
# If the user specifies, store the new animations as strips on a non-contributing NLA stack.
|
||||
if options.should_stash:
|
||||
if armature_object.animation_data is None:
|
||||
armature_object.animation_data_create()
|
||||
for action in actions:
|
||||
nla_track = armature_object.animation_data.nla_tracks.new()
|
||||
nla_track.name = action.name
|
||||
nla_track.mute = True
|
||||
nla_track.strips.new(name=action.name, start=0, action=action)
|
||||
|
||||
|
||||
class PsaImportPsaBoneItem(PropertyGroup):
|
||||
bone_name: StringProperty()
|
||||
@@ -182,7 +213,6 @@ class PsaImportActionListItem(PropertyGroup):
|
||||
|
||||
|
||||
def on_psa_file_path_updated(property, context):
|
||||
print('PATH UPDATED')
|
||||
property_group = context.scene.psa_import
|
||||
property_group.action_list.clear()
|
||||
property_group.psa_bones.clear()
|
||||
@@ -195,33 +225,23 @@ def on_psa_file_path_updated(property, context):
|
||||
item.action_name = sequence.name.decode('windows-1252')
|
||||
item.frame_count = sequence.frame_count
|
||||
item.is_selected = True
|
||||
|
||||
for psa_bone in psa_reader.bones:
|
||||
item = property_group.psa_bones.add()
|
||||
item.bone_name = psa_bone.name
|
||||
except IOError as e:
|
||||
print('ERROR READING FILE')
|
||||
print(e)
|
||||
# TODO: set an error somewhere so the user knows the PSA could not be read.
|
||||
pass
|
||||
|
||||
|
||||
def on_armature_object_updated(property, context):
|
||||
# TODO: ensure that there are matching bones between the two rigs.
|
||||
property_group = context.scene.psa_import
|
||||
armature_object = property_group.armature_object
|
||||
if armature_object is not None:
|
||||
armature_bone_names = set(map(lambda bone: bone.name, armature_object.data.bones))
|
||||
psa_bone_names = set(map(lambda psa_bone: psa_bone.name, property_group.psa_bones))
|
||||
|
||||
|
||||
class PsaImportPropertyGroup(bpy.types.PropertyGroup):
|
||||
class PsaImportPropertyGroup(PropertyGroup):
|
||||
psa_file_path: StringProperty(default='', update=on_psa_file_path_updated, name='PSA File Path')
|
||||
psa_bones: CollectionProperty(type=PsaImportPsaBoneItem)
|
||||
# armature_object: PointerProperty(name='Object', type=bpy.types.Object, update=on_armature_object_updated)
|
||||
action_list: CollectionProperty(type=PsaImportActionListItem)
|
||||
action_list_index: IntProperty(name='', default=0)
|
||||
action_filter_name: StringProperty(default='')
|
||||
should_clean_keys: BoolProperty(default=True, name='Clean Keyframes', description='Exclude unnecessary keyframes from being written to the actions.')
|
||||
should_use_fake_user: BoolProperty(default=True, name='Fake User', description='Assign each imported action a fake user so that the data block is saved even it has no users.')
|
||||
should_stash: BoolProperty(default=False, name='Stash', description='Stash each imported action as a strip on a new non-contributing NLA track')
|
||||
|
||||
|
||||
class PSA_UL_ImportActionList(UIList):
|
||||
@@ -314,12 +334,10 @@ class PSA_PT_ImportPanel(Panel):
|
||||
row = layout.row()
|
||||
row.prop(property_group, 'psa_file_path', text='')
|
||||
row.enabled = False
|
||||
# row.enabled = property_group.psa_file_path is not ''
|
||||
|
||||
row = layout.row()
|
||||
|
||||
layout.separator()
|
||||
|
||||
row.operator('psa_import.select_file', text='Select PSA File', icon='FILEBROWSER')
|
||||
|
||||
if len(property_group.action_list) > 0:
|
||||
box = layout.box()
|
||||
box.label(text=f'Actions ({len(property_group.action_list)})', icon='ACTION')
|
||||
@@ -331,7 +349,13 @@ class PSA_PT_ImportPanel(Panel):
|
||||
row.operator('psa_import.actions_select_all', text='All')
|
||||
row.operator('psa_import.actions_deselect_all', text='None')
|
||||
|
||||
layout.separator()
|
||||
row = layout.row()
|
||||
row.prop(property_group, 'should_clean_keys')
|
||||
|
||||
# DATA
|
||||
row = layout.row()
|
||||
row.prop(property_group, 'should_use_fake_user')
|
||||
row.prop(property_group, 'should_stash')
|
||||
|
||||
layout.operator('psa_import.import', text=f'Import')
|
||||
|
||||
@@ -370,7 +394,12 @@ class PsaImportOperator(Operator):
|
||||
property_group = context.scene.psa_import
|
||||
psa_reader = PsaReader(property_group.psa_file_path)
|
||||
sequence_names = [x.action_name for x in property_group.action_list if x.is_selected]
|
||||
PsaImporter().import_psa(psa_reader, sequence_names, context.view_layer.objects.active)
|
||||
options = PsaImportOptions()
|
||||
options.sequence_names = sequence_names
|
||||
options.should_clean_keys = property_group.should_clean_keys
|
||||
options.should_use_fake_user = property_group.should_use_fake_user
|
||||
options.should_stash = property_group.should_stash
|
||||
PsaImporter().import_psa(psa_reader, context.view_layer.objects.active, options)
|
||||
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
@@ -41,6 +41,15 @@ class Psk(object):
|
||||
('smoothing_groups', c_int32)
|
||||
]
|
||||
|
||||
class Face32(Structure):
|
||||
_pack_ = 1
|
||||
_fields_ = [
|
||||
('wedge_indices', c_uint32 * 3),
|
||||
('material_index', c_uint8),
|
||||
('aux_material_index', c_uint8),
|
||||
('smoothing_groups', c_int32)
|
||||
]
|
||||
|
||||
class Material(Structure):
|
||||
_fields_ = [
|
||||
('name', c_char * 64),
|
||||
@@ -71,6 +80,18 @@ class Psk(object):
|
||||
('bone_index', c_int32),
|
||||
]
|
||||
|
||||
@property
|
||||
def has_extra_uvs(self):
|
||||
return len(self.extra_uvs) > 0
|
||||
|
||||
@property
|
||||
def has_vertex_colors(self):
|
||||
return len(self.vertex_colors) > 0
|
||||
|
||||
@property
|
||||
def has_vertex_normals(self):
|
||||
return len(self.vertex_normals) > 0
|
||||
|
||||
def __init__(self):
|
||||
self.points: List[Vector3] = []
|
||||
self.wedges: List[Psk.Wedge] = []
|
||||
@@ -78,3 +99,6 @@ class Psk(object):
|
||||
self.materials: List[Psk.Material] = []
|
||||
self.weights: List[Psk.Weight] = []
|
||||
self.bones: List[Psk.Bone] = []
|
||||
self.extra_uvs: List[Vector2] = []
|
||||
self.vertex_colors: List[Color] = []
|
||||
self.vertex_normals: List[Vector3] = []
|
||||
|
||||
@@ -1,23 +1,35 @@
|
||||
import os
|
||||
import bpy
|
||||
import bmesh
|
||||
import numpy as np
|
||||
from math import inf
|
||||
from typing import Optional
|
||||
from .data import Psk
|
||||
from ..helpers import rgb_to_srgb
|
||||
from mathutils import Quaternion, Vector, Matrix
|
||||
from .reader import PskReader
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import Operator
|
||||
from bpy.props import StringProperty, EnumProperty, BoolProperty
|
||||
from bpy.types import Operator, PropertyGroup
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
|
||||
|
||||
class PskImportOptions(object):
|
||||
def __init__(self):
|
||||
self.name = ''
|
||||
self.should_import_vertex_colors = True
|
||||
self.vertex_color_space = 'sRGB'
|
||||
self.should_import_vertex_normals = True
|
||||
self.should_import_extra_uvs = True
|
||||
|
||||
|
||||
class PskImporter(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def import_psk(self, psk: Psk, name: str, context):
|
||||
def import_psk(self, psk: Psk, context, options: PskImportOptions):
|
||||
# ARMATURE
|
||||
armature_data = bpy.data.armatures.new(name)
|
||||
armature_object = bpy.data.objects.new(name, armature_data)
|
||||
armature_data = bpy.data.armatures.new(options.name)
|
||||
armature_object = bpy.data.objects.new(options.name, armature_data)
|
||||
armature_object.show_in_front = True
|
||||
|
||||
context.scene.collection.objects.link(armature_object)
|
||||
@@ -95,8 +107,8 @@ class PskImporter(object):
|
||||
edit_bone['post_quat'] = import_bone.local_rotation.conjugated()
|
||||
|
||||
# MESH
|
||||
mesh_data = bpy.data.meshes.new(name)
|
||||
mesh_object = bpy.data.objects.new(name, mesh_data)
|
||||
mesh_data = bpy.data.meshes.new(options.name)
|
||||
mesh_object = bpy.data.objects.new(options.name, mesh_data)
|
||||
|
||||
# MATERIALS
|
||||
for material in psk.materials:
|
||||
@@ -120,7 +132,6 @@ class PskImporter(object):
|
||||
bm_face.material_index = face.material_index
|
||||
except ValueError:
|
||||
degenerate_face_indices.add(face_index)
|
||||
pass
|
||||
|
||||
if len(degenerate_face_indices) > 0:
|
||||
print(f'WARNING: Discarded {len(degenerate_face_indices)} degenerate face(s).')
|
||||
@@ -129,7 +140,7 @@ class PskImporter(object):
|
||||
|
||||
# TEXTURE COORDINATES
|
||||
data_index = 0
|
||||
uv_layer = mesh_data.uv_layers.new()
|
||||
uv_layer = mesh_data.uv_layers.new(name='VTXW0000')
|
||||
for face_index, face in enumerate(psk.faces):
|
||||
if face_index in degenerate_face_indices:
|
||||
continue
|
||||
@@ -138,11 +149,63 @@ class PskImporter(object):
|
||||
uv_layer.data[data_index].uv = wedge.u, 1.0 - wedge.v
|
||||
data_index += 1
|
||||
|
||||
# EXTRA UVS
|
||||
if psk.has_extra_uvs and options.should_import_extra_uvs:
|
||||
extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges))
|
||||
wedge_index_offset = 0
|
||||
for extra_uv_index in range(extra_uv_channel_count):
|
||||
data_index = 0
|
||||
uv_layer = mesh_data.uv_layers.new(name=f'EXTRAUV{extra_uv_index}')
|
||||
for face_index, face in enumerate(psk.faces):
|
||||
if face_index in degenerate_face_indices:
|
||||
continue
|
||||
for wedge_index in reversed(face.wedge_indices):
|
||||
u, v = psk.extra_uvs[wedge_index_offset + wedge_index]
|
||||
uv_layer.data[data_index].uv = u, 1.0 - v
|
||||
data_index += 1
|
||||
wedge_index_offset += len(psk.wedges)
|
||||
|
||||
# VERTEX COLORS
|
||||
if psk.has_vertex_colors and options.should_import_vertex_colors:
|
||||
size = (len(psk.points), 4)
|
||||
vertex_colors = np.full(size, inf)
|
||||
vertex_color_data = mesh_data.vertex_colors.new(name='VERTEXCOLOR')
|
||||
ambiguous_vertex_color_point_indices = []
|
||||
|
||||
for wedge_index, wedge in enumerate(psk.wedges):
|
||||
point_index = wedge.point_index
|
||||
psk_vertex_color = psk.vertex_colors[wedge_index].normalized()
|
||||
if vertex_colors[point_index, 0] != inf and tuple(vertex_colors[point_index]) != psk_vertex_color:
|
||||
ambiguous_vertex_color_point_indices.append(point_index)
|
||||
else:
|
||||
vertex_colors[point_index] = psk_vertex_color
|
||||
|
||||
if options.vertex_color_space == 'SRGBA':
|
||||
for i in range(vertex_colors.shape[0]):
|
||||
vertex_colors[i, :3] = tuple(map(lambda x: rgb_to_srgb(x), vertex_colors[i, :3]))
|
||||
|
||||
for loop_index, loop in enumerate(mesh_data.loops):
|
||||
vertex_color = vertex_colors[loop.vertex_index]
|
||||
if vertex_color is not None:
|
||||
vertex_color_data.data[loop_index].color = vertex_color
|
||||
else:
|
||||
vertex_color_data.data[loop_index].color = 1.0, 1.0, 1.0, 1.0
|
||||
|
||||
if len(ambiguous_vertex_color_point_indices) > 0:
|
||||
print(f'WARNING: {len(ambiguous_vertex_color_point_indices)} vertex(es) with ambiguous vertex colors.')
|
||||
|
||||
# VERTEX NORMALS
|
||||
if psk.has_vertex_normals and options.should_import_vertex_normals:
|
||||
mesh_data.polygons.foreach_set("use_smooth", [True] * len(mesh_data.polygons))
|
||||
normals = []
|
||||
for vertex_normal in psk.vertex_normals:
|
||||
normals.append(tuple(vertex_normal))
|
||||
mesh_data.normals_split_custom_set_from_vertices(normals)
|
||||
mesh_data.use_auto_smooth = True
|
||||
|
||||
bm.normal_update()
|
||||
bm.free()
|
||||
|
||||
# VERTEX WEIGHTS
|
||||
|
||||
# Get a list of all bones that have weights associated with them.
|
||||
vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights))
|
||||
for import_bone in map(lambda x: import_bones[x], sorted(list(vertex_group_bone_indices))):
|
||||
@@ -164,12 +227,27 @@ class PskImporter(object):
|
||||
pass
|
||||
|
||||
|
||||
class PskImportPropertyGroup(PropertyGroup):
|
||||
should_import_vertex_colors: BoolProperty(default=True, name='Vertex Colors', description='Import vertex colors from PSKX files, if available')
|
||||
vertex_color_space: EnumProperty(
|
||||
name='Vertex Color Space',
|
||||
description='The source vertex color space',
|
||||
default='SRGBA',
|
||||
items=(
|
||||
('LINEAR', 'Linear', ''),
|
||||
('SRGBA', 'sRGBA', ''),
|
||||
)
|
||||
)
|
||||
should_import_vertex_normals: BoolProperty(default=True, name='Vertex Normals', description='Import vertex normals from PSKX files, if available')
|
||||
should_import_extra_uvs: BoolProperty(default=True, name='Extra UVs', description='Import extra UV maps from PSKX files, if available')
|
||||
|
||||
|
||||
class PskImportOperator(Operator, ImportHelper):
|
||||
bl_idname = 'import.psk'
|
||||
bl_label = 'Export'
|
||||
__doc__ = 'Load a PSK file'
|
||||
filename_ext = '.psk'
|
||||
filter_glob: StringProperty(default='*.psk', options={'HIDDEN'})
|
||||
filter_glob: StringProperty(default='*.psk;*.pskx', options={'HIDDEN'})
|
||||
filepath: StringProperty(
|
||||
name='File Path',
|
||||
description='File path used for exporting the PSK file',
|
||||
@@ -177,13 +255,28 @@ class PskImportOperator(Operator, ImportHelper):
|
||||
default='')
|
||||
|
||||
def execute(self, context):
|
||||
pg = context.scene.psk_import
|
||||
reader = PskReader()
|
||||
psk = reader.read(self.filepath)
|
||||
name = os.path.splitext(os.path.basename(self.filepath))[0]
|
||||
PskImporter().import_psk(psk, name, context)
|
||||
options = PskImportOptions()
|
||||
options.name = os.path.splitext(os.path.basename(self.filepath))[0]
|
||||
options.vertex_color_space = pg.vertex_color_space
|
||||
PskImporter().import_psk(psk, context, options)
|
||||
return {'FINISHED'}
|
||||
|
||||
def draw(self, context):
|
||||
pg = context.scene.psk_import
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
layout.prop(pg, 'should_import_vertex_normals')
|
||||
layout.prop(pg, 'should_import_extra_uvs')
|
||||
layout.prop(pg, 'should_import_vertex_colors')
|
||||
if pg.should_import_vertex_colors:
|
||||
layout.prop(pg, 'vertex_color_space')
|
||||
|
||||
|
||||
__classes__ = [
|
||||
PskImportOperator
|
||||
PskImportOperator,
|
||||
PskImportPropertyGroup,
|
||||
]
|
||||
|
||||
@@ -41,6 +41,14 @@ class PskReader(object):
|
||||
PskReader.read_types(fp, Psk.Bone, section, psk.bones)
|
||||
elif section.name == b'RAWWEIGHTS':
|
||||
PskReader.read_types(fp, Psk.Weight, section, psk.weights)
|
||||
elif section.name == b'FACE3200':
|
||||
PskReader.read_types(fp, Psk.Face32, section, psk.faces)
|
||||
elif section.name == b'VERTEXCOLOR':
|
||||
PskReader.read_types(fp, Color, section, psk.vertex_colors)
|
||||
elif section.name.startswith(b'EXTRAUVS'):
|
||||
PskReader.read_types(fp, Vector2, section, psk.extra_uvs)
|
||||
elif section.name == b'VTXNORMS':
|
||||
PskReader.read_types(fp, Vector3, section, psk.vertex_normals)
|
||||
else:
|
||||
raise RuntimeError(f'Unrecognized section "{section.name}"')
|
||||
raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"')
|
||||
return psk
|
||||
|
||||
Reference in New Issue
Block a user