Loads of work on localization
Also updated some of the operator UI to use the new panels
This commit is contained in:
@@ -60,7 +60,7 @@ def populate_bone_collection_list(armature_object: Object, bone_collection_list:
|
||||
return
|
||||
|
||||
item = bone_collection_list.add()
|
||||
item.name = 'Unassigned'
|
||||
item.name = bpy.app.translations.pgettext_iface('Unassigned')
|
||||
item.index = -1
|
||||
# Count the number of bones without an assigned bone collection
|
||||
item.count = sum(map(lambda bone: 1 if len(bone.collections) == 0 else 0, armature.bones))
|
||||
@@ -78,9 +78,12 @@ def check_bone_names(bone_names: Iterable[str]):
|
||||
pattern = re.compile(r'^[a-zA-Z\d_\- ]+$')
|
||||
invalid_bone_names = [x for x in bone_names if pattern.match(x) is None]
|
||||
if len(invalid_bone_names) > 0:
|
||||
raise RuntimeError(f'The following bone names are invalid: {invalid_bone_names}.\n'
|
||||
f'Bone names must only contain letters, numbers, spaces, hyphens and underscores.\n'
|
||||
f'You can bypass this by disabling "Enforce Bone Name Restrictions" in the export settings.')
|
||||
message = bpy.app.translations.pgettext_iface(
|
||||
'The following bone names are invalid: {invalid_bone_names}.\n'
|
||||
'Bone names must only contain letters, numbers, spaces, hyphens and underscores.\n'
|
||||
'You can bypass this by disabling "Enforce Bone Name Restrictions" in the export settings.'
|
||||
)
|
||||
raise RuntimeError(message.format(invalid_bone_names=str(invalid_bone_names)))
|
||||
|
||||
|
||||
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: List[int]) -> List[str]:
|
||||
@@ -153,13 +156,18 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_c
|
||||
# TODO: in future, it would be preferential to have a readout of *all* instigator bones.
|
||||
instigator_bone_name = instigator_bone_names[bone_names.index(bone_name)]
|
||||
if instigator_bone_name is None:
|
||||
print(f'Root bone "{root_bone_name}" was included because {bone_name} was marked for export')
|
||||
message = bpy.app.translations.pgettext_iface('Root bone "{root_bone_name}" was included because {bone_name} was marked for export')
|
||||
message = message.format(root_bone_name=root_bone_name, bone_name=bone_name)
|
||||
print(message)
|
||||
break
|
||||
bone_name = instigator_bone_name
|
||||
|
||||
raise RuntimeError('Exported bone hierarchy must have a single root bone.\n'
|
||||
f'The bone hierarchy marked for export has {len(root_bones)} root bones: {root_bone_names}.\n'
|
||||
f'Additional debugging information has been written to the console.')
|
||||
message = bpy.app.translations.pgettext_iface(
|
||||
'Exported bone hierarchy must have a single root bone.\n'
|
||||
'The bone hierarchy marked for export has {root_bone_count} root bones: {root_bone_names}.\n'
|
||||
'Additional debugging information has been written to the console.'
|
||||
)
|
||||
raise RuntimeError(message.format(root_bone_count=len(root_bones), root_bone_names=str(root_bone_names)))
|
||||
|
||||
return bone_names
|
||||
|
||||
|
||||
@@ -23,5 +23,28 @@ langs = {
|
||||
('*', 'Numbers of vertices ({point_count}) exceeds limit of {max_point_count}'): '頂点の数({point_count})が{max_point_count}の制限を超えています',
|
||||
('*', 'Number of materials ({material_count}) exceeds limit of {max_material_count}'): 'マテリアルの数({material_count})が{max_material_count}の制限を超えています',
|
||||
('*', 'Number of bones ({bone_count}) exceeds limit of {max_bone_count}'): 'ボーンの数({bone_count})が{max_bone_count}の制限を超えています',
|
||||
('*', 'Load a PSK file'): 'PSKファイルを読み込む',
|
||||
('*', 'The active object must be an armature'): 'アクティブなオブジェクトはアーマチュアである必要があります',
|
||||
('*', 'Import the selected animations into the scene as actions'): '選択したアニメーションをアクションとしてシーンにインポートします',
|
||||
('*', 'Import'): 'インポート',
|
||||
('*', 'Import extra UVs, if available'): '利用可能な場合、追加のUVをインポートします',
|
||||
('*', 'Import vertex normals, if available'): '利用可能な場合、頂点法線をインポートします',
|
||||
('*', 'Import vertex colors, if available'): '利用可能な場合、頂点カラーをインポートします',
|
||||
('*', 'Import shape keys, if available'): '利用可能な場合、シェイプキーをインポートします',
|
||||
('*', 'The source vertex color space'): 'ソースの頂点カラーの色空間',
|
||||
('*', 'Unhandled section "{section_name}" at position {position}') : '位置{position}の"{section_name}"セクションは処理されていません',
|
||||
('*', 'Nothing to import'): 'インポートするものがありません',
|
||||
('*', 'PSK imported with {count} warning(s)'): '{count}個の警告付きでPSKがインポートされました',
|
||||
('*', 'PSK imported ({name})'): 'PSKがインポートされました({name})',
|
||||
('*', 'FPS Source'): 'FPSのソース',
|
||||
('*', 'Prefix Action Name'): 'アクション名のプレフィックス',
|
||||
('*', 'Convert to Samples'): 'サンプルに変換',
|
||||
('*', 'Stash'): '保留',
|
||||
('*', 'Select all visible sequences'): 'すべての表示されているシーケンスを選択',
|
||||
('*', 'Deselect all visible sequences'): 'すべての表示されているシーケンスの選択を解除',
|
||||
('*', 'Select By Text List'): 'テキストリストで選択',
|
||||
('*', 'Select sequences by name from text list'): 'テキストリストから名前でシーケンスを選択',
|
||||
('*', 'Bone Name Mapping'): 'ボーン名のマッピング',
|
||||
('*', 'Use Config File'): '設定ファイルを使用',
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import Optional
|
||||
|
||||
import bpy.app.translations
|
||||
from bpy.types import Armature, Bone, Action, PoseBone
|
||||
|
||||
from .data import *
|
||||
@@ -95,7 +96,8 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
|
||||
try:
|
||||
psa_bone.name = bytes(bone.name, encoding='windows-1252')
|
||||
except UnicodeEncodeError:
|
||||
raise RuntimeError(f'Bone name "{bone.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
message = bpy.app.translations.pgettext_iface('Bone name "{name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
raise RuntimeError(message.format(name=bone.name))
|
||||
|
||||
try:
|
||||
parent_index = bones.index(bone.parent)
|
||||
@@ -172,7 +174,9 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
|
||||
try:
|
||||
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
|
||||
except UnicodeEncodeError:
|
||||
raise RuntimeError(f'Sequence name "{export_sequence.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
message = bpy.app.translations.pgettext_iface('Sequence name "{name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
message = message.format(name=export_sequence.name)
|
||||
raise RuntimeError(message)
|
||||
psa_sequence.frame_count = frame_count
|
||||
psa_sequence.frame_start_index = frame_start_index
|
||||
psa_sequence.fps = frame_count / sequence_duration
|
||||
|
||||
@@ -100,7 +100,9 @@ def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actio
|
||||
# Get the minimum value of action metadata FPS values.
|
||||
return min([action.psa_export.fps for action in actions])
|
||||
case _:
|
||||
raise RuntimeError(f'Invalid FPS source "{fps_source}"')
|
||||
message = bpy.app.translations.pgettext_iface('Invalid FPS source: {fps_source}')
|
||||
message = message.format(fps_source=fps_source)
|
||||
raise RuntimeError(message)
|
||||
|
||||
|
||||
def get_animation_data_object(context: Context) -> Object:
|
||||
@@ -234,84 +236,94 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
layout = self.layout
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
|
||||
flow = layout.grid_flow()
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
sequences_header, sequences_panel = layout.panel('sequences_panel_id', default_closed=False)
|
||||
sequences_header.label(text='Sequences', icon='ACTION')
|
||||
|
||||
# FPS
|
||||
fps_row = flow.row(align=True)
|
||||
fps_row.prop(pg, 'fps_source', text='FPS')
|
||||
if pg.fps_source == 'CUSTOM':
|
||||
fps_row.prop(pg, 'fps_custom', text='')
|
||||
|
||||
# SOURCE
|
||||
flow.prop(pg, 'sequence_source', text='Source')
|
||||
|
||||
if pg.sequence_source in {'TIMELINE_MARKERS', 'NLA_TRACK_STRIPS'}:
|
||||
# ANIMDATA SOURCE
|
||||
flow.prop(pg, 'should_override_animation_data')
|
||||
if pg.should_override_animation_data:
|
||||
flow.prop(pg, 'animation_data_override', text='')
|
||||
|
||||
if pg.sequence_source == 'NLA_TRACK_STRIPS':
|
||||
flow = layout.grid_flow()
|
||||
if sequences_panel is not None:
|
||||
flow = sequences_panel.grid_flow()
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
flow.prop(pg, 'nla_track')
|
||||
|
||||
# SELECT ALL/NONE
|
||||
row = layout.row(align=True)
|
||||
row.label(text='Select')
|
||||
row.operator(PSA_OT_export_actions_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row.operator(PSA_OT_export_actions_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
# FPS
|
||||
fps_row = flow.row(align=True)
|
||||
fps_row.prop(pg, 'fps_source', text='FPS')
|
||||
if pg.fps_source == 'CUSTOM':
|
||||
fps_row.prop(pg, 'fps_custom', text='')
|
||||
|
||||
# ACTIONS
|
||||
if pg.sequence_source == 'ACTIONS':
|
||||
rows = max(3, min(len(pg.action_list), 10))
|
||||
layout.template_list('PSA_UL_export_sequences', '', pg, 'action_list', pg, 'action_list_index', rows=rows)
|
||||
elif pg.sequence_source == 'TIMELINE_MARKERS':
|
||||
rows = max(3, min(len(pg.marker_list), 10))
|
||||
layout.template_list('PSA_UL_export_sequences', '', pg, 'marker_list', pg, 'marker_list_index', rows=rows)
|
||||
elif pg.sequence_source == 'NLA_TRACK_STRIPS':
|
||||
rows = max(3, min(len(pg.nla_strip_list), 10))
|
||||
layout.template_list('PSA_UL_export_sequences', '', pg, 'nla_strip_list', pg, 'nla_strip_list_index', rows=rows)
|
||||
# SOURCE
|
||||
flow.prop(pg, 'sequence_source', text='Source')
|
||||
|
||||
col = layout.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'sequence_name_prefix')
|
||||
col.prop(pg, 'sequence_name_suffix')
|
||||
if pg.sequence_source in {'TIMELINE_MARKERS', 'NLA_TRACK_STRIPS'}:
|
||||
# ANIMDATA SOURCE
|
||||
flow.prop(pg, 'should_override_animation_data')
|
||||
if pg.should_override_animation_data:
|
||||
flow.prop(pg, 'animation_data_override', text='')
|
||||
|
||||
# Determine if there is going to be a naming conflict and display an error, if so.
|
||||
selected_items = [x for x in pg.action_list if x.is_selected]
|
||||
action_names = [x.name for x in selected_items]
|
||||
action_name_counts = Counter(action_names)
|
||||
for action_name, count in action_name_counts.items():
|
||||
if count > 1:
|
||||
layout.label(text=f'Duplicate action: {action_name}', icon='ERROR')
|
||||
break
|
||||
if pg.sequence_source == 'NLA_TRACK_STRIPS':
|
||||
flow = sequences_panel.grid_flow()
|
||||
flow.use_property_split = True
|
||||
flow.use_property_decorate = False
|
||||
flow.prop(pg, 'nla_track')
|
||||
|
||||
layout.separator()
|
||||
# SELECT ALL/NONE
|
||||
row = sequences_panel.row(align=True)
|
||||
row.label(text='Select')
|
||||
row.operator(PSA_OT_export_actions_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row.operator(PSA_OT_export_actions_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
|
||||
# ACTIONS
|
||||
if pg.sequence_source == 'ACTIONS':
|
||||
rows = max(3, min(len(pg.action_list), 10))
|
||||
sequences_panel.template_list('PSA_UL_export_sequences', '', pg, 'action_list', pg, 'action_list_index', rows=rows)
|
||||
elif pg.sequence_source == 'TIMELINE_MARKERS':
|
||||
rows = max(3, min(len(pg.marker_list), 10))
|
||||
sequences_panel.template_list('PSA_UL_export_sequences', '', pg, 'marker_list', pg, 'marker_list_index', rows=rows)
|
||||
elif pg.sequence_source == 'NLA_TRACK_STRIPS':
|
||||
rows = max(3, min(len(pg.nla_strip_list), 10))
|
||||
sequences_panel.template_list('PSA_UL_export_sequences', '', pg, 'nla_strip_list', pg, 'nla_strip_list_index', rows=rows)
|
||||
|
||||
col = sequences_panel.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'sequence_name_prefix')
|
||||
col.prop(pg, 'sequence_name_suffix')
|
||||
|
||||
# Determine if there is going to be a naming conflict and display an error, if so.
|
||||
selected_items = [x for x in pg.action_list if x.is_selected]
|
||||
action_names = [x.name for x in selected_items]
|
||||
action_name_counts = Counter(action_names)
|
||||
for action_name, count in action_name_counts.items():
|
||||
if count > 1:
|
||||
text = bpy.app.translations.pgettext_iface('Duplicate action: {action_name}')
|
||||
text = text.format(action_name=action_name)
|
||||
sequences_panel.label(text, icon='ERROR')
|
||||
break
|
||||
|
||||
# BONES
|
||||
row = layout.row(align=True)
|
||||
row.prop(pg, 'bone_filter_mode', text='Bones')
|
||||
bones_header, bones_panel = layout.panel('bones_panel_id', default_closed=False)
|
||||
bones_header.label(text='Bones', icon='BONE_DATA')
|
||||
|
||||
if pg.bone_filter_mode == 'BONE_COLLECTIONS':
|
||||
row = layout.row(align=True)
|
||||
row.label(text='Select')
|
||||
row.operator(PSA_OT_export_bone_collections_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row.operator(PSA_OT_export_bone_collections_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||
layout.template_list('PSX_UL_bone_collection_list', '', pg, 'bone_collection_list', pg, 'bone_collection_list_index',
|
||||
rows=rows)
|
||||
if bones_panel is not None:
|
||||
row = bones_panel.row(align=True)
|
||||
row.prop(pg, 'bone_filter_mode', text='Bones')
|
||||
|
||||
layout.prop(pg, 'should_enforce_bone_name_restrictions')
|
||||
if pg.bone_filter_mode == 'BONE_COLLECTIONS':
|
||||
row = bones_panel.row(align=True)
|
||||
row.label(text='Select')
|
||||
row.operator(PSA_OT_export_bone_collections_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row.operator(PSA_OT_export_bone_collections_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||
bones_panel.template_list('PSX_UL_bone_collection_list', '', pg, 'bone_collection_list', pg, 'bone_collection_list_index',
|
||||
rows=rows)
|
||||
|
||||
layout.separator()
|
||||
bones_panel.prop(pg, 'should_enforce_bone_name_restrictions')
|
||||
|
||||
# ROOT MOTION
|
||||
layout.prop(pg, 'root_motion', text='Root Motion')
|
||||
advanced_header, advanced_panel = layout.panel('advanced_panel_id', default_closed=False)
|
||||
advanced_header.label(text='Advanced')
|
||||
|
||||
if advanced_panel is not None:
|
||||
advanced_panel.prop(pg, 'root_motion', text='Root Motion')
|
||||
|
||||
@classmethod
|
||||
def _check_context(cls, context):
|
||||
@@ -360,7 +372,9 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
animation_data = animation_data_object.animation_data
|
||||
|
||||
if animation_data is None:
|
||||
raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'')
|
||||
message = bpy.app.translations.pgettext_iface('No animation data for object "{name}"')
|
||||
message = message.format(name=animation_data_object.name)
|
||||
raise RuntimeError(message)
|
||||
|
||||
export_sequences: List[PsaBuildSequence] = []
|
||||
|
||||
@@ -398,7 +412,8 @@ class PSA_OT_export(Operator, ExportHelper):
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [nla_strip_item.action])
|
||||
export_sequences.append(export_sequence)
|
||||
else:
|
||||
raise ValueError(f'Unhandled sequence source: {pg.sequence_source}')
|
||||
message = bpy.app.translations.pgettext_iface('Unhandled sequence source: {sequence_source}')
|
||||
raise ValueError(message.format(sequence_source=pg.sequence_source))
|
||||
|
||||
options = PsaBuildOptions()
|
||||
options.animation_data = animation_data
|
||||
|
||||
@@ -141,9 +141,9 @@ class PSA_PG_export(PropertyGroup):
|
||||
options=empty_set,
|
||||
description='',
|
||||
items=(
|
||||
('ALL', 'All', 'All bones will be exported.'),
|
||||
('ALL', 'All', 'All bones will be exported'),
|
||||
('BONE_COLLECTIONS', 'Bone Collections', 'Only bones belonging to the selected bone collections and their '
|
||||
'ancestors will be exported.'),
|
||||
'ancestors will be exported'),
|
||||
)
|
||||
)
|
||||
bone_collection_list: CollectionProperty(type=PSX_PG_bone_collection_list_item)
|
||||
@@ -152,7 +152,7 @@ class PSA_PG_export(PropertyGroup):
|
||||
default=False,
|
||||
name='Enforce Bone Name Restrictions',
|
||||
description='Bone names restrictions will be enforced. Note that bone names without properly formatted names '
|
||||
'may not be able to be referenced in-engine'
|
||||
'may not be able to be referenced by some versions of the Unreal Engine'
|
||||
)
|
||||
sequence_name_prefix: StringProperty(name='Prefix', options=empty_set)
|
||||
sequence_name_suffix: StringProperty(name='Suffix', options=empty_set)
|
||||
|
||||
@@ -15,7 +15,7 @@ class PSA_UL_export_sequences(UIList):
|
||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||
item = cast(PSA_PG_export_action_list_item, item)
|
||||
is_pose_marker = hasattr(item, 'is_pose_marker') and item.is_pose_marker
|
||||
layout.prop(item, 'is_selected', icon_only=True, text=item.name)
|
||||
layout.prop(item, 'is_selected', icon_only=True, text=item.name, translate=False)
|
||||
if hasattr(item, 'action') and item.action is not None and item.action.asset_data is not None:
|
||||
layout.label(text='', icon='ASSET_MANAGER')
|
||||
|
||||
@@ -24,7 +24,7 @@ class PSA_UL_export_sequences(UIList):
|
||||
if item.frame_end < item.frame_start:
|
||||
row.label(text='', icon='FRAME_PREV')
|
||||
if is_pose_marker:
|
||||
row.label(text=item.action.name, icon='PMARKER')
|
||||
row.label(text=item.action.name, icon='PMARKER', translate=False)
|
||||
|
||||
def draw_filter(self, context, layout):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import bpy
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import Operator, Event, Context, FileHandler
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
@@ -28,7 +29,7 @@ class PSA_OT_import_sequences_from_text(Operator):
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
layout.label(icon='INFO', text='Each sequence name should be on a new line.')
|
||||
layout.label(icon='INFO', text='Each sequence name should be on a new line')
|
||||
layout.prop(pg, 'select_text', text='')
|
||||
|
||||
def execute(self, context):
|
||||
@@ -43,14 +44,16 @@ class PSA_OT_import_sequences_from_text(Operator):
|
||||
if sequence.action_name == line:
|
||||
sequence.is_selected = True
|
||||
count += 1
|
||||
self.report({'INFO'}, f'Selected {count} sequence(s)')
|
||||
message = bpy.app.translations.pgettext('Selected {count} sequence(s)')
|
||||
message = message.format(count=count)
|
||||
self.report({'INFO'}, message)
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSA_OT_import_sequences_select_all(Operator):
|
||||
bl_idname = 'psa_import.sequences_select_all'
|
||||
bl_label = 'All'
|
||||
bl_description = 'Select all sequences'
|
||||
bl_description = 'Select all visible sequences'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
@classmethod
|
||||
@@ -165,17 +168,22 @@ class PSA_OT_import(Operator, ImportHelper):
|
||||
try:
|
||||
options.psa_config = read_psa_config(psa_reader, str(config_path))
|
||||
except Exception as e:
|
||||
self.report({'WARNING'}, f'Failed to read PSA config file: {e}')
|
||||
message = bpy.app.translations.pgettext_iface('Failed to read PSA config file: {error}')
|
||||
message = message.format(error=str(e))
|
||||
self.report({'WARNING'}, message)
|
||||
|
||||
result = import_psa(context, psa_reader, context.view_layer.objects.active, options)
|
||||
|
||||
if len(result.warnings) > 0:
|
||||
message = f'Imported {len(sequence_names)} action(s) with {len(result.warnings)} warning(s)\n'
|
||||
message = bpy.app.translations.pgettext_iface('Imported {action_count} action(s) with {warning_count} warning(s)')
|
||||
message = message.format(action_count=len(sequence_names), warning_count=len(result.warnings))
|
||||
self.report({'WARNING'}, message)
|
||||
for warning in result.warnings:
|
||||
self.report({'WARNING'}, warning)
|
||||
else:
|
||||
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
|
||||
message = bpy.app.translations.pgettext_iface('Imported {action_count} action(s)')
|
||||
message = message.format(action_count=len(sequence_names))
|
||||
self.report({'INFO'}, message)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@@ -259,7 +267,7 @@ class PSA_OT_import(Operator, ImportHelper):
|
||||
|
||||
class PSA_FH_import(FileHandler):
|
||||
bl_idname = 'PSA_FH_import'
|
||||
bl_label = 'File handler for Unreal PSA import'
|
||||
bl_label = ''
|
||||
bl_import_operator = 'psa_import.import'
|
||||
bl_file_extensions = '.psa'
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ class PSA_PG_import(PropertyGroup):
|
||||
sequence_list_index: IntProperty(name='', default=0)
|
||||
should_use_fake_user: BoolProperty(default=True, name='Fake User',
|
||||
description='Assign each imported action a fake user so that the data block is '
|
||||
'saved even it has no users',
|
||||
'always saved',
|
||||
options=empty_set)
|
||||
should_use_config_file: BoolProperty(default=True, name='Use Config File',
|
||||
description='Use the .config file that is sometimes generated when the PSA '
|
||||
@@ -48,8 +48,8 @@ class PSA_PG_import(PropertyGroup):
|
||||
'will have it\'s data overwritten instead of a new action being created')
|
||||
should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=empty_set)
|
||||
should_write_metadata: BoolProperty(default=True, name='Metadata', options=empty_set,
|
||||
description='Additional data will be written to the custom properties of the '
|
||||
'Action (e.g., frame rate)')
|
||||
description='Additional data will be written to the properties of the Action '
|
||||
'(e.g., frame rate)')
|
||||
sequence_filter_name: StringProperty(default='', options={'TEXTEDIT_UPDATE'})
|
||||
sequence_filter_is_selected: BoolProperty(default=False, options=empty_set, name='Only Show Selected',
|
||||
description='Only show selected sequences')
|
||||
@@ -69,8 +69,8 @@ class PSA_PG_import(PropertyGroup):
|
||||
description='The method by which bones from the PSA file are mapped to the bones of the armature',
|
||||
items=(
|
||||
('EXACT', 'Exact', 'Bone names must match exactly', 'EXACT', 0),
|
||||
('CASE_INSENSITIVE', 'Case Insensitive', 'Bones names must match, ignoring case (e.g., the bone PSA bone '
|
||||
'\'root\' can be mapped to the armature bone \'Root\')', 'CASE_INSENSITIVE', 1),
|
||||
('CASE_INSENSITIVE', 'Case Insensitive', 'Bones names must match, ignoring case (e.g., the PSA bone '
|
||||
'\'aBcDeF\' can be mapped to the armature bone \'ABCDEF\')', 'CASE_INSENSITIVE', 1),
|
||||
)
|
||||
)
|
||||
fps_source: EnumProperty(name='FPS Source', items=(
|
||||
|
||||
@@ -10,8 +10,7 @@ class PSA_UL_sequences(UIList):
|
||||
split = row.split(align=True, factor=0.75)
|
||||
column = split.row(align=True)
|
||||
column.alignment = 'LEFT'
|
||||
column.prop(item, 'is_selected', icon_only=True)
|
||||
column.label(text=getattr(item, 'action_name'), translate=False)
|
||||
column.prop(item, 'is_selected', text=getattr(item, 'action_name'), translate=False)
|
||||
|
||||
def draw_filter(self, context, layout):
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
|
||||
@@ -158,16 +158,16 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
psa_bone_name = psa_bone_names[psa_bone_index]
|
||||
armature_bone_name = armature_bone_names[armature_bone_index]
|
||||
mapped_psa_bone_name = psa_bone_names[mapped_psa_bone_index]
|
||||
result.warnings.append(f'PSA bone {psa_bone_index} ({psa_bone_name}) could not be mapped to armature bone {armature_bone_index} ({armature_bone_name}) because the armature bone is already mapped to PSA bone {mapped_psa_bone_index} ({mapped_psa_bone_name})')
|
||||
message = bpy.app.translations.pgettext_iface('PSA bone {bone_index} ({bone_name}) could not be mapped to armature bone {armature_bone_index} ({armature_bone_name}) because the armature bone is already mapped to PSA bone {mapped_psa_bone_index} ({mapped_psa_bone_name})')
|
||||
message = message.format(bone_index=psa_bone_index, bone_name=psa_bone_name, armature_bone_index=armature_bone_index, armature_bone_name=armature_bone_name, mapped_psa_bone_index=mapped_psa_bone_index, mapped_psa_bone_name=mapped_psa_bone_name)
|
||||
result.warnings.append(message)
|
||||
|
||||
# Report if there are missing bones in the target armature.
|
||||
missing_bone_names = set(psa_bone_names).difference(set(armature_bone_names))
|
||||
if len(missing_bone_names) > 0:
|
||||
result.warnings.append(
|
||||
f'The armature \'{armature_object.name}\' is missing {len(missing_bone_names)} bones that exist in '
|
||||
'the PSA:\n' +
|
||||
str(list(sorted(missing_bone_names)))
|
||||
)
|
||||
message = bpy.app.translations.pgettext_iface('The armature \'{armature_name}\' is missing {count} bones that exist in the PSA:\n{missing_bone_names}')
|
||||
message = message.format(armature_name=armature_object.name, count=len(missing_bone_names), missing_bone_names=str(list(sorted(missing_bone_names))))
|
||||
result.warnings.append(message)
|
||||
del armature_bone_names
|
||||
|
||||
# Create intermediate bone data for import operations.
|
||||
@@ -232,7 +232,9 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
case 'SEQUENCE':
|
||||
target_fps = sequence.fps
|
||||
case _:
|
||||
raise ValueError(f'Unknown FPS source: {options.fps_source}')
|
||||
message = bpy.app.translations.pgettext_iface('Invalid FPS source: {fps_source}')
|
||||
message = message.format(fps_source=options.fps_source)
|
||||
raise ValueError(message)
|
||||
|
||||
if options.should_write_keyframes:
|
||||
# Remove existing f-curves.
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import ctypes
|
||||
import warnings
|
||||
|
||||
import bpy.app.translations
|
||||
import numpy as np
|
||||
|
||||
from .data import *
|
||||
@@ -113,5 +115,7 @@ class PsaReader(object):
|
||||
fp.seek(section.data_size * section.data_count, 1)
|
||||
else:
|
||||
fp.seek(section.data_size * section.data_count, 1)
|
||||
print(f'Unrecognized section in PSA: "{section.name}"')
|
||||
message = bpy.app.translations.pgettext_iface('Unhandled section "{section_name}" at position {position}')
|
||||
message = message.format(section_name=section.name.decode(), position=fp.tell())
|
||||
warnings.warn(message)
|
||||
return psa
|
||||
|
||||
@@ -29,7 +29,8 @@ def get_psk_input_objects(context) -> PskInputObjects:
|
||||
input_objects = PskInputObjects()
|
||||
for selected_object in context.view_layer.objects.selected:
|
||||
if selected_object.type != 'MESH':
|
||||
raise RuntimeError(f'Selected object "{selected_object.name}" is not a mesh')
|
||||
message = bpy.app.translations.pgettext_iface('Selected object "{object_name}" is not a mesh')
|
||||
raise RuntimeError(message.format(object_name=selected_object.name))
|
||||
|
||||
input_objects.mesh_objects = context.view_layer.objects.selected
|
||||
|
||||
@@ -38,7 +39,8 @@ def get_psk_input_objects(context) -> PskInputObjects:
|
||||
|
||||
for mesh_object in input_objects.mesh_objects:
|
||||
if len(mesh_object.data.materials) == 0:
|
||||
raise RuntimeError(f'Mesh "{mesh_object.name}" must have at least one material')
|
||||
message = bpy.app.translations.pgettext_iface('Mesh "{object_name}" must have at least one material')
|
||||
raise RuntimeError(message.format(object_name=mesh_object.name))
|
||||
|
||||
# Ensure that there are either no armature modifiers (static mesh)
|
||||
# or that there is exactly one armature modifier object shared between
|
||||
@@ -50,12 +52,15 @@ def get_psk_input_objects(context) -> PskInputObjects:
|
||||
if len(modifiers) == 0:
|
||||
continue
|
||||
elif len(modifiers) > 1:
|
||||
raise RuntimeError(f'Mesh "{mesh_object.name}" must have only one armature modifier')
|
||||
message = bpy.app.translations.pgettext_iface('Mesh "{object_name}" must have only one armature modifier')
|
||||
raise RuntimeError(message.format(object_name=mesh_object.name))
|
||||
armature_modifier_objects.add(modifiers[0].object)
|
||||
|
||||
if len(armature_modifier_objects) > 1:
|
||||
armature_modifier_names = [x.name for x in armature_modifier_objects]
|
||||
raise RuntimeError(f'All selected meshes must have the same armature modifier, encountered {len(armature_modifier_names)} ({", ".join(armature_modifier_names)})')
|
||||
message = bpy.app.translations.pgettext_iface('All selected meshes must have the same armature modifier, encountered {count} ({names})')
|
||||
message = message.format(count=len(armature_modifier_objects), names=', '.join(armature_modifier_names))
|
||||
raise RuntimeError(message)
|
||||
elif len(armature_modifier_objects) == 1:
|
||||
input_objects.armature_object = list(armature_modifier_objects)[0]
|
||||
|
||||
@@ -101,8 +106,8 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
try:
|
||||
psk_bone.name = bytes(bone.name, encoding='windows-1252')
|
||||
except UnicodeEncodeError:
|
||||
raise RuntimeError(
|
||||
f'Bone name "{bone.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
message = bpy.app.translations.pgettext_iface('Bone name "{name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
raise RuntimeError(message.format(name=bone.name))
|
||||
psk_bone.flags = 0
|
||||
psk_bone.children_count = 0
|
||||
|
||||
@@ -144,7 +149,8 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
try:
|
||||
psk_material.name = bytes(material.name, encoding='windows-1252')
|
||||
except UnicodeEncodeError:
|
||||
raise RuntimeError(f'Material name "{material.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
message = bpy.app.translations.pgettext_iface('Material name "{name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
raise RuntimeError(message.format(name=material.name))
|
||||
psk_material.texture_index = len(psk.materials)
|
||||
psk_material.poly_flags = triangle_type_and_bit_flags_to_poly_flags(material.psk.mesh_triangle_type,
|
||||
material.psk.mesh_triangle_bit_flags)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import bpy
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import Operator
|
||||
from bpy_extras.io_utils import ExportHelper
|
||||
@@ -26,11 +27,10 @@ def populate_material_list(mesh_objects, material_list):
|
||||
material = material_slot.material
|
||||
# TODO: put this in the poll arg?
|
||||
if material is None:
|
||||
message = 'Material slot cannot be empty (index {index})'
|
||||
message = bpy.app.translations.pgettext_iface(message.format(index=i))
|
||||
raise RuntimeError(message)
|
||||
if material.name not in material_names:
|
||||
material_names.append(material.name)
|
||||
message = bpy.app.translations.pgettext_iface('Material slot cannot be empty (index {index})')
|
||||
raise RuntimeError(message.format(index=i))
|
||||
if material.name not in materials:
|
||||
materials.append(material)
|
||||
|
||||
for index, material in enumerate(materials):
|
||||
m = material_list.add()
|
||||
@@ -123,37 +123,40 @@ class PSK_OT_export(Operator, ExportHelper):
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
|
||||
# MESH
|
||||
box = layout.box()
|
||||
box.label(text='Mesh', icon='MESH_DATA')
|
||||
box.prop(pg, 'use_raw_mesh_data')
|
||||
mesh_header, mesh_panel = layout.panel('mesh_panel_id', default_closed=False)
|
||||
mesh_header.label(text='Mesh', icon='MESH_DATA')
|
||||
if mesh_panel is not None:
|
||||
mesh_panel.prop(pg, 'use_raw_mesh_data')
|
||||
|
||||
# BONES
|
||||
box = layout.box()
|
||||
box.label(text='Bones', icon='BONE_DATA')
|
||||
bone_filter_mode_items = pg.bl_rna.properties['bone_filter_mode'].enum_items_static
|
||||
row = box.row(align=True)
|
||||
for item in bone_filter_mode_items:
|
||||
identifier = item.identifier
|
||||
item_layout = row.row(align=True)
|
||||
item_layout.prop_enum(pg, 'bone_filter_mode', item.identifier)
|
||||
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
|
||||
bones_header, bones_panel = layout.panel('bones_panel_id', default_closed=False)
|
||||
bones_header.label(text='Bones', icon='BONE_DATA')
|
||||
if bones_panel is not None:
|
||||
bone_filter_mode_items = pg.bl_rna.properties['bone_filter_mode'].enum_items_static
|
||||
row = bones_panel.row(align=True)
|
||||
for item in bone_filter_mode_items:
|
||||
identifier = item.identifier
|
||||
item_layout = row.row(align=True)
|
||||
item_layout.prop_enum(pg, 'bone_filter_mode', item.identifier)
|
||||
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
|
||||
|
||||
if pg.bone_filter_mode == 'BONE_COLLECTIONS':
|
||||
row = box.row()
|
||||
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||
row.template_list('PSX_UL_bone_collection_list', '', pg, 'bone_collection_list', pg, 'bone_collection_list_index', rows=rows)
|
||||
if pg.bone_filter_mode == 'BONE_COLLECTIONS':
|
||||
row = bones_panel.row()
|
||||
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||
row.template_list('PSX_UL_bone_collection_list', '', pg, 'bone_collection_list', pg, 'bone_collection_list_index', rows=rows)
|
||||
|
||||
box.prop(pg, 'should_enforce_bone_name_restrictions')
|
||||
bones_panel.prop(pg, 'should_enforce_bone_name_restrictions')
|
||||
|
||||
# MATERIALS
|
||||
box = layout.box()
|
||||
box.label(text='Materials', icon='MATERIAL')
|
||||
row = box.row()
|
||||
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||
row.template_list('PSK_UL_materials', '', pg, 'material_list', pg, 'material_list_index', rows=rows)
|
||||
col = row.column(align=True)
|
||||
col.operator(PSK_OT_material_list_move_up.bl_idname, text='', icon='TRIA_UP')
|
||||
col.operator(PSK_OT_material_list_move_down.bl_idname, text='', icon='TRIA_DOWN')
|
||||
materials_header, materials_panel = layout.panel('materials_panel_id', default_closed=False)
|
||||
materials_header.label(text='Materials', icon='MATERIAL')
|
||||
if materials_panel is not None:
|
||||
row = materials_panel.row()
|
||||
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||
row.template_list('PSK_UL_materials', '', pg, 'material_list', pg, 'material_list_index', rows=rows)
|
||||
col = row.column(align=True)
|
||||
col.operator(PSK_OT_material_list_move_up.bl_idname, text='', icon='TRIA_UP')
|
||||
col.operator(PSK_OT_material_list_move_down.bl_idname, text='', icon='TRIA_DOWN')
|
||||
|
||||
def execute(self, context):
|
||||
pg = context.scene.psk_export
|
||||
@@ -170,7 +173,8 @@ class PSK_OT_export(Operator, ExportHelper):
|
||||
self.report({'WARNING'}, warning)
|
||||
write_psk(result.psk, self.filepath)
|
||||
if len(result.warnings) > 0:
|
||||
self.report({'WARNING'}, f'PSK export successful with {len(result.warnings)} warnings')
|
||||
message = bpy.app.translations.pgettext_iface('PSK export successful with {warning_count} warnings')
|
||||
self.report({'WARNING'}, message.format(warning_count=len(result.warnings)))
|
||||
else:
|
||||
self.report({'INFO'}, f'PSK export successful')
|
||||
except RuntimeError as e:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import bpy.app.translations
|
||||
from bpy.props import StringProperty, BoolProperty, EnumProperty, FloatProperty
|
||||
from bpy.types import Operator, FileHandler, Context
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
@@ -13,7 +14,7 @@ empty_set = set()
|
||||
|
||||
class PSK_FH_import(FileHandler):
|
||||
bl_idname = 'PSK_FH_import'
|
||||
bl_label = 'File handler for Unreal PSK/PSKX import'
|
||||
bl_label = ''
|
||||
bl_import_operator = 'import_scene.psk'
|
||||
bl_file_extensions = '.psk;.pskx'
|
||||
|
||||
@@ -61,7 +62,7 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
default=True,
|
||||
name='Import Extra UVs',
|
||||
options=empty_set,
|
||||
description='Import extra UV maps, if available'
|
||||
description='Import extra UVs, if available'
|
||||
)
|
||||
should_import_mesh: BoolProperty(
|
||||
default=True,
|
||||
@@ -76,8 +77,7 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
should_import_skeleton: BoolProperty(
|
||||
default=True,
|
||||
name='Armature',
|
||||
options=empty_set,
|
||||
description='Armature'
|
||||
options=empty_set
|
||||
)
|
||||
bone_length: FloatProperty(
|
||||
default=1.0,
|
||||
@@ -123,11 +123,14 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
result = import_psk(psk, context, options)
|
||||
|
||||
if len(result.warnings):
|
||||
message = f'PSK imported with {len(result.warnings)} warning(s)\n'
|
||||
message = bpy.app.translations.pgettext_iface('PSK imported with {count} warning(s)')
|
||||
message = message.format(count=len(result.warnings))
|
||||
message += '\n'.join(result.warnings)
|
||||
self.report({'WARNING'}, message)
|
||||
else:
|
||||
self.report({'INFO'}, f'PSK imported ({options.name})')
|
||||
message = bpy.app.translations.pgettext_iface('PSK imported ({name})')
|
||||
message = message.format(name=options.name)
|
||||
self.report({'INFO'}, message)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
@@ -28,9 +28,9 @@ class PskImportOptions:
|
||||
|
||||
|
||||
class ImportBone:
|
||||
'''
|
||||
"""
|
||||
Intermediate bone type for the purpose of construction.
|
||||
'''
|
||||
"""
|
||||
def __init__(self, index: int, psk_bone: Psk.Bone):
|
||||
self.index: int = index
|
||||
self.psk_bone: Psk.Bone = psk_bone
|
||||
@@ -165,7 +165,8 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
|
||||
|
||||
# TODO: Handle invalid faces better.
|
||||
if len(invalid_face_indices) > 0:
|
||||
result.warnings.append(f'Discarded {len(invalid_face_indices)} invalid face(s).')
|
||||
message = bpy.app.translations.pgettext_iface('Discarded {count} invalid face(s)')
|
||||
result.warnings.append(message.format(count=len(invalid_face_indices)))
|
||||
|
||||
bm.to_mesh(mesh_data)
|
||||
|
||||
|
||||
@@ -2,19 +2,19 @@ from bpy.props import EnumProperty
|
||||
from bpy.types import PropertyGroup
|
||||
|
||||
mesh_triangle_types_items = (
|
||||
('NORMAL', 'Normal', 'Normal one-sided', 0),
|
||||
('NORMAL_TWO_SIDED', 'Normal Two-Sided', 'Normal but two-sided', 1),
|
||||
('TRANSLUCENT', 'Translucent', 'Translucent two-sided', 2),
|
||||
('MASKED', 'Masked', 'Masked two-sided', 3),
|
||||
('MODULATE', 'Modulate', 'Modulation blended two-sided', 4),
|
||||
('PLACEHOLDER', 'Placeholder', 'Placeholder triangle for positioning weapon. Invisible', 8),
|
||||
('NORMAL', 'Normal', '', 0),
|
||||
('NORMAL_TWO_SIDED', 'Normal Two-Sided', '', 1),
|
||||
('TRANSLUCENT', 'Translucent', '', 2),
|
||||
('MASKED', 'Masked', '', 3),
|
||||
('MODULATE', 'Modulate', '', 4),
|
||||
('PLACEHOLDER', 'Placeholder', '', 8),
|
||||
)
|
||||
|
||||
mesh_triangle_bit_flags_items = (
|
||||
('UNLIT', 'Unlit', 'Full brightness, no lighting', 16),
|
||||
('FLAT', 'Flat', 'Flat surface, don\'t do bMeshCurvy thing', 32),
|
||||
('ENVIRONMENT', 'Environment', 'Environment mapped', 64),
|
||||
('NO_SMOOTH', 'No Smooth', 'No bilinear filtering on this poly\'s texture', 128),
|
||||
('UNLIT', 'Unlit', '', 16),
|
||||
('FLAT', 'Flat', '', 32),
|
||||
('ENVIRONMENT', 'Environment', '', 64),
|
||||
('NO_SMOOTH', 'No Smooth', '', 128),
|
||||
)
|
||||
|
||||
class PSX_PG_material(PropertyGroup):
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import ctypes
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
|
||||
import bpy.app.translations
|
||||
|
||||
from .data import *
|
||||
|
||||
|
||||
@@ -69,8 +70,9 @@ def read_psk(path: str) -> Psk:
|
||||
_read_types(fp, Psk.MorphData, section, psk.morph_data)
|
||||
else:
|
||||
# Section is not handled, skip it.
|
||||
fp.seek(section.data_size * section.data_count, os.SEEK_CUR)
|
||||
warnings.warn(f'Unrecognized section "{section.name} at position {fp.tell():15}"')
|
||||
message: str = bpy.app.translations.pgettext_iface('Unhandled section "{section_name}" at position {position}')
|
||||
message.format(section_name=section.name, position=f'{fp.tell():15}')
|
||||
warnings.warn(message)
|
||||
|
||||
'''
|
||||
UEViewer exports a sidecar file (*.props.txt) with fully-qualified reference paths for each material
|
||||
@@ -78,14 +80,14 @@ def read_psk(path: str) -> Psk:
|
||||
'''
|
||||
psk.material_references = _read_material_references(path)
|
||||
|
||||
'''
|
||||
"""
|
||||
Tools like UEViewer and CUE4Parse write the point index as a 32-bit integer, exploiting the fact that due to struct
|
||||
alignment, there were 16-bits of padding following the original 16-bit point index in the wedge struct.
|
||||
However, this breaks compatibility with PSK files that were created with older tools that treated the
|
||||
point index as a 16-bit integer and might have junk data written to the padding bits.
|
||||
To work around this, we check if each point is still addressable using a 16-bit index, and if it is, assume the
|
||||
point index is a 16-bit integer and truncate the high bits.
|
||||
'''
|
||||
"""
|
||||
if len(psk.points) <= 65536:
|
||||
for wedge in psk.wedges:
|
||||
wedge.point_index &= 0xFFFF
|
||||
|
||||
@@ -7,7 +7,7 @@ class PSX_UL_bone_collection_list(UIList):
|
||||
def draw_item(self, context: Context, layout: UILayout, data: AnyType, item: AnyType, icon: int,
|
||||
active_data: AnyType, active_property: str, index: int = 0, flt_flag: int = 0):
|
||||
row = layout.row()
|
||||
row.prop(item, 'is_selected', text=item.name, translate=item.name == 'Unassigned')
|
||||
row.prop(item, 'is_selected', text=item.name, translate=False)
|
||||
row.label(text=str(getattr(item, 'count')), icon='BONE_DATA')
|
||||
|
||||
|
||||
|
||||
1
localization/.gitignore
vendored
Normal file
1
localization/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
artifacts
|
||||
132
localization/stringscan.py
Normal file
132
localization/stringscan.py
Normal file
@@ -0,0 +1,132 @@
|
||||
import ast
|
||||
import glob
|
||||
import os
|
||||
|
||||
# Walk the directory and open all .py files using glob
|
||||
strings = set()
|
||||
for file in glob.glob('../io_scene_psk_psa/**/*.py', recursive=True):
|
||||
print(file)
|
||||
with open(os.path.join(file), 'r') as f:
|
||||
if file.endswith('i18n.py'):
|
||||
# TODO: Don't parse the i18n files.
|
||||
continue
|
||||
# Walk the entire tree and build a list of all string literals.
|
||||
try:
|
||||
a = ast.parse(f.read())
|
||||
for node in ast.walk(a):
|
||||
if isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||
strings.add(node.s)
|
||||
except UnicodeDecodeError as e:
|
||||
print(f'Error reading file {file}: {e}')
|
||||
|
||||
# Remove all strings that are empty or contain only whitespace.
|
||||
strings = set(filter(lambda x: x.strip(), strings))
|
||||
|
||||
# Remove all strings that have no alphabetic characters.
|
||||
strings = set(filter(lambda x: any(c.isalpha() for c in x), strings))
|
||||
|
||||
# Remove any strings that have '@return: ' in them.
|
||||
strings = set(filter(lambda x: '@return: ' not in x, strings))
|
||||
|
||||
# Remove any strings that are entirely lowercase and have no whitespace.
|
||||
strings = set(filter(lambda x: not x.islower() or ' ' in x, strings))
|
||||
|
||||
# Remove any strings that are in SCREAMING_SNAKE_CASE.
|
||||
strings = set(filter(lambda x: not x.isupper(), strings))
|
||||
|
||||
# Remove any strings that have underscores.
|
||||
strings = set(filter(lambda x: '_' not in x, strings))
|
||||
|
||||
# Remove any string that starts with a newline.
|
||||
strings = set(filter(lambda x: not x.startswith('\n'), strings))
|
||||
|
||||
# Remove any string that looks like a regular expression.
|
||||
strings = set(filter(lambda x: not any(c in x for c in '^'), strings))
|
||||
|
||||
# Convert the set to a list and sort it.
|
||||
strings = list(strings)
|
||||
strings.sort()
|
||||
|
||||
def write_multiline_string(f, string):
|
||||
f.write(f'msgid ""\n')
|
||||
for line in string.split('\n'):
|
||||
f.write(f'"{line}"\n')
|
||||
f.write('msgstr ""\n\n')
|
||||
|
||||
# TODO: big brain move would be to load the translated Blender strings and remove any that are already translated
|
||||
# instead of manually removing them.
|
||||
exclude_strings = {
|
||||
'Import-Export',
|
||||
'Linear',
|
||||
'Masked',
|
||||
'Normal',
|
||||
'Placeholder',
|
||||
'Flat',
|
||||
'Environment',
|
||||
'Advanced',
|
||||
'Action',
|
||||
'All',
|
||||
'Assets',
|
||||
'Armature',
|
||||
'Materials'
|
||||
'Bones',
|
||||
'Custom',
|
||||
'Data',
|
||||
'Colin Basnett, Yurii Ti',
|
||||
'Invert',
|
||||
'Keyframes', # maybe?
|
||||
'Mesh',
|
||||
'None',
|
||||
'Options',
|
||||
'Overwrite',
|
||||
'Scale',
|
||||
'Scene',
|
||||
'Select',
|
||||
'RemoveTracks'
|
||||
'Source',
|
||||
'Stash',
|
||||
'Move Up',
|
||||
'Move Down',
|
||||
'Unassigned',
|
||||
'Prefix',
|
||||
'Suffix',
|
||||
'Timeline Markers',
|
||||
'Pose Markers',
|
||||
'Actions'
|
||||
}
|
||||
|
||||
# Remove any strings that are in the exclude_strings set.
|
||||
strings = set(filter(lambda x: x not in exclude_strings, strings))
|
||||
|
||||
with open('./artifacts/io_scene_psk_psa.en.po', 'w') as f:
|
||||
# Write the header (language, mime-version, content-type & content-transfer-encoding).
|
||||
f.write('msgid ""\n'
|
||||
'msgstr ""\n'
|
||||
'"Language: en\\n"\n'
|
||||
'"MIME-Version: 1.0\\n"\n'
|
||||
'"Content-Type: text/plain\\n"\n'
|
||||
'"Content-Transfer-Encoding: 8bit; charset=UTF-8\\n"\n\n'
|
||||
)
|
||||
for string in strings:
|
||||
if is_multi_line := '\n' in string:
|
||||
f.write(f'msgid ""\n')
|
||||
# Split the string into lines and write each line as a separate msgid.
|
||||
for line in string.split('\n'):
|
||||
f.write(f'"{line}"\n')
|
||||
f.write(f'msgstr ""\n')
|
||||
# Split the string into lines and write each line as a separate msgid.
|
||||
for line in string.split('\n'):
|
||||
f.write(f'"{line}"\n')
|
||||
else:
|
||||
f.write(f'msgid "{string}"\n')
|
||||
f.write(f'msgstr "{string}"\n')
|
||||
f.write('\n')
|
||||
|
||||
# Print the # of strings.
|
||||
print(f'Found {len(strings)} strings.')
|
||||
|
||||
# Zip the file.
|
||||
import zipfile
|
||||
|
||||
with zipfile.ZipFile('./artifacts/io_scene_psk_psa.po.zip', 'w') as z:
|
||||
z.write('./artifacts/io_scene_psk_psa.en.po')
|
||||
Reference in New Issue
Block a user