Compare commits

...

13 Commits

Author SHA1 Message Date
Colin Basnett
ff74f47178 Implemented multiple PSA import (#55)
This can be invoked by drag-and-dropping multiple PSA files onto the
Blender viewport when you have the target armature selected
2024-09-09 17:07:36 -07:00
Colin Basnett
bdd35ef61d Incremented version to v7.1.2 2024-09-09 16:30:01 -07:00
Colin Basnett
1c4967bd67 Fixed is_bdk_addon_loaded function 2024-09-09 16:29:20 -07:00
Colin Basnett
b5dba35ac4 Implemented feature requested in #87 2024-09-09 16:25:29 -07:00
Colin Basnett
7cc5cbe667 Added Visible Only option to the PSK collection exporter 2024-09-09 15:59:13 -07:00
Colin Basnett
e1f0fc7e89 Fix #101: Dashes in the names of PSA config keys results in parsing errors
The issue here was the regex pattern was too restrictive, so it did not
pick up the lines as ones that needed to have the `=` appended at the
end so that the ConfigParser could properly parse the file.
2024-08-07 23:36:48 -07:00
Colin Basnett
03c69783b3 Updated workflow file to be targetted against the stable version of 4.2 2024-07-31 19:16:00 -07:00
Colin Basnett
da4960298b Incremented version to 7.1.1 2024-07-31 19:09:49 -07:00
Colin Basnett
a9706d88a5 Vertices without explicit weights are now weighted to the root bone
There is an issue in some older versions of Unreal (e.g. Postal), where
the engine does not handle vertices without explicit weighting,
resulting in corrupted meshes. This now mitigates the issue.

Thank you to makabray for reporting this issue.
2024-07-31 19:09:22 -07:00
Colin Basnett
9dd02260d5 Replaced __doc__ with bl_description 2024-07-31 19:01:48 -07:00
Colin Basnett
7ceaa88f1d Incremented version to 7.0.1 2024-03-31 14:23:35 -07:00
Colin Basnett
37e246bf3e Fixed ordering of panels in the PSA import dialog 2024-03-31 14:22:16 -07:00
Colin Basnett
db93314fbc Initial commit for multiple PSA import 2024-03-31 12:47:48 -07:00
12 changed files with 188 additions and 55 deletions

View File

@@ -6,18 +6,24 @@ on:
branches: [ "main" ] branches: [ "main" ]
pull_request: pull_request:
branches: [ "main" ] branches: [ "main" ]
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
BLENDER_VERSION: blender-4.2.0-beta+v42.d19d23e91f65-linux.x86_64-release BLENDER_VERSION: blender-4.2.0-linux-x64
ADDON_NAME: io_scene_psk_psa
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: SebRollen/toml-action@v1.2.0
id: read_manifest
with:
file: '${{ env.ADDON_NAME }}/blender_manifest.toml'
field: 'version'
- name: Set derived environment variables - name: Set derived environment variables
run: | run: |
echo "BLENDER_FILENAME=${{ env.BLENDER_VERSION }}.tar.xz" >> $GITHUB_ENV echo "BLENDER_FILENAME=${{ env.BLENDER_VERSION }}.tar.xz" >> $GITHUB_ENV
echo "BLENDER_URL=https://cdn.builder.blender.org/download/daily/${{ env.BLENDER_VERSION }}.tar.xz" >> $GITHUB_ENV echo "BLENDER_URL=https://mirrors.iu13.net/blender/release/Blender4.2/${{ env.BLENDER_VERSION }}.tar.xz" >> $GITHUB_ENV
- name: Install Blender Dependencies - name: Install Blender Dependencies
run: | run: |
sudo apt-get install libxxf86vm-dev -y sudo apt-get install libxxf86vm-dev -y
@@ -35,14 +41,14 @@ jobs:
echo "${{ github.workspace }}/${{ env.BLENDER_VERSION }}/" >> $GITHUB_PATH echo "${{ github.workspace }}/${{ env.BLENDER_VERSION }}/" >> $GITHUB_PATH
- name: Build extension - name: Build extension
run: | run: |
pushd ./io_scene_psk_psa pushd ./${{ env.ADDON_NAME }}
blender --command extension build blender --command extension build
mkdir artifact mkdir artifact
unzip -q io_scene_psk_psa.zip -d ./artifact unzip -q ${{ env.ADDON_NAME }}-${{ steps.read_manifest.outputs.value }}.zip -d ./artifact
popd popd
- name: Archive addon - name: Archive addon
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: io_scene_psk_psa-${{ github.ref_name }}-${{ github.sha }} name: ${{ env.ADDON_NAME }}-${{ github.ref_name }}-${{ github.sha }}
path: | path: |
./io_scene_psk_psa/artifact/* ./${{ env.ADDON_NAME }}/artifact/*

View File

@@ -1,6 +1,6 @@
schema_version = "1.0.0" schema_version = "1.0.0"
id = "io_scene_psk_psa" id = "io_scene_psk_psa"
version = "7.1.0" version = "7.1.2"
name = "Unreal PSK/PSA (.psk/.psa)" name = "Unreal PSK/PSA (.psk/.psa)"
tagline = "Import and export PSK and PSA files used in Unreal Engine" tagline = "Import and export PSK and PSA files used in Unreal Engine"
maintainer = "Colin Basnett <cmbasnett@gmail.com>" maintainer = "Colin Basnett <cmbasnett@gmail.com>"

View File

@@ -1,8 +1,6 @@
import re import re
from configparser import ConfigParser from configparser import ConfigParser
from typing import Dict from typing import Dict, List
from .reader import PsaReader
REMOVE_TRACK_LOCATION = (1 << 0) REMOVE_TRACK_LOCATION = (1 << 0)
REMOVE_TRACK_ROTATION = (1 << 1) REMOVE_TRACK_ROTATION = (1 << 1)
@@ -28,7 +26,7 @@ def _load_config_file(file_path: str) -> ConfigParser:
with open(file_path, 'r') as f: with open(file_path, 'r') as f:
lines = f.read().split('\n') lines = f.read().split('\n')
lines = [re.sub(r'^\s*(\w+)\s*$', r'\1=', line) for line in lines] lines = [re.sub(r'^\s*([^=]+)\s*$', r'\1=', line) for line in lines]
contents = '\n'.join(lines) contents = '\n'.join(lines)
@@ -50,7 +48,7 @@ def _get_bone_flags_from_value(value: str) -> int:
return 0 return 0
def read_psa_config(psa_reader: PsaReader, file_path: str) -> PsaConfig: def read_psa_config(psa_sequence_names: List[str], file_path: str) -> PsaConfig:
psa_config = PsaConfig() psa_config = PsaConfig()
config = _load_config_file(file_path) config = _load_config_file(file_path)
@@ -62,7 +60,6 @@ def read_psa_config(psa_reader: PsaReader, file_path: str) -> PsaConfig:
# Map the sequence name onto the actual sequence name in the PSA file. # Map the sequence name onto the actual sequence name in the PSA file.
try: try:
psa_sequence_names = list(psa_reader.sequences.keys())
lowercase_sequence_names = [sequence_name.lower() for sequence_name in psa_sequence_names] lowercase_sequence_names = [sequence_name.lower() for sequence_name in psa_sequence_names]
sequence_name = psa_sequence_names[lowercase_sequence_names.index(sequence_name.lower())] sequence_name = psa_sequence_names[lowercase_sequence_names.index(sequence_name.lower())]
except ValueError: except ValueError:

View File

@@ -214,7 +214,7 @@ class PSA_OT_export(Operator, ExportHelper):
bl_idname = 'psa_export.operator' bl_idname = 'psa_export.operator'
bl_label = 'Export' bl_label = 'Export'
bl_options = {'INTERNAL', 'UNDO'} bl_options = {'INTERNAL', 'UNDO'}
__doc__ = 'Export actions to PSA' bl_description = 'Export actions to PSA'
filename_ext = '.psa' filename_ext = '.psa'
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'}) filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
filepath: StringProperty( filepath: StringProperty(

View File

@@ -1,8 +1,9 @@
import os import os
from pathlib import Path from pathlib import Path
from typing import List
from bpy.props import StringProperty from bpy.props import StringProperty, CollectionProperty
from bpy.types import Operator, Event, Context, FileHandler from bpy.types import Operator, Event, Context, FileHandler, OperatorFileListElement, Object
from bpy_extras.io_utils import ImportHelper from bpy_extras.io_utils import ImportHelper
from .properties import get_visible_sequences from .properties import get_visible_sequences
@@ -112,6 +113,95 @@ def on_psa_file_path_updated(cls, context):
load_psa_file(context, cls.filepath) load_psa_file(context, cls.filepath)
class PSA_OT_import_multiple(Operator):
bl_idname = 'psa_import.import_multiple'
bl_label = 'Import PSA'
bl_description = 'Import multiple PSA files'
bl_options = {'INTERNAL', 'UNDO'}
directory: StringProperty(subtype='FILE_PATH', options={'SKIP_SAVE', 'HIDDEN'})
files: CollectionProperty(type=OperatorFileListElement, options={'SKIP_SAVE', 'HIDDEN'})
def execute(self, context):
pg = getattr(context.scene, 'psa_import')
warnings = []
for file in self.files:
psa_path = os.path.join(self.directory, file.name)
psa_reader = PsaReader(psa_path)
sequence_names = psa_reader.sequences.keys()
result = _import_psa(context, pg, psa_path, sequence_names, context.view_layer.objects.active)
result.warnings.extend(warnings)
if len(result.warnings) > 0:
message = f'Imported {len(sequence_names)} action(s) with {len(result.warnings)} warning(s)\n'
self.report({'INFO'}, message)
for warning in result.warnings:
self.report({'WARNING'}, warning)
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
return {'FINISHED'}
def invoke(self, context: Context, event):
# Make sure the selected object is an armature.
active_object = context.view_layer.objects.active
if active_object is None or active_object.type != 'ARMATURE':
self.report({'ERROR_INVALID_CONTEXT'}, 'The active object must be an armature')
return {'CANCELLED'}
# Show the import operator properties in a pop-up dialog (do not use the file selector).
context.window_manager.invoke_props_dialog(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
pg = getattr(context.scene, 'psa_import')
draw_psa_import_options_no_panels(layout, pg)
def _import_psa(context,
pg,
filepath: str,
sequence_names: List[str],
armature_object: Object
):
options = PsaImportOptions()
options.sequence_names = sequence_names
options.should_use_fake_user = pg.should_use_fake_user
options.should_stash = pg.should_stash
options.action_name_prefix = pg.action_name_prefix if pg.should_use_action_name_prefix else ''
options.should_overwrite = pg.should_overwrite
options.should_write_metadata = pg.should_write_metadata
options.should_write_keyframes = pg.should_write_keyframes
options.should_convert_to_samples = pg.should_convert_to_samples
options.bone_mapping_mode = pg.bone_mapping_mode
options.fps_source = pg.fps_source
options.fps_custom = pg.fps_custom
options.translation_scale = pg.translation_scale
warnings = []
if options.should_use_config_file:
# Read the PSA config file if it exists.
config_path = Path(filepath).with_suffix('.config')
if config_path.exists():
try:
options.psa_config = read_psa_config(sequence_names, str(config_path))
except Exception as e:
warnings.append(f'Failed to read PSA config file: {e}')
psa_reader = PsaReader(filepath)
result = import_psa(context, psa_reader, armature_object, options)
result.warnings.extend(warnings)
return result
class PSA_OT_import(Operator, ImportHelper): class PSA_OT_import(Operator, ImportHelper):
bl_idname = 'psa_import.import' bl_idname = 'psa_import.import'
bl_label = 'Import' bl_label = 'Import'
@@ -137,36 +227,13 @@ class PSA_OT_import(Operator, ImportHelper):
def execute(self, context): def execute(self, context):
pg = getattr(context.scene, 'psa_import') pg = getattr(context.scene, 'psa_import')
psa_reader = PsaReader(self.filepath)
sequence_names = [x.action_name for x in pg.sequence_list if x.is_selected] sequence_names = [x.action_name for x in pg.sequence_list if x.is_selected]
if len(sequence_names) == 0: if len(sequence_names) == 0:
self.report({'ERROR_INVALID_CONTEXT'}, 'No sequences selected') self.report({'ERROR_INVALID_CONTEXT'}, 'No sequences selected')
return {'CANCELLED'} return {'CANCELLED'}
options = PsaImportOptions() result = _import_psa(context, pg, self.filepath, sequence_names, context.view_layer.objects.active)
options.sequence_names = sequence_names
options.should_use_fake_user = pg.should_use_fake_user
options.should_stash = pg.should_stash
options.action_name_prefix = pg.action_name_prefix if pg.should_use_action_name_prefix else ''
options.should_overwrite = pg.should_overwrite
options.should_write_metadata = pg.should_write_metadata
options.should_write_keyframes = pg.should_write_keyframes
options.should_convert_to_samples = pg.should_convert_to_samples
options.bone_mapping_mode = pg.bone_mapping_mode
options.fps_source = pg.fps_source
options.fps_custom = pg.fps_custom
if options.should_use_config_file:
# Read the PSA config file if it exists.
config_path = Path(self.filepath).with_suffix('.config')
if config_path.exists():
try:
options.psa_config = read_psa_config(psa_reader, str(config_path))
except Exception as e:
self.report({'WARNING'}, f'Failed to read PSA config file: {e}')
result = import_psa(context, psa_reader, context.view_layer.objects.active, options)
if len(result.warnings) > 0: if len(result.warnings) > 0:
message = f'Imported {len(sequence_names)} action(s) with {len(result.warnings)} warning(s)\n' message = f'Imported {len(sequence_names)} action(s) with {len(result.warnings)} warning(s)\n'
@@ -248,6 +315,11 @@ class PSA_OT_import(Operator, ImportHelper):
col.use_property_decorate = False col.use_property_decorate = False
col.prop(pg, 'bone_mapping_mode') col.prop(pg, 'bone_mapping_mode')
col = advanced_panel.column()
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'translation_scale', text='Translation Scale')
col = advanced_panel.column(heading='Options') col = advanced_panel.column(heading='Options')
col.use_property_split = True col.use_property_split = True
col.use_property_decorate = False col.use_property_decorate = False
@@ -256,10 +328,48 @@ class PSA_OT_import(Operator, ImportHelper):
col.prop(pg, 'should_use_config_file') col.prop(pg, 'should_use_config_file')
def draw_psa_import_options_no_panels(layout, pg):
col = layout.column(heading='Sequences')
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'fps_source')
if pg.fps_source == 'CUSTOM':
col.prop(pg, 'fps_custom')
col.prop(pg, 'should_overwrite')
col.prop(pg, 'should_use_action_name_prefix')
if pg.should_use_action_name_prefix:
col.prop(pg, 'action_name_prefix')
col = layout.column(heading='Write')
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'should_write_keyframes')
col.prop(pg, 'should_write_metadata')
if pg.should_write_keyframes:
col = col.column(heading='Keyframes')
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'should_convert_to_samples')
col = layout.column()
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'bone_mapping_mode')
col.prop(pg, 'translation_scale')
col = layout.column(heading='Options')
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'should_use_fake_user')
col.prop(pg, 'should_stash')
col.prop(pg, 'should_use_config_file')
class PSA_FH_import(FileHandler): class PSA_FH_import(FileHandler):
bl_idname = 'PSA_FH_import' bl_idname = 'PSA_FH_import'
bl_label = 'File handler for Unreal PSA import' bl_label = 'File handler for Unreal PSA import'
bl_import_operator = 'psa_import.import' bl_import_operator = 'psa_import.import_multiple'
bl_export_operator = 'psa_export.export' bl_export_operator = 'psa_export.export'
bl_file_extensions = '.psa' bl_file_extensions = '.psa'
@@ -273,5 +383,6 @@ classes = (
PSA_OT_import_sequences_deselect_all, PSA_OT_import_sequences_deselect_all,
PSA_OT_import_sequences_from_text, PSA_OT_import_sequences_from_text,
PSA_OT_import, PSA_OT_import,
PSA_OT_import_multiple,
PSA_FH_import, PSA_FH_import,
) )

View File

@@ -103,6 +103,11 @@ class PSA_PG_import(PropertyGroup):
soft_max=1.0, soft_max=1.0,
step=0.0625, step=0.0625,
) )
translation_scale: FloatProperty(
name='Translation Scale',
default=1.0,
description='Scale factor for bone translation values. Use this when the scale of the armature does not match the PSA file'
)
def filter_sequences(pg: PSA_PG_import, sequences) -> List[int]: def filter_sequences(pg: PSA_PG_import, sequences) -> List[int]:

View File

@@ -24,6 +24,7 @@ class PsaImportOptions(object):
self.bone_mapping_mode = 'CASE_INSENSITIVE' self.bone_mapping_mode = 'CASE_INSENSITIVE'
self.fps_source = 'SEQUENCE' self.fps_source = 'SEQUENCE'
self.fps_custom: float = 30.0 self.fps_custom: float = 30.0
self.translation_scale: float = 1.0
self.should_use_config_file = True self.should_use_config_file = True
self.psa_config: PsaConfig = PsaConfig() self.psa_config: PsaConfig = PsaConfig()
@@ -88,6 +89,7 @@ def _get_sample_frame_times(source_frame_count: int, frame_step: float) -> typin
time += frame_step time += frame_step
yield source_frame_count - 1 yield source_frame_count - 1
def _resample_sequence_data_matrix(sequence_data_matrix: np.ndarray, frame_step: float = 1.0) -> np.ndarray: def _resample_sequence_data_matrix(sequence_data_matrix: np.ndarray, frame_step: float = 1.0) -> np.ndarray:
""" """
Resamples the sequence data matrix to the target frame count. Resamples the sequence data matrix to the target frame count.
@@ -271,6 +273,10 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
# Read the sequence data matrix from the PSA. # Read the sequence data matrix from the PSA.
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name) sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
if options.translation_scale != 1.0:
# Scale the translation data.
sequence_data_matrix[:, :, 4:] *= options.translation_scale
# Convert the sequence's data from world-space to local-space. # Convert the sequence's data from world-space to local-space.
for bone_index, import_bone in enumerate(import_bones): for bone_index, import_bone in enumerate(import_bones):
if import_bone is None: if import_bone is None:

View File

@@ -25,10 +25,13 @@ class PskBuildOptions(object):
self.should_enforce_bone_name_restrictions = False self.should_enforce_bone_name_restrictions = False
def get_mesh_objects_for_collection(collection: Collection): def get_mesh_objects_for_collection(collection: Collection, should_exclude_hidden_meshes: bool = True):
for obj in collection.all_objects: for obj in collection.all_objects:
if obj.type == 'MESH': if obj.type != 'MESH':
yield obj continue
if should_exclude_hidden_meshes and obj.visible_get() is False:
continue
yield obj
def get_mesh_objects_for_context(context: Context): def get_mesh_objects_for_context(context: Context):
@@ -79,8 +82,8 @@ def get_psk_input_objects_for_context(context: Context) -> PskInputObjects:
return _get_psk_input_objects(mesh_objects) return _get_psk_input_objects(mesh_objects)
def get_psk_input_objects_for_collection(collection: Collection) -> PskInputObjects: def get_psk_input_objects_for_collection(collection: Collection, should_exclude_hidden_meshes: bool = True) -> PskInputObjects:
mesh_objects = list(get_mesh_objects_for_collection(collection)) mesh_objects = list(get_mesh_objects_for_collection(collection, should_exclude_hidden_meshes))
return _get_psk_input_objects(mesh_objects) return _get_psk_input_objects(mesh_objects)

View File

@@ -102,12 +102,17 @@ class PSK_OT_export_collection(Operator, ExportHelper):
description='Enforce that bone names must only contain letters, numbers, spaces, hyphens and underscores.\n\n' description='Enforce that bone names must only contain letters, numbers, spaces, hyphens and underscores.\n\n'
'Depending on the engine, improper bone names might not be referenced correctly by scripts' 'Depending on the engine, improper bone names might not be referenced correctly by scripts'
) )
should_exclude_hidden_meshes: BoolProperty(
default=True,
name='Visible Only',
description='Export only visible meshes'
)
def execute(self, context): def execute(self, context):
collection = bpy.data.collections.get(self.collection) collection = bpy.data.collections.get(self.collection)
try: try:
input_objects = get_psk_input_objects_for_collection(collection) input_objects = get_psk_input_objects_for_collection(collection, self.should_exclude_hidden_meshes)
except RuntimeError as e: except RuntimeError as e:
self.report({'ERROR_INVALID_CONTEXT'}, str(e)) self.report({'ERROR_INVALID_CONTEXT'}, str(e))
return {'CANCELLED'} return {'CANCELLED'}
@@ -144,6 +149,7 @@ class PSK_OT_export_collection(Operator, ExportHelper):
flow.use_property_split = True flow.use_property_split = True
flow.use_property_decorate = False flow.use_property_decorate = False
flow.prop(self, 'object_eval_state', text='Data') flow.prop(self, 'object_eval_state', text='Data')
flow.prop(self, 'should_exclude_hidden_meshes')
# BONES # BONES
bones_header, bones_panel = layout.panel('Bones', default_closed=False) bones_header, bones_panel = layout.panel('Bones', default_closed=False)
@@ -159,7 +165,7 @@ class PSK_OT_export(Operator, ExportHelper):
bl_idname = 'export.psk' bl_idname = 'export.psk'
bl_label = 'Export' bl_label = 'Export'
bl_options = {'INTERNAL', 'UNDO'} bl_options = {'INTERNAL', 'UNDO'}
__doc__ = 'Export mesh and armature to PSK' bl_description = 'Export mesh and armature to PSK'
filename_ext = '.psk' filename_ext = '.psk'
filter_glob: StringProperty(default='*.psk', options={'HIDDEN'}) filter_glob: StringProperty(default='*.psk', options={'HIDDEN'})

View File

@@ -27,7 +27,7 @@ class PSK_OT_import(Operator, ImportHelper):
bl_idname = 'import_scene.psk' bl_idname = 'import_scene.psk'
bl_label = 'Import' bl_label = 'Import'
bl_options = {'INTERNAL', 'UNDO', 'PRESET'} bl_options = {'INTERNAL', 'UNDO', 'PRESET'}
__doc__ = 'Load a PSK file' bl_description = 'Import a PSK file'
filename_ext = '.psk' filename_ext = '.psk'
filter_glob: StringProperty(default='*.psk;*.pskx', options={'HIDDEN'}) filter_glob: StringProperty(default='*.psk;*.pskx', options={'HIDDEN'})
filepath: StringProperty( filepath: StringProperty(

View File

@@ -131,7 +131,8 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
# Material does not yet exist, and we have the BDK addon installed. # Material does not yet exist, and we have the BDK addon installed.
# Attempt to load it using BDK addon's operator. # Attempt to load it using BDK addon's operator.
material_reference = psk.material_references[material_index] material_reference = psk.material_references[material_index]
if material_reference and bpy.ops.bdk.link_material(reference=material_reference, repository_id=options.bdk_repository_id) == {'FINISHED'}: repository_id = options.bdk_repository_id if options.bdk_repository_id is not None else ''
if material_reference and bpy.ops.bdk.link_material(reference=material_reference, repository_id=repository_id) == {'FINISHED'}:
material = bpy.data.materials[material_name] material = bpy.data.materials[material_name]
else: else:
# Just create a blank material. # Just create a blank material.

View File

@@ -163,7 +163,5 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_c
return bone_names return bone_names
def is_bdk_addon_loaded(): def is_bdk_addon_loaded() -> bool:
# TODO: this does not work anymore for *reasons*. Just check if bpy.ops.bdk.link_material exists. return bpy.ops.bdk is not None and bpy.ops.bdk.link_material is not None
# return addon_utils.check('bdk_addon')[1]
return bpy.ops.bdk.link_material is not None