Compare commits
349 Commits
feature-or
...
8.2.4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
37f7cc4d9f | ||
|
|
93083f09f8 | ||
|
|
75660f9dc1 | ||
|
|
5421ac5151 | ||
|
|
9dcbb74058 | ||
|
|
8ed985263c | ||
|
|
d91408ecab | ||
|
|
dd1ea683bb | ||
|
|
240b79d374 | ||
|
|
33e7862288 | ||
|
|
b4eefaba6f | ||
|
|
ca869731e7 | ||
|
|
20c8a39f23 | ||
|
|
8d2f46c2b1 | ||
|
|
84863d9de4 | ||
|
|
ef38b896a6 | ||
|
|
3e54378a57 | ||
|
|
75cee40604 | ||
|
|
7cc36d427e | ||
|
|
5c50e76771 | ||
|
|
bd3ea76109 | ||
|
|
f4b20e4e0f | ||
|
|
921efe97aa | ||
|
|
4767081bc4 | ||
|
|
ff4cbc8871 | ||
|
|
7d0c520019 | ||
|
|
e7ce934383 | ||
|
|
a48fcbd6e3 | ||
|
|
253566576e | ||
|
|
660a196002 | ||
|
|
98968a8623 | ||
|
|
29831d7f09 | ||
|
|
00d9e3996c | ||
|
|
ff2fe35846 | ||
|
|
f6fa646a63 | ||
|
|
16471344f0 | ||
|
|
172249dfff | ||
|
|
0b30161cd7 | ||
|
|
e66f7f98e2 | ||
|
|
879c3cbaf6 | ||
|
|
2e7814804d | ||
|
|
2f972a31df | ||
|
|
18915b3d86 | ||
|
|
5ea7e23cb9 | ||
|
|
385acd40bb | ||
|
|
5cfb883a32 | ||
|
|
289361f3e1 | ||
|
|
e986f94081 | ||
|
|
8471cc1996 | ||
|
|
6369d4997e | ||
|
|
fd11de05ae | ||
|
|
6241cba629 | ||
|
|
f8071ea394 | ||
|
|
aa103a76d0 | ||
|
|
00edde0605 | ||
|
|
76ee7c2df9 | ||
|
|
2b347bf064 | ||
|
|
977153e4ad | ||
|
|
30f97089e8 | ||
|
|
15614c6d37 | ||
|
|
322844b88c | ||
|
|
16a11cd961 | ||
|
|
7c695e6195 | ||
|
|
91fe54f361 | ||
|
|
1f6cbe7fb4 | ||
|
|
a809700c2b | ||
|
|
5203ae1d7e | ||
|
|
f7c85e7226 | ||
|
|
cea9029877 | ||
|
|
d2606c9d4c | ||
|
|
3c2f0c4d7c | ||
|
|
9bfc35e044 | ||
|
|
b855abaadb | ||
|
|
d66cd09660 | ||
|
|
814496d075 | ||
|
|
5f3bfc0ff3 | ||
|
|
d1bae944f1 | ||
|
|
bea245583f | ||
|
|
0e2e265b47 | ||
|
|
d61f1cefd1 | ||
|
|
fffecd8c7f | ||
|
|
fec1a286ef | ||
|
|
2b41815545 | ||
|
|
a83314c8b3 | ||
|
|
f8234b3892 | ||
|
|
35ac0bf86c | ||
|
|
515ee17203 | ||
|
|
0a5ebf4548 | ||
|
|
de1cf2316a | ||
|
|
c1d5a2229d | ||
|
|
ff5ded004a | ||
|
|
ea2ecc6a5a | ||
|
|
ef559d9475 | ||
|
|
491e042cec | ||
|
|
9b0df1c942 | ||
|
|
79ea131f64 | ||
|
|
77dc4e5d50 | ||
|
|
526df424e3 | ||
|
|
ed42b2e227 | ||
|
|
42a859e24b | ||
|
|
e791859217 | ||
|
|
0dba7bb262 | ||
|
|
77cc97107e | ||
|
|
1f2ec4c76b | ||
|
|
ff74f47178 | ||
|
|
bdd35ef61d | ||
|
|
1c4967bd67 | ||
|
|
b5dba35ac4 | ||
|
|
7cc5cbe667 | ||
|
|
e1f0fc7e89 | ||
|
|
03c69783b3 | ||
|
|
da4960298b | ||
|
|
a9706d88a5 | ||
|
|
9dd02260d5 | ||
|
|
e79af9e8e3 | ||
|
|
10a25dc036 | ||
|
|
14f5b0424c | ||
|
|
d26d195a85 | ||
|
|
02913f6922 | ||
|
|
5cfb37d1a2 | ||
|
|
3863e4edcc | ||
|
|
e77ed7cc8d | ||
|
|
87eff06f71 | ||
|
|
1a8bd66503 | ||
|
|
143e7af36b | ||
|
|
6b46cb257d | ||
|
|
1c98790cfe | ||
|
|
3fbef00edc | ||
|
|
4279338574 | ||
|
|
3b37dbceb9 | ||
|
|
47c3ed795f | ||
|
|
ee30938be8 | ||
|
|
f049055273 | ||
|
|
fac13ac86b | ||
|
|
e11b863744 | ||
|
|
97231079a7 | ||
|
|
6205c1900c | ||
|
|
810fe2f14f | ||
|
|
1384e9daf6 | ||
|
|
615983aa78 | ||
|
|
f0c2c9c6c2 | ||
|
|
80aa65f771 | ||
|
|
27548979d5 | ||
|
|
aa9fdca6cc | ||
|
|
5c83b8974a | ||
|
|
4d3630893e | ||
|
|
f664e7714b | ||
|
|
447687b588 | ||
|
|
8df7b4b141 | ||
|
|
cab8338a69 | ||
|
|
9dcd3778e0 | ||
|
|
fdb74ef7d0 | ||
|
|
a47b4a1e04 | ||
|
|
0202263dea | ||
|
|
7df032f504 | ||
|
|
4c00771f53 | ||
|
|
c7b23e8c0d | ||
|
|
7ceaa88f1d | ||
|
|
37e246bf3e | ||
|
|
db93314fbc | ||
|
|
d107a56007 | ||
|
|
a5bef57c8d | ||
|
|
44a55fc698 | ||
|
|
09cc9e5d51 | ||
|
|
d92f2d77d2 | ||
|
|
9c8b9d922b | ||
|
|
20b072f87b | ||
|
|
bd667d4833 | ||
|
|
fb02742381 | ||
|
|
d4d46bea66 | ||
|
|
a93450eab9 | ||
|
|
c65fdaa6a4 | ||
|
|
6b8088225a | ||
|
|
e27b078866 | ||
|
|
b67c734687 | ||
|
|
226e403925 | ||
|
|
d81477673b | ||
|
|
4d41f1af83 | ||
|
|
5d3c7cc570 | ||
|
|
15e2c6ccdd | ||
|
|
14116963bb | ||
|
|
ead1e3c793 | ||
|
|
ce1a411200 | ||
|
|
11bf205fe2 | ||
|
|
f7bbe911ea | ||
|
|
8c49c8f34e | ||
|
|
82eaddf1cb | ||
|
|
eb8cee6973 | ||
|
|
c2d7eecb4f | ||
|
|
44100a50f0 | ||
|
|
9125606bc4 | ||
|
|
0a7804b5ab | ||
|
|
ced03afafe | ||
|
|
9438a35cd1 | ||
|
|
5a870104f1 | ||
|
|
564f7ec221 | ||
|
|
b2f5985681 | ||
|
|
3d3bbb9296 | ||
|
|
fda976d083 | ||
|
|
7e6911c709 | ||
|
|
30586fa8bb | ||
|
|
361a7f0218 | ||
|
|
b471229584 | ||
|
|
d0f64a6546 | ||
|
|
82310d695c | ||
|
|
e9ba117fa9 | ||
|
|
0c11b326af | ||
|
|
d0fe7d9786 | ||
|
|
68c7d93d6a | ||
|
|
e5fa255899 | ||
|
|
3cf10abe91 | ||
|
|
b1a6fb9dea | ||
|
|
89767569ec | ||
|
|
b23677f291 | ||
|
|
9fe5aa0388 | ||
|
|
c8e5b8eb47 | ||
|
|
76440affdb | ||
|
|
403e9a5cca | ||
|
|
7dce3b4920 | ||
|
|
2a5a4b2a3d | ||
|
|
a1d5fdd464 | ||
|
|
122e765bca | ||
|
|
94658ac221 | ||
|
|
c18ac85eed | ||
|
|
afcfebe84a | ||
|
|
cf13aade98 | ||
|
|
14c2f585dc | ||
|
|
7907341a22 | ||
|
|
46443162a7 | ||
|
|
10fe75c2aa | ||
|
|
32e0d89dbd | ||
|
|
4db8764677 | ||
|
|
f185ffbe16 | ||
|
|
3d460a15e3 | ||
|
|
da39c14464 | ||
|
|
214f19ff8c | ||
|
|
ea5d0c6ac2 | ||
|
|
b13bcfee0a | ||
|
|
83e65687ac | ||
|
|
63fb6f7d09 | ||
|
|
3de1f075dd | ||
|
|
560ec8fecd | ||
|
|
8c74987f5b | ||
|
|
60c7f2125a | ||
|
|
07ccc8c650 | ||
|
|
5bbb1512e0 | ||
|
|
c4c00ca49e | ||
|
|
b6e5a13e5f | ||
|
|
d823af9526 | ||
|
|
741357d0af | ||
|
|
fb2ab89766 | ||
|
|
b20d19d072 | ||
|
|
d0d6deb63c | ||
|
|
e9b09dc651 | ||
|
|
ed89e78927 | ||
|
|
9798415f75 | ||
|
|
24b7bff3d8 | ||
|
|
73b967949d | ||
|
|
e750be474a | ||
|
|
972067860a | ||
|
|
95d72c23aa | ||
|
|
afe598f671 | ||
|
|
782c210f04 | ||
|
|
25bf8f2087 | ||
|
|
160a6b22f6 | ||
|
|
84f168a9c0 | ||
|
|
0428c2bd72 | ||
|
|
75033056fa | ||
|
|
1f81a6b86b | ||
|
|
51b105b5fb | ||
|
|
2993d4569f | ||
|
|
536937cc26 | ||
|
|
68dee73d1a | ||
|
|
202a77d38a | ||
|
|
5cb9714597 | ||
|
|
15e20cdefc | ||
|
|
9e18e40387 | ||
|
|
268d039693 | ||
|
|
9566131690 | ||
|
|
e4e2354834 | ||
|
|
db6204d592 | ||
|
|
89772ad90d | ||
|
|
f54d10bb80 | ||
|
|
c99725b686 | ||
|
|
947c86eb8f | ||
|
|
7111a536b2 | ||
|
|
25e1b73fc7 | ||
|
|
f7b97added | ||
|
|
fd02f3f4d7 | ||
|
|
f40db53cb9 | ||
|
|
c9d4958800 | ||
|
|
8221130e4a | ||
|
|
5a66cab92e | ||
|
|
4811911e4a | ||
|
|
eceb0622af | ||
|
|
b26e49d403 | ||
|
|
ab998885bb | ||
|
|
f821bec0ff | ||
|
|
43b0fe82dd | ||
|
|
17e9e83826 | ||
|
|
44afce3e64 | ||
|
|
449331cd00 | ||
|
|
8ada80e243 | ||
|
|
38ed183897 | ||
|
|
9ae573422b | ||
|
|
86473584b8 | ||
|
|
aa8725c3d0 | ||
|
|
980042fc7f | ||
|
|
02082b9594 | ||
|
|
4181a15d0e | ||
|
|
b6ef3dda44 | ||
|
|
f7290e6808 | ||
|
|
65d3104ea9 | ||
|
|
1a48128cb9 | ||
|
|
88c22c9e80 | ||
|
|
ffc0d99374 | ||
|
|
cd490af431 | ||
|
|
96001651c6 | ||
|
|
d215ceb10c | ||
|
|
07f1c45000 | ||
|
|
02c4253c1d | ||
|
|
3627abe6fc | ||
|
|
f58d4c5539 | ||
|
|
494c5b116b | ||
|
|
4d1cd19a61 | ||
|
|
472a743c89 | ||
|
|
7c9d13686f | ||
|
|
4937f8f779 | ||
|
|
616593d0fb | ||
|
|
edafa1cfd7 | ||
|
|
a1bbf4fb1e | ||
|
|
0d779b8174 | ||
|
|
50f6dca565 | ||
|
|
fb42fbfa33 | ||
|
|
4a6f47b11a | ||
|
|
83a13fa1f4 | ||
|
|
1c1d90bc84 | ||
|
|
d4a58caafe | ||
|
|
df6bdb96a4 | ||
|
|
8495482345 | ||
|
|
1ac0870b31 | ||
|
|
6493ace078 | ||
|
|
0d9e2a4b60 | ||
|
|
248440052b | ||
|
|
ded6fc8980 | ||
|
|
19ff47cc83 | ||
|
|
31c0ec16ab | ||
|
|
a31c3ab2ae | ||
|
|
19a8f88686 |
4
.gitattributes
vendored
Normal file
4
.gitattributes
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
*.psk filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.psa filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pskx filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.blend filter=lfs diff=lfs merge=lfs -text
|
||||||
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Versions**
|
||||||
|
Blender: (example: 3.6.2)
|
||||||
|
io_scene_psk_psa: (example: 5.0.0)
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. See error
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
|
**Files**
|
||||||
|
Please post attachments of the PSK and/or PSA files that you are using to encounter the bug, if any.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here.
|
||||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
76
.github/workflows/main.yml
vendored
Normal file
76
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
name: tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "main" ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
blender-version: [ 4.4 ]
|
||||||
|
env:
|
||||||
|
ADDON_NAME: io_scene_psk_psa
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
lfs: true
|
||||||
|
- name: Checkout LFS objects
|
||||||
|
run: git lfs checkout
|
||||||
|
- uses: SebRollen/toml-action@v1.2.0
|
||||||
|
id: read_manifest
|
||||||
|
with:
|
||||||
|
file: '${{ env.ADDON_NAME }}/blender_manifest.toml'
|
||||||
|
field: 'version'
|
||||||
|
- name: Install Blender Dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update -y
|
||||||
|
sudo apt-get install libxxf86vm-dev -y
|
||||||
|
sudo apt-get install libxfixes3 -y
|
||||||
|
sudo apt-get install libxi-dev -y
|
||||||
|
sudo apt-get install libxkbcommon-x11-0 -y
|
||||||
|
sudo apt-get install libgl1 -y
|
||||||
|
sudo apt-get install libglx-mesa0 -y
|
||||||
|
sudo apt-get install python3 -y
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
|
python3 -m pip install --upgrade pip
|
||||||
|
python3 -m pip install virtualenv
|
||||||
|
python3 -m virtualenv venv
|
||||||
|
source venv/bin/activate
|
||||||
|
pip install pytest-blender
|
||||||
|
pip install blender-downloader
|
||||||
|
- name: Install Blender
|
||||||
|
run: |
|
||||||
|
source venv/bin/activate
|
||||||
|
blender_executable="$(blender-downloader ${{ matrix.blender-version }} --extract --print-blender-executable)"
|
||||||
|
echo "BLENDER_EXECUTABLE=${blender_executable}" >> $GITHUB_ENV
|
||||||
|
blender_python="$(pytest-blender --blender-executable "$blender_executable")"
|
||||||
|
echo "BLENDER_PYTHON=${blender_python}" >> $GITHUB_ENV
|
||||||
|
# Write the BLENDER_PYTHON path to the console for debugging
|
||||||
|
# Deactivate the virtualenv to avoid conflicts with the system python
|
||||||
|
deactivate
|
||||||
|
$blender_python -m ensurepip
|
||||||
|
$blender_python -m pip install -r tests/requirements.txt
|
||||||
|
- name: Build extension
|
||||||
|
run: |
|
||||||
|
pushd ./${{ env.ADDON_NAME }}
|
||||||
|
# Run blender using the environment variable set by the action
|
||||||
|
${{ env.BLENDER_EXECUTABLE }} --command extension build
|
||||||
|
mkdir artifact
|
||||||
|
unzip -q ${{ env.ADDON_NAME }}-${{ steps.read_manifest.outputs.value }}.zip -d ./artifact
|
||||||
|
popd
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
source venv/bin/activate
|
||||||
|
pytest -svv tests --blender-addons-dirs .
|
||||||
|
- name: Archive addon
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ env.ADDON_NAME }}-${{ github.ref_name }}-${{ github.sha }}
|
||||||
|
path: |
|
||||||
|
./${{ env.ADDON_NAME }}/artifact/*
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -105,3 +105,4 @@ venv.bak/
|
|||||||
|
|
||||||
# PyCharm
|
# PyCharm
|
||||||
.idea
|
.idea
|
||||||
|
*.blend1
|
||||||
|
|||||||
17
.vscode/tasks.json
vendored
Normal file
17
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||||
|
// for the documentation about the tasks.json format
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "io_scene_psk_psa: test",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "docker run -it --volume ${PWD}:/io_scene_psk_psa --volume ${PWD}/io_scene_psk_psa:/addons/io_scene_psk_psa --volume ${PWD}/tests:/tests $(docker build -q .)",
|
||||||
|
"problemMatcher": [],
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
674
COPYING
Normal file
674
COPYING
Normal file
@@ -0,0 +1,674 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||||
34
Dockerfile
Normal file
34
Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
|
ARG BLENDER_VERSION=4.4
|
||||||
|
|
||||||
|
RUN apt-get update -y && \
|
||||||
|
apt-get install -y libxxf86vm-dev libxfixes3 libxi-dev libxkbcommon-x11-0 libgl1 libglx-mesa0 python3 python3-pip \
|
||||||
|
libxrender1 libsm6
|
||||||
|
|
||||||
|
RUN pip install --upgrade pip
|
||||||
|
RUN pip install pytest-blender
|
||||||
|
RUN pip install blender-downloader
|
||||||
|
|
||||||
|
# Set BLENDER_EXECUTABLE and BLENDER_PYTHON as environment variables
|
||||||
|
RUN BLENDER_EXECUTABLE=$(blender-downloader $BLENDER_VERSION --extract --remove-compressed --print-blender-executable) && \
|
||||||
|
BLENDER_PYTHON=$(pytest-blender --blender-executable "${BLENDER_EXECUTABLE}") && \
|
||||||
|
echo "export BLENDER_EXECUTABLE=${BLENDER_EXECUTABLE}" >> /etc/environment && \
|
||||||
|
echo "export BLENDER_PYTHON=${BLENDER_PYTHON}" >> /etc/environment && \
|
||||||
|
echo $BLENDER_EXECUTABLE > /blender_executable_path
|
||||||
|
|
||||||
|
RUN pip install pytest-cov
|
||||||
|
|
||||||
|
# Source the environment variables and install Python dependencies
|
||||||
|
RUN . /etc/environment && \
|
||||||
|
$BLENDER_PYTHON -m ensurepip && \
|
||||||
|
$BLENDER_PYTHON -m pip install pytest pytest-cov
|
||||||
|
|
||||||
|
# Persist BLENDER_EXECUTABLE as an environment variable
|
||||||
|
RUN echo $(cat /blender_executable_path) > /tmp/blender_executable_path_env && \
|
||||||
|
export BLENDER_EXECUTABLE=$(cat /tmp/blender_executable_path_env)
|
||||||
|
ENV BLENDER_EXECUTABLE /tmp/blender_executable_path_env
|
||||||
|
|
||||||
|
ENTRYPOINT [ "/bin/bash", "-c" ]
|
||||||
|
WORKDIR /io_scene_psk_psa
|
||||||
|
CMD ["source tests/test.sh"]
|
||||||
21
LICENSE
21
LICENSE
@@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2019 Darklight Games
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
94
README.md
94
README.md
@@ -1,40 +1,90 @@
|
|||||||
This Blender add-on allows you to import and export meshes and animations to and from the [PSK and PSA file formats](https://wiki.beyondunreal.com/PSK_%26_PSA_file_formats). In addition, the non-standard PSKX format is also supported for import only.
|
[](https://extensions.blender.org/add-ons/io-scene-psk-psa/ "Download Blender")
|
||||||
|
[](https://github.com/DarklightGames/io_scene_psk_psa/actions/workflows/main.yml)
|
||||||
|
|
||||||
|
[](https://ko-fi.com/L4L3853VR)
|
||||||
|
|
||||||
|
This Blender addon allows you to import and export meshes and animations to and from the [PSK and PSA file formats](https://wiki.beyondunreal.com/PSK_%26_PSA_file_formats) used in many versions of the Unreal Engine.
|
||||||
|
|
||||||
|
This software is licensed under the [GPLv3](https://www.gnu.org/licenses/gpl-3.0.html) license.
|
||||||
|
|
||||||
# Installation
|
# Installation
|
||||||
1. Download the zip file for the latest version from the [releases](https://github.com/DarklightGames/io_export_psk_psa/releases) page.
|
For Blender 4.2 and higher, download the latest version from the [Blender Extensions](https://extensions.blender.org/add-ons/io-scene-psk-psa/) platform.
|
||||||
2. Open Blender 2.80 or later.
|
|
||||||
3. Navigate to the Blender Preferences (Edit > Preferences).
|
For Blender 4.1 and lower, see [Legacy Compatibility](#legacy-compatibility).
|
||||||
4. Select the "Add-ons" tab.
|
|
||||||
5. Click the "Install..." button.
|
# Features
|
||||||
6. Select the .zip file that you downloaded earlier and click "Install Add-on".
|
* Full PSK/PSA import and export capabilities.
|
||||||
7. Enable the newly added "Import-Export: PSK/PSA Importer/Exporter" addon.
|
* Non-standard file section data (.pskx) is supported for import only (vertex normals, extra UV channels, vertex colors, shape keys).
|
||||||
|
* Fine-grained PSA sequence importing for efficient workflow when working with large PSA files.
|
||||||
|
* PSA sequence metadata (e.g., frame rate) is preserved on import, allowing this data to be reused on export.
|
||||||
|
* [Bone collections](https://docs.blender.org/manual/en/latest/animation/armatures/bones/bone_collections.html#bone-collections) can be excluded from PSK/PSA export (useful for excluding non-contributing bones such as IK controllers).
|
||||||
|
* PSA sequences can be exported directly from actions or delineated using a scene's [timeline markers](https://docs.blender.org/manual/en/latest/animation/markers.html), pose markers, or NLA track strips, allowing direct use of the [NLA](https://docs.blender.org/manual/en/latest/editors/nla/index.html) when creating sequences.
|
||||||
|
* Manual re-ordering of material slots.
|
||||||
|
* Multiple armature objects can be exported to a single PSK or PSA file, allowing seamless use of [action slots](https://docs.blender.org/manual/en/latest/animation/actions.html#action-slots).
|
||||||
|
* Support for exporting instance collections.
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
## Exporting a PSK
|
## Exporting a PSK
|
||||||
1. Select the mesh objects you wish to export.
|
1. Select the mesh objects you wish to export.
|
||||||
3. Navigate to File > Export > Unreal PSK (.psk)
|
2. Navigate to `File` > `Export` > `Unreal PSK (.psk)`.
|
||||||
4. Enter the file name and click "Export".
|
3. Enter the file name and click `Export`.
|
||||||
|
|
||||||
## Importing a PSK/PSKX
|
## Importing a PSK/PSKX
|
||||||
1. Navigate to File > Import > Unreal PSK (.psk/.pskx)
|
1. Navigate to `File` > `Import` > `Unreal PSK (.psk/.pskx)`.
|
||||||
2. Select the PSK file you want to import and click "Import"
|
2. Select the PSK file you want to import and click `Import`.
|
||||||
|
|
||||||
## Exporting a PSA
|
## Exporting a PSA
|
||||||
1. Select the armature objects you wish to export.
|
1. Select the armature objects you wish to export.
|
||||||
2. Navigate to File > Export > Unreal PSA (.psa)
|
2. Navigate to `File` > `Export` > `Unreal PSA (.psa)`.
|
||||||
3. Enter the file name and click "Export".
|
3. Enter the file name and click `Export`.
|
||||||
|
|
||||||
## Importing a PSA
|
## Importing a PSA
|
||||||
1. Select the armature object that you wish you import actions to.
|
1. Select an armature that you want import animations for.
|
||||||
2. Navigate to the Object Data Properties tab of the Properties editor.
|
2. Navigate to `File` > `Import` > `Unreal PSA (.psa)`.
|
||||||
3. Navigate to the PSA Import panel.
|
3. Select the PSA file you want to import.
|
||||||
4. Click "Select PSA File".
|
4. Select the sequences that you want to import and click `Import`.
|
||||||
5. Select the PSA file that you want to import animations from and click "Select".
|
|
||||||
6. In the Actions box, select which animations you want to import.
|
> Note that in order to see the imported actions applied to your armature, you must use the [Dope Sheet](https://docs.blender.org/manual/en/latest/editors/dope_sheet/introduction.html) or [Nonlinear Animation](https://docs.blender.org/manual/en/latest/editors/nla/introduction.html) editors.
|
||||||
7. Click "Import".
|
|
||||||
|
|
||||||
# FAQ
|
# FAQ
|
||||||
|
|
||||||
|
## Why can't I see the animations imported from my PSA?
|
||||||
|
Simply importing an animation into the scene will not automatically apply the action to the armature. This is in part because a PSA can have multiple sequences imported from it, and also that it's generally bad form for importers to modify the scene in ways that the user may not expect.
|
||||||
|
|
||||||
|
The PSA importer creates [Actions](https://docs.blender.org/manual/en/latest/animation/actions.html) for each of the selected sequences in the PSA. These actions can be applied to your armature via the [Action Editor](https://docs.blender.org/manual/en/latest/editors/dope_sheet/action.html) or [NLA Editor](https://docs.blender.org/manual/en/latest/editors/nla/index.html).
|
||||||
|
|
||||||
|
## Why are imported PSKs too big/too small?
|
||||||
|
The PSK format, unlike other more modern formats, has no explicit or implicit unit system. Each game has its own convention as to what the base distance unit will represent. As such, this addon makes no assumptions as to the unit scale of the imported PSKs. If you think that your models are being imported into Blender either too big or too small, there are a couple ways to remedy this.
|
||||||
|
|
||||||
|
The method I prefer is to simply change the Blender [scene properties](https://docs.blender.org/manual/en/4.4/scene_layout/scene/properties.html#units) to match the unit system and scale for the game you're using. This is non-destructive and ensures that the unit scaling of any PSK or PSA exports from Blender will match the source file from which it was derived.
|
||||||
|
|
||||||
|
The second option is to simply change the `Scale` value on the PSK import dialog. This will scale the armature by the factor provided. Note that this is more destructive, but may be preferable if you don't intend on exporting PSKs or PSAs to a game engine.
|
||||||
|
|
||||||
|
## How do I control shading for PSK exports?
|
||||||
|
The PSK format does not support vertex normals and instead uses [smoothing groups](https://en.wikipedia.org/wiki/Smoothing_group) to control shading. Note that a mesh's Custom Split Normals Data will be ignored when exporting to PSK. Therefore, the best way to control shading is to use sharp edges and the Edge Split modifier.
|
||||||
|
|
||||||
## Why are the mesh normals not accurate when importing a PSK extracted from [UE Viewer](https://www.gildor.org/en/projects/umodel)?
|
## Why are the mesh normals not accurate when importing a PSK extracted from [UE Viewer](https://www.gildor.org/en/projects/umodel)?
|
||||||
If preserving the mesh normals of models is important for your workflow, it is *not recommended* to export PSK files from UE Viewer. This is because UE Viewer makes no attempt to reconstruct the original [smoothing groups](https://en.wikipedia.org/wiki/Smoothing_group). As a result, the normals of imported PSK files will be incorrect when imported into Blender and will need to be manually fixed.
|
If preserving the mesh normals of models is important for your workflow, it is *not recommended* to export PSK files from UE Viewer. This is because UE Viewer makes no attempt to reconstruct the original [smoothing groups](https://en.wikipedia.org/wiki/Smoothing_group). As a result, the normals of imported PSK files will be incorrect when imported into Blender and will need to be manually fixed.
|
||||||
|
|
||||||
As a workaround, it is recommended to export [glTF](https://en.wikipedia.org/wiki/GlTF) meshes out of UE Viewer instead, since the glTF format has support for explicit normals and UE Viewer can correctly preserve the mesh normals on export. Note, however, that the imported glTF armature may have it's bones oriented incorrectly when imported into Blender. To mitigate this, you can combine the armature of PSK and the mesh of the glTF for best results.
|
There is a [pull request](https://github.com/gildor2/UEViewer/pull/277) to add support for exporting explicit normals from UE Viewer, although UEViewer's maintainer has seemingly abandoned the project.
|
||||||
|
|
||||||
|
# Legacy Compatibility
|
||||||
|
Below is a table of the latest addon versions that are compatible with older versions of Blender. These versions are no longer maintained and may contain bugs that have been fixed in newer versions. It is recommended to use the latest version of the addon for the best experience.
|
||||||
|
|
||||||
|
| Blender Version| Addon Version |
|
||||||
|
|-|-|
|
||||||
|
| [4.1](https://www.blender.org/download/releases/4-1/) | [7.0.0](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/7.0.0) |
|
||||||
|
| [4.0](https://www.blender.org/download/releases/4-0/) | [6.2.1](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/6.2.1) |
|
||||||
|
| [3.4 - 3.6](https://www.blender.org/download/lts/3-6/) | [5.0.6](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/5.0.6) |
|
||||||
|
| [2.93 - 3.3](https://www.blender.org/download/releases/3-3/) | [4.3.0](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/4.3.0) |
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
To execute the automated tests, run:
|
||||||
|
|
||||||
|
```
|
||||||
|
./test.sh
|
||||||
|
````
|
||||||
|
|
||||||
|
This will create a [Docker](https://www.docker.com/) container with and run the tests inside it. The tests are executed using [pytest](https://docs.pytest.org/en/stable/) and the results will be displayed in the terminal.
|
||||||
|
|
||||||
|
For now, the tests are not exhaustive and primarily focus on sanity checking the most common use cases (PSK & PSA import). New tests will likely be made to cover new features and prevent further regressions of reported issues.
|
||||||
|
|||||||
@@ -1,93 +1,153 @@
|
|||||||
bl_info = {
|
from bpy.app.handlers import persistent
|
||||||
"name": "PSK/PSA Importer/Exporter",
|
|
||||||
"author": "Colin Basnett",
|
|
||||||
"version": (3, 0, 0),
|
|
||||||
"blender": (2, 80, 0),
|
|
||||||
# "location": "File > Export > PSK Export (.psk)",
|
|
||||||
"description": "PSK/PSA Import/Export (.psk/.psa)",
|
|
||||||
"warning": "",
|
|
||||||
"doc_url": "https://github.com/DarklightGames/io_scene_psk_psa",
|
|
||||||
"tracker_url": "https://github.com/DarklightGames/io_scene_psk_psa/issues",
|
|
||||||
"category": "Import-Export"
|
|
||||||
}
|
|
||||||
|
|
||||||
if 'bpy' in locals():
|
from .shared import data as shared_data, types as shared_types, helpers as shared_helpers
|
||||||
|
from .shared import dfs as shared_dfs, ui as shared_ui
|
||||||
|
from .psk import (
|
||||||
|
builder as psk_builder,
|
||||||
|
data as psk_data,
|
||||||
|
importer as psk_importer,
|
||||||
|
properties as psk_properties,
|
||||||
|
writer as psk_writer,
|
||||||
|
)
|
||||||
|
from .psk import reader as psk_reader, ui as psk_ui
|
||||||
|
from .psk.export import (
|
||||||
|
operators as psk_export_operators,
|
||||||
|
properties as psk_export_properties,
|
||||||
|
ui as psk_export_ui,
|
||||||
|
)
|
||||||
|
from .psk.import_ import operators as psk_import_operators
|
||||||
|
|
||||||
|
from .psa import (
|
||||||
|
config as psa_config,
|
||||||
|
data as psa_data,
|
||||||
|
writer as psa_writer,
|
||||||
|
reader as psa_reader,
|
||||||
|
builder as psa_builder,
|
||||||
|
importer as psa_importer,
|
||||||
|
)
|
||||||
|
from .psa.export import (
|
||||||
|
properties as psa_export_properties,
|
||||||
|
ui as psa_export_ui,
|
||||||
|
operators as psa_export_operators,
|
||||||
|
)
|
||||||
|
from .psa.import_ import operators as psa_import_operators
|
||||||
|
from .psa.import_ import ui as psa_import_ui, properties as psa_import_properties
|
||||||
|
|
||||||
|
_needs_reload = 'bpy' in locals()
|
||||||
|
|
||||||
|
if _needs_reload:
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
importlib.reload(psx_data)
|
importlib.reload(shared_data)
|
||||||
importlib.reload(psx_helpers)
|
importlib.reload(shared_helpers)
|
||||||
importlib.reload(psx_types)
|
importlib.reload(shared_types)
|
||||||
|
importlib.reload(shared_dfs)
|
||||||
|
importlib.reload(shared_ui)
|
||||||
|
|
||||||
importlib.reload(psk_data)
|
importlib.reload(psk_data)
|
||||||
importlib.reload(psk_builder)
|
|
||||||
importlib.reload(psk_exporter)
|
|
||||||
importlib.reload(psk_importer)
|
|
||||||
importlib.reload(psk_reader)
|
importlib.reload(psk_reader)
|
||||||
|
importlib.reload(psk_writer)
|
||||||
|
importlib.reload(psk_builder)
|
||||||
|
importlib.reload(psk_importer)
|
||||||
|
importlib.reload(psk_properties)
|
||||||
|
importlib.reload(psk_ui)
|
||||||
|
importlib.reload(psk_export_properties)
|
||||||
|
importlib.reload(psk_export_operators)
|
||||||
|
importlib.reload(psk_export_ui)
|
||||||
|
importlib.reload(psk_import_operators)
|
||||||
|
|
||||||
importlib.reload(psa_data)
|
importlib.reload(psa_data)
|
||||||
importlib.reload(psa_builder)
|
importlib.reload(psa_config)
|
||||||
importlib.reload(psa_exporter)
|
|
||||||
importlib.reload(psa_reader)
|
importlib.reload(psa_reader)
|
||||||
|
importlib.reload(psa_writer)
|
||||||
|
importlib.reload(psa_builder)
|
||||||
importlib.reload(psa_importer)
|
importlib.reload(psa_importer)
|
||||||
else:
|
importlib.reload(psa_export_properties)
|
||||||
# if i remove this line, it can be enabled just fine
|
importlib.reload(psa_export_operators)
|
||||||
from . import data as psx_data
|
importlib.reload(psa_export_ui)
|
||||||
from . import helpers as psx_helpers
|
importlib.reload(psa_import_properties)
|
||||||
from . import types as psx_types
|
importlib.reload(psa_import_operators)
|
||||||
from .psk import data as psk_data
|
importlib.reload(psa_import_ui)
|
||||||
from .psk import builder as psk_builder
|
|
||||||
from .psk import exporter as psk_exporter
|
|
||||||
from .psk import reader as psk_reader
|
|
||||||
from .psk import importer as psk_importer
|
|
||||||
from .psa import data as psa_data
|
|
||||||
from .psa import builder as psa_builder
|
|
||||||
from .psa import exporter as psa_exporter
|
|
||||||
from .psa import reader as psa_reader
|
|
||||||
from .psa import importer as psa_importer
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
from bpy.props import PointerProperty
|
from bpy.props import PointerProperty
|
||||||
|
|
||||||
classes = (psx_types.classes +
|
|
||||||
psk_importer.classes +
|
|
||||||
psk_exporter.classes +
|
|
||||||
psa_exporter.classes +
|
|
||||||
psa_importer.classes)
|
|
||||||
|
|
||||||
|
|
||||||
def psk_export_menu_func(self, context):
|
def psk_export_menu_func(self, context):
|
||||||
self.layout.operator(psk_exporter.PskExportOperator.bl_idname, text='Unreal PSK (.psk)')
|
self.layout.operator(psk_export_operators.PSK_OT_export.bl_idname, text='Unreal PSK (.psk)')
|
||||||
|
|
||||||
|
|
||||||
def psk_import_menu_func(self, context):
|
def psk_import_menu_func(self, context):
|
||||||
self.layout.operator(psk_importer.PskImportOperator.bl_idname, text='Unreal PSK (.psk/.pskx)')
|
self.layout.operator(psk_import_operators.PSK_OT_import.bl_idname, text='Unreal PSK (.psk/.pskx)')
|
||||||
|
|
||||||
|
|
||||||
def psa_export_menu_func(self, context):
|
def psa_export_menu_func(self, context):
|
||||||
self.layout.operator(psa_exporter.PsaExportOperator.bl_idname, text='Unreal PSA (.psa)')
|
self.layout.operator(psa_export_operators.PSA_OT_export.bl_idname, text='Unreal PSA (.psa)')
|
||||||
|
|
||||||
|
|
||||||
|
def psa_import_menu_func(self, context):
|
||||||
|
self.layout.operator(psa_import_operators.PSA_OT_import.bl_idname, text='Unreal PSA (.psa)')
|
||||||
|
|
||||||
|
|
||||||
|
_modules = (
|
||||||
|
shared_types,
|
||||||
|
shared_ui,
|
||||||
|
psk_properties,
|
||||||
|
psk_ui,
|
||||||
|
psk_import_operators,
|
||||||
|
psk_export_properties,
|
||||||
|
psk_export_operators,
|
||||||
|
psk_export_ui,
|
||||||
|
psa_export_properties,
|
||||||
|
psa_export_operators,
|
||||||
|
psa_export_ui,
|
||||||
|
psa_import_properties,
|
||||||
|
psa_import_operators,
|
||||||
|
psa_import_ui
|
||||||
|
)
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
for cls in classes:
|
for module in _modules:
|
||||||
bpy.utils.register_class(cls)
|
module.register()
|
||||||
bpy.types.TOPBAR_MT_file_export.append(psk_export_menu_func)
|
bpy.types.TOPBAR_MT_file_export.append(psk_export_menu_func)
|
||||||
bpy.types.TOPBAR_MT_file_import.append(psk_import_menu_func)
|
bpy.types.TOPBAR_MT_file_import.append(psk_import_menu_func)
|
||||||
bpy.types.TOPBAR_MT_file_export.append(psa_export_menu_func)
|
bpy.types.TOPBAR_MT_file_export.append(psa_export_menu_func)
|
||||||
bpy.types.Scene.psa_import = PointerProperty(type=psa_importer.PsaImportPropertyGroup)
|
bpy.types.TOPBAR_MT_file_import.append(psa_import_menu_func)
|
||||||
bpy.types.Scene.psk_import = PointerProperty(type=psk_importer.PskImportPropertyGroup)
|
bpy.types.Material.psk = PointerProperty(type=psk_properties.PSX_PG_material, options={'HIDDEN'})
|
||||||
bpy.types.Scene.psa_export = PointerProperty(type=psa_exporter.PsaExportPropertyGroup)
|
bpy.types.Scene.psx_export = PointerProperty(type=shared_types.PSX_PG_scene_export, options={'HIDDEN'})
|
||||||
bpy.types.Scene.psk_export = PointerProperty(type=psk_exporter.PskExportPropertyGroup)
|
bpy.types.Scene.psa_import = PointerProperty(type=psa_import_properties.PSA_PG_import, options={'HIDDEN'})
|
||||||
|
bpy.types.Scene.psa_export = PointerProperty(type=psa_export_properties.PSA_PG_export, options={'HIDDEN'})
|
||||||
|
bpy.types.Scene.psk_export = PointerProperty(type=psk_export_properties.PSK_PG_export, options={'HIDDEN'})
|
||||||
|
bpy.types.Action.psa_export = PointerProperty(type=shared_types.PSX_PG_action_export, options={'HIDDEN'})
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
|
del bpy.types.Material.psk
|
||||||
|
del bpy.types.Scene.psx_export
|
||||||
del bpy.types.Scene.psa_import
|
del bpy.types.Scene.psa_import
|
||||||
del bpy.types.Scene.psk_import
|
|
||||||
del bpy.types.Scene.psa_export
|
del bpy.types.Scene.psa_export
|
||||||
del bpy.types.Scene.psk_export
|
del bpy.types.Scene.psk_export
|
||||||
|
del bpy.types.Action.psa_export
|
||||||
bpy.types.TOPBAR_MT_file_export.remove(psk_export_menu_func)
|
bpy.types.TOPBAR_MT_file_export.remove(psk_export_menu_func)
|
||||||
bpy.types.TOPBAR_MT_file_import.remove(psk_import_menu_func)
|
bpy.types.TOPBAR_MT_file_import.remove(psk_import_menu_func)
|
||||||
bpy.types.TOPBAR_MT_file_export.remove(psa_export_menu_func)
|
bpy.types.TOPBAR_MT_file_export.remove(psa_export_menu_func)
|
||||||
for cls in reversed(classes):
|
bpy.types.TOPBAR_MT_file_import.remove(psa_import_menu_func)
|
||||||
bpy.utils.unregister_class(cls)
|
for module in reversed(_modules):
|
||||||
|
module.unregister()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
register()
|
register()
|
||||||
|
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def load_handler(dummy):
|
||||||
|
# Convert old `psa_sequence_fps` property to new `psa_export.fps` property.
|
||||||
|
# This is only needed for backwards compatibility with files that may have used older versions of the addon.
|
||||||
|
for action in bpy.data.actions:
|
||||||
|
if 'psa_sequence_fps' in action:
|
||||||
|
action.psa_export.fps = action['psa_sequence_fps']
|
||||||
|
del action['psa_sequence_fps']
|
||||||
|
|
||||||
|
|
||||||
|
bpy.app.handlers.load_post.append(load_handler)
|
||||||
|
|||||||
27
io_scene_psk_psa/blender_manifest.toml
Normal file
27
io_scene_psk_psa/blender_manifest.toml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
schema_version = "1.0.0"
|
||||||
|
id = "io_scene_psk_psa"
|
||||||
|
version = "8.2.4"
|
||||||
|
name = "Unreal PSK/PSA (.psk/.psa)"
|
||||||
|
tagline = "Import and export PSK and PSA files used in Unreal Engine"
|
||||||
|
maintainer = "Colin Basnett <cmbasnett@gmail.com>"
|
||||||
|
type = "add-on"
|
||||||
|
website = "https://github.com/DarklightGames/io_scene_psk_psa/"
|
||||||
|
tags = ["Game Engine", "Import-Export"]
|
||||||
|
blender_version_min = "4.4.0"
|
||||||
|
# Optional: maximum supported Blender version
|
||||||
|
# blender_version_max = "5.1.0"
|
||||||
|
license = [
|
||||||
|
"SPDX:GPL-3.0-or-later",
|
||||||
|
]
|
||||||
|
|
||||||
|
[build]
|
||||||
|
paths_exclude_pattern = [
|
||||||
|
"/.git/",
|
||||||
|
"__pycache__/",
|
||||||
|
"/venv/",
|
||||||
|
"/.github/",
|
||||||
|
".gitignore",
|
||||||
|
]
|
||||||
|
|
||||||
|
[permissions]
|
||||||
|
files = "Read and write PSK and PSA files from and to disk"
|
||||||
@@ -1,158 +0,0 @@
|
|||||||
import datetime
|
|
||||||
from collections import Counter
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from bpy.types import NlaStrip
|
|
||||||
|
|
||||||
|
|
||||||
class Timer:
|
|
||||||
def __enter__(self):
|
|
||||||
self.start = datetime.datetime.now()
|
|
||||||
self.interval = None
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
self.end = datetime.datetime.now()
|
|
||||||
self.interval = self.end - self.start
|
|
||||||
|
|
||||||
@property
|
|
||||||
def duration(self):
|
|
||||||
if self.interval is not None:
|
|
||||||
return self.interval
|
|
||||||
else:
|
|
||||||
return datetime.datetime.now() - self.start
|
|
||||||
|
|
||||||
|
|
||||||
def rgb_to_srgb(c):
|
|
||||||
if c > 0.0031308:
|
|
||||||
return 1.055 * (pow(c, (1.0 / 2.4))) - 0.055
|
|
||||||
else:
|
|
||||||
return 12.92 * c
|
|
||||||
|
|
||||||
|
|
||||||
def get_nla_strips_ending_at_frame(object, frame) -> List[NlaStrip]:
|
|
||||||
if object is None or object.animation_data is None:
|
|
||||||
return []
|
|
||||||
strips = []
|
|
||||||
for nla_track in object.animation_data.nla_tracks:
|
|
||||||
for strip in nla_track.strips:
|
|
||||||
if strip.frame_end == frame:
|
|
||||||
strips.append(strip)
|
|
||||||
return strips
|
|
||||||
|
|
||||||
|
|
||||||
def get_nla_strips_in_timeframe(object, frame_min, frame_max) -> List[NlaStrip]:
|
|
||||||
if object is None or object.animation_data is None:
|
|
||||||
return []
|
|
||||||
strips = []
|
|
||||||
for nla_track in object.animation_data.nla_tracks:
|
|
||||||
if nla_track.mute:
|
|
||||||
continue
|
|
||||||
for strip in nla_track.strips:
|
|
||||||
if (strip.frame_start < frame_min and strip.frame_end > frame_max) or \
|
|
||||||
(frame_min <= strip.frame_start < frame_max) or \
|
|
||||||
(frame_min < strip.frame_end <= frame_max):
|
|
||||||
strips.append(strip)
|
|
||||||
return strips
|
|
||||||
|
|
||||||
|
|
||||||
def populate_bone_group_list(armature_object, bone_group_list):
|
|
||||||
bone_group_list.clear()
|
|
||||||
|
|
||||||
if armature_object and armature_object.pose:
|
|
||||||
bone_group_counts = Counter(map(lambda x: x.bone_group, armature_object.pose.bones))
|
|
||||||
|
|
||||||
item = bone_group_list.add()
|
|
||||||
item.name = 'Unassigned'
|
|
||||||
item.index = -1
|
|
||||||
item.count = 0 if None not in bone_group_counts else bone_group_counts[None]
|
|
||||||
item.is_selected = True
|
|
||||||
|
|
||||||
for bone_group_index, bone_group in enumerate(armature_object.pose.bone_groups):
|
|
||||||
item = bone_group_list.add()
|
|
||||||
item.name = bone_group.name
|
|
||||||
item.index = bone_group_index
|
|
||||||
item.count = 0 if bone_group not in bone_group_counts else bone_group_counts[bone_group]
|
|
||||||
item.is_selected = True
|
|
||||||
|
|
||||||
|
|
||||||
def get_psa_sequence_name(action, should_use_original_sequence_name):
|
|
||||||
if should_use_original_sequence_name and 'psa_sequence_name' in action:
|
|
||||||
return action['psa_sequence_name']
|
|
||||||
else:
|
|
||||||
return action.name
|
|
||||||
|
|
||||||
|
|
||||||
def get_export_bone_names(armature_object, bone_filter_mode, bone_group_indices: List[int]) -> List[str]:
|
|
||||||
"""
|
|
||||||
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone groups.
|
|
||||||
|
|
||||||
Note that the ancestors of bones within the bone groups will also be present in the returned list.
|
|
||||||
|
|
||||||
:param armature_object: Blender object with type 'ARMATURE'
|
|
||||||
:param bone_filter_mode: One of ['ALL', 'BONE_GROUPS']
|
|
||||||
:param bone_group_indices: List of bone group indices to be exported.
|
|
||||||
:return: A sorted list of bone indices that should be exported.
|
|
||||||
"""
|
|
||||||
if armature_object is None or armature_object.type != 'ARMATURE':
|
|
||||||
raise ValueError('An armature object must be supplied')
|
|
||||||
|
|
||||||
bones = armature_object.data.bones
|
|
||||||
pose_bones = armature_object.pose.bones
|
|
||||||
bone_names = [x.name for x in bones]
|
|
||||||
|
|
||||||
# Get a list of the bone indices that we are explicitly including.
|
|
||||||
bone_index_stack = []
|
|
||||||
is_exporting_none_bone_groups = -1 in bone_group_indices
|
|
||||||
for bone_index, pose_bone in enumerate(pose_bones):
|
|
||||||
if bone_filter_mode == 'ALL' or \
|
|
||||||
(pose_bone.bone_group is None and is_exporting_none_bone_groups) or \
|
|
||||||
(pose_bone.bone_group is not None and pose_bone.bone_group_index in bone_group_indices):
|
|
||||||
bone_index_stack.append((bone_index, None))
|
|
||||||
|
|
||||||
# For each bone that is explicitly being added, recursively walk up the hierarchy and ensure that all of
|
|
||||||
# those ancestor bone indices are also in the list.
|
|
||||||
bone_indices = dict()
|
|
||||||
while len(bone_index_stack) > 0:
|
|
||||||
bone_index, instigator_bone_index = bone_index_stack.pop()
|
|
||||||
bone = bones[bone_index]
|
|
||||||
if bone.parent is not None:
|
|
||||||
parent_bone_index = bone_names.index(bone.parent.name)
|
|
||||||
if parent_bone_index not in bone_indices:
|
|
||||||
bone_index_stack.append((parent_bone_index, bone_index))
|
|
||||||
bone_indices[bone_index] = instigator_bone_index
|
|
||||||
|
|
||||||
# Sort the bone index list in-place.
|
|
||||||
bone_indices = [(x[0], x[1]) for x in bone_indices.items()]
|
|
||||||
bone_indices.sort(key=lambda x: x[0])
|
|
||||||
|
|
||||||
# Split out the bone indices and the instigator bone names into separate lists.
|
|
||||||
# We use the bone names for the return values because the bone name is a more universal way of referencing them.
|
|
||||||
# For example, users of this function may modify bone lists, which would invalidate the indices and require a
|
|
||||||
# index mapping scheme to resolve it. Using strings is more comfy and results in less code downstream.
|
|
||||||
instigator_bone_names = [bones[x[1]].name if x[1] is not None else None for x in bone_indices]
|
|
||||||
bone_names = [bones[x[0]].name for x in bone_indices]
|
|
||||||
|
|
||||||
# Ensure that the hierarchy we are sending back has a single root bone.
|
|
||||||
bone_indices = [x[0] for x in bone_indices]
|
|
||||||
root_bones = [bones[bone_index] for bone_index in bone_indices if bones[bone_index].parent is None]
|
|
||||||
if len(root_bones) > 1:
|
|
||||||
# There is more than one root bone.
|
|
||||||
# Print out why each root bone was included by linking it to one of the explicitly included bones.
|
|
||||||
root_bone_names = [bone.name for bone in root_bones]
|
|
||||||
for root_bone_name in root_bone_names:
|
|
||||||
bone_name = root_bone_name
|
|
||||||
while True:
|
|
||||||
# Traverse the instigator chain until the end to find the true instigator bone.
|
|
||||||
# TODO: in future, it would be preferential to have a readout of *all* instigator bones.
|
|
||||||
instigator_bone_name = instigator_bone_names[bone_names.index(bone_name)]
|
|
||||||
if instigator_bone_name is None:
|
|
||||||
print(f'Root bone "{root_bone_name}" was included because {bone_name} was marked for export')
|
|
||||||
break
|
|
||||||
bone_name = instigator_bone_name
|
|
||||||
|
|
||||||
raise RuntimeError('Exported bone hierarchy must have a single root bone.\n'
|
|
||||||
f'The bone hierarchy marked for export has {len(root_bones)} root bones: {root_bone_names}.\n'
|
|
||||||
f'Additional debugging information has been written to the console.')
|
|
||||||
|
|
||||||
return bone_names
|
|
||||||
@@ -1,283 +1,336 @@
|
|||||||
from typing import Dict, Iterable
|
from bpy.types import Action, AnimData, Context, Object, PoseBone
|
||||||
|
|
||||||
from bpy.types import Action
|
from .data import Psa
|
||||||
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
from mathutils import Matrix, Quaternion, Vector
|
||||||
|
|
||||||
from .data import *
|
from ..shared.helpers import create_psx_bones, get_coordinate_system_transform
|
||||||
from ..helpers import *
|
|
||||||
|
|
||||||
|
|
||||||
class PsaBuilderOptions(object):
|
class PsaBuildSequence:
|
||||||
|
class NlaState:
|
||||||
|
def __init__(self):
|
||||||
|
self.action: Optional[Action] = None
|
||||||
|
self.frame_start: int = 0
|
||||||
|
self.frame_end: int = 0
|
||||||
|
|
||||||
|
def __init__(self, armature_object: Object, anim_data: AnimData):
|
||||||
|
self.armature_object = armature_object
|
||||||
|
self.anim_data = anim_data
|
||||||
|
self.name: str = ''
|
||||||
|
self.nla_state: PsaBuildSequence.NlaState = PsaBuildSequence.NlaState()
|
||||||
|
self.compression_ratio: float = 1.0
|
||||||
|
self.key_quota: int = 0
|
||||||
|
self.fps: float = 30.0
|
||||||
|
|
||||||
|
|
||||||
|
class PsaBuildOptions:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.fps_source = 'SCENE'
|
self.armature_objects: List[Object] = []
|
||||||
self.fps_custom = 30.0
|
self.animation_data: Optional[AnimData] = None
|
||||||
self.sequence_source = 'ACTIONS'
|
self.sequences: List[PsaBuildSequence] = []
|
||||||
self.actions = []
|
self.bone_filter_mode: str = 'ALL'
|
||||||
self.marker_names = []
|
self.bone_collection_indices: List[PsaBoneCollectionIndex] = []
|
||||||
self.bone_filter_mode = 'ALL'
|
self.sequence_name_prefix: str = ''
|
||||||
self.bone_group_indices = []
|
self.sequence_name_suffix: str = ''
|
||||||
self.should_use_original_sequence_names = False
|
self.scale = 1.0
|
||||||
self.should_trim_timeline_marker_sequences = True
|
self.sampling_mode: str = 'INTERPOLATED' # One of ('INTERPOLATED', 'SUBFRAME')
|
||||||
self.sequence_name_prefix = ''
|
self.export_space = 'WORLD'
|
||||||
self.sequence_name_suffix = ''
|
self.forward_axis = 'X'
|
||||||
|
self.up_axis = 'Z'
|
||||||
|
self.root_bone_name = 'ROOT'
|
||||||
|
self.sequence_source = 'ACTIONS' # One of ('ACTIONS', 'TIMELINE_MARKERS', 'NLA_STRIPS')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bone_collection_primary_key(self) -> str:
|
||||||
|
return 'DATA' if self.sequence_source == 'ACTIVE_ACTION' else 'OBJECT'
|
||||||
|
|
||||||
|
|
||||||
class PsaBuilderPerformance:
|
def _get_pose_bone_location_and_rotation(
|
||||||
def __init__(self):
|
pose_bone: Optional[PoseBone],
|
||||||
self.frame_set_duration = datetime.timedelta()
|
armature_object: Optional[Object],
|
||||||
self.key_build_duration = datetime.timedelta()
|
export_space: str,
|
||||||
self.key_add_duration = datetime.timedelta()
|
scale: Vector,
|
||||||
|
coordinate_system_transform: Matrix,
|
||||||
|
has_false_root_bone: bool,
|
||||||
|
) -> Tuple[Vector, Quaternion]:
|
||||||
|
is_false_root_bone = pose_bone is None and armature_object is None
|
||||||
|
|
||||||
|
if is_false_root_bone:
|
||||||
class PsaBuilder(object):
|
pose_bone_matrix = coordinate_system_transform
|
||||||
def __init__(self):
|
elif pose_bone.parent is not None:
|
||||||
pass
|
pose_bone_matrix = pose_bone.matrix
|
||||||
|
pose_bone_parent_matrix = pose_bone.parent.matrix
|
||||||
def get_sequence_fps(self, context, options: PsaBuilderOptions, actions: Iterable[Action]) -> float:
|
pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix
|
||||||
if options.fps_source == 'SCENE':
|
else:
|
||||||
return context.scene.render.fps
|
# Root bone
|
||||||
if options.fps_source == 'CUSTOM':
|
if has_false_root_bone:
|
||||||
return options.fps_custom
|
pose_bone_matrix = armature_object.matrix_world @ pose_bone.matrix
|
||||||
elif options.fps_source == 'ACTION_METADATA':
|
|
||||||
# Get the minimum value of action metadata FPS values.
|
|
||||||
fps_list = []
|
|
||||||
for action in filter(lambda x: 'psa_sequence_fps' in x, actions):
|
|
||||||
fps = action['psa_sequence_fps']
|
|
||||||
if type(fps) == int or type(fps) == float:
|
|
||||||
fps_list.append(fps)
|
|
||||||
if len(fps_list) > 0:
|
|
||||||
return min(fps_list)
|
|
||||||
else:
|
|
||||||
# No valid action metadata to use, fallback to scene FPS
|
|
||||||
return context.scene.render.fps
|
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(f'Invalid FPS source "{options.fps_source}"')
|
# Get the bone's pose matrix and transform it into the export space.
|
||||||
|
# In the case of an 'ARMATURE' export space, this will be the inverse of armature object's world matrix.
|
||||||
|
# Otherwise, it will be the identity matrix.
|
||||||
|
match export_space:
|
||||||
|
case 'ARMATURE':
|
||||||
|
pose_bone_matrix = pose_bone.matrix
|
||||||
|
case 'WORLD':
|
||||||
|
pose_bone_matrix = armature_object.matrix_world @ pose_bone.matrix
|
||||||
|
case 'ROOT':
|
||||||
|
pose_bone_matrix = Matrix.Identity(4)
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid export space: {export_space}'
|
||||||
|
|
||||||
def build(self, context, options: PsaBuilderOptions) -> Psa:
|
# The root bone is the only bone that should be transformed by the coordinate system transform, since all
|
||||||
performance = PsaBuilderPerformance()
|
# other bones are relative to their parent bones.
|
||||||
active_object = context.view_layer.objects.active
|
pose_bone_matrix = coordinate_system_transform @ pose_bone_matrix
|
||||||
|
|
||||||
if active_object.type != 'ARMATURE':
|
location = pose_bone_matrix.to_translation()
|
||||||
raise RuntimeError('Selected object must be an Armature')
|
rotation = pose_bone_matrix.to_quaternion().normalized()
|
||||||
|
|
||||||
armature = active_object
|
# Don't apply scale to the root bone of armatures if we have a false root.
|
||||||
|
if not has_false_root_bone or (pose_bone is None or pose_bone.parent is not None):
|
||||||
|
location *= scale
|
||||||
|
|
||||||
if armature.animation_data is None:
|
if has_false_root_bone:
|
||||||
raise RuntimeError('No animation data for armature')
|
is_child_bone = not is_false_root_bone
|
||||||
|
else:
|
||||||
|
is_child_bone = pose_bone.parent is not None
|
||||||
|
|
||||||
# Ensure that we actually have items that we are going to be exporting.
|
if is_child_bone:
|
||||||
if options.sequence_source == 'ACTIONS' and len(options.actions) == 0:
|
rotation.conjugate()
|
||||||
raise RuntimeError('No actions were selected for export')
|
|
||||||
elif options.sequence_source == 'TIMELINE_MARKERS' and len(options.marker_names) == 0:
|
|
||||||
raise RuntimeError('No timeline markers were selected for export')
|
|
||||||
|
|
||||||
psa = Psa()
|
return location, rotation
|
||||||
|
|
||||||
bones = list(armature.data.bones)
|
|
||||||
|
|
||||||
# The order of the armature bones and the pose bones is not guaranteed to be the same.
|
def build_psa(context: Context, options: PsaBuildOptions) -> Psa:
|
||||||
# As as a result, we need to reconstruct the list of pose bones in the same order as the
|
|
||||||
# armature bones.
|
|
||||||
bone_names = [x.name for x in bones]
|
|
||||||
pose_bones = [(bone_names.index(bone.name), bone) for bone in armature.pose.bones]
|
|
||||||
pose_bones.sort(key=lambda x: x[0])
|
|
||||||
pose_bones = [x[1] for x in pose_bones]
|
|
||||||
|
|
||||||
# Get a list of all the bone indices and instigator bones for the bone filter settings.
|
assert context.scene
|
||||||
export_bone_names = get_export_bone_names(armature, options.bone_filter_mode, options.bone_group_indices)
|
assert context.window_manager
|
||||||
bone_indices = [bone_names.index(x) for x in export_bone_names]
|
|
||||||
|
|
||||||
# Make the bone lists contain only the bones that are going to be exported.
|
psa = Psa()
|
||||||
bones = [bones[bone_index] for bone_index in bone_indices]
|
|
||||||
pose_bones = [pose_bones[bone_index] for bone_index in bone_indices]
|
|
||||||
|
|
||||||
# No bones are going to be exported.
|
armature_objects_for_bones = options.armature_objects
|
||||||
if len(bones) == 0:
|
if options.sequence_source == 'ACTIVE_ACTION' and len(options.armature_objects) >= 2:
|
||||||
raise RuntimeError('No bones available for export')
|
# Make sure that the data-block for all the selected armature objects is the same.
|
||||||
|
if any(map(lambda o: o.data != options.armature_objects[0].data, options.armature_objects[1:])):
|
||||||
|
raise RuntimeError('All armature objects must share the same data-block when exporting from the active action')
|
||||||
|
armature_objects_for_bones = [options.armature_objects[0]]
|
||||||
|
|
||||||
# Build list of PSA bones.
|
psx_bone_create_result = create_psx_bones(
|
||||||
for bone in bones:
|
armature_objects=armature_objects_for_bones,
|
||||||
psa_bone = Psa.Bone()
|
export_space=options.export_space,
|
||||||
psa_bone.name = bytes(bone.name, encoding='utf-8')
|
root_bone_name=options.root_bone_name,
|
||||||
|
forward_axis=options.forward_axis,
|
||||||
|
up_axis=options.up_axis,
|
||||||
|
scale=options.scale,
|
||||||
|
bone_filter_mode=options.bone_filter_mode,
|
||||||
|
bone_collection_indices=options.bone_collection_indices,
|
||||||
|
bone_collection_primary_key=options.bone_collection_primary_key,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
# Build list of PSA bones.
|
||||||
parent_index = bones.index(bone.parent)
|
# Note that the PSA bones are just here to validate the hierarchy.
|
||||||
psa_bone.parent_index = parent_index
|
# The bind pose information is not used by the engine.
|
||||||
psa.bones[parent_index].children_count += 1
|
psa.bones = [psx_bone for psx_bone, _ in psx_bone_create_result.bones]
|
||||||
except ValueError:
|
|
||||||
psa_bone.parent_index = -1
|
|
||||||
|
|
||||||
if bone.parent is not None:
|
# No bones are going to be exported.
|
||||||
rotation = bone.matrix.to_quaternion()
|
if len(psa.bones) == 0:
|
||||||
rotation.x = -rotation.x
|
raise RuntimeError('No bones available for export')
|
||||||
rotation.y = -rotation.y
|
|
||||||
rotation.z = -rotation.z
|
|
||||||
quat_parent = bone.parent.matrix.to_quaternion().inverted()
|
|
||||||
parent_head = quat_parent @ bone.parent.head
|
|
||||||
parent_tail = quat_parent @ bone.parent.tail
|
|
||||||
location = (parent_tail - parent_head) + bone.head
|
|
||||||
else:
|
|
||||||
location = armature.matrix_local @ bone.head
|
|
||||||
rot_matrix = bone.matrix @ armature.matrix_local.to_3x3()
|
|
||||||
rotation = rot_matrix.to_quaternion()
|
|
||||||
|
|
||||||
psa_bone.location.x = location.x
|
# Add prefixes and suffices to the names of the export sequences and strip whitespace.
|
||||||
psa_bone.location.y = location.y
|
for export_sequence in options.sequences:
|
||||||
psa_bone.location.z = location.z
|
export_sequence.name = f'{options.sequence_name_prefix}{export_sequence.name}{options.sequence_name_suffix}'
|
||||||
|
export_sequence.name = export_sequence.name.strip()
|
||||||
|
|
||||||
psa_bone.rotation.x = rotation.x
|
# Save each armature object's current action and frame so that we can restore the state once we are done.
|
||||||
psa_bone.rotation.y = rotation.y
|
saved_armature_object_actions = {o: o.animation_data.action for o in options.armature_objects}
|
||||||
psa_bone.rotation.z = rotation.z
|
saved_frame_current = context.scene.frame_current
|
||||||
psa_bone.rotation.w = rotation.w
|
|
||||||
|
|
||||||
psa.bones.append(psa_bone)
|
# Now build the PSA sequences.
|
||||||
|
# We actually alter the timeline frame and simply record the resultant pose bone matrices.
|
||||||
|
frame_start_index = 0
|
||||||
|
|
||||||
# Populate the export sequence list.
|
context.window_manager.progress_begin(0, len(options.sequences))
|
||||||
class NlaState:
|
|
||||||
def __init__(self):
|
|
||||||
self.frame_min = 0
|
|
||||||
self.frame_max = 0
|
|
||||||
self.action = None
|
|
||||||
|
|
||||||
class ExportSequence:
|
coordinate_system_transform = get_coordinate_system_transform(options.forward_axis, options.up_axis)
|
||||||
def __init__(self):
|
|
||||||
self.name = ''
|
|
||||||
self.nla_state = NlaState()
|
|
||||||
self.fps = 30.0
|
|
||||||
|
|
||||||
export_sequences = []
|
for export_sequence_index, export_sequence in enumerate(options.sequences):
|
||||||
|
frame_start = export_sequence.nla_state.frame_start
|
||||||
|
frame_end = export_sequence.nla_state.frame_end
|
||||||
|
|
||||||
if options.sequence_source == 'ACTIONS':
|
# Calculate the frame step based on the compression factor.
|
||||||
for action in options.actions:
|
frame_extents = abs(frame_end - frame_start)
|
||||||
if len(action.fcurves) == 0:
|
frame_count_raw = frame_extents + 1
|
||||||
continue
|
frame_count = max(1, max(export_sequence.key_quota, int(frame_count_raw * export_sequence.compression_ratio)))
|
||||||
export_sequence = ExportSequence()
|
frame_step = frame_extents / (frame_count - 1) if frame_count > 1 else 0.0
|
||||||
export_sequence.nla_state.action = action
|
|
||||||
export_sequence.name = get_psa_sequence_name(action, options.should_use_original_sequence_names)
|
|
||||||
frame_min, frame_max = [int(x) for x in action.frame_range]
|
|
||||||
export_sequence.nla_state.frame_min = frame_min
|
|
||||||
export_sequence.nla_state.frame_max = frame_max
|
|
||||||
export_sequence.fps = self.get_sequence_fps(context, options, [action])
|
|
||||||
export_sequences.append(export_sequence)
|
|
||||||
pass
|
|
||||||
elif options.sequence_source == 'TIMELINE_MARKERS':
|
|
||||||
sequence_frame_ranges = self.get_timeline_marker_sequence_frame_ranges(armature, context, options)
|
|
||||||
|
|
||||||
for name, (frame_min, frame_max) in sequence_frame_ranges.items():
|
# If this is a reverse sequence, we need to reverse the frame step.
|
||||||
export_sequence = ExportSequence()
|
if frame_start > frame_end:
|
||||||
export_sequence.name = name
|
frame_step = -frame_step
|
||||||
export_sequence.nla_state.action = None
|
|
||||||
export_sequence.nla_state.frame_min = frame_min
|
|
||||||
export_sequence.nla_state.frame_max = frame_max
|
|
||||||
nla_strips_actions = set(
|
|
||||||
map(lambda x: x.action, get_nla_strips_in_timeframe(active_object, frame_min, frame_max)))
|
|
||||||
export_sequence.fps = self.get_sequence_fps(context, options, nla_strips_actions)
|
|
||||||
export_sequences.append(export_sequence)
|
|
||||||
else:
|
|
||||||
raise ValueError(f'Unhandled sequence source: {options.sequence_source}')
|
|
||||||
|
|
||||||
# Add prefixes and suffices to the names of the export sequences and strip whitespace.
|
sequence_duration = frame_count_raw / export_sequence.fps
|
||||||
for export_sequence in export_sequences:
|
|
||||||
export_sequence.name = f'{options.sequence_name_prefix}{export_sequence.name}{options.sequence_name_suffix}'.strip()
|
|
||||||
|
|
||||||
# Now build the PSA sequences.
|
|
||||||
# We actually alter the timeline frame and simply record the resultant pose bone matrices.
|
|
||||||
frame_start_index = 0
|
|
||||||
|
|
||||||
for export_sequence in export_sequences:
|
|
||||||
armature.animation_data.action = export_sequence.nla_state.action
|
|
||||||
context.view_layer.update()
|
|
||||||
|
|
||||||
psa_sequence = Psa.Sequence()
|
|
||||||
|
|
||||||
frame_min = export_sequence.nla_state.frame_min
|
|
||||||
frame_max = export_sequence.nla_state.frame_max
|
|
||||||
frame_count = frame_max - frame_min + 1
|
|
||||||
|
|
||||||
|
psa_sequence = Psa.Sequence()
|
||||||
|
try:
|
||||||
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
|
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
|
||||||
psa_sequence.frame_count = frame_count
|
except UnicodeEncodeError:
|
||||||
psa_sequence.frame_start_index = frame_start_index
|
raise RuntimeError(
|
||||||
psa_sequence.fps = export_sequence.fps
|
f'Sequence name "{export_sequence.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||||
|
psa_sequence.frame_count = frame_count
|
||||||
|
psa_sequence.frame_start_index = frame_start_index
|
||||||
|
psa_sequence.fps = frame_count / sequence_duration
|
||||||
|
psa_sequence.bone_count = len(psa.bones)
|
||||||
|
psa_sequence.track_time = frame_count
|
||||||
|
psa_sequence.key_reduction = 1.0
|
||||||
|
|
||||||
frame_count = frame_max - frame_min + 1
|
frame = float(frame_start)
|
||||||
|
|
||||||
for frame in range(frame_count):
|
# Link the action to the animation data and update view layer.
|
||||||
with Timer() as t:
|
for armature_object in options.armature_objects:
|
||||||
context.scene.frame_set(frame_min + frame)
|
armature_object.animation_data.action = export_sequence.nla_state.action
|
||||||
performance.frame_set_duration += t.duration
|
|
||||||
|
|
||||||
for pose_bone in pose_bones:
|
context.view_layer.update()
|
||||||
with Timer() as t:
|
|
||||||
key = Psa.Key()
|
|
||||||
pose_bone_matrix = pose_bone.matrix
|
|
||||||
|
|
||||||
if pose_bone.parent is not None:
|
def add_key(location: Vector, rotation: Quaternion):
|
||||||
pose_bone_parent_matrix = pose_bone.parent.matrix
|
key = Psa.Key()
|
||||||
pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix
|
key.location.x = location.x
|
||||||
|
key.location.y = location.y
|
||||||
|
key.location.z = location.z
|
||||||
|
key.rotation.x = rotation.x
|
||||||
|
key.rotation.y = rotation.y
|
||||||
|
key.rotation.z = rotation.z
|
||||||
|
key.rotation.w = rotation.w
|
||||||
|
key.time = 1.0 / psa_sequence.fps
|
||||||
|
psa.keys.append(key)
|
||||||
|
|
||||||
location = pose_bone_matrix.to_translation()
|
class PsaExportBone:
|
||||||
rotation = pose_bone_matrix.to_quaternion().normalized()
|
def __init__(self, pose_bone: Optional[PoseBone], armature_object: Optional[Object], scale: Vector):
|
||||||
|
self.pose_bone = pose_bone
|
||||||
|
self.armature_object = armature_object
|
||||||
|
self.scale = scale
|
||||||
|
|
||||||
if pose_bone.parent is not None:
|
armature_scales: Dict[Object, Vector] = {}
|
||||||
rotation.x = -rotation.x
|
|
||||||
rotation.y = -rotation.y
|
|
||||||
rotation.z = -rotation.z
|
|
||||||
|
|
||||||
key.location.x = location.x
|
# Extract the scale from the world matrix of the evaluated armature object.
|
||||||
key.location.y = location.y
|
for armature_object in options.armature_objects:
|
||||||
key.location.z = location.z
|
evaluated_armature_object = armature_object.evaluated_get(context.evaluated_depsgraph_get())
|
||||||
key.rotation.x = rotation.x
|
_, _, scale = evaluated_armature_object.matrix_world.decompose()
|
||||||
key.rotation.y = rotation.y
|
scale *= options.scale
|
||||||
key.rotation.z = rotation.z
|
armature_scales[armature_object] = scale
|
||||||
key.rotation.w = rotation.w
|
|
||||||
key.time = 1.0 / psa_sequence.fps
|
|
||||||
performance.key_build_duration += t.duration
|
|
||||||
|
|
||||||
with Timer() as t:
|
# Create a list of export pose bones, in the same order as the bones as they appear in the armature.
|
||||||
psa.keys.append(key)
|
# The object contains the pose bone, the armature object, and a pre-calculated scaling value to apply to the
|
||||||
performance.key_add_duration += t.duration
|
# locations.
|
||||||
|
export_bones: List[PsaExportBone] = []
|
||||||
|
|
||||||
psa_sequence.bone_count = len(pose_bones)
|
for psx_bone, armature_object in psx_bone_create_result.bones:
|
||||||
psa_sequence.track_time = frame_count
|
if armature_object is None:
|
||||||
|
export_bones.append(PsaExportBone(None, None, Vector((1.0, 1.0, 1.0))))
|
||||||
frame_start_index += frame_count
|
|
||||||
|
|
||||||
psa.sequences[export_sequence.name] = psa_sequence
|
|
||||||
|
|
||||||
return psa
|
|
||||||
|
|
||||||
def get_timeline_marker_sequence_frame_ranges(self, object, context, options: PsaBuilderOptions) -> Dict:
|
|
||||||
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
|
|
||||||
sequence_frame_ranges = dict()
|
|
||||||
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
|
|
||||||
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
|
|
||||||
|
|
||||||
for marker_name in options.marker_names:
|
|
||||||
marker = context.scene.timeline_markers[marker_name]
|
|
||||||
frame_min = marker.frame
|
|
||||||
# Determine the final frame of the sequence based on the next marker.
|
|
||||||
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
|
|
||||||
marker_index = sorted_timeline_marker_names.index(marker_name)
|
|
||||||
next_marker_index = marker_index + 1
|
|
||||||
frame_max = 0
|
|
||||||
if next_marker_index < len(sorted_timeline_markers):
|
|
||||||
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
|
|
||||||
frame_max = sorted_timeline_markers[next_marker_index].frame
|
|
||||||
if options.should_trim_timeline_marker_sequences:
|
|
||||||
nla_strips = get_nla_strips_in_timeframe(object, marker.frame, frame_max)
|
|
||||||
frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
|
|
||||||
frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
|
|
||||||
else:
|
|
||||||
# There is no next marker.
|
|
||||||
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
|
|
||||||
for nla_track in object.animation_data.nla_tracks:
|
|
||||||
if nla_track.mute:
|
|
||||||
continue
|
|
||||||
for strip in nla_track.strips:
|
|
||||||
frame_max = max(frame_max, strip.frame_end)
|
|
||||||
|
|
||||||
if frame_min == frame_max:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max)
|
assert armature_object.pose
|
||||||
|
pose_bone = armature_object.pose.bones[psx_bone.name.decode('windows-1252')]
|
||||||
|
|
||||||
return sequence_frame_ranges
|
export_bones.append(PsaExportBone(pose_bone, armature_object, armature_scales[armature_object]))
|
||||||
|
|
||||||
|
match options.sampling_mode:
|
||||||
|
case 'INTERPOLATED':
|
||||||
|
# Used as a store for the last frame's pose bone locations and rotations.
|
||||||
|
last_frame: Optional[int] = None
|
||||||
|
last_frame_bone_poses: List[Tuple[Vector, Quaternion]] = []
|
||||||
|
|
||||||
|
next_frame: Optional[int] = None
|
||||||
|
next_frame_bone_poses: List[Tuple[Vector, Quaternion]] = []
|
||||||
|
|
||||||
|
for _ in range(frame_count):
|
||||||
|
if last_frame is None or last_frame != int(frame):
|
||||||
|
# Populate the bone poses for frame A.
|
||||||
|
last_frame = int(frame)
|
||||||
|
|
||||||
|
# TODO: simplify this code and make it easier to follow!
|
||||||
|
if next_frame == last_frame:
|
||||||
|
# Simply transfer the data from next_frame to the last_frame so that we don't need to
|
||||||
|
# resample anything.
|
||||||
|
last_frame_bone_poses = next_frame_bone_poses.copy()
|
||||||
|
else:
|
||||||
|
last_frame_bone_poses.clear()
|
||||||
|
context.scene.frame_set(frame=last_frame)
|
||||||
|
for export_bone in export_bones:
|
||||||
|
location, rotation = _get_pose_bone_location_and_rotation(
|
||||||
|
export_bone.pose_bone,
|
||||||
|
export_bone.armature_object,
|
||||||
|
options.export_space,
|
||||||
|
export_bone.scale,
|
||||||
|
coordinate_system_transform=coordinate_system_transform,
|
||||||
|
has_false_root_bone=psx_bone_create_result.has_false_root_bone,
|
||||||
|
)
|
||||||
|
last_frame_bone_poses.append((location, rotation))
|
||||||
|
|
||||||
|
next_frame = None
|
||||||
|
next_frame_bone_poses.clear()
|
||||||
|
|
||||||
|
# If this is not a subframe, just use the last frame's bone poses.
|
||||||
|
if frame % 1.0 == 0:
|
||||||
|
for i in range(len(export_bones)):
|
||||||
|
add_key(*last_frame_bone_poses[i])
|
||||||
|
else:
|
||||||
|
# Otherwise, this is a subframe, so we need to interpolate the pose between the next frame and the last frame.
|
||||||
|
if next_frame is None:
|
||||||
|
next_frame = last_frame + 1
|
||||||
|
context.scene.frame_set(frame=next_frame)
|
||||||
|
for export_bone in export_bones:
|
||||||
|
location, rotation = _get_pose_bone_location_and_rotation(
|
||||||
|
pose_bone=export_bone.pose_bone,
|
||||||
|
armature_object=export_bone.armature_object,
|
||||||
|
export_space=options.export_space,
|
||||||
|
scale=export_bone.scale,
|
||||||
|
coordinate_system_transform=coordinate_system_transform,
|
||||||
|
has_false_root_bone=psx_bone_create_result.has_false_root_bone,
|
||||||
|
)
|
||||||
|
next_frame_bone_poses.append((location, rotation))
|
||||||
|
|
||||||
|
factor = frame % 1.0
|
||||||
|
|
||||||
|
for i in range(len(export_bones)):
|
||||||
|
last_location, last_rotation = last_frame_bone_poses[i]
|
||||||
|
next_location, next_rotation = next_frame_bone_poses[i]
|
||||||
|
|
||||||
|
location = last_location.lerp(next_location, factor)
|
||||||
|
rotation = last_rotation.slerp(next_rotation, factor)
|
||||||
|
|
||||||
|
add_key(location, rotation)
|
||||||
|
|
||||||
|
frame += frame_step
|
||||||
|
case 'SUBFRAME':
|
||||||
|
for _ in range(frame_count):
|
||||||
|
context.scene.frame_set(frame=int(frame), subframe=frame % 1.0)
|
||||||
|
|
||||||
|
for export_bone in export_bones:
|
||||||
|
location, rotation = _get_pose_bone_location_and_rotation(
|
||||||
|
pose_bone=export_bone.pose_bone,
|
||||||
|
armature_object=export_bone.armature_object,
|
||||||
|
export_space=options.export_space,
|
||||||
|
scale=export_bone.scale,
|
||||||
|
coordinate_system_transform=coordinate_system_transform,
|
||||||
|
has_false_root_bone=psx_bone_create_result.has_false_root_bone,
|
||||||
|
)
|
||||||
|
add_key(location, rotation)
|
||||||
|
|
||||||
|
frame += frame_step
|
||||||
|
|
||||||
|
frame_start_index += frame_count
|
||||||
|
|
||||||
|
psa.sequences[export_sequence.name] = psa_sequence
|
||||||
|
|
||||||
|
context.window_manager.progress_update(export_sequence_index)
|
||||||
|
|
||||||
|
# Restore the previous actions & frame.
|
||||||
|
for armature_object, action in saved_armature_object_actions.items():
|
||||||
|
assert armature_object.animation_data
|
||||||
|
armature_object.animation_data.action = action
|
||||||
|
|
||||||
|
context.scene.frame_set(saved_frame_current)
|
||||||
|
|
||||||
|
context.window_manager.progress_end()
|
||||||
|
|
||||||
|
return psa
|
||||||
|
|||||||
77
io_scene_psk_psa/psa/config.py
Normal file
77
io_scene_psk_psa/psa/config.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
import re
|
||||||
|
from configparser import ConfigParser
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
REMOVE_TRACK_LOCATION = (1 << 0)
|
||||||
|
REMOVE_TRACK_ROTATION = (1 << 1)
|
||||||
|
|
||||||
|
|
||||||
|
class PsaConfig:
|
||||||
|
def __init__(self):
|
||||||
|
self.sequence_bone_flags: Dict[str, Dict[int, int]] = dict()
|
||||||
|
|
||||||
|
|
||||||
|
def _load_config_file(file_path: str) -> ConfigParser:
|
||||||
|
"""
|
||||||
|
UEViewer exports a dialect of INI files that is not compatible with Python's ConfigParser.
|
||||||
|
Specifically, it allows values in this format:
|
||||||
|
|
||||||
|
[Section]
|
||||||
|
Key1
|
||||||
|
Key2
|
||||||
|
|
||||||
|
This is not allowed in Python's ConfigParser, which requires a '=' character after each key name.
|
||||||
|
To work around this, we'll modify the file to add the '=' character after each key name if it is missing.
|
||||||
|
"""
|
||||||
|
with open(file_path, 'r') as f:
|
||||||
|
lines = f.read().split('\n')
|
||||||
|
|
||||||
|
lines = [re.sub(r'^\s*([^=]+)\s*$', r'\1=', line) for line in lines]
|
||||||
|
|
||||||
|
contents = '\n'.join(lines)
|
||||||
|
|
||||||
|
config = ConfigParser()
|
||||||
|
config.read_string(contents)
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def _get_bone_flags_from_value(value: str) -> int:
|
||||||
|
match value:
|
||||||
|
case 'all':
|
||||||
|
return REMOVE_TRACK_LOCATION | REMOVE_TRACK_ROTATION
|
||||||
|
case 'trans':
|
||||||
|
return REMOVE_TRACK_LOCATION
|
||||||
|
case 'rot':
|
||||||
|
return REMOVE_TRACK_ROTATION
|
||||||
|
case _:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def read_psa_config(psa_sequence_names: List[str], file_path: str) -> PsaConfig:
|
||||||
|
psa_config = PsaConfig()
|
||||||
|
|
||||||
|
config = _load_config_file(file_path)
|
||||||
|
|
||||||
|
if config.has_section('RemoveTracks'):
|
||||||
|
for key, value in config.items('RemoveTracks'):
|
||||||
|
match = re.match(f'^(.+)\.(\d+)$', key)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
sequence_name = match.group(1)
|
||||||
|
|
||||||
|
# Map the sequence name onto the actual sequence name in the PSA file.
|
||||||
|
try:
|
||||||
|
lowercase_sequence_names = [sequence_name.lower() for sequence_name in psa_sequence_names]
|
||||||
|
sequence_name = psa_sequence_names[lowercase_sequence_names.index(sequence_name.lower())]
|
||||||
|
except ValueError:
|
||||||
|
# Sequence name is not in the PSA file.
|
||||||
|
continue
|
||||||
|
|
||||||
|
if sequence_name not in psa_config.sequence_bone_flags:
|
||||||
|
psa_config.sequence_bone_flags[sequence_name] = dict()
|
||||||
|
|
||||||
|
bone_index = int(match.group(2))
|
||||||
|
psa_config.sequence_bone_flags[sequence_name][bone_index] = _get_bone_flags_from_value(value)
|
||||||
|
|
||||||
|
return psa_config
|
||||||
@@ -1,26 +1,15 @@
|
|||||||
import typing
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from typing import List
|
from typing import List, OrderedDict as OrderedDictType
|
||||||
|
|
||||||
from ..data import *
|
from ctypes import Structure, c_char, c_int32, c_float
|
||||||
|
from ..shared.data import PsxBone, Quaternion, Vector3
|
||||||
"""
|
|
||||||
Note that keys are not stored within the Psa object.
|
|
||||||
Use the PsaReader::get_sequence_keys to get a the keys for a sequence.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Psa(object):
|
class Psa:
|
||||||
class Bone(Structure):
|
"""
|
||||||
_fields_ = [
|
Note that keys are not stored within the Psa object.
|
||||||
('name', c_char * 64),
|
Use the `PsaReader.get_sequence_keys` to get the keys for a sequence.
|
||||||
('flags', c_int32),
|
"""
|
||||||
('children_count', c_int32),
|
|
||||||
('parent_index', c_int32),
|
|
||||||
('rotation', Quaternion),
|
|
||||||
('location', Vector3),
|
|
||||||
('padding', c_char * 16)
|
|
||||||
]
|
|
||||||
|
|
||||||
class Sequence(Structure):
|
class Sequence(Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
@@ -59,6 +48,6 @@ class Psa(object):
|
|||||||
return repr((self.location, self.rotation, self.time))
|
return repr((self.location, self.rotation, self.time))
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.bones: List[Psa.Bone] = []
|
self.bones: List[PsxBone] = []
|
||||||
self.sequences: typing.OrderedDict[Psa.Sequence] = OrderedDict()
|
self.sequences: OrderedDictType[str, Psa.Sequence] = OrderedDict()
|
||||||
self.keys: List[Psa.Key] = []
|
self.keys: List[Psa.Key] = []
|
||||||
|
|||||||
0
io_scene_psk_psa/psa/export/__init__.py
Normal file
0
io_scene_psk_psa/psa/export/__init__.py
Normal file
674
io_scene_psk_psa/psa/export/operators.py
Normal file
674
io_scene_psk_psa/psa/export/operators.py
Normal file
@@ -0,0 +1,674 @@
|
|||||||
|
from collections import Counter
|
||||||
|
from typing import List, Iterable, Dict, Tuple, cast as typing_cast
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import re
|
||||||
|
from bpy.props import StringProperty
|
||||||
|
from bpy.types import Context, Action, Object, AnimData, TimelineMarker, Operator, Armature
|
||||||
|
from bpy_extras.io_utils import ExportHelper
|
||||||
|
|
||||||
|
from .properties import (
|
||||||
|
PSA_PG_export,
|
||||||
|
PSA_PG_export_action_list_item,
|
||||||
|
filter_sequences,
|
||||||
|
get_sequences_from_name_and_frame_range,
|
||||||
|
)
|
||||||
|
from .ui import PSA_UL_export_sequences
|
||||||
|
from ..builder import build_psa, PsaBuildSequence, PsaBuildOptions
|
||||||
|
from ..writer import write_psa
|
||||||
|
from ...shared.helpers import populate_bone_collection_list, get_nla_strips_in_frame_range, PsxBoneCollection
|
||||||
|
from ...shared.ui import draw_bone_filter_mode
|
||||||
|
|
||||||
|
|
||||||
|
def get_sequences_propnames_from_source(sequence_source: str) -> Tuple[str, str]:
|
||||||
|
match sequence_source:
|
||||||
|
case 'ACTIONS':
|
||||||
|
return 'action_list', 'action_list_index'
|
||||||
|
case 'TIMELINE_MARKERS':
|
||||||
|
return 'marker_list', 'marker_list_index'
|
||||||
|
case 'NLA_TRACK_STRIPS':
|
||||||
|
return 'nla_strip_list', 'nla_strip_list_index'
|
||||||
|
case 'ACTIVE_ACTION':
|
||||||
|
return 'active_action_list', 'active_action_list_index'
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid sequence source: {sequence_source}'
|
||||||
|
|
||||||
|
|
||||||
|
def is_action_for_object(obj: Object, action: Action):
|
||||||
|
if len(action.fcurves) == 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if obj is None or obj.animation_data is None or obj.type != 'ARMATURE':
|
||||||
|
return False
|
||||||
|
|
||||||
|
armature_data = typing_cast(Armature, obj.data)
|
||||||
|
bone_names = set([x.name for x in armature_data.bones])
|
||||||
|
|
||||||
|
# The nesting here is absolutely bonkers.
|
||||||
|
for layer in action.layers:
|
||||||
|
for strip in layer.strips:
|
||||||
|
for channelbag in strip.channelbags:
|
||||||
|
for fcurve in channelbag.fcurves:
|
||||||
|
match = re.match(r'pose\.bones\[\"([^\"]+)\"](\[\"([^\"]+)\"])?', fcurve.data_path)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
bone_name = match.group(1)
|
||||||
|
if bone_name in bone_names:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def update_actions_and_timeline_markers(context: Context, armature_objects: Iterable[Object]):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
|
||||||
|
# Clear actions and markers.
|
||||||
|
pg.action_list.clear()
|
||||||
|
pg.marker_list.clear()
|
||||||
|
pg.active_action_list.clear()
|
||||||
|
|
||||||
|
# Get animation data.
|
||||||
|
# TODO: Not sure how to handle this with multiple armatures.
|
||||||
|
animation_data_object = get_animation_data_object(context)
|
||||||
|
animation_data = animation_data_object.animation_data if animation_data_object else None
|
||||||
|
|
||||||
|
if animation_data is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Populate actions list.
|
||||||
|
for action in bpy.data.actions:
|
||||||
|
if not any(map(lambda armature_object: is_action_for_object(armature_object, action), armature_objects)):
|
||||||
|
# This action is not applicable to any of the selected armatures.
|
||||||
|
continue
|
||||||
|
|
||||||
|
for (name, frame_start, frame_end) in get_sequences_from_action(action):
|
||||||
|
item = pg.action_list.add()
|
||||||
|
item.action = action
|
||||||
|
item.name = name
|
||||||
|
item.is_selected = False
|
||||||
|
item.is_pose_marker = False
|
||||||
|
item.frame_start = frame_start
|
||||||
|
item.frame_end = frame_end
|
||||||
|
|
||||||
|
# Pose markers are not guaranteed to be in frame-order, so make sure that they are.
|
||||||
|
pose_markers = sorted(action.pose_markers, key=lambda x: x.frame)
|
||||||
|
for pose_marker_index, pose_marker in enumerate(pose_markers):
|
||||||
|
if pose_marker.name.strip() == '' or pose_marker.name.startswith('#'):
|
||||||
|
continue
|
||||||
|
sequences = get_sequences_from_action_pose_markers(action, pose_markers, pose_marker, pose_marker_index)
|
||||||
|
for (name, frame_start, frame_end) in sequences:
|
||||||
|
item = pg.action_list.add()
|
||||||
|
item.action = action
|
||||||
|
item.name = name
|
||||||
|
item.is_selected = False
|
||||||
|
item.is_pose_marker = True
|
||||||
|
item.frame_start = frame_start
|
||||||
|
item.frame_end = frame_end
|
||||||
|
|
||||||
|
# Populate timeline markers list.
|
||||||
|
marker_names = [x.name for x in context.scene.timeline_markers]
|
||||||
|
sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, marker_names)
|
||||||
|
|
||||||
|
for marker_name in marker_names:
|
||||||
|
if marker_name not in sequence_frame_ranges:
|
||||||
|
continue
|
||||||
|
if marker_name.strip() == '' or marker_name.startswith('#'):
|
||||||
|
continue
|
||||||
|
frame_start, frame_end = sequence_frame_ranges[marker_name]
|
||||||
|
sequences = get_sequences_from_name_and_frame_range(marker_name, frame_start, frame_end)
|
||||||
|
for (sequence_name, frame_start, frame_end) in sequences:
|
||||||
|
item = pg.marker_list.add()
|
||||||
|
item.name = sequence_name
|
||||||
|
item.is_selected = False
|
||||||
|
item.frame_start = frame_start
|
||||||
|
item.frame_end = frame_end
|
||||||
|
|
||||||
|
# Populate the active action list.
|
||||||
|
for armature_object in context.selected_objects:
|
||||||
|
if armature_object.type != 'ARMATURE':
|
||||||
|
continue
|
||||||
|
action = armature_object.animation_data.action if armature_object.animation_data else None
|
||||||
|
if action is None:
|
||||||
|
continue
|
||||||
|
item = pg.active_action_list.add()
|
||||||
|
item.name = action.name
|
||||||
|
item.armature_object = armature_object
|
||||||
|
item.action = action
|
||||||
|
item.frame_start = int(item.action.frame_range[0])
|
||||||
|
item.frame_end = int(item.action.frame_range[1])
|
||||||
|
item.is_selected = True
|
||||||
|
|
||||||
|
|
||||||
|
def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actions: Iterable[Action]) -> float:
|
||||||
|
match fps_source:
|
||||||
|
case 'SCENE':
|
||||||
|
return context.scene.render.fps
|
||||||
|
case 'CUSTOM':
|
||||||
|
return fps_custom
|
||||||
|
case 'ACTION_METADATA':
|
||||||
|
# Get the minimum value of action metadata FPS values.
|
||||||
|
return min([action.psa_export.fps for action in actions])
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid FPS source: {fps_source}'
|
||||||
|
|
||||||
|
|
||||||
|
def get_sequence_compression_ratio(
|
||||||
|
compression_ratio_source: str,
|
||||||
|
compression_ratio_custom: float,
|
||||||
|
actions: Iterable[Action],
|
||||||
|
) -> float:
|
||||||
|
match compression_ratio_source:
|
||||||
|
case 'ACTION_METADATA':
|
||||||
|
# Get the minimum value of action metadata compression ratio values.
|
||||||
|
return min(map(lambda action: action.psa_export.compression_ratio, actions))
|
||||||
|
case 'CUSTOM':
|
||||||
|
return compression_ratio_custom
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid compression ratio source: {compression_ratio_source}'
|
||||||
|
|
||||||
|
|
||||||
|
def get_animation_data_object(context: Context) -> Object:
|
||||||
|
pg: PSA_PG_export = getattr(context.scene, 'psa_export')
|
||||||
|
|
||||||
|
active_object = context.view_layer.objects.active
|
||||||
|
|
||||||
|
if active_object is None or active_object.type != 'ARMATURE':
|
||||||
|
raise RuntimeError('Active object must be an Armature')
|
||||||
|
|
||||||
|
if pg.sequence_source != 'ACTIONS' and pg.should_override_animation_data:
|
||||||
|
animation_data_object = pg.animation_data_override
|
||||||
|
else:
|
||||||
|
animation_data_object = active_object
|
||||||
|
|
||||||
|
return animation_data_object
|
||||||
|
|
||||||
|
|
||||||
|
def get_timeline_marker_sequence_frame_ranges(
|
||||||
|
animation_data: AnimData,
|
||||||
|
context: Context,
|
||||||
|
marker_names: List[str],
|
||||||
|
) -> Dict:
|
||||||
|
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
|
||||||
|
sequence_frame_ranges = dict()
|
||||||
|
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
|
||||||
|
sorted_timeline_marker_names = [x.name for x in sorted_timeline_markers]
|
||||||
|
|
||||||
|
for marker_name in marker_names:
|
||||||
|
marker = context.scene.timeline_markers[marker_name]
|
||||||
|
frame_start = marker.frame
|
||||||
|
# Determine the final frame of the sequence based on the next marker.
|
||||||
|
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
|
||||||
|
marker_index = sorted_timeline_marker_names.index(marker_name)
|
||||||
|
next_marker_index = marker_index + 1
|
||||||
|
frame_end = 0
|
||||||
|
if next_marker_index < len(sorted_timeline_markers):
|
||||||
|
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
|
||||||
|
frame_end = sorted_timeline_markers[next_marker_index].frame
|
||||||
|
nla_strips = list(get_nla_strips_in_frame_range(animation_data, marker.frame, frame_end))
|
||||||
|
if len(nla_strips) > 0:
|
||||||
|
frame_end = min(frame_end, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
|
||||||
|
frame_start = max(frame_start, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
|
||||||
|
else:
|
||||||
|
# No strips in between this marker and the next, just export this as a one-frame animation.
|
||||||
|
frame_end = frame_start
|
||||||
|
else:
|
||||||
|
# There is no next marker.
|
||||||
|
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
|
||||||
|
for nla_track in animation_data.nla_tracks:
|
||||||
|
if nla_track.mute:
|
||||||
|
continue
|
||||||
|
for strip in nla_track.strips:
|
||||||
|
frame_end = max(frame_end, strip.frame_end)
|
||||||
|
|
||||||
|
if frame_start > frame_end:
|
||||||
|
continue
|
||||||
|
|
||||||
|
sequence_frame_ranges[marker_name] = int(frame_start), int(frame_end)
|
||||||
|
|
||||||
|
return sequence_frame_ranges
|
||||||
|
|
||||||
|
|
||||||
|
def get_sequences_from_action(action: Action):
|
||||||
|
if action.name == '' or action.name.startswith('#'):
|
||||||
|
return
|
||||||
|
|
||||||
|
frame_start = int(action.frame_range[0])
|
||||||
|
action_name = action.name
|
||||||
|
|
||||||
|
if action_name.startswith('!'):
|
||||||
|
# If the pose marker name starts with an exclamation mark, only export the first frame.
|
||||||
|
frame_end = frame_start
|
||||||
|
action_name = action_name[1:]
|
||||||
|
else:
|
||||||
|
frame_end = int(action.frame_range[1])
|
||||||
|
|
||||||
|
yield from get_sequences_from_name_and_frame_range(action_name, frame_start, frame_end)
|
||||||
|
|
||||||
|
|
||||||
|
def get_sequences_from_action_pose_markers(
|
||||||
|
action: Action,
|
||||||
|
pose_markers: List[TimelineMarker],
|
||||||
|
pose_marker: TimelineMarker,
|
||||||
|
pose_marker_index: int,
|
||||||
|
):
|
||||||
|
frame_start = pose_marker.frame
|
||||||
|
sequence_name = pose_marker.name
|
||||||
|
if pose_marker.name.startswith('!'):
|
||||||
|
# If the pose marker name starts with an exclamation mark, only export the first frame.
|
||||||
|
frame_end = frame_start
|
||||||
|
sequence_name = sequence_name[1:]
|
||||||
|
elif pose_marker_index + 1 < len(pose_markers):
|
||||||
|
frame_end = pose_markers[pose_marker_index + 1].frame
|
||||||
|
else:
|
||||||
|
frame_end = int(action.frame_range[1])
|
||||||
|
yield from get_sequences_from_name_and_frame_range(sequence_name, frame_start, frame_end)
|
||||||
|
|
||||||
|
|
||||||
|
def get_visible_sequences(pg: PSA_PG_export, sequences) -> List[PSA_PG_export_action_list_item]:
|
||||||
|
visible_sequences = []
|
||||||
|
for i, flag in enumerate(filter_sequences(pg, sequences)):
|
||||||
|
if bool(flag & (1 << 30)):
|
||||||
|
visible_sequences.append(sequences[i])
|
||||||
|
return visible_sequences
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_export(Operator, ExportHelper):
|
||||||
|
bl_idname = 'psa.export'
|
||||||
|
bl_label = 'Export'
|
||||||
|
bl_options = {'INTERNAL', 'UNDO'}
|
||||||
|
bl_description = 'Export actions to PSA'
|
||||||
|
filename_ext = '.psa'
|
||||||
|
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for exporting the PSA file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='')
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.armature_objects: List[Object] = []
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
try:
|
||||||
|
cls._check_context(context)
|
||||||
|
except RuntimeError as e:
|
||||||
|
cls.poll_message_set(str(e))
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
|
||||||
|
sequences_header, sequences_panel = layout.panel('Sequences', default_closed=False)
|
||||||
|
sequences_header.label(text='Sequences', icon='ACTION')
|
||||||
|
|
||||||
|
if sequences_panel:
|
||||||
|
flow = sequences_panel.grid_flow()
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'sequence_source', text='Source')
|
||||||
|
|
||||||
|
if pg.sequence_source in {'TIMELINE_MARKERS', 'NLA_TRACK_STRIPS'}:
|
||||||
|
# ANIMDATA SOURCE
|
||||||
|
flow.prop(pg, 'should_override_animation_data')
|
||||||
|
if pg.should_override_animation_data:
|
||||||
|
flow.prop(pg, 'animation_data_override', text=' ')
|
||||||
|
|
||||||
|
if pg.sequence_source == 'NLA_TRACK_STRIPS':
|
||||||
|
flow = sequences_panel.grid_flow()
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'nla_track')
|
||||||
|
|
||||||
|
# SELECT ALL/NONE
|
||||||
|
row = sequences_panel.row(align=True)
|
||||||
|
row.label(text='Select')
|
||||||
|
row.operator(PSA_OT_export_actions_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||||
|
row.operator(PSA_OT_export_actions_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||||
|
|
||||||
|
propname, active_propname = get_sequences_propnames_from_source(pg.sequence_source)
|
||||||
|
sequences_panel.template_list(PSA_UL_export_sequences.bl_idname, '', pg, propname, pg, active_propname,
|
||||||
|
rows=max(3, min(len(getattr(pg, propname)), 10)))
|
||||||
|
|
||||||
|
name_header, name_panel = layout.panel('Name', default_closed=False)
|
||||||
|
name_header.label(text='Name')
|
||||||
|
if name_panel:
|
||||||
|
flow = name_panel.grid_flow()
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'sequence_name_prefix', text='Name Prefix')
|
||||||
|
flow.prop(pg, 'sequence_name_suffix')
|
||||||
|
|
||||||
|
# Determine if there is going to be a naming conflict and display an error, if so.
|
||||||
|
selected_items = [x for x in pg.action_list if x.is_selected]
|
||||||
|
action_names = [x.name for x in selected_items]
|
||||||
|
action_name_counts = Counter(action_names)
|
||||||
|
for action_name, count in action_name_counts.items():
|
||||||
|
if count > 1:
|
||||||
|
layout.label(text=f'Duplicate action: {action_name}', icon='ERROR')
|
||||||
|
break
|
||||||
|
|
||||||
|
sampling_header, sampling_panel = layout.panel('Data Source', default_closed=False)
|
||||||
|
sampling_header.label(text='Sampling')
|
||||||
|
if sampling_panel:
|
||||||
|
flow = sampling_panel.grid_flow()
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
|
||||||
|
# SAMPLING MODE
|
||||||
|
flow.prop(pg, 'sampling_mode', text='Sampling Mode')
|
||||||
|
|
||||||
|
# FPS
|
||||||
|
col = flow.row(align=True)
|
||||||
|
col.prop(pg, 'fps_source', text='FPS')
|
||||||
|
if pg.fps_source == 'CUSTOM':
|
||||||
|
col.prop(pg, 'fps_custom', text='')
|
||||||
|
|
||||||
|
# COMPRESSION RATIO
|
||||||
|
col = flow.row(align=True)
|
||||||
|
col.prop(pg, 'compression_ratio_source', text='Compression Ratio')
|
||||||
|
if pg.compression_ratio_source == 'CUSTOM':
|
||||||
|
col.prop(pg, 'compression_ratio_custom', text='')
|
||||||
|
|
||||||
|
# BONES
|
||||||
|
bones_header, bones_panel = layout.panel('Bones', default_closed=False)
|
||||||
|
bones_header.label(text='Bones', icon='BONE_DATA')
|
||||||
|
if bones_panel:
|
||||||
|
row = bones_panel.row(align=True)
|
||||||
|
|
||||||
|
draw_bone_filter_mode(row, pg)
|
||||||
|
|
||||||
|
if pg.bone_filter_mode == 'BONE_COLLECTIONS':
|
||||||
|
row = bones_panel.row(align=True)
|
||||||
|
row.label(text='Select')
|
||||||
|
row.operator(PSA_OT_export_bone_collections_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||||
|
row.operator(PSA_OT_export_bone_collections_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||||
|
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||||
|
bones_panel.template_list(
|
||||||
|
'PSX_UL_bone_collection_list', '', pg, 'bone_collection_list', pg, 'bone_collection_list_index',
|
||||||
|
rows=rows
|
||||||
|
)
|
||||||
|
|
||||||
|
bones_advanced_header, bones_advanced_panel = layout.panel('Bones Advanced', default_closed=True)
|
||||||
|
bones_advanced_header.label(text='Advanced')
|
||||||
|
if bones_advanced_panel:
|
||||||
|
flow = bones_advanced_panel.grid_flow()
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'root_bone_name', text='Root Bone Name')
|
||||||
|
|
||||||
|
# TRANSFORM
|
||||||
|
transform_header, transform_panel = layout.panel('Advanced', default_closed=False)
|
||||||
|
transform_header.label(text='Transform')
|
||||||
|
|
||||||
|
if transform_panel:
|
||||||
|
flow = transform_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'export_space')
|
||||||
|
flow.prop(pg, 'scale')
|
||||||
|
flow.prop(pg, 'forward_axis')
|
||||||
|
flow.prop(pg, 'up_axis')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _check_context(cls, context):
|
||||||
|
if context.view_layer.objects.active is None:
|
||||||
|
raise RuntimeError('An armature must be selected')
|
||||||
|
|
||||||
|
if context.view_layer.objects.active.type != 'ARMATURE':
|
||||||
|
raise RuntimeError('The active object must be an armature')
|
||||||
|
|
||||||
|
if context.scene.is_nla_tweakmode:
|
||||||
|
raise RuntimeError('Cannot export PSA while in NLA tweak mode')
|
||||||
|
|
||||||
|
|
||||||
|
def invoke(self, context, _event):
|
||||||
|
try:
|
||||||
|
self._check_context(context)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
|
||||||
|
pg: PSA_PG_export = getattr(context.scene, 'psa_export')
|
||||||
|
|
||||||
|
self.armature_objects = [x for x in context.view_layer.objects.selected if x.type == 'ARMATURE']
|
||||||
|
|
||||||
|
for armature_object in self.armature_objects:
|
||||||
|
# This is required otherwise the action list will be empty if the armature has never had its animation
|
||||||
|
# data created before (i.e. if no action was ever assigned to it).
|
||||||
|
if armature_object.animation_data is None:
|
||||||
|
armature_object.animation_data_create()
|
||||||
|
|
||||||
|
update_actions_and_timeline_markers(context, self.armature_objects)
|
||||||
|
populate_bone_collection_list(
|
||||||
|
pg.bone_collection_list,
|
||||||
|
self.armature_objects,
|
||||||
|
primary_key='DATA' if pg.sequence_source == 'ACTIVE_ACTION' else 'OBJECT',
|
||||||
|
)
|
||||||
|
|
||||||
|
context.window_manager.fileselect_add(self)
|
||||||
|
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
|
||||||
|
# Populate the export sequence list.
|
||||||
|
animation_data_object = get_animation_data_object(context)
|
||||||
|
animation_data = animation_data_object.animation_data
|
||||||
|
|
||||||
|
if animation_data is None:
|
||||||
|
raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'')
|
||||||
|
|
||||||
|
if context.active_object is None:
|
||||||
|
raise RuntimeError('No active object')
|
||||||
|
|
||||||
|
export_sequences: List[PsaBuildSequence] = []
|
||||||
|
|
||||||
|
match pg.sequence_source:
|
||||||
|
case 'ACTIONS':
|
||||||
|
for action_item in filter(lambda x: x.is_selected, pg.action_list):
|
||||||
|
if len(action_item.action.fcurves) == 0:
|
||||||
|
continue
|
||||||
|
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||||
|
export_sequence.name = action_item.name
|
||||||
|
export_sequence.nla_state.action = action_item.action
|
||||||
|
export_sequence.nla_state.frame_start = action_item.frame_start
|
||||||
|
export_sequence.nla_state.frame_end = action_item.frame_end
|
||||||
|
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action_item.action])
|
||||||
|
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action_item.action])
|
||||||
|
export_sequence.key_quota = action_item.action.psa_export.key_quota
|
||||||
|
export_sequences.append(export_sequence)
|
||||||
|
case 'TIMELINE_MARKERS':
|
||||||
|
for marker_item in filter(lambda x: x.is_selected, pg.marker_list):
|
||||||
|
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||||
|
export_sequence.name = marker_item.name
|
||||||
|
export_sequence.nla_state.frame_start = marker_item.frame_start
|
||||||
|
export_sequence.nla_state.frame_end = marker_item.frame_end
|
||||||
|
nla_strips_actions = set(
|
||||||
|
map(lambda x: x.action, get_nla_strips_in_frame_range(animation_data, marker_item.frame_start, marker_item.frame_end)))
|
||||||
|
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions)
|
||||||
|
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, nla_strips_actions)
|
||||||
|
export_sequences.append(export_sequence)
|
||||||
|
case 'NLA_TRACK_STRIPS':
|
||||||
|
for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list):
|
||||||
|
export_sequence = PsaBuildSequence(context.active_object, animation_data)
|
||||||
|
export_sequence.name = nla_strip_item.name
|
||||||
|
export_sequence.nla_state.frame_start = nla_strip_item.frame_start
|
||||||
|
export_sequence.nla_state.frame_end = nla_strip_item.frame_end
|
||||||
|
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [nla_strip_item.action])
|
||||||
|
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [nla_strip_item.action])
|
||||||
|
export_sequence.key_quota = nla_strip_item.action.psa_export.key_quota
|
||||||
|
export_sequences.append(export_sequence)
|
||||||
|
case 'ACTIVE_ACTION':
|
||||||
|
for active_action_item in filter(lambda x: x.is_selected, pg.active_action_list):
|
||||||
|
export_sequence = PsaBuildSequence(active_action_item.armature_object, active_action_item.armature_object.animation_data)
|
||||||
|
action = active_action_item.action
|
||||||
|
export_sequence.name = action.name
|
||||||
|
export_sequence.nla_state.action = action
|
||||||
|
export_sequence.nla_state.frame_start = int(action.frame_range[0])
|
||||||
|
export_sequence.nla_state.frame_end = int(action.frame_range[1])
|
||||||
|
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action])
|
||||||
|
export_sequence.compression_ratio = get_sequence_compression_ratio(pg.compression_ratio_source, pg.compression_ratio_custom, [action])
|
||||||
|
export_sequence.key_quota = action.psa_export.key_quota
|
||||||
|
export_sequences.append(export_sequence)
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid sequence source: {pg.sequence_source}'
|
||||||
|
|
||||||
|
if len(export_sequences) == 0:
|
||||||
|
self.report({'ERROR'}, 'No sequences were selected for export')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
options = PsaBuildOptions()
|
||||||
|
options.armature_objects = self.armature_objects
|
||||||
|
options.animation_data = animation_data
|
||||||
|
options.sequences = export_sequences
|
||||||
|
options.bone_filter_mode = pg.bone_filter_mode
|
||||||
|
options.bone_collection_indices = [PsxBoneCollection(x.armature_object_name, x.armature_data_name, x.index) for x in pg.bone_collection_list if x.is_selected]
|
||||||
|
options.sequence_name_prefix = pg.sequence_name_prefix
|
||||||
|
options.sequence_name_suffix = pg.sequence_name_suffix
|
||||||
|
options.sampling_mode = pg.sampling_mode
|
||||||
|
options.export_space = pg.export_space
|
||||||
|
options.scale = pg.scale
|
||||||
|
options.forward_axis = pg.forward_axis
|
||||||
|
options.up_axis = pg.up_axis
|
||||||
|
options.root_bone_name = pg.root_bone_name
|
||||||
|
options.sequence_source = pg.sequence_source
|
||||||
|
|
||||||
|
try:
|
||||||
|
psa = build_psa(context, options)
|
||||||
|
self.report({'INFO'}, f'PSA export successful')
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
write_psa(psa, self.filepath)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_export_actions_select_all(Operator):
|
||||||
|
bl_idname = 'psa.export_actions_select_all'
|
||||||
|
bl_label = 'Select All'
|
||||||
|
bl_description = 'Select all visible sequences'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_item_list(cls, context):
|
||||||
|
pg = context.scene.psa_export
|
||||||
|
match pg.sequence_source:
|
||||||
|
case 'ACTIONS':
|
||||||
|
return pg.action_list
|
||||||
|
case 'TIMELINE_MARKERS':
|
||||||
|
return pg.marker_list
|
||||||
|
case 'NLA_TRACK_STRIPS':
|
||||||
|
return pg.nla_strip_list
|
||||||
|
case 'ACTIVE_ACTION':
|
||||||
|
return pg.active_action_list
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid sequence source: {pg.sequence_source}'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
item_list = cls.get_item_list(context)
|
||||||
|
visible_sequences = get_visible_sequences(pg, item_list)
|
||||||
|
has_unselected_sequences = any(map(lambda item: not item.is_selected, visible_sequences))
|
||||||
|
return has_unselected_sequences
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
sequences = self.get_item_list(context)
|
||||||
|
for sequence in get_visible_sequences(pg, sequences):
|
||||||
|
sequence.is_selected = True
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_export_actions_deselect_all(Operator):
|
||||||
|
bl_idname = 'psa.export_sequences_deselect_all'
|
||||||
|
bl_label = 'Deselect All'
|
||||||
|
bl_description = 'Deselect all visible sequences'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_item_list(cls, context):
|
||||||
|
pg = context.scene.psa_export
|
||||||
|
match pg.sequence_source:
|
||||||
|
case 'ACTIONS':
|
||||||
|
return pg.action_list
|
||||||
|
case 'TIMELINE_MARKERS':
|
||||||
|
return pg.marker_list
|
||||||
|
case 'NLA_TRACK_STRIPS':
|
||||||
|
return pg.nla_strip_list
|
||||||
|
case 'ACTIVE_ACTION':
|
||||||
|
return pg.active_action_list
|
||||||
|
case _:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
item_list = cls.get_item_list(context)
|
||||||
|
has_selected_items = any(map(lambda item: item.is_selected, item_list))
|
||||||
|
return len(item_list) > 0 and has_selected_items
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
item_list = self.get_item_list(context)
|
||||||
|
for sequence in get_visible_sequences(pg, item_list):
|
||||||
|
sequence.is_selected = False
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_export_bone_collections_select_all(Operator):
|
||||||
|
bl_idname = 'psa.export_bone_collections_select_all'
|
||||||
|
bl_label = 'Select All'
|
||||||
|
bl_description = 'Select all bone collections'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
item_list = pg.bone_collection_list
|
||||||
|
has_unselected_items = any(map(lambda action: not action.is_selected, item_list))
|
||||||
|
return len(item_list) > 0 and has_unselected_items
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
for item in pg.bone_collection_list:
|
||||||
|
item.is_selected = True
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_export_bone_collections_deselect_all(Operator):
|
||||||
|
bl_idname = 'psa.export_bone_collections_deselect_all'
|
||||||
|
bl_label = 'Deselect All'
|
||||||
|
bl_description = 'Deselect all bone collections'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
item_list = pg.bone_collection_list
|
||||||
|
has_selected_actions = any(map(lambda action: action.is_selected, item_list))
|
||||||
|
return len(item_list) > 0 and has_selected_actions
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
for action in pg.bone_collection_list:
|
||||||
|
action.is_selected = False
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSA_OT_export,
|
||||||
|
PSA_OT_export_actions_select_all,
|
||||||
|
PSA_OT_export_actions_deselect_all,
|
||||||
|
PSA_OT_export_bone_collections_select_all,
|
||||||
|
PSA_OT_export_bone_collections_deselect_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
|
|
||||||
276
io_scene_psk_psa/psa/export/properties.py
Normal file
276
io_scene_psk_psa/psa/export/properties.py
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
from typing import List, Optional
|
||||||
|
from bpy.props import (
|
||||||
|
BoolProperty,
|
||||||
|
PointerProperty,
|
||||||
|
EnumProperty,
|
||||||
|
FloatProperty,
|
||||||
|
CollectionProperty,
|
||||||
|
IntProperty,
|
||||||
|
StringProperty,
|
||||||
|
)
|
||||||
|
from bpy.types import PropertyGroup, Object, Action, AnimData, Context
|
||||||
|
|
||||||
|
from ...shared.dfs import dfs_view_layer_objects
|
||||||
|
from ...shared.helpers import populate_bone_collection_list
|
||||||
|
from ...shared.types import TransformMixin, ExportSpaceMixin, PsxBoneExportMixin
|
||||||
|
|
||||||
|
|
||||||
|
def psa_export_property_group_animation_data_override_poll(_context, obj):
|
||||||
|
return obj.animation_data is not None
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PG_export_action_list_item(PropertyGroup):
|
||||||
|
action: PointerProperty(type=Action)
|
||||||
|
name: StringProperty()
|
||||||
|
is_selected: BoolProperty(default=True)
|
||||||
|
frame_start: IntProperty(options={'HIDDEN'})
|
||||||
|
frame_end: IntProperty(options={'HIDDEN'})
|
||||||
|
is_pose_marker: BoolProperty(options={'HIDDEN'})
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PG_export_active_action_list_item(PropertyGroup):
|
||||||
|
action: PointerProperty(type=Action)
|
||||||
|
name: StringProperty()
|
||||||
|
armature_object: PointerProperty(type=Object)
|
||||||
|
is_selected: BoolProperty(default=True)
|
||||||
|
frame_start: IntProperty(options={'HIDDEN'})
|
||||||
|
frame_end: IntProperty(options={'HIDDEN'})
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PG_export_timeline_markers(PropertyGroup): # TODO: rename this to singular
|
||||||
|
marker_index: IntProperty()
|
||||||
|
name: StringProperty()
|
||||||
|
is_selected: BoolProperty(default=True)
|
||||||
|
frame_start: IntProperty(options={'HIDDEN'})
|
||||||
|
frame_end: IntProperty(options={'HIDDEN'})
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PG_export_nla_strip_list_item(PropertyGroup):
|
||||||
|
name: StringProperty()
|
||||||
|
action: PointerProperty(type=Action)
|
||||||
|
frame_start: FloatProperty()
|
||||||
|
frame_end: FloatProperty()
|
||||||
|
is_selected: BoolProperty(default=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int):
|
||||||
|
reversed_pattern = r'(.+)/(.+)'
|
||||||
|
reversed_match = re.match(reversed_pattern, name)
|
||||||
|
if reversed_match:
|
||||||
|
forward_name = reversed_match.group(1)
|
||||||
|
backwards_name = reversed_match.group(2)
|
||||||
|
yield forward_name, frame_start, frame_end
|
||||||
|
yield backwards_name, frame_end, frame_start
|
||||||
|
else:
|
||||||
|
yield name, frame_start, frame_end
|
||||||
|
|
||||||
|
|
||||||
|
def nla_track_update_cb(self: 'PSA_PG_export', context: Context) -> None:
|
||||||
|
self.nla_strip_list.clear()
|
||||||
|
match = re.match(r'^(\d+).+$', self.nla_track)
|
||||||
|
self.nla_track_index = int(match.group(1)) if match else -1
|
||||||
|
if self.nla_track_index >= 0:
|
||||||
|
animation_data = get_animation_data(self, context)
|
||||||
|
if animation_data is None:
|
||||||
|
return
|
||||||
|
nla_track = animation_data.nla_tracks[self.nla_track_index]
|
||||||
|
for nla_strip in nla_track.strips:
|
||||||
|
for sequence_name, frame_start, frame_end in get_sequences_from_name_and_frame_range(nla_strip.name, nla_strip.frame_start, nla_strip.frame_end):
|
||||||
|
strip: PSA_PG_export_nla_strip_list_item = self.nla_strip_list.add()
|
||||||
|
strip.action = nla_strip.action
|
||||||
|
strip.name = sequence_name
|
||||||
|
strip.frame_start = frame_start
|
||||||
|
strip.frame_end = frame_end
|
||||||
|
|
||||||
|
|
||||||
|
def get_animation_data(pg: 'PSA_PG_export', context: Context) -> Optional[AnimData]:
|
||||||
|
animation_data_object = context.object
|
||||||
|
if pg.should_override_animation_data:
|
||||||
|
animation_data_object = pg.animation_data_override
|
||||||
|
return animation_data_object.animation_data if animation_data_object else None
|
||||||
|
|
||||||
|
|
||||||
|
def nla_track_search_cb(self, context: Context, edit_text: str):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
animation_data = get_animation_data(pg, context)
|
||||||
|
if animation_data is not None:
|
||||||
|
for index, nla_track in enumerate(animation_data.nla_tracks):
|
||||||
|
yield f'{index} - {nla_track.name}'
|
||||||
|
|
||||||
|
|
||||||
|
def animation_data_override_update_cb(self: 'PSA_PG_export', context: Context):
|
||||||
|
# Reset NLA track selection
|
||||||
|
self.nla_track = ''
|
||||||
|
|
||||||
|
|
||||||
|
sequence_source_items = (
|
||||||
|
('ACTIONS', 'Actions', 'Sequences will be exported using actions', 'ACTION', 0),
|
||||||
|
('TIMELINE_MARKERS', 'Timeline Markers', 'Sequences are delineated by scene timeline markers', 'MARKER_HLT', 1),
|
||||||
|
('NLA_TRACK_STRIPS', 'NLA Track Strips', 'Sequences are delineated by the start & end times of strips on the selected NLA track', 'NLA', 2),
|
||||||
|
('ACTIVE_ACTION', 'Active Action', 'The active action will be exported for each selected armature', 'ACTION', 3),
|
||||||
|
)
|
||||||
|
|
||||||
|
fps_source_items = (
|
||||||
|
('SCENE', 'Scene', '', 'SCENE_DATA', 0),
|
||||||
|
('ACTION_METADATA', 'Action Metadata', 'The frame rate will be determined by action\'s FPS property found in the PSA Export panel.\n\nIf the Sequence Source is Timeline Markers, the lowest value of all contributing actions will be used', 'ACTION', 1),
|
||||||
|
('CUSTOM', 'Custom', '', 2)
|
||||||
|
)
|
||||||
|
|
||||||
|
compression_ratio_source_items = (
|
||||||
|
('ACTION_METADATA', 'Action Metadata', 'The compression ratio will be determined by action\'s Compression Ratio property found in the PSA Export panel.\n\nIf the Sequence Source is Timeline Markers, the lowest value of all contributing actions will be used', 'ACTION', 1),
|
||||||
|
('CUSTOM', 'Custom', '', 2)
|
||||||
|
)
|
||||||
|
|
||||||
|
sampling_mode_items = (
|
||||||
|
('INTERPOLATED', 'Interpolated', 'Sampling is performed by interpolating the evaluated bone poses from the adjacent whole frames.', 'INTERPOLATED', 0),
|
||||||
|
('SUBFRAME', 'Subframe', 'Sampling is performed by evaluating the bone poses at the subframe time.\n\nNot recommended unless you are also animating with subframes enabled.', 'SUBFRAME', 1),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def sequence_source_update_cb(self: 'PSA_PG_export', context: Context) -> None:
|
||||||
|
armature_objects = []
|
||||||
|
assert context.view_layer
|
||||||
|
for dfs_object in dfs_view_layer_objects(context.view_layer):
|
||||||
|
if dfs_object.obj.type == 'ARMATURE' and dfs_object.is_selected:
|
||||||
|
armature_objects.append(dfs_object.obj)
|
||||||
|
|
||||||
|
populate_bone_collection_list(
|
||||||
|
self.bone_collection_list,
|
||||||
|
armature_objects,
|
||||||
|
primary_key='DATA' if self.sequence_source == 'ACTIVE_ACTION' else 'OBJECT')
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PG_export(PropertyGroup, TransformMixin, ExportSpaceMixin, PsxBoneExportMixin):
|
||||||
|
should_override_animation_data: BoolProperty(
|
||||||
|
name='Override Animation Data',
|
||||||
|
options=set(),
|
||||||
|
default=False,
|
||||||
|
description='Use the animation data from a different object instead of the selected object',
|
||||||
|
update=animation_data_override_update_cb,
|
||||||
|
)
|
||||||
|
animation_data_override: PointerProperty(
|
||||||
|
type=Object,
|
||||||
|
update=animation_data_override_update_cb,
|
||||||
|
poll=psa_export_property_group_animation_data_override_poll
|
||||||
|
)
|
||||||
|
sequence_source: EnumProperty(
|
||||||
|
name='Source',
|
||||||
|
options=set(),
|
||||||
|
description='',
|
||||||
|
items=sequence_source_items,
|
||||||
|
update=sequence_source_update_cb,
|
||||||
|
)
|
||||||
|
nla_track: StringProperty(
|
||||||
|
name='NLA Track',
|
||||||
|
options=set(),
|
||||||
|
description='',
|
||||||
|
search=nla_track_search_cb,
|
||||||
|
update=nla_track_update_cb
|
||||||
|
)
|
||||||
|
nla_track_index: IntProperty(name='NLA Track Index', default=-1)
|
||||||
|
fps_source: EnumProperty(
|
||||||
|
name='FPS Source',
|
||||||
|
options=set(),
|
||||||
|
description='',
|
||||||
|
items=fps_source_items,
|
||||||
|
)
|
||||||
|
fps_custom: FloatProperty(default=30.0, min=sys.float_info.epsilon, soft_min=1.0, options=set(), step=100, soft_max=60.0)
|
||||||
|
compression_ratio_source: EnumProperty(
|
||||||
|
name='Compression Ratio Source',
|
||||||
|
options=set(),
|
||||||
|
description='',
|
||||||
|
items=compression_ratio_source_items,
|
||||||
|
)
|
||||||
|
compression_ratio_custom: FloatProperty(default=1.0, min=0.0, max=1.0, subtype='FACTOR', description='The key sampling ratio of the exported sequence.\n\nA compression ratio of 1.0 will export all frames, while a compression ratio of 0.5 will export half of the frames')
|
||||||
|
action_list: CollectionProperty(type=PSA_PG_export_action_list_item)
|
||||||
|
action_list_index: IntProperty(default=0)
|
||||||
|
marker_list: CollectionProperty(type=PSA_PG_export_timeline_markers)
|
||||||
|
marker_list_index: IntProperty(default=0)
|
||||||
|
nla_strip_list: CollectionProperty(type=PSA_PG_export_nla_strip_list_item)
|
||||||
|
nla_strip_list_index: IntProperty(default=0)
|
||||||
|
active_action_list: CollectionProperty(type=PSA_PG_export_active_action_list_item)
|
||||||
|
active_action_list_index: IntProperty(default=0)
|
||||||
|
sequence_name_prefix: StringProperty(name='Prefix', options=set())
|
||||||
|
sequence_name_suffix: StringProperty(name='Suffix', options=set())
|
||||||
|
sequence_filter_name: StringProperty(
|
||||||
|
default='',
|
||||||
|
name='Filter by Name',
|
||||||
|
options={'TEXTEDIT_UPDATE'},
|
||||||
|
description='Only show items matching this name (use \'*\' as wildcard)')
|
||||||
|
sequence_use_filter_invert: BoolProperty(
|
||||||
|
default=False,
|
||||||
|
name='Invert',
|
||||||
|
options=set(),
|
||||||
|
description='Invert filtering (show hidden items, and vice versa)')
|
||||||
|
sequence_filter_asset: BoolProperty(
|
||||||
|
default=False,
|
||||||
|
name='Show assets',
|
||||||
|
options=set(),
|
||||||
|
description='Show actions that belong to an asset library')
|
||||||
|
sequence_filter_pose_marker: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
name='Show pose markers',
|
||||||
|
options=set())
|
||||||
|
sequence_use_filter_sort_reverse: BoolProperty(default=True, options=set())
|
||||||
|
sequence_filter_reversed: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
options=set(),
|
||||||
|
name='Show Reversed',
|
||||||
|
description='Show reversed sequences'
|
||||||
|
)
|
||||||
|
sampling_mode: EnumProperty(
|
||||||
|
name='Sampling Mode',
|
||||||
|
options=set(),
|
||||||
|
description='The method by which frames are sampled',
|
||||||
|
items=sampling_mode_items,
|
||||||
|
default='INTERPOLATED'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def filter_sequences(pg: PSA_PG_export, sequences) -> List[int]:
|
||||||
|
bitflag_filter_item = 1 << 30
|
||||||
|
flt_flags = [bitflag_filter_item] * len(sequences)
|
||||||
|
|
||||||
|
if pg.sequence_filter_name:
|
||||||
|
# Filter name is non-empty.
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
if not fnmatch(sequence.name, f'*{pg.sequence_filter_name}*'):
|
||||||
|
flt_flags[i] &= ~bitflag_filter_item
|
||||||
|
|
||||||
|
# Invert filter flags for all items.
|
||||||
|
if pg.sequence_use_filter_invert:
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
flt_flags[i] ^= bitflag_filter_item
|
||||||
|
|
||||||
|
if not pg.sequence_filter_asset:
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
if hasattr(sequence, 'action') and sequence.action is not None and sequence.action.asset_data is not None:
|
||||||
|
flt_flags[i] &= ~bitflag_filter_item
|
||||||
|
|
||||||
|
if not pg.sequence_filter_pose_marker:
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
if hasattr(sequence, 'is_pose_marker') and sequence.is_pose_marker:
|
||||||
|
flt_flags[i] &= ~bitflag_filter_item
|
||||||
|
|
||||||
|
if not pg.sequence_filter_reversed:
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
if sequence.frame_start > sequence.frame_end:
|
||||||
|
flt_flags[i] &= ~bitflag_filter_item
|
||||||
|
|
||||||
|
return flt_flags
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSA_PG_export_action_list_item,
|
||||||
|
PSA_PG_export_timeline_markers,
|
||||||
|
PSA_PG_export_nla_strip_list_item,
|
||||||
|
PSA_PG_export_active_action_list_item,
|
||||||
|
PSA_PG_export,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
|
|
||||||
61
io_scene_psk_psa/psa/export/ui.py
Normal file
61
io_scene_psk_psa/psa/export/ui.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
from typing import cast as typing_cast
|
||||||
|
|
||||||
|
from bpy.types import UIList
|
||||||
|
|
||||||
|
from .properties import PSA_PG_export_action_list_item, filter_sequences
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_UL_export_sequences(UIList):
|
||||||
|
bl_idname = 'PSA_UL_export_sequences'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(PSA_UL_export_sequences, self).__init__(*args, **kwargs)
|
||||||
|
# Show the filtering options by default.
|
||||||
|
self.use_filter_show = True
|
||||||
|
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||||
|
item = typing_cast(PSA_PG_export_action_list_item, item)
|
||||||
|
|
||||||
|
is_pose_marker = hasattr(item, 'is_pose_marker') and item.is_pose_marker
|
||||||
|
layout.prop(item, 'is_selected', icon_only=True, text=item.name)
|
||||||
|
if hasattr(item, 'action') and item.action is not None and item.action.asset_data is not None:
|
||||||
|
layout.label(text='', icon='ASSET_MANAGER')
|
||||||
|
|
||||||
|
row = layout.row(align=True)
|
||||||
|
row.alignment = 'RIGHT'
|
||||||
|
|
||||||
|
row.label(text=str(abs(item.frame_end - item.frame_start) + 1), icon='FRAME_PREV' if item.frame_end < item.frame_start else 'KEYFRAME')
|
||||||
|
|
||||||
|
if hasattr(item, 'armature_object') and item.armature_object is not None:
|
||||||
|
row.label(text=item.armature_object.name, icon='ARMATURE_DATA')
|
||||||
|
|
||||||
|
# row.label(text=item.action.name, icon='PMARKER' if is_pose_marker else 'ACTION_DATA')
|
||||||
|
|
||||||
|
def draw_filter(self, context, layout):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
row = layout.row()
|
||||||
|
subrow = row.row(align=True)
|
||||||
|
subrow.prop(pg, 'sequence_filter_name', text='')
|
||||||
|
subrow.prop(pg, 'sequence_use_filter_invert', text='', icon='ARROW_LEFTRIGHT')
|
||||||
|
|
||||||
|
if pg.sequence_source == 'ACTIONS':
|
||||||
|
subrow = row.row(align=True)
|
||||||
|
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
|
||||||
|
subrow.prop(pg, 'sequence_filter_pose_marker', icon_only=True, icon='PMARKER')
|
||||||
|
subrow.prop(pg, 'sequence_filter_reversed', text='', icon='FRAME_PREV')
|
||||||
|
|
||||||
|
def filter_items(self, context, data, prop):
|
||||||
|
pg = getattr(context.scene, 'psa_export')
|
||||||
|
actions = getattr(data, prop)
|
||||||
|
flt_flags = filter_sequences(pg, actions)
|
||||||
|
flt_neworder = list(range(len(actions)))
|
||||||
|
return flt_flags, flt_neworder
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSA_UL_export_sequences,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
|
|
||||||
@@ -1,499 +0,0 @@
|
|||||||
import fnmatch
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from collections import Counter
|
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
from bpy.props import BoolProperty, CollectionProperty, EnumProperty, FloatProperty, IntProperty, PointerProperty, \
|
|
||||||
StringProperty
|
|
||||||
from bpy.types import Action, Operator, PropertyGroup, UIList
|
|
||||||
from bpy_extras.io_utils import ExportHelper
|
|
||||||
|
|
||||||
from .builder import PsaBuilder, PsaBuilderOptions
|
|
||||||
from .data import *
|
|
||||||
from ..helpers import *
|
|
||||||
from ..types import BoneGroupListItem
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExporter(object):
|
|
||||||
def __init__(self, psa: Psa):
|
|
||||||
self.psa: Psa = psa
|
|
||||||
|
|
||||||
# This method is shared by both PSA/K file formats, move this?
|
|
||||||
@staticmethod
|
|
||||||
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
|
||||||
section = Section()
|
|
||||||
section.name = name
|
|
||||||
if data_type is not None and data is not None:
|
|
||||||
section.data_size = sizeof(data_type)
|
|
||||||
section.data_count = len(data)
|
|
||||||
fp.write(section)
|
|
||||||
if data is not None:
|
|
||||||
for datum in data:
|
|
||||||
fp.write(datum)
|
|
||||||
|
|
||||||
def export(self, path: str):
|
|
||||||
with open(path, 'wb') as fp:
|
|
||||||
self.write_section(fp, b'ANIMHEAD')
|
|
||||||
self.write_section(fp, b'BONENAMES', Psa.Bone, self.psa.bones)
|
|
||||||
self.write_section(fp, b'ANIMINFO', Psa.Sequence, list(self.psa.sequences.values()))
|
|
||||||
self.write_section(fp, b'ANIMKEYS', Psa.Key, self.psa.keys)
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportActionListItem(PropertyGroup):
|
|
||||||
action: PointerProperty(type=Action)
|
|
||||||
name: StringProperty()
|
|
||||||
is_selected: BoolProperty(default=False)
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportTimelineMarkerListItem(PropertyGroup):
|
|
||||||
marker_index: IntProperty()
|
|
||||||
name: StringProperty()
|
|
||||||
is_selected: BoolProperty(default=True)
|
|
||||||
|
|
||||||
|
|
||||||
def update_action_names(context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
for item in pg.action_list:
|
|
||||||
action = item.action
|
|
||||||
item.action_name = get_psa_sequence_name(action, pg.should_use_original_sequence_names)
|
|
||||||
|
|
||||||
|
|
||||||
def should_use_original_sequence_names_updated(_, context):
|
|
||||||
update_action_names(context)
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportPropertyGroup(PropertyGroup):
|
|
||||||
sequence_source: EnumProperty(
|
|
||||||
name='Source',
|
|
||||||
options=set(),
|
|
||||||
description='',
|
|
||||||
items=(
|
|
||||||
('ACTIONS', 'Actions', 'Sequences will be exported using actions', 'ACTION', 0),
|
|
||||||
('TIMELINE_MARKERS', 'Timeline Markers', 'Sequences will be exported using timeline markers', 'MARKER_HLT',
|
|
||||||
1),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
fps_source: EnumProperty(
|
|
||||||
name='FPS Source',
|
|
||||||
options=set(),
|
|
||||||
description='',
|
|
||||||
items=(
|
|
||||||
('SCENE', 'Scene', '', 'SCENE_DATA', 0),
|
|
||||||
('ACTION_METADATA', 'Action Metadata',
|
|
||||||
'The frame rate will be determined by action\'s "psa_sequence_fps" custom property, if it exists. If the Sequence Source is Timeline Markers, the lowest value of all contributing actions will be used. If no metadata is available, the scene\'s frame rate will be used.',
|
|
||||||
'PROPERTIES', 1),
|
|
||||||
('CUSTOM', 'Custom', '', 2)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
fps_custom: FloatProperty(default=30.0, min=sys.float_info.epsilon, soft_min=1.0, options=set(), step=100,
|
|
||||||
soft_max=60.0)
|
|
||||||
action_list: CollectionProperty(type=PsaExportActionListItem)
|
|
||||||
action_list_index: IntProperty(default=0)
|
|
||||||
marker_list: CollectionProperty(type=PsaExportTimelineMarkerListItem)
|
|
||||||
marker_list_index: IntProperty(default=0)
|
|
||||||
bone_filter_mode: EnumProperty(
|
|
||||||
name='Bone Filter',
|
|
||||||
options=set(),
|
|
||||||
description='',
|
|
||||||
items=(
|
|
||||||
('ALL', 'All', 'All bones will be exported.'),
|
|
||||||
('BONE_GROUPS', 'Bone Groups', 'Only bones belonging to the selected bone groups and their ancestors will '
|
|
||||||
'be exported.'),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
bone_group_list: CollectionProperty(type=BoneGroupListItem)
|
|
||||||
bone_group_list_index: IntProperty(default=0, name='', description='')
|
|
||||||
should_use_original_sequence_names: BoolProperty(
|
|
||||||
default=False,
|
|
||||||
name='Original Names',
|
|
||||||
options=set(),
|
|
||||||
update=should_use_original_sequence_names_updated,
|
|
||||||
description='If the action was imported from the PSA Import panel, the original name of the sequence will be '
|
|
||||||
'used instead of the Blender action name',
|
|
||||||
)
|
|
||||||
should_trim_timeline_marker_sequences: BoolProperty(
|
|
||||||
default=True,
|
|
||||||
name='Trim Sequences',
|
|
||||||
options=set(),
|
|
||||||
description='Frames without NLA track information at the boundaries of timeline markers will be excluded from '
|
|
||||||
'the exported sequences '
|
|
||||||
)
|
|
||||||
sequence_name_prefix: StringProperty(name='Prefix', options=set())
|
|
||||||
sequence_name_suffix: StringProperty(name='Suffix', options=set())
|
|
||||||
sequence_filter_name: StringProperty(default='', options={'TEXTEDIT_UPDATE'})
|
|
||||||
sequence_use_filter_invert: BoolProperty(default=False, options=set())
|
|
||||||
sequence_filter_asset: BoolProperty(default=False, name='Show assets',
|
|
||||||
description='Show actions that belong to an asset library', options=set())
|
|
||||||
sequence_use_filter_sort_reverse: BoolProperty(default=True, options=set())
|
|
||||||
|
|
||||||
|
|
||||||
def is_bone_filter_mode_item_available(context, identifier):
|
|
||||||
if identifier == 'BONE_GROUPS':
|
|
||||||
obj = context.active_object
|
|
||||||
if not obj.pose or not obj.pose.bone_groups:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportOperator(Operator, ExportHelper):
|
|
||||||
bl_idname = 'psa_export.operator'
|
|
||||||
bl_label = 'Export'
|
|
||||||
bl_options = {'INTERNAL', 'UNDO'}
|
|
||||||
__doc__ = 'Export actions to PSA'
|
|
||||||
filename_ext = '.psa'
|
|
||||||
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
|
||||||
filepath: StringProperty(
|
|
||||||
name='File Path',
|
|
||||||
description='File path used for exporting the PSA file',
|
|
||||||
maxlen=1024,
|
|
||||||
default='')
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.armature = None
|
|
||||||
|
|
||||||
def draw(self, context):
|
|
||||||
layout = self.layout
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
|
|
||||||
# FPS
|
|
||||||
layout.prop(pg, 'fps_source', text='FPS')
|
|
||||||
if pg.fps_source == 'CUSTOM':
|
|
||||||
layout.prop(pg, 'fps_custom', text='Custom')
|
|
||||||
|
|
||||||
# SOURCE
|
|
||||||
layout.prop(pg, 'sequence_source', text='Source')
|
|
||||||
|
|
||||||
# SELECT ALL/NONE
|
|
||||||
row = layout.row(align=True)
|
|
||||||
row.label(text='Select')
|
|
||||||
row.operator(PsaExportActionsSelectAll.bl_idname, text='All', icon='CHECKBOX_HLT')
|
|
||||||
row.operator(PsaExportActionsDeselectAll.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
|
||||||
|
|
||||||
# ACTIONS
|
|
||||||
if pg.sequence_source == 'ACTIONS':
|
|
||||||
rows = max(3, min(len(pg.action_list), 10))
|
|
||||||
|
|
||||||
layout.template_list('PSA_UL_ExportActionList', '', pg, 'action_list', pg, 'action_list_index', rows=rows)
|
|
||||||
|
|
||||||
col = layout.column()
|
|
||||||
col.use_property_split = True
|
|
||||||
col.use_property_decorate = False
|
|
||||||
col.prop(pg, 'should_use_original_sequence_names')
|
|
||||||
col.prop(pg, 'sequence_name_prefix')
|
|
||||||
col.prop(pg, 'sequence_name_suffix')
|
|
||||||
|
|
||||||
elif pg.sequence_source == 'TIMELINE_MARKERS':
|
|
||||||
rows = max(3, min(len(pg.marker_list), 10))
|
|
||||||
layout.template_list('PSA_UL_ExportTimelineMarkerList', '', pg, 'marker_list', pg, 'marker_list_index',
|
|
||||||
rows=rows)
|
|
||||||
|
|
||||||
col = layout.column()
|
|
||||||
col.use_property_split = True
|
|
||||||
col.use_property_decorate = False
|
|
||||||
col.prop(pg, 'should_trim_timeline_marker_sequences')
|
|
||||||
col.prop(pg, 'sequence_name_prefix')
|
|
||||||
col.prop(pg, 'sequence_name_suffix')
|
|
||||||
|
|
||||||
# Determine if there is going to be a naming conflict and display an error, if so.
|
|
||||||
selected_items = [x for x in pg.action_list if x.is_selected]
|
|
||||||
action_names = [x.name for x in selected_items]
|
|
||||||
action_name_counts = Counter(action_names)
|
|
||||||
for action_name, count in action_name_counts.items():
|
|
||||||
if count > 1:
|
|
||||||
layout.label(text=f'Duplicate action: {action_name}', icon='ERROR')
|
|
||||||
break
|
|
||||||
|
|
||||||
layout.separator()
|
|
||||||
|
|
||||||
# BONES
|
|
||||||
row = layout.row(align=True)
|
|
||||||
row.prop(pg, 'bone_filter_mode', text='Bones')
|
|
||||||
|
|
||||||
if pg.bone_filter_mode == 'BONE_GROUPS':
|
|
||||||
row = layout.row(align=True)
|
|
||||||
row.label(text='Select')
|
|
||||||
row.operator(PsaExportBoneGroupsSelectAll.bl_idname, text='All', icon='CHECKBOX_HLT')
|
|
||||||
row.operator(PsaExportBoneGroupsDeselectAll.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
|
||||||
rows = max(3, min(len(pg.bone_group_list), 10))
|
|
||||||
layout.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index',
|
|
||||||
rows=rows)
|
|
||||||
|
|
||||||
def should_action_be_selected_by_default(self, action):
|
|
||||||
return action is not None and action.asset_data is None
|
|
||||||
|
|
||||||
def is_action_for_armature(self, action):
|
|
||||||
if len(action.fcurves) == 0:
|
|
||||||
return False
|
|
||||||
bone_names = set([x.name for x in self.armature.data.bones])
|
|
||||||
for fcurve in action.fcurves:
|
|
||||||
match = re.match(r'pose\.bones\["(.+)"\].\w+', fcurve.data_path)
|
|
||||||
if not match:
|
|
||||||
continue
|
|
||||||
bone_name = match.group(1)
|
|
||||||
if bone_name in bone_names:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
|
|
||||||
if context.view_layer.objects.active is None:
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, 'An armature must be selected')
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
if context.view_layer.objects.active.type != 'ARMATURE':
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, 'The selected object must be an armature.')
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
self.armature = context.view_layer.objects.active
|
|
||||||
|
|
||||||
# Populate actions list.
|
|
||||||
pg.action_list.clear()
|
|
||||||
for action in bpy.data.actions:
|
|
||||||
if not self.is_action_for_armature(action):
|
|
||||||
continue
|
|
||||||
item = pg.action_list.add()
|
|
||||||
item.action = action
|
|
||||||
item.name = action.name
|
|
||||||
item.is_selected = self.should_action_be_selected_by_default(action)
|
|
||||||
|
|
||||||
update_action_names(context)
|
|
||||||
|
|
||||||
# Populate timeline markers list.
|
|
||||||
pg.marker_list.clear()
|
|
||||||
for marker in context.scene.timeline_markers:
|
|
||||||
item = pg.marker_list.add()
|
|
||||||
item.name = marker.name
|
|
||||||
|
|
||||||
if len(pg.action_list) == 0 and len(pg.marker_list) == 0:
|
|
||||||
# If there are no actions at all, we have nothing to export, so just cancel the operation.
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, 'There are no actions or timeline markers to export.')
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
# Populate bone groups list.
|
|
||||||
populate_bone_group_list(self.armature, pg.bone_group_list)
|
|
||||||
|
|
||||||
context.window_manager.fileselect_add(self)
|
|
||||||
|
|
||||||
return {'RUNNING_MODAL'}
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
|
|
||||||
actions = [x.action for x in pg.action_list if x.is_selected]
|
|
||||||
marker_names = [x.name for x in pg.marker_list if x.is_selected]
|
|
||||||
|
|
||||||
options = PsaBuilderOptions()
|
|
||||||
options.fps_source = pg.fps_source
|
|
||||||
options.fps_custom = pg.fps_custom
|
|
||||||
options.sequence_source = pg.sequence_source
|
|
||||||
options.actions = actions
|
|
||||||
options.marker_names = marker_names
|
|
||||||
options.bone_filter_mode = pg.bone_filter_mode
|
|
||||||
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
|
|
||||||
options.should_use_original_sequence_names = pg.should_use_original_sequence_names
|
|
||||||
options.should_trim_timeline_marker_sequences = pg.should_trim_timeline_marker_sequences
|
|
||||||
options.sequence_name_prefix = pg.sequence_name_prefix
|
|
||||||
options.sequence_name_suffix = pg.sequence_name_suffix
|
|
||||||
|
|
||||||
builder = PsaBuilder()
|
|
||||||
|
|
||||||
try:
|
|
||||||
psa = builder.build(context, options)
|
|
||||||
except RuntimeError as e:
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
exporter = PsaExporter(psa)
|
|
||||||
exporter.export(self.filepath)
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
|
|
||||||
class PSA_UL_ExportTimelineMarkerList(UIList):
|
|
||||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
|
||||||
layout.prop(item, 'is_selected', icon_only=True, text=item.name)
|
|
||||||
|
|
||||||
def filter_items(self, context, data, property):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
sequences = getattr(data, property)
|
|
||||||
flt_flags = filter_sequences(pg, sequences)
|
|
||||||
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(sequences, 'name')
|
|
||||||
return flt_flags, flt_neworder
|
|
||||||
|
|
||||||
|
|
||||||
def filter_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[int]:
|
|
||||||
bitflag_filter_item = 1 << 30
|
|
||||||
flt_flags = [bitflag_filter_item] * len(sequences)
|
|
||||||
|
|
||||||
if pg.sequence_filter_name is not None:
|
|
||||||
# Filter name is non-empty.
|
|
||||||
for i, sequence in enumerate(sequences):
|
|
||||||
if not fnmatch.fnmatch(sequence.name, f'*{pg.sequence_filter_name}*'):
|
|
||||||
flt_flags[i] &= ~bitflag_filter_item
|
|
||||||
|
|
||||||
if not pg.sequence_filter_asset:
|
|
||||||
for i, sequence in enumerate(sequences):
|
|
||||||
if hasattr(sequence, 'action') and sequence.action.asset_data is not None:
|
|
||||||
flt_flags[i] &= ~bitflag_filter_item
|
|
||||||
|
|
||||||
if pg.sequence_use_filter_invert:
|
|
||||||
# Invert filter flags for all items.
|
|
||||||
for i, sequence in enumerate(sequences):
|
|
||||||
flt_flags[i] ^= bitflag_filter_item
|
|
||||||
|
|
||||||
return flt_flags
|
|
||||||
|
|
||||||
|
|
||||||
def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[
|
|
||||||
PsaExportActionListItem]:
|
|
||||||
visible_sequences = []
|
|
||||||
for i, flag in enumerate(filter_sequences(pg, sequences)):
|
|
||||||
if bool(flag & (1 << 30)):
|
|
||||||
visible_sequences.append(sequences[i])
|
|
||||||
return visible_sequences
|
|
||||||
|
|
||||||
|
|
||||||
class PSA_UL_ExportActionList(UIList):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(PSA_UL_ExportActionList, self).__init__()
|
|
||||||
# Show the filtering options by default.
|
|
||||||
self.use_filter_show = True
|
|
||||||
|
|
||||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
|
||||||
layout.prop(item, 'is_selected', icon_only=True, text=item.name)
|
|
||||||
if item.action.asset_data is not None:
|
|
||||||
layout.label(text='', icon='ASSET_MANAGER')
|
|
||||||
|
|
||||||
def draw_filter(self, context, layout):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
row = layout.row()
|
|
||||||
subrow = row.row(align=True)
|
|
||||||
subrow.prop(pg, 'sequence_filter_name', text="")
|
|
||||||
subrow.prop(pg, 'sequence_use_filter_invert', text="", icon='ARROW_LEFTRIGHT')
|
|
||||||
subrow = row.row(align=True)
|
|
||||||
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
|
|
||||||
# subrow.prop(pg, 'sequence_use_filter_sort_reverse', text='', icon='SORT_ASC')
|
|
||||||
|
|
||||||
def filter_items(self, context, data, property):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
actions = getattr(data, property)
|
|
||||||
flt_flags = filter_sequences(pg, actions)
|
|
||||||
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
|
|
||||||
return flt_flags, flt_neworder
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportActionsSelectAll(Operator):
|
|
||||||
bl_idname = 'psa_export.sequences_select_all'
|
|
||||||
bl_label = 'Select All'
|
|
||||||
bl_description = 'Select all visible sequences'
|
|
||||||
bl_options = {'INTERNAL'}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_item_list(cls, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
if pg.sequence_source == 'ACTIONS':
|
|
||||||
return pg.action_list
|
|
||||||
elif pg.sequence_source == 'TIMELINE_MARKERS':
|
|
||||||
return pg.marker_list
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
item_list = cls.get_item_list(context)
|
|
||||||
visible_sequences = get_visible_sequences(pg, item_list)
|
|
||||||
has_unselected_sequences = any(map(lambda item: not item.is_selected, visible_sequences))
|
|
||||||
return has_unselected_sequences
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
sequences = self.get_item_list(context)
|
|
||||||
for sequence in get_visible_sequences(pg, sequences):
|
|
||||||
sequence.is_selected = True
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportActionsDeselectAll(Operator):
|
|
||||||
bl_idname = 'psa_export.sequences_deselect_all'
|
|
||||||
bl_label = 'Deselect All'
|
|
||||||
bl_description = 'Deselect all visible sequences'
|
|
||||||
bl_options = {'INTERNAL'}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_item_list(cls, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
if pg.sequence_source == 'ACTIONS':
|
|
||||||
return pg.action_list
|
|
||||||
elif pg.sequence_source == 'TIMELINE_MARKERS':
|
|
||||||
return pg.marker_list
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context):
|
|
||||||
item_list = cls.get_item_list(context)
|
|
||||||
has_selected_items = any(map(lambda item: item.is_selected, item_list))
|
|
||||||
return len(item_list) > 0 and has_selected_items
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
item_list = self.get_item_list(context)
|
|
||||||
for sequence in get_visible_sequences(pg, item_list):
|
|
||||||
sequence.is_selected = False
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportBoneGroupsSelectAll(Operator):
|
|
||||||
bl_idname = 'psa_export.bone_groups_select_all'
|
|
||||||
bl_label = 'Select All'
|
|
||||||
bl_description = 'Select all bone groups'
|
|
||||||
bl_options = {'INTERNAL'}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
item_list = pg.bone_group_list
|
|
||||||
has_unselected_items = any(map(lambda action: not action.is_selected, item_list))
|
|
||||||
return len(item_list) > 0 and has_unselected_items
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
for item in pg.bone_group_list:
|
|
||||||
item.is_selected = True
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportBoneGroupsDeselectAll(Operator):
|
|
||||||
bl_idname = 'psa_export.bone_groups_deselect_all'
|
|
||||||
bl_label = 'Deselect All'
|
|
||||||
bl_description = 'Deselect all bone groups'
|
|
||||||
bl_options = {'INTERNAL'}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
item_list = pg.bone_group_list
|
|
||||||
has_selected_actions = any(map(lambda action: action.is_selected, item_list))
|
|
||||||
return len(item_list) > 0 and has_selected_actions
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psa_export
|
|
||||||
for action in pg.bone_group_list:
|
|
||||||
action.is_selected = False
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
|
||||||
PsaExportActionListItem,
|
|
||||||
PsaExportTimelineMarkerListItem,
|
|
||||||
PsaExportPropertyGroup,
|
|
||||||
PsaExportOperator,
|
|
||||||
PSA_UL_ExportActionList,
|
|
||||||
PSA_UL_ExportTimelineMarkerList,
|
|
||||||
PsaExportActionsSelectAll,
|
|
||||||
PsaExportActionsDeselectAll,
|
|
||||||
PsaExportBoneGroupsSelectAll,
|
|
||||||
PsaExportBoneGroupsDeselectAll,
|
|
||||||
)
|
|
||||||
0
io_scene_psk_psa/psa/import_/__init__.py
Normal file
0
io_scene_psk_psa/psa/import_/__init__.py
Normal file
477
io_scene_psk_psa/psa/import_/operators.py
Normal file
477
io_scene_psk_psa/psa/import_/operators.py
Normal file
@@ -0,0 +1,477 @@
|
|||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Iterable
|
||||||
|
|
||||||
|
from bpy.props import CollectionProperty, StringProperty
|
||||||
|
from bpy.types import Context, Event, FileHandler, Object, Operator, OperatorFileListElement
|
||||||
|
from bpy_extras.io_utils import ImportHelper
|
||||||
|
|
||||||
|
from .properties import PsaImportMixin, get_visible_sequences
|
||||||
|
from ..config import read_psa_config
|
||||||
|
from ..importer import BoneMapping, PsaImportOptions, import_psa
|
||||||
|
from ..reader import PsaReader
|
||||||
|
|
||||||
|
|
||||||
|
def psa_import_poll(cls, context: Context):
|
||||||
|
assert context.view_layer and context.view_layer.objects.active
|
||||||
|
active_object = context.view_layer.objects.active
|
||||||
|
if active_object is None or active_object.type != 'ARMATURE':
|
||||||
|
cls.poll_message_set('The active object must be an armature')
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_import_sequences_select_from_text(Operator):
|
||||||
|
bl_idname = 'psa.import_sequences_select_from_text'
|
||||||
|
bl_label = 'Select By Text List'
|
||||||
|
bl_description = 'Select sequences by name from text list'
|
||||||
|
bl_options = {'INTERNAL', 'UNDO'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
return len(pg.sequence_list) > 0
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
assert context.window_manager
|
||||||
|
return context.window_manager.invoke_props_dialog(self, width=256)
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
assert layout
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
layout.label(icon='INFO', text='Each sequence name should be on a new line.')
|
||||||
|
layout.prop(pg, 'select_text', text='')
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
if pg.select_text is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No text block selected')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
contents = pg.select_text.as_string()
|
||||||
|
count = 0
|
||||||
|
for line in contents.split('\n'):
|
||||||
|
for sequence in pg.sequence_list:
|
||||||
|
if sequence.action_name == line:
|
||||||
|
sequence.is_selected = True
|
||||||
|
count += 1
|
||||||
|
self.report({'INFO'}, f'Selected {count} sequence(s)')
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_import_sequences_select_all(Operator):
|
||||||
|
bl_idname = 'psa.import_sequences_select_all'
|
||||||
|
bl_label = 'All'
|
||||||
|
bl_description = 'Select all sequences'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
visible_sequences = get_visible_sequences(pg, pg.sequence_list)
|
||||||
|
has_unselected_actions = any(map(lambda action: not action.is_selected, visible_sequences))
|
||||||
|
return len(visible_sequences) > 0 and has_unselected_actions
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
visible_sequences = get_visible_sequences(pg, pg.sequence_list)
|
||||||
|
for sequence in visible_sequences:
|
||||||
|
sequence.is_selected = True
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_import_sequences_deselect_all(Operator):
|
||||||
|
bl_idname = 'psa.import_sequences_deselect_all'
|
||||||
|
bl_label = 'None'
|
||||||
|
bl_description = 'Deselect all visible sequences'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
visible_sequences = get_visible_sequences(pg, pg.sequence_list)
|
||||||
|
has_selected_sequences = any(map(lambda sequence: sequence.is_selected, visible_sequences))
|
||||||
|
return len(visible_sequences) > 0 and has_selected_sequences
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
visible_sequences = get_visible_sequences(pg, pg.sequence_list)
|
||||||
|
for sequence in visible_sequences:
|
||||||
|
sequence.is_selected = False
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
def load_psa_file(context, filepath: str):
|
||||||
|
pg = context.scene.psa_import
|
||||||
|
pg.sequence_list.clear()
|
||||||
|
pg.psa.bones.clear()
|
||||||
|
pg.psa_error = ''
|
||||||
|
try:
|
||||||
|
# Read the file and populate the action list.
|
||||||
|
p = os.path.abspath(filepath)
|
||||||
|
psa_reader = PsaReader(p)
|
||||||
|
for sequence in psa_reader.sequences.values():
|
||||||
|
item = pg.sequence_list.add()
|
||||||
|
item.action_name = sequence.name.decode('windows-1252')
|
||||||
|
for psa_bone in psa_reader.bones:
|
||||||
|
item = pg.psa.bones.add()
|
||||||
|
item.bone_name = psa_bone.name.decode('windows-1252')
|
||||||
|
except Exception as e:
|
||||||
|
pg.psa_error = str(e)
|
||||||
|
|
||||||
|
|
||||||
|
def on_psa_file_path_updated(cls, context):
|
||||||
|
load_psa_file(context, cls.filepath)
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_import_drag_and_drop(Operator, PsaImportMixin):
|
||||||
|
bl_idname = 'psa.import_drag_and_drop'
|
||||||
|
bl_label = 'Import PSA'
|
||||||
|
bl_description = 'Import multiple PSA files'
|
||||||
|
bl_options = {'INTERNAL', 'UNDO', 'PRESET'}
|
||||||
|
|
||||||
|
directory: StringProperty(subtype='FILE_PATH', options={'SKIP_SAVE', 'HIDDEN'})
|
||||||
|
files: CollectionProperty(type=OperatorFileListElement, options={'SKIP_SAVE', 'HIDDEN'})
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
warnings = []
|
||||||
|
sequences_count = 0
|
||||||
|
|
||||||
|
assert context.view_layer and context.view_layer.objects.active
|
||||||
|
|
||||||
|
for file in self.files:
|
||||||
|
psa_path = str(os.path.join(self.directory, file.name))
|
||||||
|
psa_reader = PsaReader(psa_path)
|
||||||
|
sequence_names = list(psa_reader.sequences.keys())
|
||||||
|
options = psa_import_options_from_property_group(self, sequence_names)
|
||||||
|
|
||||||
|
sequences_count += len(sequence_names)
|
||||||
|
|
||||||
|
result = _import_psa(context, options, psa_path, context.view_layer.objects.active)
|
||||||
|
warnings.extend(result.warnings)
|
||||||
|
|
||||||
|
if len(warnings) > 0:
|
||||||
|
message = f'Imported {sequences_count} action(s) from {len(self.files)} file(s) with {len(warnings)} warning(s)\n'
|
||||||
|
self.report({'INFO'}, message)
|
||||||
|
for warning in warnings:
|
||||||
|
self.report({'WARNING'}, warning)
|
||||||
|
|
||||||
|
self.report({'INFO'}, f'Imported {sequences_count} action(s) from {len(self.files)} file(s)')
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
def invoke(self, context: Context, event):
|
||||||
|
# Make sure the selected object is an obj.
|
||||||
|
assert context.view_layer and context.view_layer.objects.active
|
||||||
|
active_object = context.view_layer.objects.active
|
||||||
|
if active_object is None or active_object.type != 'ARMATURE':
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'The active object must be an armature')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
# Show the import operator properties in a pop-up dialog (do not use the file selector).
|
||||||
|
assert context.window_manager
|
||||||
|
context.window_manager.invoke_props_dialog(self)
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
draw_psa_import_options_no_panels(layout, self)
|
||||||
|
|
||||||
|
|
||||||
|
def psa_import_options_from_property_group(pg: PsaImportMixin, sequence_names: Iterable[str]) -> PsaImportOptions:
|
||||||
|
options = PsaImportOptions()
|
||||||
|
options.sequence_names = list(sequence_names)
|
||||||
|
options.should_use_fake_user = pg.should_use_fake_user
|
||||||
|
options.should_stash = pg.should_stash
|
||||||
|
options.action_name_prefix = pg.action_name_prefix if pg.should_use_action_name_prefix else ''
|
||||||
|
options.should_overwrite = pg.should_overwrite
|
||||||
|
options.should_write_metadata = pg.should_write_metadata
|
||||||
|
options.should_write_keyframes = pg.should_write_keyframes
|
||||||
|
options.should_convert_to_samples = pg.should_convert_to_samples
|
||||||
|
options.bone_mapping = BoneMapping(
|
||||||
|
is_case_sensitive=pg.bone_mapping_is_case_sensitive,
|
||||||
|
should_ignore_trailing_whitespace=pg.bone_mapping_should_ignore_trailing_whitespace
|
||||||
|
)
|
||||||
|
options.fps_source = pg.fps_source
|
||||||
|
options.fps_custom = pg.fps_custom
|
||||||
|
options.translation_scale = pg.translation_scale
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
def _import_psa(context,
|
||||||
|
options: PsaImportOptions,
|
||||||
|
filepath: str,
|
||||||
|
armature_object: Object
|
||||||
|
):
|
||||||
|
warnings = []
|
||||||
|
|
||||||
|
if options.should_use_config_file:
|
||||||
|
# Read the PSA config file if it exists.
|
||||||
|
config_path = Path(filepath).with_suffix('.config')
|
||||||
|
if config_path.exists():
|
||||||
|
try:
|
||||||
|
options.psa_config = read_psa_config(options.sequence_names, str(config_path))
|
||||||
|
except Exception as e:
|
||||||
|
warnings.append(f'Failed to read PSA config file: {e}')
|
||||||
|
|
||||||
|
psa_reader = PsaReader(filepath)
|
||||||
|
|
||||||
|
result = import_psa(context, psa_reader, armature_object, options)
|
||||||
|
result.warnings.extend(warnings)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_import_all(Operator, PsaImportMixin):
|
||||||
|
bl_idname = 'psa.import_all'
|
||||||
|
bl_label = 'Import PSA'
|
||||||
|
bl_description = 'Import all sequences from the selected PSA file'
|
||||||
|
bl_options = {'INTERNAL', 'UNDO'}
|
||||||
|
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for importing the PSA file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='',
|
||||||
|
update=on_psa_file_path_updated)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return psa_import_poll(cls, context)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
sequence_names = []
|
||||||
|
with PsaReader(self.filepath) as psa_reader:
|
||||||
|
sequence_names.extend(psa_reader.sequences.keys())
|
||||||
|
|
||||||
|
options = PsaImportOptions(
|
||||||
|
action_name_prefix=self.action_name_prefix,
|
||||||
|
bone_mapping=BoneMapping(
|
||||||
|
is_case_sensitive=self.bone_mapping_is_case_sensitive,
|
||||||
|
should_ignore_trailing_whitespace=self.bone_mapping_should_ignore_trailing_whitespace
|
||||||
|
),
|
||||||
|
fps_custom=self.fps_custom,
|
||||||
|
fps_source=self.fps_source,
|
||||||
|
sequence_names=sequence_names,
|
||||||
|
should_convert_to_samples=self.should_convert_to_samples,
|
||||||
|
should_overwrite=self.should_overwrite,
|
||||||
|
should_stash=self.should_stash,
|
||||||
|
should_use_config_file=self.should_use_config_file,
|
||||||
|
should_use_fake_user=self.should_use_fake_user,
|
||||||
|
should_write_keyframes=self.should_write_keyframes,
|
||||||
|
should_write_metadata=self.should_write_metadata,
|
||||||
|
translation_scale=self.translation_scale
|
||||||
|
)
|
||||||
|
|
||||||
|
assert context.view_layer
|
||||||
|
assert context.view_layer.objects.active
|
||||||
|
result = _import_psa(context, options, self.filepath, context.view_layer.objects.active)
|
||||||
|
|
||||||
|
if len(result.warnings) > 0:
|
||||||
|
message = f'Imported {len(options.sequence_names)} action(s) with {len(result.warnings)} warning(s)\n'
|
||||||
|
self.report({'WARNING'}, message)
|
||||||
|
for warning in result.warnings:
|
||||||
|
self.report({'WARNING'}, warning)
|
||||||
|
else:
|
||||||
|
self.report({'INFO'}, f'Imported {len(options.sequence_names)} action(s)')
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
def draw(self, context: Context):
|
||||||
|
draw_psa_import_options_no_panels(self.layout, self)
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_OT_import(Operator, ImportHelper, PsaImportMixin):
|
||||||
|
bl_idname = 'psa.import_file'
|
||||||
|
bl_label = 'Import'
|
||||||
|
bl_description = 'Import the selected animations into the scene as actions'
|
||||||
|
bl_options = {'INTERNAL', 'UNDO'}
|
||||||
|
|
||||||
|
filename_ext = '.psa'
|
||||||
|
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for importing the PSA file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='',
|
||||||
|
update=on_psa_file_path_updated)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return psa_import_poll(cls, context)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
options = psa_import_options_from_property_group(self, [x.action_name for x in pg.sequence_list if x.is_selected])
|
||||||
|
|
||||||
|
if len(options.sequence_names) == 0:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No sequences selected')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
result = _import_psa(context, options, self.filepath, context.view_layer.objects.active)
|
||||||
|
|
||||||
|
if len(result.warnings) > 0:
|
||||||
|
message = f'Imported {len(options.sequence_names)} action(s) with {len(result.warnings)} warning(s)\n'
|
||||||
|
self.report({'WARNING'}, message)
|
||||||
|
for warning in result.warnings:
|
||||||
|
self.report({'WARNING'}, warning)
|
||||||
|
else:
|
||||||
|
self.report({'INFO'}, f'Imported {len(options.sequence_names)} action(s)')
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
def invoke(self, context: Context, event: Event):
|
||||||
|
# Attempt to load the PSA file for the pre-selected file.
|
||||||
|
load_psa_file(context, self.filepath)
|
||||||
|
assert context.window_manager
|
||||||
|
context.window_manager.fileselect_add(self)
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def draw(self, context: Context):
|
||||||
|
layout = self.layout
|
||||||
|
assert layout
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
|
||||||
|
sequences_header, sequences_panel = layout.panel('sequences_panel_id', default_closed=False)
|
||||||
|
sequences_header.label(text='Sequences')
|
||||||
|
|
||||||
|
if sequences_panel:
|
||||||
|
if pg.psa_error:
|
||||||
|
row = sequences_panel.row()
|
||||||
|
row.label(text='Select a PSA file', icon='ERROR')
|
||||||
|
else:
|
||||||
|
# Select buttons.
|
||||||
|
rows = max(3, min(len(pg.sequence_list), 10))
|
||||||
|
|
||||||
|
row = sequences_panel.row()
|
||||||
|
col = row.column()
|
||||||
|
|
||||||
|
row2 = col.row(align=True)
|
||||||
|
row2.label(text='Select')
|
||||||
|
row2.operator(PSA_OT_import_sequences_select_from_text.bl_idname, text='', icon='TEXT')
|
||||||
|
row2.operator(PSA_OT_import_sequences_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||||
|
row2.operator(PSA_OT_import_sequences_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||||
|
|
||||||
|
col = col.row()
|
||||||
|
col.template_list('PSA_UL_import_sequences', '', pg, 'sequence_list', pg, 'sequence_list_index', rows=rows)
|
||||||
|
|
||||||
|
col = sequences_panel.column(heading='')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(self, 'fps_source')
|
||||||
|
if self.fps_source == 'CUSTOM':
|
||||||
|
col.prop(self, 'fps_custom')
|
||||||
|
col.prop(self, 'should_overwrite')
|
||||||
|
col.prop(self, 'should_use_action_name_prefix')
|
||||||
|
if self.should_use_action_name_prefix:
|
||||||
|
col.prop(self, 'action_name_prefix')
|
||||||
|
|
||||||
|
data_header, data_panel = layout.panel('data_panel_id', default_closed=False)
|
||||||
|
data_header.label(text='Data')
|
||||||
|
|
||||||
|
if data_panel:
|
||||||
|
col = data_panel.column(heading='Write')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(self, 'should_write_keyframes')
|
||||||
|
col.prop(self, 'should_write_metadata')
|
||||||
|
|
||||||
|
if self.should_write_keyframes:
|
||||||
|
col = col.column(heading='Keyframes')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(self, 'should_convert_to_samples')
|
||||||
|
|
||||||
|
advanced_header, advanced_panel = layout.panel('advanced_panel_id', default_closed=True)
|
||||||
|
advanced_header.label(text='Advanced')
|
||||||
|
|
||||||
|
if advanced_panel:
|
||||||
|
bone_mapping_header, bone_mapping_panel = layout.panel('bone_mapping_id', default_closed=False)
|
||||||
|
bone_mapping_header.label(text='Bone Mapping')
|
||||||
|
if bone_mapping_panel:
|
||||||
|
col = bone_mapping_panel.column()
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(self, 'bone_mapping_is_case_sensitive')
|
||||||
|
col.prop(self, 'bone_mapping_should_ignore_trailing_whitespace')
|
||||||
|
|
||||||
|
col = advanced_panel.column()
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(self, 'translation_scale', text='Translation Scale')
|
||||||
|
|
||||||
|
col = advanced_panel.column(heading='Options')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(self, 'should_use_fake_user')
|
||||||
|
col.prop(self, 'should_stash')
|
||||||
|
col.prop(self, 'should_use_config_file')
|
||||||
|
|
||||||
|
|
||||||
|
def draw_psa_import_options_no_panels(layout, pg: PsaImportMixin):
|
||||||
|
col = layout.column(heading='Sequences')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(pg, 'fps_source')
|
||||||
|
if pg.fps_source == 'CUSTOM':
|
||||||
|
col.prop(pg, 'fps_custom')
|
||||||
|
col.prop(pg, 'should_overwrite')
|
||||||
|
col.prop(pg, 'should_use_action_name_prefix')
|
||||||
|
if pg.should_use_action_name_prefix:
|
||||||
|
col.prop(pg, 'action_name_prefix')
|
||||||
|
|
||||||
|
col = layout.column(heading='Write')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(pg, 'should_write_keyframes')
|
||||||
|
col.prop(pg, 'should_write_metadata')
|
||||||
|
|
||||||
|
if pg.should_write_keyframes:
|
||||||
|
col = col.column(heading='Keyframes')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(pg, 'should_convert_to_samples')
|
||||||
|
|
||||||
|
col = layout.column(heading='Bone Mapping')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(pg, 'bone_mapping_is_case_sensitive')
|
||||||
|
col.prop(pg, 'bone_mapping_should_ignore_trailing_whitespace')
|
||||||
|
|
||||||
|
col = layout.column()
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(pg, 'translation_scale')
|
||||||
|
|
||||||
|
col = layout.column(heading='Options')
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(pg, 'should_use_fake_user')
|
||||||
|
col.prop(pg, 'should_stash')
|
||||||
|
col.prop(pg, 'should_use_config_file')
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_FH_import(FileHandler): # TODO: rename and add handling for PSA export.
|
||||||
|
bl_idname = 'PSA_FH_import'
|
||||||
|
bl_label = 'File handler for Unreal PSA import'
|
||||||
|
bl_import_operator = PSA_OT_import_drag_and_drop.bl_idname
|
||||||
|
# bl_export_operator = 'psa_export.export'
|
||||||
|
bl_file_extensions = '.psa'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll_drop(cls, context: Context) -> bool:
|
||||||
|
return context.area is not None and context.area.type == 'VIEW_3D'
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSA_OT_import_sequences_select_all,
|
||||||
|
PSA_OT_import_sequences_deselect_all,
|
||||||
|
PSA_OT_import_sequences_select_from_text,
|
||||||
|
PSA_OT_import,
|
||||||
|
PSA_OT_import_all,
|
||||||
|
PSA_OT_import_drag_and_drop,
|
||||||
|
PSA_FH_import,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
187
io_scene_psk_psa/psa/import_/properties.py
Normal file
187
io_scene_psk_psa/psa/import_/properties.py
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
import re
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from bpy.props import (
|
||||||
|
BoolProperty,
|
||||||
|
CollectionProperty,
|
||||||
|
EnumProperty,
|
||||||
|
FloatProperty,
|
||||||
|
IntProperty,
|
||||||
|
PointerProperty,
|
||||||
|
StringProperty,
|
||||||
|
)
|
||||||
|
from bpy.types import PropertyGroup, Text
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PG_import_action_list_item(PropertyGroup):
|
||||||
|
action_name: StringProperty(options=set())
|
||||||
|
is_selected: BoolProperty(default=True, options=set())
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PG_bone(PropertyGroup):
|
||||||
|
bone_name: StringProperty(options=set())
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PG_data(PropertyGroup):
|
||||||
|
bones: CollectionProperty(type=PSA_PG_bone)
|
||||||
|
sequence_count: IntProperty(default=0)
|
||||||
|
|
||||||
|
|
||||||
|
bone_mapping_items = (
|
||||||
|
('EXACT', 'Exact', 'Bone names must match exactly.', 'EXACT', 0),
|
||||||
|
('CASE_INSENSITIVE', 'Case Insensitive', 'Bones names must match, ignoring case (e.g., the bone PSA bone \'root\' can be mapped to the armature bone \'Root\')', 'CASE_INSENSITIVE', 1),
|
||||||
|
)
|
||||||
|
|
||||||
|
fps_source_items = (
|
||||||
|
('SEQUENCE', 'Sequence', 'The sequence frame rate matches the original frame rate', 'ACTION', 0),
|
||||||
|
('SCENE', 'Scene', 'The sequence is resampled to the frame rate of the scene', 'SCENE_DATA', 1),
|
||||||
|
('CUSTOM', 'Custom', 'The sequence is resampled to a custom frame rate', 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
compression_ratio_source_items = (
|
||||||
|
('ACTION', 'Action', 'The compression ratio is sourced from the action metadata', 'ACTION', 0),
|
||||||
|
('CUSTOM', 'Custom', 'The compression ratio is set to a custom value', 1),
|
||||||
|
)
|
||||||
|
|
||||||
|
class PsaImportMixin:
|
||||||
|
should_use_fake_user: BoolProperty(default=True, name='Fake User',
|
||||||
|
description='Assign each imported action a fake user so that the data block is '
|
||||||
|
'saved even it has no users',
|
||||||
|
options=set())
|
||||||
|
should_use_config_file: BoolProperty(default=True, name='Use Config File',
|
||||||
|
description='Use the .config file that is sometimes generated when the PSA '
|
||||||
|
'file is exported from UEViewer. This file contains '
|
||||||
|
'options that can be used to filter out certain bones tracks '
|
||||||
|
'from the imported actions',
|
||||||
|
options=set())
|
||||||
|
should_stash: BoolProperty(default=False, name='Stash',
|
||||||
|
description='Stash each imported action as a strip on a new non-contributing NLA track',
|
||||||
|
options=set())
|
||||||
|
should_use_action_name_prefix: BoolProperty(default=False, name='Prefix Action Name', options=set())
|
||||||
|
action_name_prefix: StringProperty(default='', name='Prefix', options=set())
|
||||||
|
should_overwrite: BoolProperty(default=False, name='Overwrite', options=set(),
|
||||||
|
description='If an action with a matching name already exists, the existing action '
|
||||||
|
'will have it\'s data overwritten instead of a new action being created')
|
||||||
|
should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=set())
|
||||||
|
should_write_metadata: BoolProperty(default=True, name='Metadata', options=set(),
|
||||||
|
description='Additional data will be written to the custom properties of the '
|
||||||
|
'Action (e.g., frame rate)')
|
||||||
|
sequence_filter_name: StringProperty(default='', options={'TEXTEDIT_UPDATE'})
|
||||||
|
sequence_filter_is_selected: BoolProperty(default=False, options=set(), name='Only Show Selected',
|
||||||
|
description='Only show selected sequences')
|
||||||
|
sequence_use_filter_invert: BoolProperty(default=False, options=set())
|
||||||
|
sequence_use_filter_regex: BoolProperty(default=False, name='Regular Expression',
|
||||||
|
description='Filter using regular expressions', options=set())
|
||||||
|
|
||||||
|
should_convert_to_samples: BoolProperty(
|
||||||
|
default=False,
|
||||||
|
name='Convert to Samples',
|
||||||
|
description='Convert keyframes to read-only samples. '
|
||||||
|
'Recommended if you do not plan on editing the actions directly'
|
||||||
|
)
|
||||||
|
bone_mapping_is_case_sensitive: BoolProperty(
|
||||||
|
default=False,
|
||||||
|
name='Case Sensitive'
|
||||||
|
)
|
||||||
|
bone_mapping_should_ignore_trailing_whitespace: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
name='Ignore Trailing Whitespace'
|
||||||
|
)
|
||||||
|
fps_source: EnumProperty(name='FPS Source', items=fps_source_items)
|
||||||
|
fps_custom: FloatProperty(
|
||||||
|
default=30.0,
|
||||||
|
name='Custom FPS',
|
||||||
|
description='The frame rate to which the imported sequences will be resampled to',
|
||||||
|
options=set(),
|
||||||
|
min=1.0,
|
||||||
|
soft_min=1.0,
|
||||||
|
soft_max=60.0,
|
||||||
|
step=100,
|
||||||
|
)
|
||||||
|
compression_ratio_source: EnumProperty(name='Compression Ratio Source', items=compression_ratio_source_items, default='ACTION')
|
||||||
|
compression_ratio_custom: FloatProperty(
|
||||||
|
default=1.0,
|
||||||
|
name='Custom Compression Ratio',
|
||||||
|
description='The compression ratio to apply to the imported sequences',
|
||||||
|
options=set(),
|
||||||
|
min=0.0,
|
||||||
|
soft_min=0.0,
|
||||||
|
soft_max=1.0,
|
||||||
|
step=0.0625,
|
||||||
|
)
|
||||||
|
translation_scale: FloatProperty(
|
||||||
|
name='Translation Scale',
|
||||||
|
default=1.0,
|
||||||
|
description='Scale factor for bone translation values. Use this when the scale of the armature does not match the PSA file'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# This property group lives "globally" in the scene, since Operators cannot have PointerProperty or CollectionProperty
|
||||||
|
# properties.
|
||||||
|
class PSA_PG_import(PropertyGroup):
|
||||||
|
psa_error: StringProperty(default='')
|
||||||
|
psa: PointerProperty(type=PSA_PG_data)
|
||||||
|
sequence_list: CollectionProperty(type=PSA_PG_import_action_list_item)
|
||||||
|
sequence_list_index: IntProperty(name='', default=0)
|
||||||
|
sequence_filter_name: StringProperty(default='', options={'TEXTEDIT_UPDATE'})
|
||||||
|
sequence_filter_is_selected: BoolProperty(default=False, options=set(), name='Only Show Selected',
|
||||||
|
description='Only show selected sequences')
|
||||||
|
sequence_use_filter_invert: BoolProperty(default=False, options=set())
|
||||||
|
sequence_use_filter_regex: BoolProperty(default=False, name='Regular Expression',
|
||||||
|
description='Filter using regular expressions', options=set())
|
||||||
|
select_text: PointerProperty(type=Text)
|
||||||
|
|
||||||
|
|
||||||
|
def filter_sequences(pg: PSA_PG_import, sequences) -> List[int]:
|
||||||
|
bitflag_filter_item = 1 << 30
|
||||||
|
flt_flags = [bitflag_filter_item] * len(sequences)
|
||||||
|
|
||||||
|
if pg.sequence_filter_name is not None:
|
||||||
|
# Filter name is non-empty.
|
||||||
|
if pg.sequence_use_filter_regex:
|
||||||
|
# Use regular expression. If regex pattern doesn't compile, just ignore it.
|
||||||
|
try:
|
||||||
|
regex = re.compile(pg.sequence_filter_name)
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
if not regex.match(sequence.action_name):
|
||||||
|
flt_flags[i] &= ~bitflag_filter_item
|
||||||
|
except re.error:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# User regular text matching.
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
if not fnmatch(sequence.action_name, f'*{pg.sequence_filter_name}*'):
|
||||||
|
flt_flags[i] &= ~bitflag_filter_item
|
||||||
|
|
||||||
|
if pg.sequence_filter_is_selected:
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
if not sequence.is_selected:
|
||||||
|
flt_flags[i] &= ~bitflag_filter_item
|
||||||
|
|
||||||
|
if pg.sequence_use_filter_invert:
|
||||||
|
# Invert filter flags for all items.
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
flt_flags[i] ^= bitflag_filter_item
|
||||||
|
|
||||||
|
return flt_flags
|
||||||
|
|
||||||
|
|
||||||
|
def get_visible_sequences(pg: PSA_PG_import, sequences) -> List[PSA_PG_import_action_list_item]:
|
||||||
|
bitflag_filter_item = 1 << 30
|
||||||
|
visible_sequences = []
|
||||||
|
for i, flag in enumerate(filter_sequences(pg, sequences)):
|
||||||
|
if bool(flag & bitflag_filter_item):
|
||||||
|
visible_sequences.append(sequences[i])
|
||||||
|
return visible_sequences
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSA_PG_import_action_list_item,
|
||||||
|
PSA_PG_bone,
|
||||||
|
PSA_PG_data,
|
||||||
|
PSA_PG_import,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
52
io_scene_psk_psa/psa/import_/ui.py
Normal file
52
io_scene_psk_psa/psa/import_/ui.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import bpy
|
||||||
|
from bpy.types import UIList
|
||||||
|
|
||||||
|
from .properties import filter_sequences
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_UL_sequences_mixin(UIList):
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_property, index, flt_flag):
|
||||||
|
row = layout.row(align=True)
|
||||||
|
split = row.split(align=True, factor=0.75)
|
||||||
|
column = split.row(align=True)
|
||||||
|
column.alignment = 'LEFT'
|
||||||
|
column.prop(item, 'is_selected', icon_only=True)
|
||||||
|
column.label(text=getattr(item, 'action_name'))
|
||||||
|
|
||||||
|
def draw_filter(self, context, layout):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
row = layout.row()
|
||||||
|
sub_row = row.row(align=True)
|
||||||
|
sub_row.prop(pg, 'sequence_filter_name', text='')
|
||||||
|
sub_row.prop(pg, 'sequence_use_filter_invert', text='', icon='ARROW_LEFTRIGHT')
|
||||||
|
sub_row.prop(pg, 'sequence_use_filter_regex', text='', icon='SORTBYEXT')
|
||||||
|
sub_row.prop(pg, 'sequence_filter_is_selected', text='', icon='CHECKBOX_HLT')
|
||||||
|
|
||||||
|
def filter_items(self, context, data, property_):
|
||||||
|
pg = getattr(context.scene, 'psa_import')
|
||||||
|
sequences = getattr(data, property_)
|
||||||
|
flt_flags = filter_sequences(pg, sequences)
|
||||||
|
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(sequences, 'action_name')
|
||||||
|
return flt_flags, flt_neworder
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_UL_sequences(PSA_UL_sequences_mixin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_UL_import_sequences(PSA_UL_sequences_mixin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_UL_import_actions(PSA_UL_sequences_mixin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSA_UL_sequences,
|
||||||
|
PSA_UL_import_sequences,
|
||||||
|
PSA_UL_import_actions,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
@@ -1,638 +1,385 @@
|
|||||||
import fnmatch
|
from typing import Sequence, Iterable, List, Optional, cast as typing_cast
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from bpy.props import StringProperty, BoolProperty, CollectionProperty, PointerProperty, IntProperty
|
import re
|
||||||
from bpy.types import Operator, UIList, PropertyGroup, Panel
|
from bpy.types import Armature, Context, FCurve, Object, Bone, PoseBone
|
||||||
from bpy_extras.io_utils import ImportHelper
|
|
||||||
from mathutils import Vector, Quaternion
|
from mathutils import Vector, Quaternion
|
||||||
|
|
||||||
from .data import Psa
|
from .config import PsaConfig, REMOVE_TRACK_LOCATION, REMOVE_TRACK_ROTATION
|
||||||
from .reader import PsaReader
|
from .reader import PsaReader
|
||||||
|
from ..shared.data import PsxBone
|
||||||
|
|
||||||
|
class BoneMapping:
|
||||||
|
def __init__(self,
|
||||||
|
is_case_sensitive: bool = False,
|
||||||
|
should_ignore_trailing_whitespace: bool = True
|
||||||
|
):
|
||||||
|
self.is_case_sensitive = is_case_sensitive
|
||||||
|
# Ancient PSK and PSA exporters would, for some reason, pad the bone names with spaces
|
||||||
|
# instead of just writing null bytes, probably because the programmers were lazy.
|
||||||
|
# By default, we will ignore trailing whitespace when doing comparisons.
|
||||||
|
self.should_ignore_trailing_whitespace = should_ignore_trailing_whitespace
|
||||||
|
|
||||||
|
|
||||||
class PsaImportOptions(object):
|
class PsaImportOptions(object):
|
||||||
|
def __init__(self,
|
||||||
|
action_name_prefix: str = '',
|
||||||
|
bone_mapping: BoneMapping = BoneMapping(),
|
||||||
|
fps_custom: float = 30.0,
|
||||||
|
fps_source: str = 'SEQUENCE',
|
||||||
|
psa_config: PsaConfig = PsaConfig(),
|
||||||
|
sequence_names: Optional[List[str]] = None,
|
||||||
|
should_convert_to_samples: bool = False,
|
||||||
|
should_overwrite: bool = False,
|
||||||
|
should_stash: bool = False,
|
||||||
|
should_use_config_file: bool = True,
|
||||||
|
should_use_fake_user: bool = False,
|
||||||
|
should_write_keyframes: bool = True,
|
||||||
|
should_write_metadata: bool = True,
|
||||||
|
translation_scale: float = 1.0
|
||||||
|
):
|
||||||
|
self.action_name_prefix = action_name_prefix
|
||||||
|
self.bone_mapping = bone_mapping
|
||||||
|
self.fps_custom = fps_custom
|
||||||
|
self.fps_source = fps_source
|
||||||
|
self.psa_config = psa_config
|
||||||
|
self.sequence_names = sequence_names if sequence_names is not None else []
|
||||||
|
self.should_convert_to_samples = should_convert_to_samples
|
||||||
|
self.should_overwrite = should_overwrite
|
||||||
|
self.should_stash = should_stash
|
||||||
|
self.should_use_config_file = should_use_config_file
|
||||||
|
self.should_use_fake_user = should_use_fake_user
|
||||||
|
self.should_write_keyframes = should_write_keyframes
|
||||||
|
self.should_write_metadata = should_write_metadata
|
||||||
|
self.translation_scale = translation_scale
|
||||||
|
|
||||||
|
|
||||||
|
class ImportBone(object):
|
||||||
|
def __init__(self, psa_bone: PsxBone):
|
||||||
|
self.psa_bone: PsxBone = psa_bone
|
||||||
|
self.parent: Optional[ImportBone] = None
|
||||||
|
self.armature_bone: Optional[Bone] = None
|
||||||
|
self.pose_bone: Optional[PoseBone] = None
|
||||||
|
self.original_location: Vector = Vector()
|
||||||
|
self.original_rotation: Quaternion = Quaternion()
|
||||||
|
self.post_rotation: Quaternion = Quaternion()
|
||||||
|
self.fcurves: List[FCurve] = []
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_fcurve_data(import_bone: ImportBone, key_data: Sequence[float]):
|
||||||
|
# Convert world-space transforms to local-space transforms.
|
||||||
|
key_rotation = Quaternion(key_data[0:4])
|
||||||
|
key_location = Vector(key_data[4:])
|
||||||
|
q = import_bone.post_rotation.copy()
|
||||||
|
q.rotate(import_bone.original_rotation)
|
||||||
|
rotation = q
|
||||||
|
q = import_bone.post_rotation.copy()
|
||||||
|
if import_bone.parent is None:
|
||||||
|
q.rotate(key_rotation.conjugated())
|
||||||
|
else:
|
||||||
|
q.rotate(key_rotation)
|
||||||
|
rotation.rotate(q.conjugated())
|
||||||
|
location = key_location - import_bone.original_location
|
||||||
|
location.rotate(import_bone.post_rotation.conjugated())
|
||||||
|
return rotation.w, rotation.x, rotation.y, rotation.z, location.x, location.y, location.z
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportResult:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.should_clean_keys = True
|
self.warnings: List[str] = []
|
||||||
self.should_use_fake_user = False
|
|
||||||
self.should_stash = False
|
|
||||||
self.sequence_names = []
|
|
||||||
self.should_overwrite = False
|
|
||||||
self.should_write_keyframes = True
|
|
||||||
self.should_write_metadata = True
|
|
||||||
self.action_name_prefix = ''
|
|
||||||
|
|
||||||
|
|
||||||
class PsaImporter(object):
|
def _get_armature_bone_index_for_psa_bone(psa_bone_name: str, armature_bone_names: List[str], bone_mapping: BoneMapping) -> Optional[int]:
|
||||||
def __init__(self):
|
"""
|
||||||
pass
|
@param psa_bone_name: The name of the PSA bone.
|
||||||
|
@param armature_bone_names: The names of the bones in the armature.
|
||||||
|
@param bone_mapping: Bone mapping information.
|
||||||
|
@return: The index of the armature bone that corresponds to the given PSA bone, or None if no such bone exists.
|
||||||
|
"""
|
||||||
|
# Use regular expressions for bone name matching.
|
||||||
|
pattern = psa_bone_name
|
||||||
|
flags = 0
|
||||||
|
|
||||||
def import_psa(self, psa_reader: PsaReader, armature_object, options: PsaImportOptions):
|
if bone_mapping.should_ignore_trailing_whitespace:
|
||||||
sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names)
|
psa_bone_name = psa_bone_name.rstrip()
|
||||||
armature_data = armature_object.data
|
pattern += r'\s*'
|
||||||
|
|
||||||
class ImportBone(object):
|
if not bone_mapping.is_case_sensitive:
|
||||||
def __init__(self, psa_bone: Psa.Bone):
|
flags = re.IGNORECASE
|
||||||
self.psa_bone: Psa.Bone = psa_bone
|
|
||||||
self.parent: Optional[ImportBone] = None
|
|
||||||
self.armature_bone = None
|
|
||||||
self.pose_bone = None
|
|
||||||
self.orig_loc: Vector = Vector()
|
|
||||||
self.orig_quat: Quaternion = Quaternion()
|
|
||||||
self.post_quat: Quaternion = Quaternion()
|
|
||||||
self.fcurves = []
|
|
||||||
|
|
||||||
def calculate_fcurve_data(import_bone: ImportBone, key_data: []):
|
pattern = re.compile(pattern, flags)
|
||||||
# Convert world-space transforms to local-space transforms.
|
|
||||||
key_rotation = Quaternion(key_data[0:4])
|
|
||||||
key_location = Vector(key_data[4:])
|
|
||||||
q = import_bone.post_quat.copy()
|
|
||||||
q.rotate(import_bone.orig_quat)
|
|
||||||
quat = q
|
|
||||||
q = import_bone.post_quat.copy()
|
|
||||||
if import_bone.parent is None:
|
|
||||||
q.rotate(key_rotation.conjugated())
|
|
||||||
else:
|
|
||||||
q.rotate(key_rotation)
|
|
||||||
quat.rotate(q.conjugated())
|
|
||||||
loc = key_location - import_bone.orig_loc
|
|
||||||
loc.rotate(import_bone.post_quat.conjugated())
|
|
||||||
return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z
|
|
||||||
|
|
||||||
# Create an index mapping from bones in the PSA to bones in the target armature.
|
for armature_bone_index, armature_bone_name in enumerate(armature_bone_names):
|
||||||
psa_to_armature_bone_indices = {}
|
if re.fullmatch(pattern, armature_bone_name):
|
||||||
armature_bone_names = [x.name for x in armature_data.bones]
|
return armature_bone_index
|
||||||
psa_bone_names = []
|
|
||||||
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
|
||||||
psa_bone_name = psa_bone.name.decode('windows-1252')
|
|
||||||
psa_bone_names.append(psa_bone_name)
|
|
||||||
try:
|
|
||||||
psa_to_armature_bone_indices[psa_bone_index] = armature_bone_names.index(psa_bone_name)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Report if there are missing bones in the target armature.
|
return None
|
||||||
missing_bone_names = set(psa_bone_names).difference(set(armature_bone_names))
|
|
||||||
if len(missing_bone_names) > 0:
|
|
||||||
print(
|
|
||||||
f'The armature object \'{armature_object.name}\' is missing the following bones that exist in the PSA:')
|
|
||||||
print(list(sorted(missing_bone_names)))
|
|
||||||
del armature_bone_names
|
|
||||||
|
|
||||||
# Create intermediate bone data for import operations.
|
|
||||||
import_bones = []
|
|
||||||
import_bones_dict = dict()
|
|
||||||
|
|
||||||
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
|
||||||
bone_name = psa_bone.name.decode('windows-1252')
|
|
||||||
if psa_bone_index not in psa_to_armature_bone_indices: # TODO: replace with bone_name in armature_data.bones
|
|
||||||
# PSA bone does not map to armature bone, skip it and leave an empty bone in its place.
|
|
||||||
import_bones.append(None)
|
|
||||||
continue
|
|
||||||
import_bone = ImportBone(psa_bone)
|
|
||||||
import_bone.armature_bone = armature_data.bones[bone_name]
|
|
||||||
import_bone.pose_bone = armature_object.pose.bones[bone_name]
|
|
||||||
import_bones_dict[bone_name] = import_bone
|
|
||||||
import_bones.append(import_bone)
|
|
||||||
|
|
||||||
for import_bone in filter(lambda x: x is not None, import_bones):
|
|
||||||
armature_bone = import_bone.armature_bone
|
|
||||||
if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names:
|
|
||||||
import_bone.parent = import_bones_dict[armature_bone.parent.name]
|
|
||||||
# Calculate the original location & rotation of each bone (in world-space maybe?)
|
|
||||||
if armature_bone.get('orig_quat') is not None:
|
|
||||||
# TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect (animations are flipped 180 around Z)
|
|
||||||
import_bone.orig_quat = Quaternion(armature_bone['orig_quat'])
|
|
||||||
import_bone.orig_loc = Vector(armature_bone['orig_loc'])
|
|
||||||
import_bone.post_quat = Quaternion(armature_bone['post_quat'])
|
|
||||||
else:
|
|
||||||
if import_bone.parent is not None:
|
|
||||||
import_bone.orig_loc = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
|
|
||||||
import_bone.orig_loc.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
|
||||||
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
|
|
||||||
import_bone.orig_quat.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
|
||||||
import_bone.orig_quat.conjugate()
|
|
||||||
else:
|
|
||||||
import_bone.orig_loc = armature_bone.matrix_local.translation.copy()
|
|
||||||
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
|
|
||||||
import_bone.post_quat = import_bone.orig_quat.conjugated()
|
|
||||||
|
|
||||||
# Create and populate the data for new sequences.
|
|
||||||
actions = []
|
|
||||||
for sequence in sequences:
|
|
||||||
# Add the action.
|
|
||||||
sequence_name = sequence.name.decode('windows-1252')
|
|
||||||
action_name = options.action_name_prefix + sequence_name
|
|
||||||
|
|
||||||
if options.should_overwrite and action_name in bpy.data.actions:
|
|
||||||
action = bpy.data.actions[action_name]
|
|
||||||
else:
|
|
||||||
action = bpy.data.actions.new(name=action_name)
|
|
||||||
|
|
||||||
if options.should_write_keyframes:
|
|
||||||
# Remove existing f-curves (replace with action.fcurves.clear() in Blender 3.2)
|
|
||||||
while len(action.fcurves) > 0:
|
|
||||||
action.fcurves.remove(action.fcurves[-1])
|
|
||||||
|
|
||||||
# Create f-curves for the rotation and location of each bone.
|
|
||||||
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
|
||||||
import_bone = import_bones[psa_bone_index]
|
|
||||||
pose_bone = import_bone.pose_bone
|
|
||||||
rotation_data_path = pose_bone.path_from_id('rotation_quaternion')
|
|
||||||
location_data_path = pose_bone.path_from_id('location')
|
|
||||||
import_bone.fcurves = [
|
|
||||||
action.fcurves.new(rotation_data_path, index=0, action_group=pose_bone.name), # Qw
|
|
||||||
action.fcurves.new(rotation_data_path, index=1, action_group=pose_bone.name), # Qx
|
|
||||||
action.fcurves.new(rotation_data_path, index=2, action_group=pose_bone.name), # Qy
|
|
||||||
action.fcurves.new(rotation_data_path, index=3, action_group=pose_bone.name), # Qz
|
|
||||||
action.fcurves.new(location_data_path, index=0, action_group=pose_bone.name), # Lx
|
|
||||||
action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name), # Ly
|
|
||||||
action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name), # Lz
|
|
||||||
]
|
|
||||||
|
|
||||||
# Read the sequence data matrix from the PSA.
|
|
||||||
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
|
||||||
keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8)
|
|
||||||
|
|
||||||
# Convert the sequence's data from world-space to local-space.
|
|
||||||
for bone_index, import_bone in enumerate(import_bones):
|
|
||||||
if import_bone is None:
|
|
||||||
continue
|
|
||||||
for frame_index in range(sequence.frame_count):
|
|
||||||
# This bone has writeable keyframes for this frame.
|
|
||||||
key_data = sequence_data_matrix[frame_index, bone_index]
|
|
||||||
# Calculate the local-space key data for the bone.
|
|
||||||
sequence_data_matrix[frame_index, bone_index] = calculate_fcurve_data(import_bone, key_data)
|
|
||||||
|
|
||||||
# Clean the keyframe data. This is accomplished by writing zeroes to the write matrix when there is an
|
|
||||||
# insufficiently large change in the data from the last written frame.
|
|
||||||
if options.should_clean_keys:
|
|
||||||
threshold = 0.001
|
|
||||||
for bone_index, import_bone in enumerate(import_bones):
|
|
||||||
if import_bone is None:
|
|
||||||
continue
|
|
||||||
for fcurve_index in range(len(import_bone.fcurves)):
|
|
||||||
# Get all the keyframe data for the bone's f-curve data from the sequence data matrix.
|
|
||||||
fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index]
|
|
||||||
last_written_datum = 0
|
|
||||||
for frame_index, datum in enumerate(fcurve_frame_data):
|
|
||||||
# If the f-curve data is not different enough to the last written frame, un-mark this data for writing.
|
|
||||||
if frame_index > 0 and abs(datum - last_written_datum) < threshold:
|
|
||||||
keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0
|
|
||||||
else:
|
|
||||||
last_written_datum = datum
|
|
||||||
|
|
||||||
# Write the keyframes out!
|
|
||||||
for frame_index in range(sequence.frame_count):
|
|
||||||
for bone_index, import_bone in enumerate(import_bones):
|
|
||||||
if import_bone is None:
|
|
||||||
continue
|
|
||||||
bone_has_writeable_keyframes = any(keyframe_write_matrix[frame_index, bone_index])
|
|
||||||
if bone_has_writeable_keyframes:
|
|
||||||
# This bone has writeable keyframes for this frame.
|
|
||||||
key_data = sequence_data_matrix[frame_index, bone_index]
|
|
||||||
for fcurve, should_write, datum in zip(import_bone.fcurves,
|
|
||||||
keyframe_write_matrix[frame_index, bone_index],
|
|
||||||
key_data):
|
|
||||||
if should_write:
|
|
||||||
fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'})
|
|
||||||
|
|
||||||
# Write
|
|
||||||
if options.should_write_metadata:
|
|
||||||
action['psa_sequence_name'] = sequence_name
|
|
||||||
action['psa_sequence_fps'] = sequence.fps
|
|
||||||
|
|
||||||
action.use_fake_user = options.should_use_fake_user
|
|
||||||
|
|
||||||
actions.append(action)
|
|
||||||
|
|
||||||
# If the user specifies, store the new animations as strips on a non-contributing NLA track.
|
|
||||||
if options.should_stash:
|
|
||||||
if armature_object.animation_data is None:
|
|
||||||
armature_object.animation_data_create()
|
|
||||||
for action in actions:
|
|
||||||
nla_track = armature_object.animation_data.nla_tracks.new()
|
|
||||||
nla_track.name = action.name
|
|
||||||
nla_track.mute = True
|
|
||||||
nla_track.strips.new(name=action.name, start=0, action=action)
|
|
||||||
|
|
||||||
|
|
||||||
class PsaImportActionListItem(PropertyGroup):
|
def _resample_sequence_data_matrix(sequence_data_matrix: np.ndarray, frame_step: float = 1.0) -> np.ndarray:
|
||||||
action_name: StringProperty(options=set())
|
"""
|
||||||
is_selected: BoolProperty(default=False, options=set())
|
Resamples the sequence data matrix to the target frame count.
|
||||||
|
|
||||||
|
@param sequence_data_matrix: FxBx7 matrix where F is the number of frames, B is the number of bones, and X is the
|
||||||
|
number of data elements per bone.
|
||||||
|
@param frame_step: The step between frames in the resampled sequence.
|
||||||
|
@return: The resampled sequence data matrix, or sequence_data_matrix if no resampling is necessary.
|
||||||
|
"""
|
||||||
|
|
||||||
def load_psa_file(context):
|
def _get_sample_frame_times(source_frame_count: int, frame_step: float) -> Iterable[float]:
|
||||||
pg = context.scene.psa_import
|
# TODO: for correctness, we should also emit the target frame time as well (because the last frame can be a
|
||||||
pg.sequence_list.clear()
|
# fractional frame).
|
||||||
pg.psa.bones.clear()
|
assert frame_step > 0.0, 'Frame step must be greater than 0'
|
||||||
pg.psa_error = ''
|
time = 0.0
|
||||||
try:
|
while time < source_frame_count - 1:
|
||||||
# Read the file and populate the action list.
|
yield time
|
||||||
p = os.path.abspath(pg.psa_file_path)
|
time += frame_step
|
||||||
psa_reader = PsaReader(p)
|
yield source_frame_count - 1
|
||||||
for sequence in psa_reader.sequences.values():
|
|
||||||
item = pg.sequence_list.add()
|
|
||||||
item.action_name = sequence.name.decode('windows-1252')
|
|
||||||
for psa_bone in psa_reader.bones:
|
|
||||||
item = pg.psa.bones.add()
|
|
||||||
item.bone_name = psa_bone.name.decode('windows-1252')
|
|
||||||
except Exception as e:
|
|
||||||
pg.psa_error = str(e)
|
|
||||||
|
|
||||||
|
if frame_step == 1.0:
|
||||||
|
# No resampling is necessary.
|
||||||
|
return sequence_data_matrix
|
||||||
|
|
||||||
def on_psa_file_path_updated(property, context):
|
source_frame_count, bone_count = sequence_data_matrix.shape[:2]
|
||||||
load_psa_file(context)
|
sample_frame_times = list(_get_sample_frame_times(source_frame_count, frame_step))
|
||||||
|
target_frame_count = len(sample_frame_times)
|
||||||
|
resampled_sequence_data_matrix = np.zeros((target_frame_count, bone_count, 7), dtype=float)
|
||||||
|
|
||||||
|
for sample_frame_index, sample_frame_time in enumerate(sample_frame_times):
|
||||||
class PsaBonePropertyGroup(PropertyGroup):
|
frame_index = int(sample_frame_time)
|
||||||
bone_name: StringProperty(options=set())
|
if sample_frame_time % 1.0 == 0.0:
|
||||||
|
# Sample time has no fractional part, so just copy the frame.
|
||||||
|
resampled_sequence_data_matrix[sample_frame_index, :, :] = sequence_data_matrix[frame_index, :, :]
|
||||||
class PsaDataPropertyGroup(PropertyGroup):
|
|
||||||
bones: CollectionProperty(type=PsaBonePropertyGroup)
|
|
||||||
sequence_count: IntProperty(default=0)
|
|
||||||
|
|
||||||
|
|
||||||
class PsaImportPropertyGroup(PropertyGroup):
|
|
||||||
psa_file_path: StringProperty(default='', options=set(), update=on_psa_file_path_updated, name='PSA File Path')
|
|
||||||
psa_error: StringProperty(default='')
|
|
||||||
psa: PointerProperty(type=PsaDataPropertyGroup)
|
|
||||||
sequence_list: CollectionProperty(type=PsaImportActionListItem)
|
|
||||||
sequence_list_index: IntProperty(name='', default=0)
|
|
||||||
should_clean_keys: BoolProperty(default=True, name='Clean Keyframes',
|
|
||||||
description='Exclude unnecessary keyframes from being written to the actions',
|
|
||||||
options=set())
|
|
||||||
should_use_fake_user: BoolProperty(default=True, name='Fake User',
|
|
||||||
description='Assign each imported action a fake user so that the data block is saved even it has no users',
|
|
||||||
options=set())
|
|
||||||
should_stash: BoolProperty(default=False, name='Stash',
|
|
||||||
description='Stash each imported action as a strip on a new non-contributing NLA track',
|
|
||||||
options=set())
|
|
||||||
should_use_action_name_prefix: BoolProperty(default=False, name='Prefix Action Name', options=set())
|
|
||||||
action_name_prefix: StringProperty(default='', name='Prefix', options=set())
|
|
||||||
should_overwrite: BoolProperty(default=False, name='Reuse Existing Actions', options=set(),
|
|
||||||
description='If an action with a matching name already exists, the existing action will have it\'s data overwritten instead of a new action being created')
|
|
||||||
should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=set())
|
|
||||||
should_write_metadata: BoolProperty(default=True, name='Metadata', options=set(),
|
|
||||||
description='Additional data will be written to the custom properties of the Action (e.g., frame rate)')
|
|
||||||
sequence_filter_name: StringProperty(default='', options={'TEXTEDIT_UPDATE'})
|
|
||||||
sequence_filter_is_selected: BoolProperty(default=False, options=set(), name='Only Show Selected',
|
|
||||||
description='Only show selected sequences')
|
|
||||||
sequence_use_filter_invert: BoolProperty(default=False, options=set())
|
|
||||||
sequence_use_filter_regex: BoolProperty(default=False, name='Regular Expression',
|
|
||||||
description='Filter using regular expressions', options=set())
|
|
||||||
select_text: PointerProperty(type=bpy.types.Text)
|
|
||||||
|
|
||||||
|
|
||||||
def filter_sequences(pg: PsaImportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[int]:
|
|
||||||
bitflag_filter_item = 1 << 30
|
|
||||||
flt_flags = [bitflag_filter_item] * len(sequences)
|
|
||||||
|
|
||||||
if pg.sequence_filter_name is not None:
|
|
||||||
# Filter name is non-empty.
|
|
||||||
if pg.sequence_use_filter_regex:
|
|
||||||
# Use regular expression. If regex pattern doesn't compile, just ignore it.
|
|
||||||
try:
|
|
||||||
regex = re.compile(pg.sequence_filter_name)
|
|
||||||
for i, sequence in enumerate(sequences):
|
|
||||||
if not regex.match(sequence.action_name):
|
|
||||||
flt_flags[i] &= ~bitflag_filter_item
|
|
||||||
except re.error:
|
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
# User regular text matching.
|
# Sample time has a fractional part, so interpolate between two frames.
|
||||||
for i, sequence in enumerate(sequences):
|
next_frame_index = frame_index + 1
|
||||||
if not fnmatch.fnmatch(sequence.action_name, f'*{pg.sequence_filter_name}*'):
|
for bone_index in range(bone_count):
|
||||||
flt_flags[i] &= ~bitflag_filter_item
|
source_frame_1_data = sequence_data_matrix[frame_index, bone_index, :]
|
||||||
|
source_frame_2_data = sequence_data_matrix[next_frame_index, bone_index, :]
|
||||||
if pg.sequence_filter_is_selected:
|
factor = sample_frame_time - frame_index
|
||||||
for i, sequence in enumerate(sequences):
|
q = Quaternion((source_frame_1_data[:4])).slerp(Quaternion((source_frame_2_data[:4])), factor)
|
||||||
if not sequence.is_selected:
|
q.normalize()
|
||||||
flt_flags[i] &= ~bitflag_filter_item
|
l = Vector(source_frame_1_data[4:]).lerp(Vector(source_frame_2_data[4:]), factor)
|
||||||
|
resampled_sequence_data_matrix[sample_frame_index, bone_index, :] = q.w, q.x, q.y, q.z, l.x, l.y, l.z
|
||||||
if pg.sequence_use_filter_invert:
|
|
||||||
# Invert filter flags for all items.
|
return resampled_sequence_data_matrix
|
||||||
for i, sequence in enumerate(sequences):
|
|
||||||
flt_flags[i] ^= bitflag_filter_item
|
|
||||||
|
def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object, options: PsaImportOptions) -> PsaImportResult:
|
||||||
return flt_flags
|
|
||||||
|
assert context.window_manager
|
||||||
|
|
||||||
def get_visible_sequences(pg: PsaImportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[
|
result = PsaImportResult()
|
||||||
PsaImportActionListItem]:
|
sequences = [psa_reader.sequences[x] for x in options.sequence_names]
|
||||||
bitflag_filter_item = 1 << 30
|
armature_data = typing_cast(Armature, armature_object.data)
|
||||||
visible_sequences = []
|
|
||||||
for i, flag in enumerate(filter_sequences(pg, sequences)):
|
# Create an index mapping from bones in the PSA to bones in the target armature.
|
||||||
if bool(flag & bitflag_filter_item):
|
psa_to_armature_bone_indices = {}
|
||||||
visible_sequences.append(sequences[i])
|
armature_to_psa_bone_indices = {}
|
||||||
return visible_sequences
|
armature_bone_names = [x.name for x in armature_data.bones]
|
||||||
|
psa_bone_names = []
|
||||||
|
duplicate_mappings = []
|
||||||
class PSA_UL_SequenceList(UIList):
|
|
||||||
|
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
||||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
psa_bone_name: str = psa_bone.name.decode('windows-1252')
|
||||||
row = layout.row(align=True)
|
armature_bone_index = _get_armature_bone_index_for_psa_bone(psa_bone_name, armature_bone_names, options.bone_mapping)
|
||||||
split = row.split(align=True, factor=0.75)
|
if armature_bone_index is not None:
|
||||||
column = split.row(align=True)
|
# Ensure that no other PSA bone has been mapped to this armature bone yet.
|
||||||
column.alignment = 'LEFT'
|
if armature_bone_index not in armature_to_psa_bone_indices:
|
||||||
column.prop(item, 'is_selected', icon_only=True)
|
psa_to_armature_bone_indices[psa_bone_index] = armature_bone_index
|
||||||
column.label(text=item.action_name)
|
armature_to_psa_bone_indices[armature_bone_index] = psa_bone_index
|
||||||
|
else:
|
||||||
def draw_filter(self, context, layout):
|
# This armature bone has already been mapped to a PSA bone.
|
||||||
pg = context.scene.psa_import
|
duplicate_mappings.append((psa_bone_index, armature_bone_index, armature_to_psa_bone_indices[armature_bone_index]))
|
||||||
row = layout.row()
|
psa_bone_names.append(armature_bone_names[armature_bone_index])
|
||||||
subrow = row.row(align=True)
|
else:
|
||||||
subrow.prop(pg, 'sequence_filter_name', text="")
|
psa_bone_names.append(psa_bone_name)
|
||||||
subrow.prop(pg, 'sequence_use_filter_invert', text="", icon='ARROW_LEFTRIGHT')
|
|
||||||
subrow.prop(pg, 'sequence_use_filter_regex', text="", icon='SORTBYEXT')
|
# Warn about duplicate bone mappings.
|
||||||
subrow.prop(pg, 'sequence_filter_is_selected', text="", icon='CHECKBOX_HLT')
|
if len(duplicate_mappings) > 0:
|
||||||
|
for (psa_bone_index, armature_bone_index, mapped_psa_bone_index) in duplicate_mappings:
|
||||||
def filter_items(self, context, data, property):
|
psa_bone_name = psa_bone_names[psa_bone_index]
|
||||||
pg = context.scene.psa_import
|
armature_bone_name = armature_bone_names[armature_bone_index]
|
||||||
sequences = getattr(data, property)
|
mapped_psa_bone_name = psa_bone_names[mapped_psa_bone_index]
|
||||||
flt_flags = filter_sequences(pg, sequences)
|
result.warnings.append(f'PSA bone {psa_bone_index} ({psa_bone_name}) could not be mapped to armature bone {armature_bone_index} ({armature_bone_name}) because the armature bone is already mapped to PSA bone {mapped_psa_bone_index} ({mapped_psa_bone_name})')
|
||||||
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(sequences, 'action_name')
|
|
||||||
return flt_flags, flt_neworder
|
# Report if there are missing bones in the target armature.
|
||||||
|
missing_bone_names = set(psa_bone_names).difference(set(armature_bone_names))
|
||||||
|
if len(missing_bone_names) > 0:
|
||||||
class PSA_UL_ImportSequenceList(PSA_UL_SequenceList, UIList):
|
result.warnings.append(
|
||||||
pass
|
f'The armature \'{armature_object.name}\' is missing {len(missing_bone_names)} bones that exist in '
|
||||||
|
'the PSA:\n' +
|
||||||
|
str(list(sorted(missing_bone_names)))
|
||||||
class PSA_UL_ImportActionList(PSA_UL_SequenceList, UIList):
|
)
|
||||||
pass
|
del armature_bone_names
|
||||||
|
|
||||||
|
# Create intermediate bone data for import operations.
|
||||||
class PsaImportSequencesFromText(Operator):
|
import_bones = []
|
||||||
bl_idname = 'psa_import.sequences_select_from_text'
|
psa_bone_names_to_import_bones = dict()
|
||||||
bl_label = 'Select By Text List'
|
|
||||||
bl_description = 'Select sequences by name from text list'
|
for (psa_bone_index, psa_bone), psa_bone_name in zip(enumerate(psa_reader.bones), psa_bone_names):
|
||||||
bl_options = {'INTERNAL', 'UNDO'}
|
if psa_bone_index not in psa_to_armature_bone_indices:
|
||||||
|
# PSA bone does not map to armature bone, skip it and leave an empty bone in its place.
|
||||||
@classmethod
|
import_bones.append(None)
|
||||||
def poll(cls, context):
|
continue
|
||||||
pg = context.scene.psa_import
|
import_bone = ImportBone(psa_bone)
|
||||||
return len(pg.sequence_list) > 0
|
import_bone.armature_bone = armature_data.bones[psa_bone_name]
|
||||||
|
import_bone.pose_bone = armature_object.pose.bones[psa_bone_name]
|
||||||
def invoke(self, context, event):
|
psa_bone_names_to_import_bones[psa_bone_name] = import_bone
|
||||||
return context.window_manager.invoke_props_dialog(self, width=256)
|
import_bones.append(import_bone)
|
||||||
|
|
||||||
def draw(self, context):
|
bones_with_missing_parents = []
|
||||||
layout = self.layout
|
|
||||||
pg = context.scene.psa_import
|
for import_bone in filter(lambda x: x is not None, import_bones):
|
||||||
layout.label(icon='INFO', text='Each sequence name should be on a new line.')
|
armature_bone = import_bone.armature_bone
|
||||||
layout.prop(pg, 'select_text', text='')
|
has_parent = armature_bone.parent is not None
|
||||||
|
if has_parent:
|
||||||
def execute(self, context):
|
if armature_bone.parent.name in psa_bone_names:
|
||||||
pg = context.scene.psa_import
|
import_bone.parent = psa_bone_names_to_import_bones[armature_bone.parent.name]
|
||||||
contents = pg.select_text.as_string()
|
else:
|
||||||
count = 0
|
# Add a warning if the parent bone is not in the PSA.
|
||||||
for line in contents.split('\n'):
|
bones_with_missing_parents.append(armature_bone)
|
||||||
for sequence in pg.sequence_list:
|
# Calculate the original location & rotation of each bone (in world-space maybe?)
|
||||||
if sequence.action_name == line:
|
if has_parent:
|
||||||
sequence.is_selected = True
|
import_bone.original_location = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
|
||||||
count += 1
|
import_bone.original_location.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
||||||
self.report({'INFO'}, f'Selected {count} sequence(s)')
|
import_bone.original_rotation = armature_bone.matrix_local.to_quaternion()
|
||||||
return {'FINISHED'}
|
import_bone.original_rotation.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
||||||
|
import_bone.original_rotation.conjugate()
|
||||||
|
else:
|
||||||
class PsaImportSequencesSelectAll(Operator):
|
import_bone.original_location = armature_bone.matrix_local.translation.copy()
|
||||||
bl_idname = 'psa_import.sequences_select_all'
|
import_bone.original_rotation = armature_bone.matrix_local.to_quaternion().conjugated()
|
||||||
bl_label = 'All'
|
|
||||||
bl_description = 'Select all visible sequences'
|
import_bone.post_rotation = import_bone.original_rotation.conjugated()
|
||||||
bl_options = {'INTERNAL'}
|
|
||||||
|
# Warn about bones with missing parents.
|
||||||
@classmethod
|
if len(bones_with_missing_parents) > 0:
|
||||||
def poll(cls, context):
|
count = len(bones_with_missing_parents)
|
||||||
pg = context.scene.psa_import
|
message = f'{count} bone(s) have parents that are not present in the PSA:\n' + str([x.name for x in bones_with_missing_parents])
|
||||||
visible_sequences = get_visible_sequences(pg, pg.sequence_list)
|
result.warnings.append(message)
|
||||||
has_unselected_actions = any(map(lambda action: not action.is_selected, visible_sequences))
|
|
||||||
return len(visible_sequences) > 0 and has_unselected_actions
|
context.window_manager.progress_begin(0, len(sequences))
|
||||||
|
|
||||||
def execute(self, context):
|
# Create and populate the data for new sequences.
|
||||||
pg = context.scene.psa_import
|
actions = []
|
||||||
visible_sequences = get_visible_sequences(pg, pg.sequence_list)
|
for sequence_index, sequence in enumerate(sequences):
|
||||||
for sequence in visible_sequences:
|
# Add the action.
|
||||||
sequence.is_selected = True
|
sequence_name = sequence.name.decode('windows-1252')
|
||||||
return {'FINISHED'}
|
action_name = options.action_name_prefix + sequence_name
|
||||||
|
|
||||||
|
# Get the bone track flags for this sequence, or an empty dictionary if none exist.
|
||||||
class PsaImportSequencesDeselectAll(Operator):
|
sequence_bone_track_flags = dict()
|
||||||
bl_idname = 'psa_import.sequences_deselect_all'
|
if sequence_name in options.psa_config.sequence_bone_flags.keys():
|
||||||
bl_label = 'None'
|
sequence_bone_track_flags = options.psa_config.sequence_bone_flags[sequence_name]
|
||||||
bl_description = 'Deselect all visible sequences'
|
|
||||||
bl_options = {'INTERNAL'}
|
if options.should_overwrite and action_name in bpy.data.actions:
|
||||||
|
action = bpy.data.actions[action_name]
|
||||||
@classmethod
|
else:
|
||||||
def poll(cls, context):
|
action = bpy.data.actions.new(name=action_name)
|
||||||
pg = context.scene.psa_import
|
action.slots.new('OBJECT', armature_object.name)
|
||||||
visible_sequences = get_visible_sequences(pg, pg.sequence_list)
|
|
||||||
has_selected_sequences = any(map(lambda sequence: sequence.is_selected, visible_sequences))
|
# Calculate the target FPS.
|
||||||
return len(visible_sequences) > 0 and has_selected_sequences
|
match options.fps_source:
|
||||||
|
case 'CUSTOM':
|
||||||
def execute(self, context):
|
target_fps = options.fps_custom
|
||||||
pg = context.scene.psa_import
|
case 'SCENE':
|
||||||
visible_sequences = get_visible_sequences(pg, pg.sequence_list)
|
assert context.scene
|
||||||
for sequence in visible_sequences:
|
target_fps = context.scene.render.fps
|
||||||
sequence.is_selected = False
|
case 'SEQUENCE':
|
||||||
return {'FINISHED'}
|
target_fps = sequence.fps
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid FPS source: {options.fps_source}'
|
||||||
class PSA_PT_ImportPanel_Advanced(Panel):
|
|
||||||
bl_space_type = 'PROPERTIES'
|
if options.should_write_keyframes:
|
||||||
bl_region_type = 'WINDOW'
|
# Remove existing f-curves.
|
||||||
bl_label = 'Advanced'
|
action.fcurves.clear()
|
||||||
bl_options = {'DEFAULT_CLOSED'}
|
|
||||||
bl_parent_id = 'PSA_PT_ImportPanel'
|
# Create f-curves for the rotation and location of each bone.
|
||||||
|
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
||||||
def draw(self, context):
|
bone_track_flags = sequence_bone_track_flags.get(psa_bone_index, 0)
|
||||||
layout = self.layout
|
import_bone = import_bones[psa_bone_index]
|
||||||
pg = context.scene.psa_import
|
pose_bone = import_bone.pose_bone
|
||||||
|
rotation_data_path = pose_bone.path_from_id('rotation_quaternion')
|
||||||
col = layout.column(heading="Options")
|
location_data_path = pose_bone.path_from_id('location')
|
||||||
col.use_property_split = True
|
add_rotation_fcurves = (bone_track_flags & REMOVE_TRACK_ROTATION) == 0
|
||||||
col.use_property_decorate = False
|
add_location_fcurves = (bone_track_flags & REMOVE_TRACK_LOCATION) == 0
|
||||||
col.prop(pg, 'should_clean_keys')
|
import_bone.fcurves = [
|
||||||
col.separator()
|
action.fcurves.new(rotation_data_path, index=0, action_group=pose_bone.name) if add_rotation_fcurves else None, # Qw
|
||||||
col.prop(pg, 'should_use_fake_user')
|
action.fcurves.new(rotation_data_path, index=1, action_group=pose_bone.name) if add_rotation_fcurves else None, # Qx
|
||||||
col.prop(pg, 'should_stash')
|
action.fcurves.new(rotation_data_path, index=2, action_group=pose_bone.name) if add_rotation_fcurves else None, # Qy
|
||||||
col.prop(pg, 'should_use_action_name_prefix')
|
action.fcurves.new(rotation_data_path, index=3, action_group=pose_bone.name) if add_rotation_fcurves else None, # Qz
|
||||||
|
action.fcurves.new(location_data_path, index=0, action_group=pose_bone.name) if add_location_fcurves else None, # Lx
|
||||||
if pg.should_use_action_name_prefix:
|
action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name) if add_location_fcurves else None, # Ly
|
||||||
col.prop(pg, 'action_name_prefix')
|
action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name) if add_location_fcurves else None, # Lz
|
||||||
|
]
|
||||||
|
|
||||||
class PSA_PT_ImportPanel(Panel):
|
# Read the sequence data matrix from the PSA.
|
||||||
bl_space_type = 'PROPERTIES'
|
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
||||||
bl_region_type = 'WINDOW'
|
|
||||||
bl_label = 'PSA Import'
|
if options.translation_scale != 1.0:
|
||||||
bl_context = 'data'
|
# Scale the translation data.
|
||||||
bl_category = 'PSA Import'
|
sequence_data_matrix[:, :, 4:] *= options.translation_scale
|
||||||
bl_options = {'DEFAULT_CLOSED'}
|
|
||||||
|
# Convert the sequence's data from world-space to local-space.
|
||||||
@classmethod
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
def poll(cls, context):
|
if import_bone is None:
|
||||||
return context.object.type == 'ARMATURE'
|
continue
|
||||||
|
for frame_index in range(sequence.frame_count):
|
||||||
def draw(self, context):
|
# This bone has writeable keyframes for this frame.
|
||||||
layout = self.layout
|
key_data = sequence_data_matrix[frame_index, bone_index]
|
||||||
pg = context.scene.psa_import
|
# Calculate the local-space key data for the bone.
|
||||||
|
sequence_data_matrix[frame_index, bone_index] = _calculate_fcurve_data(import_bone, key_data)
|
||||||
row = layout.row(align=True)
|
|
||||||
row.operator(PsaImportSelectFile.bl_idname, text='', icon='FILEBROWSER')
|
# Resample the sequence data to the target FPS.
|
||||||
row.prop(pg, 'psa_file_path', text='')
|
# If the target frame count is the same as the source frame count, this will be a no-op.
|
||||||
row.operator(PsaImportFileReload.bl_idname, text='', icon='FILE_REFRESH')
|
resampled_sequence_data_matrix = _resample_sequence_data_matrix(sequence_data_matrix,
|
||||||
|
frame_step=sequence.fps / target_fps)
|
||||||
if pg.psa_error != '':
|
|
||||||
row = layout.row()
|
# Write the keyframes out.
|
||||||
row.label(text='File could not be read', icon='ERROR')
|
# Note that the f-curve data consists of alternating time and value data.
|
||||||
|
target_frame_count = resampled_sequence_data_matrix.shape[0]
|
||||||
box = layout.box()
|
fcurve_data = np.zeros(2 * target_frame_count, dtype=float)
|
||||||
|
fcurve_data[0::2] = range(0, target_frame_count)
|
||||||
box.label(text=f'Sequences ({len(pg.sequence_list)})', icon='ARMATURE_DATA')
|
|
||||||
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
# select
|
if import_bone is None:
|
||||||
rows = max(3, min(len(pg.sequence_list), 10))
|
continue
|
||||||
|
for fcurve_index, fcurve in enumerate(import_bone.fcurves):
|
||||||
row = box.row()
|
if fcurve is None:
|
||||||
col = row.column()
|
continue
|
||||||
|
fcurve_data[1::2] = resampled_sequence_data_matrix[:, bone_index, fcurve_index]
|
||||||
row2 = col.row(align=True)
|
fcurve.keyframe_points.add(target_frame_count)
|
||||||
row2.label(text='Select')
|
fcurve.keyframe_points.foreach_set('co', fcurve_data)
|
||||||
row2.operator(PsaImportSequencesFromText.bl_idname, text='', icon='TEXT')
|
for fcurve_keyframe in fcurve.keyframe_points:
|
||||||
row2.operator(PsaImportSequencesSelectAll.bl_idname, text='All', icon='CHECKBOX_HLT')
|
fcurve_keyframe.interpolation = 'LINEAR'
|
||||||
row2.operator(PsaImportSequencesDeselectAll.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
|
||||||
|
if options.should_convert_to_samples:
|
||||||
col = col.row()
|
# Bake the curve to samples.
|
||||||
col.template_list('PSA_UL_ImportSequenceList', '', pg, 'sequence_list', pg, 'sequence_list_index', rows=rows)
|
for fcurve in action.fcurves:
|
||||||
|
fcurve.convert_to_samples(start=0, end=sequence.frame_count)
|
||||||
col = layout.column(heading='')
|
|
||||||
col.use_property_split = True
|
# Write meta-data.
|
||||||
col.use_property_decorate = False
|
if options.should_write_metadata:
|
||||||
col.prop(pg, 'should_overwrite')
|
action.psa_export.fps = target_fps
|
||||||
|
|
||||||
col = layout.column(heading='Write')
|
action.use_fake_user = options.should_use_fake_user
|
||||||
col.use_property_split = True
|
|
||||||
col.use_property_decorate = False
|
actions.append(action)
|
||||||
col.prop(pg, 'should_write_keyframes')
|
|
||||||
col.prop(pg, 'should_write_metadata')
|
context.window_manager.progress_update(sequence_index)
|
||||||
|
|
||||||
selected_sequence_count = sum(map(lambda x: x.is_selected, pg.sequence_list))
|
# If the user specifies, store the new animations as strips on a non-contributing NLA track.
|
||||||
|
if options.should_stash:
|
||||||
row = layout.row()
|
animation_data = armature_object.animation_data
|
||||||
|
if animation_data is None:
|
||||||
import_button_text = 'Import'
|
animation_data = armature_object.animation_data_create()
|
||||||
if selected_sequence_count > 0:
|
for action in actions:
|
||||||
import_button_text = f'Import ({selected_sequence_count})'
|
nla_track = armature_object.animation_data.nla_tracks.new()
|
||||||
|
nla_track.name = action.name
|
||||||
row.operator(PsaImportOperator.bl_idname, text=import_button_text)
|
nla_track.mute = True
|
||||||
|
nla_track.strips.new(name=action.name, start=0, action=action)
|
||||||
|
|
||||||
class PsaImportFileReload(Operator):
|
context.window_manager.progress_end()
|
||||||
bl_idname = 'psa_import.file_reload'
|
|
||||||
bl_label = 'Refresh'
|
return result
|
||||||
bl_options = {'INTERNAL'}
|
|
||||||
bl_description = 'Refresh the PSA file'
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
load_psa_file(context)
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class PsaImportSelectFile(Operator):
|
|
||||||
bl_idname = 'psa_import.select_file'
|
|
||||||
bl_label = 'Select'
|
|
||||||
bl_options = {'INTERNAL'}
|
|
||||||
bl_description = 'Select a PSA file from which to import animations'
|
|
||||||
filepath: bpy.props.StringProperty(subtype='FILE_PATH')
|
|
||||||
filter_glob: bpy.props.StringProperty(default="*.psa", options={'HIDDEN'})
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
context.scene.psa_import.psa_file_path = self.filepath
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
|
||||||
context.window_manager.fileselect_add(self)
|
|
||||||
return {"RUNNING_MODAL"}
|
|
||||||
|
|
||||||
|
|
||||||
class PsaImportOperator(Operator):
|
|
||||||
bl_idname = 'psa_import.import'
|
|
||||||
bl_label = 'Import'
|
|
||||||
bl_description = 'Import the selected animations into the scene as actions'
|
|
||||||
bl_options = {'INTERNAL', 'UNDO'}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context):
|
|
||||||
pg = context.scene.psa_import
|
|
||||||
active_object = context.view_layer.objects.active
|
|
||||||
if active_object is None or active_object.type != 'ARMATURE':
|
|
||||||
return False
|
|
||||||
return any(map(lambda x: x.is_selected, pg.sequence_list))
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psa_import
|
|
||||||
psa_reader = PsaReader(pg.psa_file_path)
|
|
||||||
sequence_names = [x.action_name for x in pg.sequence_list if x.is_selected]
|
|
||||||
|
|
||||||
options = PsaImportOptions()
|
|
||||||
options.sequence_names = sequence_names
|
|
||||||
options.should_clean_keys = pg.should_clean_keys
|
|
||||||
options.should_use_fake_user = pg.should_use_fake_user
|
|
||||||
options.should_stash = pg.should_stash
|
|
||||||
options.action_name_prefix = pg.action_name_prefix if pg.should_use_action_name_prefix else ''
|
|
||||||
options.should_overwrite = pg.should_overwrite
|
|
||||||
options.should_write_metadata = pg.should_write_metadata
|
|
||||||
options.should_write_keyframes = pg.should_write_keyframes
|
|
||||||
|
|
||||||
PsaImporter().import_psa(psa_reader, context.view_layer.objects.active, options)
|
|
||||||
|
|
||||||
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
|
|
||||||
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
|
|
||||||
class PsaImportFileSelectOperator(Operator, ImportHelper):
|
|
||||||
bl_idname = 'psa_import.file_select'
|
|
||||||
bl_label = 'File Select'
|
|
||||||
bl_options = {'INTERNAL'}
|
|
||||||
filename_ext = '.psa'
|
|
||||||
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
|
||||||
filepath: StringProperty(
|
|
||||||
name='File Path',
|
|
||||||
description='File path used for importing the PSA file',
|
|
||||||
maxlen=1024,
|
|
||||||
default='')
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
|
||||||
context.window_manager.fileselect_add(self)
|
|
||||||
return {'RUNNING_MODAL'}
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psa_import
|
|
||||||
pg.psa_file_path = self.filepath
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
|
||||||
PsaImportActionListItem,
|
|
||||||
PsaBonePropertyGroup,
|
|
||||||
PsaDataPropertyGroup,
|
|
||||||
PsaImportPropertyGroup,
|
|
||||||
PSA_UL_SequenceList,
|
|
||||||
PSA_UL_ImportSequenceList,
|
|
||||||
PSA_UL_ImportActionList,
|
|
||||||
PsaImportSequencesSelectAll,
|
|
||||||
PsaImportSequencesDeselectAll,
|
|
||||||
PsaImportSequencesFromText,
|
|
||||||
PsaImportFileReload,
|
|
||||||
PSA_PT_ImportPanel,
|
|
||||||
PSA_PT_ImportPanel_Advanced,
|
|
||||||
PsaImportOperator,
|
|
||||||
PsaImportFileSelectOperator,
|
|
||||||
PsaImportSelectFile,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,15 +1,34 @@
|
|||||||
import ctypes
|
from ctypes import sizeof
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from .data import *
|
from .data import Psa, PsxBone
|
||||||
|
from ..shared.data import Section
|
||||||
|
|
||||||
|
|
||||||
|
def _try_fix_cue4parse_issue_103(sequences) -> bool:
|
||||||
|
# Detect if the file was exported from CUE4Parse prior to the fix for issue #103.
|
||||||
|
# https://github.com/FabianFG/CUE4Parse/issues/103
|
||||||
|
# The issue was that the frame_start_index was not being set correctly, and was always being set to the same value
|
||||||
|
# as the frame_count.
|
||||||
|
# This fix will eventually be deprecated as it is only necessary for files exported prior to the fix.
|
||||||
|
if len(sequences) > 0 and sequences[0].frame_start_index == sequences[0].frame_count:
|
||||||
|
# Manually set the frame_start_index for each sequence. This assumes that the sequences are in order with
|
||||||
|
# no shared frames between sequences (all exporters that I know of do this, so it's a safe assumption).
|
||||||
|
frame_start_index = 0
|
||||||
|
for i, sequence in enumerate(sequences):
|
||||||
|
sequence.frame_start_index = frame_start_index
|
||||||
|
frame_start_index += sequence.frame_count
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class PsaReader(object):
|
class PsaReader(object):
|
||||||
"""
|
"""
|
||||||
This class reads the sequences and bone information immediately upon instantiation and hold onto a file handle.
|
This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle.
|
||||||
The key data is not read into memory upon instantiation due to it's potentially very large size.
|
The keyframe data is not read into memory upon instantiation due to its potentially very large size.
|
||||||
To read the key data for a particular sequence, call `read_sequence_keys`.
|
To read the key data for a particular sequence, call :read_sequence_keys.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
@@ -17,15 +36,27 @@ class PsaReader(object):
|
|||||||
self.fp = open(path, 'rb')
|
self.fp = open(path, 'rb')
|
||||||
self.psa: Psa = self._read(self.fp)
|
self.psa: Psa = self._read(self.fp)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
self.fp.close()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def bones(self):
|
def bones(self):
|
||||||
return self.psa.bones
|
return self.psa.bones
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sequences(self) -> OrderedDict[Psa.Sequence]:
|
def sequences(self):
|
||||||
return self.psa.sequences
|
return self.psa.sequences
|
||||||
|
|
||||||
def read_sequence_data_matrix(self, sequence_name: str):
|
def read_sequence_data_matrix(self, sequence_name: str) -> np.ndarray:
|
||||||
|
"""
|
||||||
|
Reads and returns the data matrix for the given sequence.
|
||||||
|
|
||||||
|
@param sequence_name: The name of the sequence.
|
||||||
|
@return: An FxBx7 matrix where F is the number of frames, B is the number of bones.
|
||||||
|
"""
|
||||||
sequence = self.psa.sequences[sequence_name]
|
sequence = self.psa.sequences[sequence_name]
|
||||||
keys = self.read_sequence_keys(sequence_name)
|
keys = self.read_sequence_keys(sequence_name)
|
||||||
bone_count = len(self.bones)
|
bone_count = len(self.bones)
|
||||||
@@ -38,10 +69,11 @@ class PsaReader(object):
|
|||||||
return matrix
|
return matrix
|
||||||
|
|
||||||
def read_sequence_keys(self, sequence_name: str) -> List[Psa.Key]:
|
def read_sequence_keys(self, sequence_name: str) -> List[Psa.Key]:
|
||||||
""" Reads and returns the key data for a sequence.
|
"""
|
||||||
|
Reads and returns the key data for a sequence.
|
||||||
|
|
||||||
:param sequence_name: The name of the sequence.
|
@param sequence_name: The name of the sequence.
|
||||||
:return: A list of Psa.Keys.
|
@return: A list of Psa.Keys.
|
||||||
"""
|
"""
|
||||||
# Set the file reader to the beginning of the keys data
|
# Set the file reader to the beginning of the keys data
|
||||||
sequence = self.psa.sequences[sequence_name]
|
sequence = self.psa.sequences[sequence_name]
|
||||||
@@ -60,7 +92,7 @@ class PsaReader(object):
|
|||||||
return keys
|
return keys
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _read_types(fp, data_class: ctypes.Structure, section: Section, data):
|
def _read_types(fp, data_class, section: Section, data):
|
||||||
buffer_length = section.data_size * section.data_count
|
buffer_length = section.data_size * section.data_count
|
||||||
buffer = fp.read(buffer_length)
|
buffer = fp.read(buffer_length)
|
||||||
offset = 0
|
offset = 0
|
||||||
@@ -72,22 +104,23 @@ class PsaReader(object):
|
|||||||
psa = Psa()
|
psa = Psa()
|
||||||
while fp.read(1):
|
while fp.read(1):
|
||||||
fp.seek(-1, 1)
|
fp.seek(-1, 1)
|
||||||
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
|
section = Section.from_buffer_copy(fp.read(sizeof(Section)))
|
||||||
if section.name == b'ANIMHEAD':
|
if section.name == b'ANIMHEAD':
|
||||||
pass
|
pass
|
||||||
elif section.name == b'BONENAMES':
|
elif section.name == b'BONENAMES':
|
||||||
PsaReader._read_types(fp, Psa.Bone, section, psa.bones)
|
PsaReader._read_types(fp, PsxBone, section, psa.bones)
|
||||||
elif section.name == b'ANIMINFO':
|
elif section.name == b'ANIMINFO':
|
||||||
sequences = []
|
sequences = []
|
||||||
PsaReader._read_types(fp, Psa.Sequence, section, sequences)
|
PsaReader._read_types(fp, Psa.Sequence, section, sequences)
|
||||||
|
# Try to fix CUE4Parse bug, if necessary.
|
||||||
|
_try_fix_cue4parse_issue_103(sequences)
|
||||||
for sequence in sequences:
|
for sequence in sequences:
|
||||||
psa.sequences[sequence.name.decode()] = sequence
|
psa.sequences[sequence.name.decode()] = sequence
|
||||||
elif section.name == b'ANIMKEYS':
|
elif section.name == b'ANIMKEYS':
|
||||||
# Skip keys on this pass. We will keep this file open and read from it as needed.
|
# Skip keys on this pass. We will keep this file open and read from it as needed.
|
||||||
self.keys_data_offset = fp.tell()
|
self.keys_data_offset = fp.tell()
|
||||||
fp.seek(section.data_size * section.data_count, 1)
|
fp.seek(section.data_size * section.data_count, 1)
|
||||||
elif section.name in [b'SCALEKEYS']:
|
|
||||||
fp.seek(section.data_size * section.data_count, 1)
|
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(f'Unrecognized section "{section.name}"')
|
fp.seek(section.data_size * section.data_count, 1)
|
||||||
|
print(f'Unrecognized section in PSA: "{section.name}"')
|
||||||
return psa
|
return psa
|
||||||
|
|||||||
25
io_scene_psk_psa/psa/writer.py
Normal file
25
io_scene_psk_psa/psa/writer.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from ctypes import Structure, sizeof
|
||||||
|
from typing import Optional, Type, Collection
|
||||||
|
|
||||||
|
from .data import Psa
|
||||||
|
from ..shared.data import PsxBone, Section
|
||||||
|
|
||||||
|
|
||||||
|
def write_section(fp, name: bytes, data_type: Optional[Type[Structure]] = None, data: Optional[Collection] = None):
|
||||||
|
section = Section()
|
||||||
|
section.name = name
|
||||||
|
if data_type is not None and data is not None:
|
||||||
|
section.data_size = sizeof(data_type)
|
||||||
|
section.data_count = len(data)
|
||||||
|
fp.write(section)
|
||||||
|
if data is not None:
|
||||||
|
for datum in data:
|
||||||
|
fp.write(datum)
|
||||||
|
|
||||||
|
|
||||||
|
def write_psa(psa: Psa, path: str):
|
||||||
|
with open(path, 'wb') as fp:
|
||||||
|
write_section(fp, b'ANIMHEAD')
|
||||||
|
write_section(fp, b'BONENAMES', PsxBone, psa.bones)
|
||||||
|
write_section(fp, b'ANIMINFO', Psa.Sequence, list(psa.sequences.values()))
|
||||||
|
write_section(fp, b'ANIMKEYS', Psa.Key, psa.keys)
|
||||||
@@ -1,236 +1,438 @@
|
|||||||
from collections import OrderedDict
|
import bmesh
|
||||||
|
import bpy
|
||||||
from .data import *
|
import numpy as np
|
||||||
from ..helpers import *
|
from bpy.types import Armature, Collection, Context, Depsgraph, Object, ArmatureModifier, Mesh
|
||||||
|
from mathutils import Matrix
|
||||||
|
from typing import Dict, Iterable, List, Optional, Set, cast as typing_cast
|
||||||
|
from .data import Psk
|
||||||
|
from .properties import triangle_type_and_bit_flags_to_poly_flags
|
||||||
|
from ..shared.data import Vector3
|
||||||
|
from ..shared.dfs import DfsObject, dfs_collection_objects, dfs_view_layer_objects
|
||||||
|
from ..shared.helpers import (
|
||||||
|
PsxBoneCollection,
|
||||||
|
convert_string_to_cp1252_bytes,
|
||||||
|
create_psx_bones,
|
||||||
|
get_armatures_for_mesh_objects,
|
||||||
|
get_coordinate_system_transform,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PskInputObjects(object):
|
class PskInputObjects(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.mesh_objects = []
|
self.mesh_dfs_objects: List[DfsObject] = []
|
||||||
self.armature_object = None
|
self.armature_objects: Set[Object] = set()
|
||||||
|
|
||||||
|
|
||||||
class PskBuilderOptions(object):
|
class PskBuildOptions(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.bone_filter_mode = 'ALL'
|
self.bone_filter_mode = 'ALL'
|
||||||
self.bone_group_indices = []
|
self.bone_collection_indices: List[PsxBoneCollection] = []
|
||||||
|
self.object_eval_state = 'EVALUATED'
|
||||||
|
self.material_order_mode = 'AUTOMATIC'
|
||||||
|
self.material_name_list: List[str] = []
|
||||||
|
self.scale = 1.0
|
||||||
|
self.export_space = 'WORLD'
|
||||||
|
self.forward_axis = 'X'
|
||||||
|
self.up_axis = 'Z'
|
||||||
|
self.root_bone_name = 'ROOT'
|
||||||
|
|
||||||
|
|
||||||
class PskBuilder(object):
|
def get_materials_for_mesh_objects(depsgraph: Depsgraph, mesh_objects: Iterable[Object]):
|
||||||
def __init__(self):
|
yielded_materials = set()
|
||||||
pass
|
for mesh_object in mesh_objects:
|
||||||
|
evaluated_mesh_object = mesh_object.evaluated_get(depsgraph)
|
||||||
|
for i, material_slot in enumerate(evaluated_mesh_object.material_slots):
|
||||||
|
material = material_slot.material
|
||||||
|
if material is None:
|
||||||
|
raise RuntimeError(f'Material slots cannot be empty. ({mesh_object.name}, index {i})')
|
||||||
|
if material not in yielded_materials:
|
||||||
|
yielded_materials.add(material)
|
||||||
|
yield material
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_input_objects(context) -> PskInputObjects:
|
|
||||||
input_objects = PskInputObjects()
|
|
||||||
for obj in context.view_layer.objects.selected:
|
|
||||||
if obj.type != 'MESH':
|
|
||||||
raise RuntimeError(f'Selected object "{obj.name}" is not a mesh')
|
|
||||||
|
|
||||||
input_objects.mesh_objects = context.view_layer.objects.selected
|
def get_mesh_objects_for_collection(collection: Collection) -> Iterable[DfsObject]:
|
||||||
|
return filter(lambda x: x.obj.type == 'MESH', dfs_collection_objects(collection))
|
||||||
|
|
||||||
if len(input_objects.mesh_objects) == 0:
|
|
||||||
raise RuntimeError('At least one mesh must be selected')
|
|
||||||
|
|
||||||
for obj in input_objects.mesh_objects:
|
def get_mesh_objects_for_context(context: Context) -> Iterable[DfsObject]:
|
||||||
if len(obj.data.materials) == 0:
|
if context.view_layer is None:
|
||||||
raise RuntimeError(f'Mesh "{obj.name}" must have at least one material')
|
return
|
||||||
|
for dfs_object in dfs_view_layer_objects(context.view_layer):
|
||||||
|
if dfs_object.obj.type == 'MESH' and dfs_object.is_selected:
|
||||||
|
yield dfs_object
|
||||||
|
|
||||||
# Ensure that there are either no armature modifiers (static mesh)
|
|
||||||
# or that there is exactly one armature modifier object shared between
|
|
||||||
# all selected meshes
|
|
||||||
armature_modifier_objects = set()
|
|
||||||
|
|
||||||
for obj in input_objects.mesh_objects:
|
def get_armature_for_mesh_object(mesh_object: Object) -> Optional[Object]:
|
||||||
modifiers = [x for x in obj.modifiers if x.type == 'ARMATURE']
|
if mesh_object.type != 'MESH':
|
||||||
if len(modifiers) == 0:
|
return None
|
||||||
continue
|
# Get the first armature modifier with a non-empty armature object.
|
||||||
elif len(modifiers) > 1:
|
for modifier in filter(lambda x: x.type == 'ARMATURE', mesh_object.modifiers):
|
||||||
raise RuntimeError(f'Mesh "{obj.name}" must have only one armature modifier')
|
armature_modifier = typing_cast(ArmatureModifier, modifier)
|
||||||
armature_modifier_objects.add(modifiers[0].object)
|
if armature_modifier.object is not None:
|
||||||
|
return armature_modifier.object
|
||||||
|
return None
|
||||||
|
|
||||||
if len(armature_modifier_objects) > 1:
|
|
||||||
raise RuntimeError('All selected meshes must have the same armature modifier')
|
|
||||||
elif len(armature_modifier_objects) == 1:
|
|
||||||
input_objects.armature_object = list(armature_modifier_objects)[0]
|
|
||||||
|
|
||||||
return input_objects
|
def _get_psk_input_objects(mesh_dfs_objects: Iterable[DfsObject]) -> PskInputObjects:
|
||||||
|
mesh_dfs_objects = list(mesh_dfs_objects)
|
||||||
|
if len(mesh_dfs_objects) == 0:
|
||||||
|
raise RuntimeError('At least one mesh must be selected')
|
||||||
|
input_objects = PskInputObjects()
|
||||||
|
input_objects.mesh_dfs_objects = mesh_dfs_objects
|
||||||
|
input_objects.armature_objects |= set(get_armatures_for_mesh_objects(map(lambda x: x.obj, mesh_dfs_objects)))
|
||||||
|
return input_objects
|
||||||
|
|
||||||
def build(self, context, options: PskBuilderOptions) -> Psk:
|
|
||||||
input_objects = PskBuilder.get_input_objects(context)
|
|
||||||
|
|
||||||
armature_object = input_objects.armature_object
|
def get_psk_input_objects_for_context(context: Context) -> PskInputObjects:
|
||||||
|
mesh_objects = list(get_mesh_objects_for_context(context))
|
||||||
|
return _get_psk_input_objects(mesh_objects)
|
||||||
|
|
||||||
psk = Psk()
|
|
||||||
bones = []
|
|
||||||
materials = OrderedDict()
|
|
||||||
|
|
||||||
if armature_object is None:
|
def get_psk_input_objects_for_collection(collection: Collection) -> PskInputObjects:
|
||||||
# If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement
|
mesh_objects = get_mesh_objects_for_collection(collection)
|
||||||
# that a PSK file must have at least one bone.
|
return _get_psk_input_objects(mesh_objects)
|
||||||
psk_bone = Psk.Bone()
|
|
||||||
psk_bone.name = bytes('root', encoding='windows-1252')
|
|
||||||
psk_bone.flags = 0
|
class PskBuildResult(object):
|
||||||
psk_bone.children_count = 0
|
def __init__(self, psk: Psk, warnings: list[str]):
|
||||||
psk_bone.parent_index = 0
|
self.psk: Psk = psk
|
||||||
psk_bone.location = Vector3.zero()
|
self.warnings: List[str] = warnings
|
||||||
psk_bone.rotation = Quaternion.identity()
|
|
||||||
psk.bones.append(psk_bone)
|
|
||||||
|
def _get_mesh_export_space_matrix(armature_object: Optional[Object], export_space: str) -> Matrix:
|
||||||
|
if armature_object is None:
|
||||||
|
return Matrix.Identity(4)
|
||||||
|
|
||||||
|
def get_object_space_matrix(obj: Object) -> Matrix:
|
||||||
|
translation, rotation, _ = obj.matrix_world.decompose()
|
||||||
|
# We neutralize the scale here because the scale is already applied to the mesh objects implicitly.
|
||||||
|
return Matrix.Translation(translation) @ rotation.to_matrix().to_4x4()
|
||||||
|
|
||||||
|
match export_space:
|
||||||
|
case 'WORLD':
|
||||||
|
return Matrix.Identity(4)
|
||||||
|
case 'ARMATURE':
|
||||||
|
return get_object_space_matrix(armature_object).inverted()
|
||||||
|
case 'ROOT':
|
||||||
|
armature_data = typing_cast(Armature, armature_object.data)
|
||||||
|
armature_space_matrix = get_object_space_matrix(armature_object) @ armature_data.bones[0].matrix_local
|
||||||
|
return armature_space_matrix.inverted()
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid export space: {export_space}'
|
||||||
|
|
||||||
|
|
||||||
|
def _get_material_name_indices(obj: Object, material_names: List[str]) -> Iterable[int]:
|
||||||
|
"""
|
||||||
|
Returns the index of the material in the list of material names.
|
||||||
|
If the material is not found, the index 0 is returned.
|
||||||
|
"""
|
||||||
|
for material_slot in obj.material_slots:
|
||||||
|
if material_slot.material is None:
|
||||||
|
yield 0
|
||||||
else:
|
else:
|
||||||
bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices)
|
try:
|
||||||
bones = [armature_object.data.bones[bone_name] for bone_name in bone_names]
|
yield material_names.index(material_slot.material.name)
|
||||||
|
except ValueError:
|
||||||
|
yield 0
|
||||||
|
|
||||||
for bone in bones:
|
|
||||||
psk_bone = Psk.Bone()
|
def build_psk(context: Context, input_objects: PskInputObjects, options: PskBuildOptions) -> PskBuildResult:
|
||||||
psk_bone.name = bytes(bone.name, encoding='windows-1252')
|
|
||||||
psk_bone.flags = 0
|
assert context.window_manager
|
||||||
psk_bone.children_count = 0
|
|
||||||
|
armature_objects = list(input_objects.armature_objects)
|
||||||
|
|
||||||
|
warnings: list[str] = []
|
||||||
|
psk = Psk()
|
||||||
|
|
||||||
|
psx_bone_create_result = create_psx_bones(
|
||||||
|
armature_objects=armature_objects,
|
||||||
|
export_space=options.export_space,
|
||||||
|
forward_axis=options.forward_axis,
|
||||||
|
up_axis=options.up_axis,
|
||||||
|
scale=options.scale,
|
||||||
|
root_bone_name=options.root_bone_name,
|
||||||
|
bone_filter_mode=options.bone_filter_mode,
|
||||||
|
bone_collection_indices=options.bone_collection_indices
|
||||||
|
)
|
||||||
|
|
||||||
|
psk.bones = [psx_bone for psx_bone, _ in psx_bone_create_result.bones]
|
||||||
|
|
||||||
|
# Materials
|
||||||
|
match options.material_order_mode:
|
||||||
|
case 'AUTOMATIC':
|
||||||
|
mesh_objects = [dfs_object.obj for dfs_object in input_objects.mesh_dfs_objects]
|
||||||
|
materials = list(get_materials_for_mesh_objects(context.evaluated_depsgraph_get(), mesh_objects))
|
||||||
|
case 'MANUAL':
|
||||||
|
# The material name list may contain materials that are not on the mesh objects.
|
||||||
|
# Therefore, we can take the material_name_list as gospel and simply use it as a lookup table.
|
||||||
|
# If a look-up fails, replace it with an empty material.
|
||||||
|
materials = [bpy.data.materials.get(x, None) for x in options.material_name_list]
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid material order mode: {options.material_order_mode}'
|
||||||
|
|
||||||
|
for material in materials:
|
||||||
|
psk_material = Psk.Material()
|
||||||
|
psk_material.name = convert_string_to_cp1252_bytes(material.name if material else 'None')
|
||||||
|
psk_material.texture_index = len(psk.materials)
|
||||||
|
if material is not None:
|
||||||
|
psk_material.poly_flags = triangle_type_and_bit_flags_to_poly_flags(material.psk.mesh_triangle_type,
|
||||||
|
material.psk.mesh_triangle_bit_flags)
|
||||||
|
psk.materials.append(psk_material)
|
||||||
|
|
||||||
|
# TODO: This wasn't left in a good state. We should detect if we need to add a "default" material.
|
||||||
|
# This can be done by checking if there is an empty material slot on any of the mesh objects, or if there are
|
||||||
|
# no material slots on any of the mesh objects.
|
||||||
|
# If so, it should be added to the end of the list of materials, and its index should mapped to a None value in the
|
||||||
|
# material indices list.
|
||||||
|
if len(psk.materials) == 0:
|
||||||
|
# Add a default material if no materials are present.
|
||||||
|
psk_material = Psk.Material()
|
||||||
|
psk_material.name = convert_string_to_cp1252_bytes('None')
|
||||||
|
psk.materials.append(psk_material)
|
||||||
|
|
||||||
|
context.window_manager.progress_begin(0, len(input_objects.mesh_dfs_objects))
|
||||||
|
|
||||||
|
coordinate_system_matrix = get_coordinate_system_transform(options.forward_axis, options.up_axis)
|
||||||
|
|
||||||
|
# Calculate the export spaces for the armature objects.
|
||||||
|
# This is used later to transform the mesh object geometry into the export space.
|
||||||
|
armature_mesh_export_space_matrices: Dict[Optional[Object], Matrix] = {None: Matrix.Identity(4)}
|
||||||
|
if options.export_space == 'ARMATURE':
|
||||||
|
# For meshes without an armature modifier, we need to set the export space to the armature object.
|
||||||
|
armature_mesh_export_space_matrices[None] = _get_mesh_export_space_matrix(next(iter(input_objects.armature_objects), None), options.export_space)
|
||||||
|
for armature_object in armature_objects:
|
||||||
|
armature_mesh_export_space_matrices[armature_object] = _get_mesh_export_space_matrix(armature_object, options.export_space)
|
||||||
|
|
||||||
|
scale_matrix = Matrix.Scale(options.scale, 4)
|
||||||
|
|
||||||
|
original_armature_object_pose_positions = {a: a.data.pose_position for a in armature_objects}
|
||||||
|
|
||||||
|
# Temporarily force the armature into the rest position.
|
||||||
|
# We will undo this later.
|
||||||
|
for armature_object in armature_objects:
|
||||||
|
armature_data = typing_cast(Armature, armature_object.data)
|
||||||
|
armature_data.pose_position = 'REST'
|
||||||
|
|
||||||
|
material_names = [m.name if m is not None else 'None' for m in materials]
|
||||||
|
|
||||||
|
for object_index, input_mesh_object in enumerate(input_objects.mesh_dfs_objects):
|
||||||
|
obj, matrix_world = input_mesh_object.obj, input_mesh_object.matrix_world
|
||||||
|
armature_object = get_armature_for_mesh_object(obj)
|
||||||
|
should_flip_normals = False
|
||||||
|
|
||||||
|
# Material indices
|
||||||
|
material_indices = list(_get_material_name_indices(obj, material_names))
|
||||||
|
|
||||||
|
if len(material_indices) == 0:
|
||||||
|
# Add a default material if no materials are present.
|
||||||
|
material_indices = [0]
|
||||||
|
|
||||||
|
# Store the reference to the evaluated object and data so that we can clean them up later.
|
||||||
|
evaluated_mesh_object = None
|
||||||
|
evaluated_mesh_data = None
|
||||||
|
|
||||||
|
# Mesh data
|
||||||
|
match options.object_eval_state:
|
||||||
|
case 'ORIGINAL':
|
||||||
|
mesh_object = obj
|
||||||
|
mesh_data = typing_cast(Mesh, obj.data)
|
||||||
|
case 'EVALUATED':
|
||||||
|
# Create a copy of the mesh object after non-armature modifiers are applied.
|
||||||
|
depsgraph = context.evaluated_depsgraph_get()
|
||||||
|
bm = bmesh.new()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parent_index = bones.index(bone.parent)
|
bm.from_object(obj, depsgraph)
|
||||||
psk_bone.parent_index = parent_index
|
except ValueError as e:
|
||||||
psk.bones[parent_index].children_count += 1
|
del bm
|
||||||
|
raise RuntimeError(f'Object "{obj.name}" is not evaluated.\n'
|
||||||
|
'This is likely because the object is in a collection that has been excluded from the view layer.') from e
|
||||||
|
|
||||||
|
evaluated_mesh_data = bpy.data.meshes.new('')
|
||||||
|
mesh_data = evaluated_mesh_data
|
||||||
|
bm.to_mesh(mesh_data)
|
||||||
|
del bm
|
||||||
|
evaluated_mesh_object = bpy.data.objects.new('', mesh_data)
|
||||||
|
mesh_object = evaluated_mesh_object
|
||||||
|
mesh_object.matrix_world = matrix_world
|
||||||
|
|
||||||
|
# Extract the scale from the matrix.
|
||||||
|
_, _, scale = matrix_world.decompose()
|
||||||
|
|
||||||
|
# Negative scaling in Blender results in inverted normals after the scale is applied. However, if the
|
||||||
|
# scale is not applied, the normals will appear unaffected in the viewport. The evaluated mesh data used
|
||||||
|
# in the export will have the scale applied, but this behavior is not obvious to the user.
|
||||||
|
#
|
||||||
|
# In order to have the exporter be as WYSIWYG as possible, we need to check for negative scaling and
|
||||||
|
# invert the normals if necessary. If two axes have negative scaling and the third has positive scaling,
|
||||||
|
# the normals will be correct. We can detect this by checking if the number of negative scaling axes is
|
||||||
|
# odd. If it is, we need to invert the normals of the mesh by swapping the order of the vertices in each
|
||||||
|
# face.
|
||||||
|
if not should_flip_normals:
|
||||||
|
should_flip_normals = sum(1 for x in scale if x < 0) % 2 == 1
|
||||||
|
|
||||||
|
# Copy the vertex groups
|
||||||
|
for vertex_group in obj.vertex_groups:
|
||||||
|
mesh_object.vertex_groups.new(name=vertex_group.name)
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid object evaluation state: {options.object_eval_state}'
|
||||||
|
|
||||||
|
match options.export_space:
|
||||||
|
case 'ARMATURE' | 'ROOT':
|
||||||
|
mesh_export_space_matrix = armature_mesh_export_space_matrices[armature_object]
|
||||||
|
case 'WORLD':
|
||||||
|
mesh_export_space_matrix = armature_mesh_export_space_matrices[armature_object]
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid export space: {options.export_space}'
|
||||||
|
|
||||||
|
vertex_transform_matrix = scale_matrix @ coordinate_system_matrix @ mesh_export_space_matrix
|
||||||
|
point_transform_matrix = vertex_transform_matrix @ mesh_object.matrix_world
|
||||||
|
|
||||||
|
# Vertices
|
||||||
|
vertex_offset = len(psk.points)
|
||||||
|
for vertex in mesh_data.vertices:
|
||||||
|
point = Vector3()
|
||||||
|
v = point_transform_matrix @ vertex.co
|
||||||
|
point.x = v.x
|
||||||
|
point.y = v.y
|
||||||
|
point.z = v.z
|
||||||
|
psk.points.append(point)
|
||||||
|
|
||||||
|
# Wedges
|
||||||
|
mesh_data.calc_loop_triangles()
|
||||||
|
|
||||||
|
if mesh_data.uv_layers.active is None:
|
||||||
|
warnings.append(f'"{mesh_object.name}" has no active UV Map')
|
||||||
|
|
||||||
|
# Build a list of non-unique wedges.
|
||||||
|
wedges = []
|
||||||
|
if mesh_data.uv_layers.active:
|
||||||
|
uv_layer = mesh_data.uv_layers.active.data
|
||||||
|
for loop_index, loop in enumerate(mesh_data.loops):
|
||||||
|
wedges.append(Psk.Wedge(
|
||||||
|
point_index=loop.vertex_index + vertex_offset,
|
||||||
|
u=uv_layer[loop_index].uv[0],
|
||||||
|
v=1.0 - uv_layer[loop_index].uv[1]
|
||||||
|
))
|
||||||
|
else:
|
||||||
|
for loop_index, loop in enumerate(mesh_data.loops):
|
||||||
|
wedges.append(Psk.Wedge(point_index=loop.vertex_index + vertex_offset, u=0.0, v=0.0))
|
||||||
|
|
||||||
|
# Assign material indices to the wedges.
|
||||||
|
for triangle in mesh_data.loop_triangles:
|
||||||
|
for loop_index in triangle.loops:
|
||||||
|
wedges[loop_index].material_index = material_indices[triangle.material_index]
|
||||||
|
|
||||||
|
# Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices.
|
||||||
|
wedge_indices = dict()
|
||||||
|
loop_wedge_indices = np.full(len(mesh_data.loops), -1)
|
||||||
|
for loop_index, wedge in enumerate(wedges):
|
||||||
|
wedge_hash = hash(wedge)
|
||||||
|
if wedge_hash in wedge_indices:
|
||||||
|
loop_wedge_indices[loop_index] = wedge_indices[wedge_hash]
|
||||||
|
else:
|
||||||
|
wedge_index = len(psk.wedges)
|
||||||
|
wedge_indices[wedge_hash] = wedge_index
|
||||||
|
psk.wedges.append(wedge)
|
||||||
|
loop_wedge_indices[loop_index] = wedge_index
|
||||||
|
|
||||||
|
# Faces
|
||||||
|
poly_groups, groups = mesh_data.calc_smooth_groups(use_bitflags=True)
|
||||||
|
psk_face_start_index = len(psk.faces)
|
||||||
|
for f in mesh_data.loop_triangles:
|
||||||
|
face = Psk.Face()
|
||||||
|
face.material_index = material_indices[f.material_index]
|
||||||
|
face.wedge_indices[0] = loop_wedge_indices[f.loops[2]]
|
||||||
|
face.wedge_indices[1] = loop_wedge_indices[f.loops[1]]
|
||||||
|
face.wedge_indices[2] = loop_wedge_indices[f.loops[0]]
|
||||||
|
face.smoothing_groups = poly_groups[f.polygon_index]
|
||||||
|
psk.faces.append(face)
|
||||||
|
|
||||||
|
if should_flip_normals:
|
||||||
|
# Invert the normals of the faces.
|
||||||
|
for face in psk.faces[psk_face_start_index:]:
|
||||||
|
face.wedge_indices[0], face.wedge_indices[2] = face.wedge_indices[2], face.wedge_indices[0]
|
||||||
|
|
||||||
|
# Weights
|
||||||
|
if armature_object is not None:
|
||||||
|
armature_data = typing_cast(Armature, armature_object.data)
|
||||||
|
bone_index_offset = psx_bone_create_result.armature_object_root_bone_indices[armature_object]
|
||||||
|
# Because the vertex groups may contain entries for which there is no matching bone in the armature,
|
||||||
|
# we must filter them out and not export any weights for these vertex groups.
|
||||||
|
|
||||||
|
bone_names = psx_bone_create_result.armature_object_bone_names[armature_object]
|
||||||
|
vertex_group_names = [x.name for x in mesh_object.vertex_groups]
|
||||||
|
vertex_group_bone_indices: Dict[int, int] = dict()
|
||||||
|
for vertex_group_index, vertex_group_name in enumerate(vertex_group_names):
|
||||||
|
try:
|
||||||
|
vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name) + bone_index_offset
|
||||||
except ValueError:
|
except ValueError:
|
||||||
psk_bone.parent_index = 0
|
# The vertex group does not have a matching bone in the list of bones to be exported.
|
||||||
|
# Check to see if there is an associated bone for this vertex group that exists in the armature.
|
||||||
|
# If there is, we can traverse the ancestors of that bone to find an alternate bone to use for
|
||||||
|
# weighting the vertices belonging to this vertex group.
|
||||||
|
if vertex_group_name in armature_data.bones:
|
||||||
|
bone = armature_data.bones[vertex_group_name]
|
||||||
|
while bone is not None:
|
||||||
|
try:
|
||||||
|
vertex_group_bone_indices[vertex_group_index] = bone_names.index(bone.name) + bone_index_offset
|
||||||
|
break
|
||||||
|
except ValueError:
|
||||||
|
bone = bone.parent
|
||||||
|
|
||||||
if bone.parent is not None:
|
# Keep track of which vertices have been assigned weights.
|
||||||
rotation = bone.matrix.to_quaternion()
|
# The ones that have not been assigned weights will be assigned to the root bone.
|
||||||
rotation.x = -rotation.x
|
# Without this, some older versions of UnrealEd may have corrupted meshes.
|
||||||
rotation.y = -rotation.y
|
vertices_assigned_weights = np.full(len(mesh_data.vertices), False)
|
||||||
rotation.z = -rotation.z
|
|
||||||
quat_parent = bone.parent.matrix.to_quaternion().inverted()
|
|
||||||
parent_head = quat_parent @ bone.parent.head
|
|
||||||
parent_tail = quat_parent @ bone.parent.tail
|
|
||||||
location = (parent_tail - parent_head) + bone.head
|
|
||||||
else:
|
|
||||||
location = armature_object.matrix_local @ bone.head
|
|
||||||
rot_matrix = bone.matrix @ armature_object.matrix_local.to_3x3()
|
|
||||||
rotation = rot_matrix.to_quaternion()
|
|
||||||
|
|
||||||
psk_bone.location.x = location.x
|
for vertex_group_index, vertex_group in enumerate(mesh_object.vertex_groups):
|
||||||
psk_bone.location.y = location.y
|
if vertex_group_index not in vertex_group_bone_indices:
|
||||||
psk_bone.location.z = location.z
|
# Vertex group has no associated bone, skip it.
|
||||||
|
continue
|
||||||
psk_bone.rotation.x = rotation.x
|
bone_index = vertex_group_bone_indices[vertex_group_index]
|
||||||
psk_bone.rotation.y = rotation.y
|
for vertex_index in range(len(mesh_data.vertices)):
|
||||||
psk_bone.rotation.z = rotation.z
|
|
||||||
psk_bone.rotation.w = rotation.w
|
|
||||||
|
|
||||||
psk.bones.append(psk_bone)
|
|
||||||
|
|
||||||
for object in input_objects.mesh_objects:
|
|
||||||
vertex_offset = len(psk.points)
|
|
||||||
|
|
||||||
# VERTICES
|
|
||||||
for vertex in object.data.vertices:
|
|
||||||
point = Vector3()
|
|
||||||
v = object.matrix_world @ vertex.co
|
|
||||||
point.x = v.x
|
|
||||||
point.y = v.y
|
|
||||||
point.z = v.z
|
|
||||||
psk.points.append(point)
|
|
||||||
|
|
||||||
uv_layer = object.data.uv_layers.active.data
|
|
||||||
|
|
||||||
# MATERIALS
|
|
||||||
material_indices = []
|
|
||||||
for i, m in enumerate(object.data.materials):
|
|
||||||
if m is None:
|
|
||||||
raise RuntimeError('Material cannot be empty (index ' + str(i) + ')')
|
|
||||||
if m.name in materials:
|
|
||||||
# Material already evaluated, just get its index.
|
|
||||||
material_index = list(materials.keys()).index(m.name)
|
|
||||||
else:
|
|
||||||
# New material.
|
|
||||||
material = Psk.Material()
|
|
||||||
material.name = bytes(m.name, encoding='utf-8')
|
|
||||||
material.texture_index = len(psk.materials)
|
|
||||||
psk.materials.append(material)
|
|
||||||
materials[m.name] = m
|
|
||||||
material_index = material.texture_index
|
|
||||||
material_indices.append(material_index)
|
|
||||||
|
|
||||||
# WEDGES
|
|
||||||
object.data.calc_loop_triangles()
|
|
||||||
|
|
||||||
# Build a list of non-unique wedges.
|
|
||||||
wedges = []
|
|
||||||
for loop_index, loop in enumerate(object.data.loops):
|
|
||||||
wedge = Psk.Wedge()
|
|
||||||
wedge.point_index = loop.vertex_index + vertex_offset
|
|
||||||
wedge.u, wedge.v = uv_layer[loop_index].uv
|
|
||||||
wedge.v = 1.0 - wedge.v
|
|
||||||
wedges.append(wedge)
|
|
||||||
|
|
||||||
# Assign material indices to the wedges.
|
|
||||||
for triangle in object.data.loop_triangles:
|
|
||||||
for loop_index in triangle.loops:
|
|
||||||
wedges[loop_index].material_index = material_indices[triangle.material_index]
|
|
||||||
|
|
||||||
# Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices
|
|
||||||
wedge_indices = {}
|
|
||||||
loop_wedge_indices = [-1] * len(object.data.loops)
|
|
||||||
for loop_index, wedge in enumerate(wedges):
|
|
||||||
wedge_hash = hash(wedge)
|
|
||||||
if wedge_hash in wedge_indices:
|
|
||||||
loop_wedge_indices[loop_index] = wedge_indices[wedge_hash]
|
|
||||||
else:
|
|
||||||
wedge_index = len(psk.wedges)
|
|
||||||
wedge_indices[wedge_hash] = wedge_index
|
|
||||||
psk.wedges.append(wedge)
|
|
||||||
loop_wedge_indices[loop_index] = wedge_index
|
|
||||||
|
|
||||||
# FACES
|
|
||||||
poly_groups, groups = object.data.calc_smooth_groups(use_bitflags=True)
|
|
||||||
for f in object.data.loop_triangles:
|
|
||||||
face = Psk.Face()
|
|
||||||
face.material_index = material_indices[f.material_index]
|
|
||||||
face.wedge_indices[0] = loop_wedge_indices[f.loops[2]]
|
|
||||||
face.wedge_indices[1] = loop_wedge_indices[f.loops[1]]
|
|
||||||
face.wedge_indices[2] = loop_wedge_indices[f.loops[0]]
|
|
||||||
face.smoothing_groups = poly_groups[f.polygon_index]
|
|
||||||
psk.faces.append(face)
|
|
||||||
|
|
||||||
# WEIGHTS
|
|
||||||
if armature_object is not None:
|
|
||||||
# Because the vertex groups may contain entries for which there is no matching bone in the armature,
|
|
||||||
# we must filter them out and not export any weights for these vertex groups.
|
|
||||||
bone_names = [x.name for x in bones]
|
|
||||||
vertex_group_names = [x.name for x in object.vertex_groups]
|
|
||||||
vertex_group_bone_indices = dict()
|
|
||||||
for vertex_group_index, vertex_group_name in enumerate(vertex_group_names):
|
|
||||||
try:
|
try:
|
||||||
vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name)
|
weight = vertex_group.weight(vertex_index)
|
||||||
except ValueError:
|
except RuntimeError:
|
||||||
# The vertex group does not have a matching bone in the list of bones to be exported.
|
|
||||||
# Check to see if there is an associated bone for this vertex group that exists in the armature.
|
|
||||||
# If there is, we can traverse the ancestors of that bone to find an alternate bone to use for
|
|
||||||
# weighting the vertices belonging to this vertex group.
|
|
||||||
if vertex_group_name in armature_object.data.bones:
|
|
||||||
bone = armature_object.data.bones[vertex_group_name]
|
|
||||||
while bone is not None:
|
|
||||||
try:
|
|
||||||
bone_index = bone_names.index(bone.name)
|
|
||||||
vertex_group_bone_indices[vertex_group_index] = bone_index
|
|
||||||
break
|
|
||||||
except ValueError:
|
|
||||||
bone = bone.parent
|
|
||||||
for vertex_group_index, vertex_group in enumerate(object.vertex_groups):
|
|
||||||
if vertex_group_index not in vertex_group_bone_indices:
|
|
||||||
# Vertex group has no associated bone, skip it.
|
|
||||||
continue
|
continue
|
||||||
bone_index = vertex_group_bone_indices[vertex_group_index]
|
if weight == 0.0:
|
||||||
for vertex_index in range(len(object.data.vertices)):
|
continue
|
||||||
try:
|
w = Psk.Weight()
|
||||||
weight = vertex_group.weight(vertex_index)
|
w.bone_index = bone_index
|
||||||
except RuntimeError:
|
w.point_index = vertex_offset + vertex_index
|
||||||
continue
|
w.weight = weight
|
||||||
if weight == 0.0:
|
psk.weights.append(w)
|
||||||
continue
|
vertices_assigned_weights[vertex_index] = True
|
||||||
w = Psk.Weight()
|
|
||||||
w.bone_index = bone_index
|
|
||||||
w.point_index = vertex_offset + vertex_index
|
|
||||||
w.weight = weight
|
|
||||||
psk.weights.append(w)
|
|
||||||
|
|
||||||
return psk
|
# Assign vertices that have not been assigned weights to the root bone of the armature.
|
||||||
|
fallback_weight_bone_index = psx_bone_create_result.armature_object_root_bone_indices[armature_object]
|
||||||
|
for vertex_index, assigned in enumerate(vertices_assigned_weights):
|
||||||
|
if not assigned:
|
||||||
|
w = Psk.Weight()
|
||||||
|
w.bone_index = fallback_weight_bone_index
|
||||||
|
w.point_index = vertex_offset + vertex_index
|
||||||
|
w.weight = 1.0
|
||||||
|
psk.weights.append(w)
|
||||||
|
|
||||||
|
if evaluated_mesh_object is not None:
|
||||||
|
bpy.data.objects.remove(mesh_object)
|
||||||
|
del mesh_object
|
||||||
|
|
||||||
|
if evaluated_mesh_data is not None:
|
||||||
|
bpy.data.meshes.remove(mesh_data)
|
||||||
|
del mesh_data
|
||||||
|
|
||||||
|
context.window_manager.progress_update(object_index)
|
||||||
|
|
||||||
|
# Restore the original pose position of the armature objects.
|
||||||
|
for armature_object, pose_position in original_armature_object_pose_positions.items():
|
||||||
|
armature_data = typing_cast(Armature, armature_object.data)
|
||||||
|
armature_data.pose_position = pose_position
|
||||||
|
|
||||||
|
# https://github.com/DarklightGames/io_scene_psk_psa/issues/129.
|
||||||
|
psk.sort_and_normalize_weights()
|
||||||
|
|
||||||
|
context.window_manager.progress_end()
|
||||||
|
|
||||||
|
return PskBuildResult(psk, warnings)
|
||||||
|
|||||||
@@ -1,23 +1,23 @@
|
|||||||
|
from ctypes import Structure, c_uint32, c_float, c_int32, c_uint8, c_int8, c_int16, c_char, c_uint16
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from ..data import *
|
from ..shared.data import Vector3, Quaternion, Color, Vector2, PsxBone
|
||||||
|
|
||||||
|
|
||||||
class Psk(object):
|
class Psk(object):
|
||||||
class Wedge(object):
|
class Wedge(object):
|
||||||
def __init__(self):
|
def __init__(self, point_index: int, u: float, v: float, material_index: int = 0):
|
||||||
self.point_index: int = 0
|
self.point_index: int = point_index
|
||||||
self.u: float = 0.0
|
self.u: float = u
|
||||||
self.v: float = 0.0
|
self.v: float = v
|
||||||
self.material_index: int = 0
|
self.material_index = material_index
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash(f'{self.point_index}-{self.u}-{self.v}-{self.material_index}')
|
return hash(f'{self.point_index}-{self.u}-{self.v}-{self.material_index}')
|
||||||
|
|
||||||
class Wedge16(Structure):
|
class Wedge16(Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
('point_index', c_uint16),
|
('point_index', c_uint32),
|
||||||
('padding1', c_int16),
|
|
||||||
('u', c_float),
|
('u', c_float),
|
||||||
('v', c_float),
|
('v', c_float),
|
||||||
('material_index', c_uint8),
|
('material_index', c_uint8),
|
||||||
@@ -80,6 +80,19 @@ class Psk(object):
|
|||||||
('bone_index', c_int32),
|
('bone_index', c_int32),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
class MorphInfo(Structure):
|
||||||
|
_fields_ = [
|
||||||
|
('name', c_char * 64),
|
||||||
|
('vertex_count', c_int32)
|
||||||
|
]
|
||||||
|
|
||||||
|
class MorphData(Structure):
|
||||||
|
_fields_ = [
|
||||||
|
('position_delta', Vector3),
|
||||||
|
('tangent_z_delta', Vector3),
|
||||||
|
('point_index', c_int32)
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_extra_uvs(self):
|
def has_extra_uvs(self):
|
||||||
return len(self.extra_uvs) > 0
|
return len(self.extra_uvs) > 0
|
||||||
@@ -92,13 +105,49 @@ class Psk(object):
|
|||||||
def has_vertex_normals(self):
|
def has_vertex_normals(self):
|
||||||
return len(self.vertex_normals) > 0
|
return len(self.vertex_normals) > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_material_references(self):
|
||||||
|
return len(self.material_references) > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_morph_data(self):
|
||||||
|
return len(self.morph_infos) > 0
|
||||||
|
|
||||||
|
def sort_and_normalize_weights(self):
|
||||||
|
self.weights.sort(key=lambda x: x.point_index)
|
||||||
|
|
||||||
|
weight_index = 0
|
||||||
|
weight_total = len(self.weights)
|
||||||
|
|
||||||
|
while weight_index < weight_total:
|
||||||
|
point_index = self.weights[weight_index].point_index
|
||||||
|
weight_sum = self.weights[weight_index].weight
|
||||||
|
point_weight_total = 1
|
||||||
|
|
||||||
|
# Calculate the sum of weights for the current point_index.
|
||||||
|
for i in range(weight_index + 1, weight_total):
|
||||||
|
if self.weights[i].point_index != point_index:
|
||||||
|
break
|
||||||
|
weight_sum += self.weights[i].weight
|
||||||
|
point_weight_total += 1
|
||||||
|
|
||||||
|
# Normalize the weights for the current point_index.
|
||||||
|
for i in range(weight_index, weight_index + point_weight_total):
|
||||||
|
self.weights[i].weight /= weight_sum
|
||||||
|
|
||||||
|
# Move to the next group of weights.
|
||||||
|
weight_index += point_weight_total
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.points: List[Vector3] = []
|
self.points: List[Vector3] = []
|
||||||
self.wedges: List[Psk.Wedge] = []
|
self.wedges: List[Psk.Wedge] = []
|
||||||
self.faces: List[Psk.Face] = []
|
self.faces: List[Psk.Face] = []
|
||||||
self.materials: List[Psk.Material] = []
|
self.materials: List[Psk.Material] = []
|
||||||
self.weights: List[Psk.Weight] = []
|
self.weights: List[Psk.Weight] = []
|
||||||
self.bones: List[Psk.Bone] = []
|
self.bones: List[PsxBone] = []
|
||||||
self.extra_uvs: List[Vector2] = []
|
self.extra_uvs: List[Vector2] = []
|
||||||
self.vertex_colors: List[Color] = []
|
self.vertex_colors: List[Color] = []
|
||||||
self.vertex_normals: List[Vector3] = []
|
self.vertex_normals: List[Vector3] = []
|
||||||
|
self.morph_infos: List[Psk.MorphInfo] = []
|
||||||
|
self.morph_data: List[Psk.MorphData] = []
|
||||||
|
self.material_references: List[str] = []
|
||||||
|
|||||||
0
io_scene_psk_psa/psk/export/__init__.py
Normal file
0
io_scene_psk_psa/psk/export/__init__.py
Normal file
555
io_scene_psk_psa/psk/export/operators.py
Normal file
555
io_scene_psk_psa/psk/export/operators.py
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from typing import Iterable, List, Optional, cast as typing_cast
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.props import BoolProperty, StringProperty
|
||||||
|
from bpy.types import Context, Depsgraph, Material, Object, Operator, Scene
|
||||||
|
from bpy_extras.io_utils import ExportHelper
|
||||||
|
|
||||||
|
from .properties import PskExportMixin
|
||||||
|
from ..builder import (
|
||||||
|
PskBuildOptions,
|
||||||
|
build_psk,
|
||||||
|
get_materials_for_mesh_objects,
|
||||||
|
get_psk_input_objects_for_collection,
|
||||||
|
get_psk_input_objects_for_context,
|
||||||
|
)
|
||||||
|
from ..writer import write_psk
|
||||||
|
from ...shared.helpers import PsxBoneCollection, get_collection_export_operator_from_context, populate_bone_collection_list
|
||||||
|
from ...shared.ui import draw_bone_filter_mode
|
||||||
|
|
||||||
|
|
||||||
|
def populate_material_name_list(depsgraph: Depsgraph, mesh_objects: Iterable[Object], material_list):
|
||||||
|
materials = list(get_materials_for_mesh_objects(depsgraph, mesh_objects))
|
||||||
|
|
||||||
|
# Order the mesh object materials by the order any existing entries in the material list.
|
||||||
|
# This way, if the user has already set up the material list, we don't change the order.
|
||||||
|
material_names = [x.material_name for x in material_list]
|
||||||
|
materials = get_sorted_materials_by_names(materials, material_names)
|
||||||
|
|
||||||
|
material_list.clear()
|
||||||
|
for index, material in enumerate(materials):
|
||||||
|
m = material_list.add()
|
||||||
|
m.material_name = material.name
|
||||||
|
m.index = index
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_bone_collection_list_populate(Operator):
|
||||||
|
bl_idname = 'psk.bone_collection_list_populate'
|
||||||
|
bl_label = 'Populate Bone Collection List'
|
||||||
|
bl_description = 'Populate the bone collection list from the armature that will be used in this collection export'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
export_operator = get_collection_export_operator_from_context(context)
|
||||||
|
if export_operator is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
if context.collection is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No active collection')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
try:
|
||||||
|
input_objects = get_psk_input_objects_for_collection(context.collection)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
if not input_objects.armature_objects:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No armature modifiers found on mesh objects')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
populate_bone_collection_list(export_operator.bone_collection_list, input_objects.armature_objects)
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_bone_collection_list_select_all(Operator):
|
||||||
|
bl_idname = 'psk.bone_collection_list_select_all'
|
||||||
|
bl_label = 'Select All'
|
||||||
|
bl_description = 'Select all bone collections'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
is_selected: BoolProperty(default=True)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
export_operator = get_collection_export_operator_from_context(context)
|
||||||
|
if export_operator is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
for item in export_operator.bone_collection_list:
|
||||||
|
item.is_selected = self.is_selected
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_populate_material_name_list(Operator):
|
||||||
|
bl_idname = 'psk.export_populate_material_name_list'
|
||||||
|
bl_label = 'Populate Material Name List'
|
||||||
|
bl_description = 'Populate the material name list from the objects that will be used in this export'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
export_operator = get_collection_export_operator_from_context(context)
|
||||||
|
if export_operator is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
depsgraph = context.evaluated_depsgraph_get()
|
||||||
|
assert context.collection
|
||||||
|
input_objects = get_psk_input_objects_for_collection(context.collection)
|
||||||
|
try:
|
||||||
|
populate_material_name_list(depsgraph, [x.obj for x in input_objects.mesh_dfs_objects], export_operator.material_name_list)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def material_list_names_search_cb(self, context: Context, edit_text: str):
|
||||||
|
for material in bpy.data.materials:
|
||||||
|
yield material.name
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_material_list_name_add(Operator):
|
||||||
|
bl_idname = 'psk.export_material_name_list_item_add'
|
||||||
|
bl_label = 'Add Material'
|
||||||
|
bl_description = 'Add a material to the material name list (useful if you want to add a material slot that is not actually used in the mesh)'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
name: StringProperty(search=material_list_names_search_cb, name='Material Name', default='None')
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
assert context.window_manager
|
||||||
|
return context.window_manager.invoke_props_dialog(self)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
export_operator = get_collection_export_operator_from_context(context)
|
||||||
|
if export_operator is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
m = export_operator.material_name_list.add()
|
||||||
|
m.material_name = self.name
|
||||||
|
m.index = len(export_operator.material_name_list) - 1
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_material_list_move_up(Operator):
|
||||||
|
bl_idname = 'psk.export_material_list_item_move_up'
|
||||||
|
bl_label = 'Move Up'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
bl_description = 'Move the selected material up one slot'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = getattr(context.scene, 'psk_export')
|
||||||
|
return pg.material_name_list_index > 0
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psk_export')
|
||||||
|
pg.material_name_list.move(pg.material_name_list_index, pg.material_name_list_index - 1)
|
||||||
|
pg.material_name_list_index -= 1
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_material_list_move_down(Operator):
|
||||||
|
bl_idname = 'psk.export_material_list_item_move_down'
|
||||||
|
bl_label = 'Move Down'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
bl_description = 'Move the selected material down one slot'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = getattr(context.scene, 'psk_export')
|
||||||
|
return pg.material_name_list_index < len(pg.material_name_list) - 1
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psk_export')
|
||||||
|
pg.material_name_list.move(pg.material_name_list_index, pg.material_name_list_index + 1)
|
||||||
|
pg.material_name_list_index += 1
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_material_list_name_move_up(Operator):
|
||||||
|
bl_idname = 'psk.export_material_name_list_item_move_up'
|
||||||
|
bl_label = 'Move Up'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
bl_description = 'Move the selected material name up one slot'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
export_operator = get_collection_export_operator_from_context(context)
|
||||||
|
if export_operator is None:
|
||||||
|
return False
|
||||||
|
return export_operator.material_name_list_index > 0
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
export_operator = get_collection_export_operator_from_context(context)
|
||||||
|
if export_operator is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
export_operator.material_name_list.move(export_operator.material_name_list_index, export_operator.material_name_list_index - 1)
|
||||||
|
export_operator.material_name_list_index -= 1
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_material_list_name_move_down(Operator):
|
||||||
|
bl_idname = 'psk.export_material_name_list_item_move_down'
|
||||||
|
bl_label = 'Move Down'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
bl_description = 'Move the selected material name down one slot'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
export_operator = get_collection_export_operator_from_context(context)
|
||||||
|
if export_operator is None:
|
||||||
|
return False
|
||||||
|
return export_operator.material_name_list_index < len(export_operator.material_name_list) - 1
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
export_operator = get_collection_export_operator_from_context(context)
|
||||||
|
if export_operator is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No valid export operator found in context')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
export_operator.material_name_list.move(export_operator.material_name_list_index, export_operator.material_name_list_index + 1)
|
||||||
|
export_operator.material_name_list_index += 1
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
def get_sorted_materials_by_names(materials: Iterable[Material], material_names: List[str]) -> List[Material]:
|
||||||
|
"""
|
||||||
|
Sorts the materials by the order of the material names list. Any materials not in the list will be appended to the
|
||||||
|
end of the list in the order they are found.
|
||||||
|
|
||||||
|
@param materials: A list of materials to sort
|
||||||
|
@param material_names: A list of material names to sort by
|
||||||
|
@return: A sorted list of materials
|
||||||
|
"""
|
||||||
|
materials_in_collection = [m for m in materials if m.name in material_names]
|
||||||
|
materials_not_in_collection = [m for m in materials if m.name not in material_names]
|
||||||
|
materials_in_collection = sorted(materials_in_collection, key=lambda x: material_names.index(x.name))
|
||||||
|
return materials_in_collection + materials_not_in_collection
|
||||||
|
|
||||||
|
|
||||||
|
def get_psk_build_options_from_property_group(scene: Scene, pg: PskExportMixin) -> PskBuildOptions:
|
||||||
|
options = PskBuildOptions()
|
||||||
|
options.object_eval_state = pg.object_eval_state
|
||||||
|
options.export_space = pg.export_space
|
||||||
|
options.bone_filter_mode = pg.bone_filter_mode
|
||||||
|
options.bone_collection_indices = [PsxBoneCollection(x.armature_object_name, x.armature_data_name, x.index) for x in pg.bone_collection_list if x.is_selected]
|
||||||
|
options.root_bone_name = pg.root_bone_name
|
||||||
|
options.material_order_mode = pg.material_order_mode
|
||||||
|
options.material_name_list = [x.material_name for x in pg.material_name_list]
|
||||||
|
|
||||||
|
match pg.transform_source:
|
||||||
|
case 'SCENE':
|
||||||
|
transform_source = getattr(scene, 'psx_export')
|
||||||
|
case 'CUSTOM':
|
||||||
|
transform_source = pg
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid transform source: {pg.transform_source}'
|
||||||
|
|
||||||
|
options.scale = transform_source.scale
|
||||||
|
options.forward_axis = transform_source.forward_axis
|
||||||
|
options.up_axis = transform_source.up_axis
|
||||||
|
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_export_collection(Operator, ExportHelper, PskExportMixin):
|
||||||
|
bl_idname = 'psk.export_collection'
|
||||||
|
bl_label = 'Export'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
filename_ext = '.psk'
|
||||||
|
filter_glob: StringProperty(default='*.psk', options={'HIDDEN'})
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for exporting the PSK file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='',
|
||||||
|
subtype='FILE_PATH')
|
||||||
|
collection: StringProperty(options={'HIDDEN'})
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
collection = bpy.data.collections.get(self.collection, None)
|
||||||
|
|
||||||
|
if collection is not None:
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
input_objects = get_psk_input_objects_for_collection(collection)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
options = get_psk_build_options_from_property_group(context.scene, self)
|
||||||
|
filepath = str(Path(self.filepath).resolve())
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = build_psk(context, input_objects, options)
|
||||||
|
for warning in result.warnings:
|
||||||
|
self.report({'WARNING'}, warning)
|
||||||
|
write_psk(result.psk, filepath)
|
||||||
|
if len(result.warnings) > 0:
|
||||||
|
self.report({'WARNING'}, f'PSK export successful with {len(result.warnings)} warnings')
|
||||||
|
else:
|
||||||
|
self.report({'INFO'}, f'PSK export successful')
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
def draw(self, context: Context):
|
||||||
|
layout = self.layout
|
||||||
|
|
||||||
|
assert layout is not None
|
||||||
|
|
||||||
|
flow = layout.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
|
||||||
|
# Mesh
|
||||||
|
mesh_header, mesh_panel = layout.panel('Mesh', default_closed=False)
|
||||||
|
mesh_header.label(text='Mesh', icon='MESH_DATA')
|
||||||
|
if mesh_panel:
|
||||||
|
flow = mesh_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(self, 'object_eval_state', text='Data')
|
||||||
|
|
||||||
|
# Bones
|
||||||
|
bones_header, bones_panel = layout.panel('Bones', default_closed=False)
|
||||||
|
bones_header.label(text='Bones', icon='BONE_DATA')
|
||||||
|
if bones_panel:
|
||||||
|
draw_bone_filter_mode(bones_panel, self, True)
|
||||||
|
|
||||||
|
if self.bone_filter_mode == 'BONE_COLLECTIONS':
|
||||||
|
row = bones_panel.row()
|
||||||
|
rows = max(3, min(len(self.bone_collection_list), 10))
|
||||||
|
row.template_list('PSX_UL_bone_collection_list', '', self, 'bone_collection_list', self, 'bone_collection_list_index', rows=rows)
|
||||||
|
col = row.column(align=True)
|
||||||
|
col.operator(PSK_OT_bone_collection_list_populate.bl_idname, text='', icon='FILE_REFRESH')
|
||||||
|
col.separator()
|
||||||
|
op = col.operator(PSK_OT_bone_collection_list_select_all.bl_idname, text='', icon='CHECKBOX_HLT')
|
||||||
|
op.is_selected = True
|
||||||
|
op = col.operator(PSK_OT_bone_collection_list_select_all.bl_idname, text='', icon='CHECKBOX_DEHLT')
|
||||||
|
op.is_selected = False
|
||||||
|
|
||||||
|
advanced_bones_header, advanced_bones_panel = bones_panel.panel('Advanced', default_closed=True)
|
||||||
|
advanced_bones_header.label(text='Advanced')
|
||||||
|
if advanced_bones_panel:
|
||||||
|
flow = advanced_bones_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(self, 'root_bone_name')
|
||||||
|
|
||||||
|
# Materials
|
||||||
|
materials_header, materials_panel = layout.panel('Materials', default_closed=False)
|
||||||
|
materials_header.label(text='Materials', icon='MATERIAL')
|
||||||
|
|
||||||
|
if materials_panel:
|
||||||
|
flow = materials_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(self, 'material_order_mode', text='Material Order')
|
||||||
|
|
||||||
|
if self.material_order_mode == 'MANUAL':
|
||||||
|
rows = max(3, min(len(self.material_name_list), 10))
|
||||||
|
row = materials_panel.row()
|
||||||
|
row.template_list('PSK_UL_material_names', '', self, 'material_name_list', self, 'material_name_list_index', rows=rows)
|
||||||
|
col = row.column(align=True)
|
||||||
|
col.operator(PSK_OT_populate_material_name_list.bl_idname, text='', icon='FILE_REFRESH')
|
||||||
|
col.separator()
|
||||||
|
col.operator(PSK_OT_material_list_name_move_up.bl_idname, text='', icon='TRIA_UP')
|
||||||
|
col.operator(PSK_OT_material_list_name_move_down.bl_idname, text='', icon='TRIA_DOWN')
|
||||||
|
col.separator()
|
||||||
|
col.operator(PSK_OT_material_list_name_add.bl_idname, text='', icon='ADD')
|
||||||
|
|
||||||
|
# Transform
|
||||||
|
transform_header, transform_panel = layout.panel('Transform', default_closed=False)
|
||||||
|
transform_header.label(text='Transform')
|
||||||
|
if transform_panel:
|
||||||
|
flow = transform_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(self, 'export_space')
|
||||||
|
flow.prop(self, 'transform_source')
|
||||||
|
|
||||||
|
flow = transform_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
|
||||||
|
match self.transform_source:
|
||||||
|
case 'SCENE':
|
||||||
|
transform_source = getattr(context.scene, 'psx_export')
|
||||||
|
flow.enabled = False
|
||||||
|
case 'CUSTOM':
|
||||||
|
transform_source = self
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid transform source: {self.transform_source}'
|
||||||
|
|
||||||
|
flow.prop(transform_source, 'scale')
|
||||||
|
flow.prop(transform_source, 'forward_axis')
|
||||||
|
flow.prop(transform_source, 'up_axis')
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_export(Operator, ExportHelper):
|
||||||
|
bl_idname = 'psk.export'
|
||||||
|
bl_label = 'Export'
|
||||||
|
bl_options = {'INTERNAL', 'UNDO'}
|
||||||
|
bl_description = 'Export selected meshes to PSK'
|
||||||
|
filename_ext = '.psk'
|
||||||
|
filter_glob: StringProperty(default='*.psk', options={'HIDDEN'})
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for exporting the PSK file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='')
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
try:
|
||||||
|
input_objects = get_psk_input_objects_for_context(context)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
pg = getattr(context.scene, 'psk_export')
|
||||||
|
|
||||||
|
populate_bone_collection_list(pg.bone_collection_list, input_objects.armature_objects)
|
||||||
|
|
||||||
|
depsgraph = context.evaluated_depsgraph_get()
|
||||||
|
|
||||||
|
try:
|
||||||
|
populate_material_name_list(depsgraph, [x.obj for x in input_objects.mesh_dfs_objects], pg.material_name_list)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
assert context.window_manager
|
||||||
|
context.window_manager.fileselect_add(self)
|
||||||
|
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
|
||||||
|
assert layout
|
||||||
|
|
||||||
|
pg = getattr(context.scene, 'psk_export')
|
||||||
|
|
||||||
|
# Mesh
|
||||||
|
mesh_header, mesh_panel = layout.panel('Mesh', default_closed=False)
|
||||||
|
mesh_header.label(text='Mesh', icon='MESH_DATA')
|
||||||
|
if mesh_panel:
|
||||||
|
flow = mesh_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'object_eval_state', text='Data')
|
||||||
|
|
||||||
|
# Bones
|
||||||
|
bones_header, bones_panel = layout.panel('Bones', default_closed=False)
|
||||||
|
bones_header.label(text='Bones', icon='BONE_DATA')
|
||||||
|
if bones_panel:
|
||||||
|
draw_bone_filter_mode(bones_panel, pg)
|
||||||
|
if pg.bone_filter_mode == 'BONE_COLLECTIONS':
|
||||||
|
row = bones_panel.row()
|
||||||
|
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||||
|
row.template_list('PSX_UL_bone_collection_list', '', pg, 'bone_collection_list', pg, 'bone_collection_list_index', rows=rows)
|
||||||
|
bones_advanced_header, bones_advanced_panel = bones_panel.panel('Advanced', default_closed=True)
|
||||||
|
bones_advanced_header.label(text='Advanced')
|
||||||
|
if bones_advanced_panel:
|
||||||
|
flow = bones_advanced_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'root_bone_name')
|
||||||
|
|
||||||
|
# Materials
|
||||||
|
materials_header, materials_panel = layout.panel('Materials', default_closed=False)
|
||||||
|
materials_header.label(text='Materials', icon='MATERIAL')
|
||||||
|
if materials_panel:
|
||||||
|
flow = materials_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'material_order_mode', text='Material Order')
|
||||||
|
|
||||||
|
if pg.material_order_mode == 'MANUAL':
|
||||||
|
row = materials_panel.row()
|
||||||
|
rows = max(3, min(len(pg.bone_collection_list), 10))
|
||||||
|
row.template_list('PSK_UL_material_names', '', pg, 'material_name_list', pg, 'material_name_list_index', rows=rows)
|
||||||
|
col = row.column(align=True)
|
||||||
|
col.operator(PSK_OT_material_list_move_up.bl_idname, text='', icon='TRIA_UP')
|
||||||
|
col.operator(PSK_OT_material_list_move_down.bl_idname, text='', icon='TRIA_DOWN')
|
||||||
|
|
||||||
|
# Transform
|
||||||
|
transform_header, transform_panel = layout.panel('Transform', default_closed=False)
|
||||||
|
transform_header.label(text='Transform')
|
||||||
|
if transform_panel:
|
||||||
|
flow = transform_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'export_space')
|
||||||
|
flow.prop(pg, 'transform_source')
|
||||||
|
|
||||||
|
flow = transform_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
|
||||||
|
match pg.transform_source:
|
||||||
|
case 'SCENE':
|
||||||
|
transform_source = getattr(context.scene, 'psx_export')
|
||||||
|
flow.enabled = False
|
||||||
|
case 'CUSTOM':
|
||||||
|
transform_source = pg
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid transform source: {pg.transform_source}'
|
||||||
|
|
||||||
|
flow.prop(transform_source, 'scale')
|
||||||
|
flow.prop(transform_source, 'forward_axis')
|
||||||
|
flow.prop(transform_source, 'up_axis')
|
||||||
|
|
||||||
|
# Extended Format
|
||||||
|
extended_format_header, extended_format_panel = layout.panel('Extended Format', default_closed=False)
|
||||||
|
extended_format_header.label(text='Extended Format')
|
||||||
|
if extended_format_panel:
|
||||||
|
flow = extended_format_panel.grid_flow(row_major=True)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(pg, 'should_export_vertex_normals', text='Vertex Normals')
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = getattr(context.scene, 'psk_export')
|
||||||
|
|
||||||
|
assert context.scene
|
||||||
|
|
||||||
|
input_objects = get_psk_input_objects_for_context(context)
|
||||||
|
options = get_psk_build_options_from_property_group(context.scene, pg)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = build_psk(context, input_objects, options)
|
||||||
|
for warning in result.warnings:
|
||||||
|
self.report({'WARNING'}, warning)
|
||||||
|
write_psk(result.psk, self.filepath)
|
||||||
|
if len(result.warnings) > 0:
|
||||||
|
self.report({'WARNING'}, f'PSK export successful with {len(result.warnings)} warnings')
|
||||||
|
else:
|
||||||
|
self.report({'INFO'}, f'PSK export successful')
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSK_OT_material_list_move_up,
|
||||||
|
PSK_OT_material_list_move_down,
|
||||||
|
PSK_OT_export,
|
||||||
|
PSK_OT_export_collection,
|
||||||
|
PSK_OT_bone_collection_list_populate,
|
||||||
|
PSK_OT_bone_collection_list_select_all,
|
||||||
|
PSK_OT_populate_material_name_list,
|
||||||
|
PSK_OT_material_list_name_move_up,
|
||||||
|
PSK_OT_material_list_name_move_down,
|
||||||
|
PSK_OT_material_list_name_add,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
78
io_scene_psk_psa/psk/export/properties.py
Normal file
78
io_scene_psk_psa/psk/export/properties.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
from bpy.types import Context
|
||||||
|
from bpy.props import (
|
||||||
|
BoolProperty,
|
||||||
|
CollectionProperty,
|
||||||
|
EnumProperty,
|
||||||
|
IntProperty,
|
||||||
|
PointerProperty,
|
||||||
|
StringProperty,
|
||||||
|
)
|
||||||
|
from bpy.types import Material, PropertyGroup
|
||||||
|
|
||||||
|
from ...shared.helpers import get_collection_export_operator_from_context
|
||||||
|
from ...shared.types import ExportSpaceMixin, TransformMixin, PsxBoneExportMixin
|
||||||
|
|
||||||
|
object_eval_state_items = (
|
||||||
|
('EVALUATED', 'Evaluated', 'Use data from fully evaluated object'),
|
||||||
|
('ORIGINAL', 'Original', 'Use data from original object with no modifiers applied'),
|
||||||
|
)
|
||||||
|
|
||||||
|
material_order_mode_items = (
|
||||||
|
('AUTOMATIC', 'Automatic', 'Automatically order the materials'),
|
||||||
|
('MANUAL', 'Manual', 'Manually arrange the materials'),
|
||||||
|
)
|
||||||
|
|
||||||
|
transform_source_items = (
|
||||||
|
('SCENE', 'Scene', 'Use the scene transform settings'),
|
||||||
|
('CUSTOM', 'Custom', 'Use custom transform settings'),
|
||||||
|
)
|
||||||
|
|
||||||
|
class PSK_PG_material_list_item(PropertyGroup):
|
||||||
|
material: PointerProperty(type=Material)
|
||||||
|
index: IntProperty()
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_PG_material_name_list_item(PropertyGroup):
|
||||||
|
material_name: StringProperty()
|
||||||
|
index: IntProperty()
|
||||||
|
|
||||||
|
|
||||||
|
class PskExportMixin(ExportSpaceMixin, TransformMixin, PsxBoneExportMixin):
|
||||||
|
object_eval_state: EnumProperty(
|
||||||
|
items=object_eval_state_items,
|
||||||
|
name='Object Evaluation State',
|
||||||
|
default='EVALUATED'
|
||||||
|
)
|
||||||
|
material_order_mode: EnumProperty(
|
||||||
|
name='Material Order',
|
||||||
|
description='The order in which to export the materials',
|
||||||
|
items=material_order_mode_items,
|
||||||
|
default='AUTOMATIC'
|
||||||
|
)
|
||||||
|
material_name_list: CollectionProperty(type=PSK_PG_material_name_list_item)
|
||||||
|
material_name_list_index: IntProperty(default=0)
|
||||||
|
should_export_vertex_normals: BoolProperty(
|
||||||
|
name='Export Vertex Normals',
|
||||||
|
default=False,
|
||||||
|
description='Export VTXNORMS section.'
|
||||||
|
)
|
||||||
|
transform_source: EnumProperty(
|
||||||
|
items=transform_source_items,
|
||||||
|
name='Transform Source',
|
||||||
|
default='SCENE'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_PG_export(PropertyGroup, PskExportMixin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSK_PG_material_list_item,
|
||||||
|
PSK_PG_material_name_list_item,
|
||||||
|
PSK_PG_export,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
|
|
||||||
18
io_scene_psk_psa/psk/export/ui.py
Normal file
18
io_scene_psk_psa/psk/export/ui.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import bpy
|
||||||
|
from bpy.types import UIList
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_UL_material_names(UIList):
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||||
|
row = layout.row()
|
||||||
|
material = bpy.data.materials.get(item.material_name, None)
|
||||||
|
icon_value = layout.icon(material) if material else 0
|
||||||
|
row.prop(item, 'material_name', text='', emboss=False, icon_value=icon_value, icon='BLANK1' if icon_value == 0 else 'NONE')
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSK_UL_material_names,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
from typing import Type
|
|
||||||
|
|
||||||
from bpy.props import StringProperty, CollectionProperty, IntProperty, EnumProperty
|
|
||||||
from bpy.types import Operator, PropertyGroup
|
|
||||||
from bpy_extras.io_utils import ExportHelper
|
|
||||||
|
|
||||||
from .builder import PskBuilder, PskBuilderOptions
|
|
||||||
from .data import *
|
|
||||||
from ..helpers import populate_bone_group_list
|
|
||||||
from ..types import BoneGroupListItem
|
|
||||||
|
|
||||||
MAX_WEDGE_COUNT = 65536
|
|
||||||
MAX_POINT_COUNT = 4294967296
|
|
||||||
MAX_BONE_COUNT = 256
|
|
||||||
MAX_MATERIAL_COUNT = 256
|
|
||||||
|
|
||||||
|
|
||||||
class PskExporter(object):
|
|
||||||
|
|
||||||
def __init__(self, psk: Psk):
|
|
||||||
self.psk: Psk = psk
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
|
||||||
section = Section()
|
|
||||||
section.name = name
|
|
||||||
if data_type is not None and data is not None:
|
|
||||||
section.data_size = sizeof(data_type)
|
|
||||||
section.data_count = len(data)
|
|
||||||
fp.write(section)
|
|
||||||
if data is not None:
|
|
||||||
for datum in data:
|
|
||||||
fp.write(datum)
|
|
||||||
|
|
||||||
def export(self, path: str):
|
|
||||||
if len(self.psk.wedges) > MAX_WEDGE_COUNT:
|
|
||||||
raise RuntimeError(f'Number of wedges ({len(self.psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
|
|
||||||
if len(self.psk.bones) > MAX_BONE_COUNT:
|
|
||||||
raise RuntimeError(f'Number of bones ({len(self.psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
|
|
||||||
if len(self.psk.points) > MAX_POINT_COUNT:
|
|
||||||
raise RuntimeError(f'Numbers of vertices ({len(self.psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
|
|
||||||
if len(self.psk.materials) > MAX_MATERIAL_COUNT:
|
|
||||||
raise RuntimeError(f'Number of materials ({len(self.psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
|
|
||||||
|
|
||||||
with open(path, 'wb') as fp:
|
|
||||||
self.write_section(fp, b'ACTRHEAD')
|
|
||||||
self.write_section(fp, b'PNTS0000', Vector3, self.psk.points)
|
|
||||||
|
|
||||||
wedges = []
|
|
||||||
for index, w in enumerate(self.psk.wedges):
|
|
||||||
wedge = Psk.Wedge16()
|
|
||||||
wedge.material_index = w.material_index
|
|
||||||
wedge.u = w.u
|
|
||||||
wedge.v = w.v
|
|
||||||
wedge.point_index = w.point_index
|
|
||||||
wedges.append(wedge)
|
|
||||||
|
|
||||||
self.write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
|
|
||||||
self.write_section(fp, b'FACE0000', Psk.Face, self.psk.faces)
|
|
||||||
self.write_section(fp, b'MATT0000', Psk.Material, self.psk.materials)
|
|
||||||
self.write_section(fp, b'REFSKELT', Psk.Bone, self.psk.bones)
|
|
||||||
self.write_section(fp, b'RAWWEIGHTS', Psk.Weight, self.psk.weights)
|
|
||||||
|
|
||||||
|
|
||||||
def is_bone_filter_mode_item_available(context, identifier):
|
|
||||||
input_objects = PskBuilder.get_input_objects(context)
|
|
||||||
armature_object = input_objects.armature_object
|
|
||||||
if identifier == 'BONE_GROUPS':
|
|
||||||
if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups:
|
|
||||||
return False
|
|
||||||
# else if... you can set up other conditions if you add more options
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class PskExportOperator(Operator, ExportHelper):
|
|
||||||
bl_idname = 'export.psk'
|
|
||||||
bl_label = 'Export'
|
|
||||||
bl_options = {'INTERNAL', 'UNDO'}
|
|
||||||
__doc__ = 'Export mesh and armature to PSK'
|
|
||||||
filename_ext = '.psk'
|
|
||||||
filter_glob: StringProperty(default='*.psk', options={'HIDDEN'})
|
|
||||||
|
|
||||||
filepath: StringProperty(
|
|
||||||
name='File Path',
|
|
||||||
description='File path used for exporting the PSK file',
|
|
||||||
maxlen=1024,
|
|
||||||
default='')
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
|
||||||
try:
|
|
||||||
input_objects = PskBuilder.get_input_objects(context)
|
|
||||||
except RuntimeError as e:
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
pg = context.scene.psk_export
|
|
||||||
|
|
||||||
# Populate bone groups list.
|
|
||||||
populate_bone_group_list(input_objects.armature_object, pg.bone_group_list)
|
|
||||||
|
|
||||||
context.window_manager.fileselect_add(self)
|
|
||||||
|
|
||||||
return {'RUNNING_MODAL'}
|
|
||||||
|
|
||||||
def draw(self, context):
|
|
||||||
layout = self.layout
|
|
||||||
scene = context.scene
|
|
||||||
pg = scene.psk_export
|
|
||||||
|
|
||||||
# BONES
|
|
||||||
box = layout.box()
|
|
||||||
box.label(text='Bones', icon='BONE_DATA')
|
|
||||||
bone_filter_mode_items = pg.bl_rna.properties['bone_filter_mode'].enum_items_static
|
|
||||||
row = box.row(align=True)
|
|
||||||
for item in bone_filter_mode_items:
|
|
||||||
identifier = item.identifier
|
|
||||||
item_layout = row.row(align=True)
|
|
||||||
item_layout.prop_enum(pg, 'bone_filter_mode', item.identifier)
|
|
||||||
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
|
|
||||||
|
|
||||||
if pg.bone_filter_mode == 'BONE_GROUPS':
|
|
||||||
row = box.row()
|
|
||||||
rows = max(3, min(len(pg.bone_group_list), 10))
|
|
||||||
row.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index', rows=rows)
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psk_export
|
|
||||||
builder = PskBuilder()
|
|
||||||
options = PskBuilderOptions()
|
|
||||||
options.bone_filter_mode = pg.bone_filter_mode
|
|
||||||
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
|
|
||||||
try:
|
|
||||||
psk = builder.build(context, options)
|
|
||||||
exporter = PskExporter(psk)
|
|
||||||
exporter.export(self.filepath)
|
|
||||||
except RuntimeError as e:
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
|
||||||
return {'CANCELLED'}
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
|
|
||||||
class PskExportPropertyGroup(PropertyGroup):
|
|
||||||
bone_filter_mode: EnumProperty(
|
|
||||||
name='Bone Filter',
|
|
||||||
options=set(),
|
|
||||||
description='',
|
|
||||||
items=(
|
|
||||||
('ALL', 'All', 'All bones will be exported.'),
|
|
||||||
('BONE_GROUPS', 'Bone Groups',
|
|
||||||
'Only bones belonging to the selected bone groups and their ancestors will be exported.')
|
|
||||||
)
|
|
||||||
)
|
|
||||||
bone_group_list: CollectionProperty(type=BoneGroupListItem)
|
|
||||||
bone_group_list_index: IntProperty(default=0)
|
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
|
||||||
PskExportOperator,
|
|
||||||
PskExportPropertyGroup
|
|
||||||
)
|
|
||||||
0
io_scene_psk_psa/psk/import_/__init__.py
Normal file
0
io_scene_psk_psa/psk/import_/__init__.py
Normal file
185
io_scene_psk_psa/psk/import_/operators.py
Normal file
185
io_scene_psk_psa/psk/import_/operators.py
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from bpy.props import CollectionProperty, StringProperty
|
||||||
|
from bpy.types import Context, FileHandler, Operator, OperatorFileListElement, UILayout
|
||||||
|
from bpy_extras.io_utils import ImportHelper
|
||||||
|
|
||||||
|
from ..importer import PskImportOptions, import_psk
|
||||||
|
from ..properties import PskImportMixin
|
||||||
|
from ..reader import read_psk
|
||||||
|
|
||||||
|
|
||||||
|
def get_psk_import_options_from_properties(property_group: PskImportMixin):
|
||||||
|
options = PskImportOptions()
|
||||||
|
options.should_import_mesh = property_group.should_import_mesh
|
||||||
|
options.should_import_extra_uvs = property_group.should_import_extra_uvs
|
||||||
|
options.should_import_vertex_colors = property_group.should_import_vertex_colors
|
||||||
|
options.should_import_vertex_normals = property_group.should_import_vertex_normals
|
||||||
|
options.vertex_color_space = property_group.vertex_color_space
|
||||||
|
options.should_import_armature = property_group.should_import_armature
|
||||||
|
options.bone_length = property_group.bone_length
|
||||||
|
options.should_import_materials = property_group.should_import_materials
|
||||||
|
options.should_import_shape_keys = property_group.should_import_shape_keys
|
||||||
|
options.scale = property_group.scale
|
||||||
|
|
||||||
|
if property_group.bdk_repository_id:
|
||||||
|
options.bdk_repository_id = property_group.bdk_repository_id
|
||||||
|
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
def psk_import_draw(layout: UILayout, props: PskImportMixin):
|
||||||
|
row = layout.row()
|
||||||
|
|
||||||
|
col = row.column()
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(props, 'components')
|
||||||
|
|
||||||
|
if props.should_import_mesh:
|
||||||
|
mesh_header, mesh_panel = layout.panel('mesh_panel_id', default_closed=False)
|
||||||
|
mesh_header.label(text='Mesh', icon='MESH_DATA')
|
||||||
|
|
||||||
|
if mesh_panel:
|
||||||
|
row = mesh_panel.row()
|
||||||
|
col = row.column()
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(props, 'should_import_extra_uvs', text='Extra UVs')
|
||||||
|
col.prop(props, 'should_import_materials', text='Materials')
|
||||||
|
col.prop(props, 'should_import_vertex_colors', text='Vertex Colors')
|
||||||
|
if props.should_import_vertex_colors:
|
||||||
|
col.prop(props, 'vertex_color_space')
|
||||||
|
col.separator()
|
||||||
|
col.prop(props, 'should_import_vertex_normals', text='Vertex Normals')
|
||||||
|
col.prop(props, 'should_import_shape_keys', text='Shape Keys')
|
||||||
|
|
||||||
|
if props.should_import_armature:
|
||||||
|
armature_header, armature_panel = layout.panel('armature_panel_id', default_closed=False)
|
||||||
|
armature_header.label(text='Armature', icon='OUTLINER_DATA_ARMATURE')
|
||||||
|
|
||||||
|
if armature_panel:
|
||||||
|
row = armature_panel.row()
|
||||||
|
col = row.column()
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(props, 'bone_length')
|
||||||
|
|
||||||
|
transform_header, transform_panel = layout.panel('transform_panel_id', default_closed=False)
|
||||||
|
transform_header.label(text='Transform')
|
||||||
|
if transform_panel:
|
||||||
|
row = transform_panel.row()
|
||||||
|
col = row.column()
|
||||||
|
col.use_property_split = True
|
||||||
|
col.use_property_decorate = False
|
||||||
|
col.prop(props, 'scale')
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_import(Operator, ImportHelper, PskImportMixin):
|
||||||
|
bl_idname = 'psk.import_file'
|
||||||
|
bl_label = 'Import'
|
||||||
|
bl_options = {'INTERNAL', 'UNDO', 'PRESET'}
|
||||||
|
bl_description = 'Import a PSK file'
|
||||||
|
filename_ext = '.psk'
|
||||||
|
filter_glob: StringProperty(default='*.psk;*.pskx', options={'HIDDEN'})
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for exporting the PSK file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='')
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
try:
|
||||||
|
psk = read_psk(self.filepath)
|
||||||
|
except OSError as e:
|
||||||
|
self.report({'ERROR'}, f'Failed to read "{self.filepath}". The file may be corrupted or not a valid PSK file: {e}')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
name = os.path.splitext(os.path.basename(self.filepath))[0]
|
||||||
|
options = get_psk_import_options_from_properties(self)
|
||||||
|
result = import_psk(psk, context, name, options)
|
||||||
|
|
||||||
|
if len(result.warnings):
|
||||||
|
message = f'PSK imported as "{result.root_object.name}" with {len(result.warnings)} warning(s)\n'
|
||||||
|
message += '\n'.join(result.warnings)
|
||||||
|
self.report({'WARNING'}, message)
|
||||||
|
else:
|
||||||
|
self.report({'INFO'}, f'PSK imported as "{result.root_object.name}"')
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
assert self.layout
|
||||||
|
psk_import_draw(self.layout, self)
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_OT_import_drag_and_drop(Operator, PskImportMixin):
|
||||||
|
bl_idname = 'psk.import_drag_and_drop'
|
||||||
|
bl_label = 'Import PSK'
|
||||||
|
bl_options = {'INTERNAL', 'UNDO', 'PRESET'}
|
||||||
|
bl_description = 'Import PSK files by dragging and dropping them onto the 3D view'
|
||||||
|
|
||||||
|
directory: StringProperty(subtype='FILE_PATH', options={'SKIP_SAVE', 'HIDDEN'})
|
||||||
|
files: CollectionProperty(type=OperatorFileListElement, options={'SKIP_SAVE', 'HIDDEN'})
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context) -> bool:
|
||||||
|
return context.area is not None and context.area.type == 'VIEW_3D'
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
assert self.layout
|
||||||
|
psk_import_draw(self.layout, self)
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
assert context.window_manager
|
||||||
|
context.window_manager.invoke_props_dialog(self)
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
warning_count = 0
|
||||||
|
|
||||||
|
options = get_psk_import_options_from_properties(self)
|
||||||
|
|
||||||
|
for file in self.files:
|
||||||
|
filepath = Path(self.directory) / file.name
|
||||||
|
try:
|
||||||
|
psk = read_psk(filepath)
|
||||||
|
except OSError as e:
|
||||||
|
self.report({'ERROR'}, f'Failed to read "{filepath}". The file may be corrupted or not a valid PSK file: {e}')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
name = os.path.splitext(file.name)[0]
|
||||||
|
result = import_psk(psk, context, name, options)
|
||||||
|
if result.warnings:
|
||||||
|
warning_count += len(result.warnings)
|
||||||
|
|
||||||
|
if warning_count > 0:
|
||||||
|
self.report({'WARNING'}, f'Imported {len(self.files)} PSK file(s) with {warning_count} warning(s)')
|
||||||
|
else:
|
||||||
|
self.report({'INFO'}, f'Imported {len(self.files)} PSK file(s)')
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: move to another file
|
||||||
|
class PSK_FH_import(FileHandler):
|
||||||
|
bl_idname = 'PSK_FH_import'
|
||||||
|
bl_label = 'Unreal PSK'
|
||||||
|
bl_import_operator = PSK_OT_import_drag_and_drop.bl_idname
|
||||||
|
bl_export_operator = 'psk.export_collection'
|
||||||
|
bl_file_extensions = '.psk;.pskx'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll_drop(cls, context: Context) -> bool:
|
||||||
|
return context.area is not None and context.area.type == 'VIEW_3D'
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSK_OT_import,
|
||||||
|
PSK_OT_import_drag_and_drop,
|
||||||
|
PSK_FH_import,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
@@ -1,39 +1,74 @@
|
|||||||
import os
|
|
||||||
import sys
|
|
||||||
from math import inf
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import bmesh
|
import bmesh
|
||||||
import bpy
|
import bpy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from bpy.props import BoolProperty, EnumProperty, FloatProperty, StringProperty
|
|
||||||
from bpy.types import Operator, PropertyGroup
|
from bpy.types import Context, Object, VertexGroup
|
||||||
from bpy_extras.io_utils import ImportHelper
|
from mathutils import Matrix, Quaternion, Vector
|
||||||
from mathutils import Quaternion, Vector, Matrix
|
from typing import List, Optional
|
||||||
|
|
||||||
from .data import Psk
|
from .data import Psk
|
||||||
from .reader import PskReader
|
from .properties import poly_flags_to_triangle_type_and_bit_flags
|
||||||
from ..helpers import rgb_to_srgb
|
from ..shared.data import PsxBone
|
||||||
|
from ..shared.helpers import is_bdk_addon_loaded, rgb_to_srgb
|
||||||
|
|
||||||
|
|
||||||
class PskImportOptions(object):
|
class PskImportOptions:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.name = ''
|
self.should_import_mesh = True
|
||||||
|
self.should_reuse_materials = True
|
||||||
self.should_import_vertex_colors = True
|
self.should_import_vertex_colors = True
|
||||||
self.vertex_color_space = 'sRGB'
|
self.vertex_color_space = 'SRGB'
|
||||||
self.should_import_vertex_normals = True
|
self.should_import_vertex_normals = True
|
||||||
self.should_import_extra_uvs = True
|
self.should_import_extra_uvs = True
|
||||||
|
self.should_import_armature = True
|
||||||
|
self.should_import_shape_keys = True
|
||||||
self.bone_length = 1.0
|
self.bone_length = 1.0
|
||||||
|
self.should_import_materials = True
|
||||||
|
self.scale = 1.0
|
||||||
|
self.bdk_repository_id = None
|
||||||
|
|
||||||
|
|
||||||
class PskImporter(object):
|
class ImportBone:
|
||||||
|
"""
|
||||||
|
Intermediate bone type for the purpose of construction.
|
||||||
|
"""
|
||||||
|
def __init__(self, index: int, psk_bone: PsxBone):
|
||||||
|
self.index: int = index
|
||||||
|
self.psk_bone: PsxBone = psk_bone
|
||||||
|
self.parent: Optional[ImportBone] = None
|
||||||
|
self.local_rotation: Quaternion = Quaternion()
|
||||||
|
self.local_translation: Vector = Vector()
|
||||||
|
self.world_rotation_matrix: Matrix = Matrix()
|
||||||
|
self.world_matrix: Matrix = Matrix()
|
||||||
|
self.vertex_group = None
|
||||||
|
self.original_rotation: Quaternion = Quaternion()
|
||||||
|
self.original_location: Vector = Vector()
|
||||||
|
self.post_rotation: Quaternion = Quaternion()
|
||||||
|
|
||||||
|
|
||||||
|
class PskImportResult:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
self.warnings: List[str] = []
|
||||||
|
self.armature_object: Optional[Object] = None
|
||||||
|
self.mesh_object: Optional[Object] = None
|
||||||
|
|
||||||
def import_psk(self, psk: Psk, context, options: PskImportOptions):
|
@property
|
||||||
# ARMATURE
|
def root_object(self) -> Object:
|
||||||
armature_data = bpy.data.armatures.new(options.name)
|
return self.armature_object if self.armature_object is not None else self.mesh_object
|
||||||
armature_object = bpy.data.objects.new(options.name, armature_data)
|
|
||||||
|
|
||||||
|
def import_psk(psk: Psk, context: Context, name: str, options: PskImportOptions) -> PskImportResult:
|
||||||
|
result = PskImportResult()
|
||||||
|
armature_object = None
|
||||||
|
mesh_object = None
|
||||||
|
|
||||||
|
assert context.scene
|
||||||
|
assert bpy.context.view_layer
|
||||||
|
|
||||||
|
if options.should_import_armature:
|
||||||
|
# Armature
|
||||||
|
armature_data = bpy.data.armatures.new(name)
|
||||||
|
armature_object = bpy.data.objects.new(name, armature_data)
|
||||||
armature_object.show_in_front = True
|
armature_object.show_in_front = True
|
||||||
|
|
||||||
context.scene.collection.objects.link(armature_object)
|
context.scene.collection.objects.link(armature_object)
|
||||||
@@ -48,21 +83,6 @@ class PskImporter(object):
|
|||||||
|
|
||||||
bpy.ops.object.mode_set(mode='EDIT')
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
|
||||||
# Intermediate bone type for the purpose of construction.
|
|
||||||
class ImportBone(object):
|
|
||||||
def __init__(self, index: int, psk_bone: Psk.Bone):
|
|
||||||
self.index: int = index
|
|
||||||
self.psk_bone: Psk.Bone = psk_bone
|
|
||||||
self.parent: Optional[ImportBone] = None
|
|
||||||
self.local_rotation: Quaternion = Quaternion()
|
|
||||||
self.local_translation: Vector = Vector()
|
|
||||||
self.world_rotation_matrix: Matrix = Matrix()
|
|
||||||
self.world_matrix: Matrix = Matrix()
|
|
||||||
self.vertex_group = None
|
|
||||||
self.orig_quat: Quaternion = Quaternion()
|
|
||||||
self.orig_loc: Vector = Vector()
|
|
||||||
self.post_quat: Quaternion = Quaternion()
|
|
||||||
|
|
||||||
import_bones = []
|
import_bones = []
|
||||||
|
|
||||||
for bone_index, psk_bone in enumerate(psk.bones):
|
for bone_index, psk_bone in enumerate(psk.bones):
|
||||||
@@ -103,215 +123,182 @@ class PskImporter(object):
|
|||||||
edit_bone_matrix.translation = import_bone.world_matrix.translation
|
edit_bone_matrix.translation = import_bone.world_matrix.translation
|
||||||
edit_bone.matrix = edit_bone_matrix
|
edit_bone.matrix = edit_bone_matrix
|
||||||
|
|
||||||
# Store bind pose information in the bone's custom properties.
|
# Mesh
|
||||||
# This information is used when importing animations from PSA files.
|
if options.should_import_mesh:
|
||||||
edit_bone['orig_quat'] = import_bone.local_rotation
|
mesh_data = bpy.data.meshes.new(name)
|
||||||
edit_bone['orig_loc'] = import_bone.local_translation
|
mesh_object = bpy.data.objects.new(name, mesh_data)
|
||||||
edit_bone['post_quat'] = import_bone.local_rotation.conjugated()
|
|
||||||
|
|
||||||
# MESH
|
# Materials
|
||||||
mesh_data = bpy.data.meshes.new(options.name)
|
if options.should_import_materials:
|
||||||
mesh_object = bpy.data.objects.new(options.name, mesh_data)
|
for material_index, psk_material in enumerate(psk.materials):
|
||||||
|
material_name = psk_material.name.decode('utf-8')
|
||||||
|
material = None
|
||||||
|
|
||||||
# MATERIALS
|
if options.should_reuse_materials and material_name in bpy.data.materials:
|
||||||
for material in psk.materials:
|
# Material already exists, just re-use it.
|
||||||
# TODO: re-use of materials should be an option
|
material = bpy.data.materials[material_name]
|
||||||
bpy_material = bpy.data.materials.new(material.name.decode('utf-8'))
|
elif is_bdk_addon_loaded() and psk.has_material_references:
|
||||||
mesh_data.materials.append(bpy_material)
|
# Material does not yet exist, and we have the BDK addon installed.
|
||||||
|
# Attempt to load it using BDK addon's operator.
|
||||||
|
material_reference = psk.material_references[material_index]
|
||||||
|
repository_id = options.bdk_repository_id if options.bdk_repository_id is not None else ''
|
||||||
|
if material_reference and bpy.ops.bdk.link_material(reference=material_reference, repository_id=repository_id) == {'FINISHED'}:
|
||||||
|
material = bpy.data.materials[material_name]
|
||||||
|
|
||||||
|
if material is None:
|
||||||
|
# Material was unable to be loaded, so just create a blank material.
|
||||||
|
material = bpy.data.materials.new(material_name)
|
||||||
|
mesh_triangle_type, mesh_triangle_bit_flags = poly_flags_to_triangle_type_and_bit_flags(psk_material.poly_flags)
|
||||||
|
material.psk.mesh_triangle_type = mesh_triangle_type
|
||||||
|
material.psk.mesh_triangle_bit_flags = mesh_triangle_bit_flags
|
||||||
|
material.use_nodes = True
|
||||||
|
|
||||||
|
mesh_data.materials.append(material)
|
||||||
|
|
||||||
bm = bmesh.new()
|
bm = bmesh.new()
|
||||||
|
|
||||||
# VERTICES
|
# Vertices
|
||||||
for point in psk.points:
|
for point in psk.points:
|
||||||
bm.verts.new(tuple(point))
|
bm.verts.new(tuple(point))
|
||||||
|
|
||||||
bm.verts.ensure_lookup_table()
|
bm.verts.ensure_lookup_table()
|
||||||
|
|
||||||
degenerate_face_indices = set()
|
# Faces
|
||||||
|
invalid_face_indices = set()
|
||||||
for face_index, face in enumerate(psk.faces):
|
for face_index, face in enumerate(psk.faces):
|
||||||
point_indices = [bm.verts[psk.wedges[i].point_index] for i in reversed(face.wedge_indices)]
|
points = (
|
||||||
|
bm.verts[psk.wedges[face.wedge_indices[2]].point_index],
|
||||||
|
bm.verts[psk.wedges[face.wedge_indices[1]].point_index],
|
||||||
|
bm.verts[psk.wedges[face.wedge_indices[0]].point_index],
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
bm_face = bm.faces.new(point_indices)
|
bm_face = bm.faces.new(points)
|
||||||
bm_face.material_index = face.material_index
|
bm_face.material_index = face.material_index
|
||||||
except ValueError:
|
except ValueError:
|
||||||
degenerate_face_indices.add(face_index)
|
# This happens for two reasons:
|
||||||
|
# 1. Two or more of the face's points are the same. (i.e, point indices of [0, 0, 1])
|
||||||
|
# 2. The face is a duplicate of another face. (i.e., point indices of [0, 1, 2] and [0, 1, 2])
|
||||||
|
invalid_face_indices.add(face_index)
|
||||||
|
|
||||||
if len(degenerate_face_indices) > 0:
|
# TODO: Handle invalid faces better.
|
||||||
print(f'WARNING: Discarded {len(degenerate_face_indices)} degenerate face(s).')
|
if len(invalid_face_indices) > 0:
|
||||||
|
result.warnings.append(f'Discarded {len(invalid_face_indices)} invalid face(s).')
|
||||||
|
|
||||||
|
face_count = len(bm.faces)
|
||||||
|
|
||||||
bm.to_mesh(mesh_data)
|
bm.to_mesh(mesh_data)
|
||||||
|
|
||||||
# TEXTURE COORDINATES
|
# Texture Coordinates
|
||||||
data_index = 0
|
uv_layer_data_index = 0
|
||||||
uv_layer = mesh_data.uv_layers.new(name='VTXW0000')
|
uv_layer_data = np.zeros((face_count * 3, 2), dtype=np.float32)
|
||||||
for face_index, face in enumerate(psk.faces):
|
for face_index, face in enumerate(psk.faces):
|
||||||
if face_index in degenerate_face_indices:
|
if face_index in invalid_face_indices:
|
||||||
continue
|
continue
|
||||||
face_wedges = [psk.wedges[i] for i in reversed(face.wedge_indices)]
|
for wedge in map(lambda i: psk.wedges[i], reversed(face.wedge_indices)):
|
||||||
for wedge in face_wedges:
|
uv_layer_data[uv_layer_data_index] = wedge.u, 1.0 - wedge.v
|
||||||
uv_layer.data[data_index].uv = wedge.u, 1.0 - wedge.v
|
uv_layer_data_index += 1
|
||||||
data_index += 1
|
uv_layer = mesh_data.uv_layers.new(name='UVMap')
|
||||||
|
uv_layer.uv.foreach_set('vector', uv_layer_data.ravel())
|
||||||
|
|
||||||
# EXTRA UVS
|
# Extra UVs
|
||||||
if psk.has_extra_uvs and options.should_import_extra_uvs:
|
if psk.has_extra_uvs and options.should_import_extra_uvs:
|
||||||
extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges))
|
extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges))
|
||||||
wedge_index_offset = 0
|
wedge_index_offset = 0
|
||||||
|
uv_layer_data = np.zeros((face_count * 3, 2), dtype=np.float32)
|
||||||
for extra_uv_index in range(extra_uv_channel_count):
|
for extra_uv_index in range(extra_uv_channel_count):
|
||||||
data_index = 0
|
uv_layer_data_index = 0
|
||||||
uv_layer = mesh_data.uv_layers.new(name=f'EXTRAUV{extra_uv_index}')
|
|
||||||
for face_index, face in enumerate(psk.faces):
|
for face_index, face in enumerate(psk.faces):
|
||||||
if face_index in degenerate_face_indices:
|
if face_index in invalid_face_indices:
|
||||||
continue
|
continue
|
||||||
for wedge_index in reversed(face.wedge_indices):
|
for wedge_index in reversed(face.wedge_indices):
|
||||||
u, v = psk.extra_uvs[wedge_index_offset + wedge_index]
|
u, v = psk.extra_uvs[wedge_index_offset + wedge_index]
|
||||||
uv_layer.data[data_index].uv = u, 1.0 - v
|
uv_layer_data[uv_layer_data_index] = u, 1.0 - v
|
||||||
data_index += 1
|
uv_layer_data_index += 1
|
||||||
wedge_index_offset += len(psk.wedges)
|
wedge_index_offset += len(psk.wedges)
|
||||||
|
uv_layer = mesh_data.uv_layers.new(name=f'EXTRAUV{extra_uv_index}')
|
||||||
|
uv_layer.uv.foreach_set('vector', uv_layer_data.ravel())
|
||||||
|
|
||||||
# VERTEX COLORS
|
# Vertex Colors
|
||||||
if psk.has_vertex_colors and options.should_import_vertex_colors:
|
if psk.has_vertex_colors and options.should_import_vertex_colors:
|
||||||
size = (len(psk.points), 4)
|
psk_vertex_colors = np.zeros((len(psk.vertex_colors), 4))
|
||||||
vertex_colors = np.full(size, inf)
|
for vertex_color_index in range(len(psk.vertex_colors)):
|
||||||
vertex_color_data = mesh_data.vertex_colors.new(name='VERTEXCOLOR')
|
psk_vertex_colors[vertex_color_index] = tuple(psk.vertex_colors[vertex_color_index])
|
||||||
ambiguous_vertex_color_point_indices = []
|
psk_vertex_colors /= 255.0
|
||||||
|
|
||||||
for wedge_index, wedge in enumerate(psk.wedges):
|
|
||||||
point_index = wedge.point_index
|
|
||||||
psk_vertex_color = psk.vertex_colors[wedge_index].normalized()
|
|
||||||
if vertex_colors[point_index, 0] != inf and tuple(vertex_colors[point_index]) != psk_vertex_color:
|
|
||||||
ambiguous_vertex_color_point_indices.append(point_index)
|
|
||||||
else:
|
|
||||||
vertex_colors[point_index] = psk_vertex_color
|
|
||||||
|
|
||||||
|
# Convert vertex colors to sRGB if necessary.
|
||||||
if options.vertex_color_space == 'SRGBA':
|
if options.vertex_color_space == 'SRGBA':
|
||||||
for i in range(vertex_colors.shape[0]):
|
psk_vertex_colors[:, :3] = np.vectorize(rgb_to_srgb)(psk_vertex_colors[:, :3])
|
||||||
vertex_colors[i, :3] = tuple(map(lambda x: rgb_to_srgb(x), vertex_colors[i, :3]))
|
|
||||||
|
|
||||||
for loop_index, loop in enumerate(mesh_data.loops):
|
# Map the PSK vertex colors to the face corners.
|
||||||
vertex_color = vertex_colors[loop.vertex_index]
|
face_count = len(psk.faces) - len(invalid_face_indices)
|
||||||
if vertex_color is not None:
|
face_corner_colors = np.full((face_count * 3, 4), 1.0)
|
||||||
vertex_color_data.data[loop_index].color = vertex_color
|
face_corner_color_index = 0
|
||||||
else:
|
for face_index, face in enumerate(psk.faces):
|
||||||
vertex_color_data.data[loop_index].color = 1.0, 1.0, 1.0, 1.0
|
if face_index in invalid_face_indices:
|
||||||
|
continue
|
||||||
|
for wedge_index in reversed(face.wedge_indices):
|
||||||
|
face_corner_colors[face_corner_color_index] = psk_vertex_colors[wedge_index]
|
||||||
|
face_corner_color_index += 1
|
||||||
|
|
||||||
if len(ambiguous_vertex_color_point_indices) > 0:
|
# Create the vertex color attribute.
|
||||||
print(f'WARNING: {len(ambiguous_vertex_color_point_indices)} vertex(es) with ambiguous vertex colors.')
|
face_corner_color_attribute = mesh_data.attributes.new(name='VERTEXCOLOR', type='FLOAT_COLOR', domain='CORNER')
|
||||||
|
face_corner_color_attribute.data.foreach_set('color', face_corner_colors.ravel())
|
||||||
|
|
||||||
# VERTEX NORMALS
|
# Vertex Normals
|
||||||
if psk.has_vertex_normals and options.should_import_vertex_normals:
|
if psk.has_vertex_normals and options.should_import_vertex_normals:
|
||||||
mesh_data.polygons.foreach_set("use_smooth", [True] * len(mesh_data.polygons))
|
mesh_data.polygons.foreach_set('use_smooth', [True] * len(mesh_data.polygons))
|
||||||
normals = []
|
normals = []
|
||||||
for vertex_normal in psk.vertex_normals:
|
for vertex_normal in psk.vertex_normals:
|
||||||
normals.append(tuple(vertex_normal))
|
normals.append(tuple(vertex_normal))
|
||||||
mesh_data.normals_split_custom_set_from_vertices(normals)
|
mesh_data.normals_split_custom_set_from_vertices(normals)
|
||||||
mesh_data.use_auto_smooth = True
|
else:
|
||||||
|
mesh_data.shade_smooth()
|
||||||
|
|
||||||
bm.normal_update()
|
bm.normal_update()
|
||||||
bm.free()
|
bm.free()
|
||||||
|
|
||||||
|
# Weights
|
||||||
# Get a list of all bones that have weights associated with them.
|
# Get a list of all bones that have weights associated with them.
|
||||||
vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights))
|
vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights))
|
||||||
for import_bone in map(lambda x: import_bones[x], sorted(list(vertex_group_bone_indices))):
|
vertex_groups: List[Optional[VertexGroup]] = [None] * len(psk.bones)
|
||||||
import_bone.vertex_group = mesh_object.vertex_groups.new(
|
for bone_index, psk_bone in map(lambda x: (x, psk.bones[x]), vertex_group_bone_indices):
|
||||||
name=import_bone.psk_bone.name.decode('windows-1252'))
|
vertex_groups[bone_index] = mesh_object.vertex_groups.new(name=psk_bone.name.decode('windows-1252'))
|
||||||
|
|
||||||
for weight in psk.weights:
|
for weight in psk.weights:
|
||||||
import_bones[weight.bone_index].vertex_group.add((weight.point_index,), weight.weight, 'ADD')
|
vertex_groups[weight.bone_index].add((weight.point_index,), weight.weight, 'ADD')
|
||||||
|
|
||||||
# Add armature modifier to our mesh object.
|
# Morphs (Shape Keys)
|
||||||
armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE')
|
if options.should_import_shape_keys:
|
||||||
armature_modifier.object = armature_object
|
morph_data_iterator = iter(psk.morph_data)
|
||||||
mesh_object.parent = armature_object
|
|
||||||
|
if psk.has_morph_data:
|
||||||
|
mesh_object.shape_key_add(name='MORPH_BASE', from_mix=False)
|
||||||
|
|
||||||
|
for morph_info in psk.morph_infos:
|
||||||
|
shape_key = mesh_object.shape_key_add(name=morph_info.name.decode('windows-1252'), from_mix=False)
|
||||||
|
|
||||||
|
for _ in range(morph_info.vertex_count):
|
||||||
|
morph_data = next(morph_data_iterator)
|
||||||
|
x, y, z = morph_data.position_delta
|
||||||
|
shape_key.data[morph_data.point_index].co += Vector((x, -y, z))
|
||||||
|
|
||||||
context.scene.collection.objects.link(mesh_object)
|
context.scene.collection.objects.link(mesh_object)
|
||||||
|
|
||||||
try:
|
# Add armature modifier to our mesh object.
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
if options.should_import_armature:
|
||||||
except:
|
armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE')
|
||||||
pass
|
armature_modifier.object = armature_object
|
||||||
|
mesh_object.parent = armature_object
|
||||||
|
|
||||||
|
root_object = armature_object if options.should_import_armature else mesh_object
|
||||||
|
root_object.scale = (options.scale, options.scale, options.scale)
|
||||||
|
|
||||||
class PskImportPropertyGroup(PropertyGroup):
|
try:
|
||||||
should_import_vertex_colors: BoolProperty(
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
default=True,
|
except:
|
||||||
options=set(),
|
pass
|
||||||
name='Vertex Colors',
|
|
||||||
description='Import vertex colors from PSKX files, if available'
|
|
||||||
)
|
|
||||||
vertex_color_space: EnumProperty(
|
|
||||||
name='Vertex Color Space',
|
|
||||||
options=set(),
|
|
||||||
description='The source vertex color space',
|
|
||||||
default='SRGBA',
|
|
||||||
items=(
|
|
||||||
('LINEAR', 'Linear', ''),
|
|
||||||
('SRGBA', 'sRGBA', ''),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
should_import_vertex_normals: BoolProperty(
|
|
||||||
default=True,
|
|
||||||
name='Vertex Normals',
|
|
||||||
options=set(),
|
|
||||||
description='Import vertex normals from PSKX files, if available'
|
|
||||||
)
|
|
||||||
should_import_extra_uvs: BoolProperty(
|
|
||||||
default=True,
|
|
||||||
name='Extra UVs',
|
|
||||||
options=set(),
|
|
||||||
description='Import extra UV maps from PSKX files, if available'
|
|
||||||
)
|
|
||||||
bone_length: FloatProperty(
|
|
||||||
default=1.0,
|
|
||||||
min=sys.float_info.epsilon,
|
|
||||||
step=100,
|
|
||||||
soft_min=1.0,
|
|
||||||
name='Bone Length',
|
|
||||||
options=set(),
|
|
||||||
description='Length of the bones'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
result.armature_object = armature_object
|
||||||
|
result.mesh_object = mesh_object
|
||||||
|
|
||||||
class PskImportOperator(Operator, ImportHelper):
|
return result
|
||||||
bl_idname = 'import.psk'
|
|
||||||
bl_label = 'Export'
|
|
||||||
bl_options = {'INTERNAL', 'UNDO'}
|
|
||||||
__doc__ = 'Load a PSK file'
|
|
||||||
filename_ext = '.psk'
|
|
||||||
filter_glob: StringProperty(default='*.psk;*.pskx', options={'HIDDEN'})
|
|
||||||
filepath: StringProperty(
|
|
||||||
name='File Path',
|
|
||||||
description='File path used for exporting the PSK file',
|
|
||||||
maxlen=1024,
|
|
||||||
default='')
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
pg = context.scene.psk_import
|
|
||||||
reader = PskReader()
|
|
||||||
psk = reader.read(self.filepath)
|
|
||||||
options = PskImportOptions()
|
|
||||||
options.name = os.path.splitext(os.path.basename(self.filepath))[0]
|
|
||||||
options.should_import_extra_uvs = pg.should_import_extra_uvs
|
|
||||||
options.should_import_vertex_colors = pg.should_import_vertex_colors
|
|
||||||
options.should_import_vertex_normals = pg.should_import_vertex_normals
|
|
||||||
options.vertex_color_space = pg.vertex_color_space
|
|
||||||
options.bone_length = pg.bone_length
|
|
||||||
PskImporter().import_psk(psk, context, options)
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
def draw(self, context):
|
|
||||||
pg = context.scene.psk_import
|
|
||||||
layout = self.layout
|
|
||||||
layout.use_property_split = True
|
|
||||||
layout.use_property_decorate = False
|
|
||||||
layout.prop(pg, 'should_import_vertex_normals')
|
|
||||||
layout.prop(pg, 'should_import_extra_uvs')
|
|
||||||
layout.prop(pg, 'should_import_vertex_colors')
|
|
||||||
if pg.should_import_vertex_colors:
|
|
||||||
layout.prop(pg, 'vertex_color_space')
|
|
||||||
layout.prop(pg, 'bone_length')
|
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
|
||||||
PskImportOperator,
|
|
||||||
PskImportPropertyGroup,
|
|
||||||
)
|
|
||||||
|
|||||||
154
io_scene_psk_psa/psk/properties.py
Normal file
154
io_scene_psk_psa/psk/properties.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
from bpy.props import BoolProperty, EnumProperty, FloatProperty, StringProperty
|
||||||
|
from bpy.types import PropertyGroup
|
||||||
|
|
||||||
|
mesh_triangle_types_items = (
|
||||||
|
('NORMAL', 'Normal', 'Normal one-sided', 0),
|
||||||
|
('NORMAL_TWO_SIDED', 'Normal Two-Sided', 'Normal but two-sided', 1),
|
||||||
|
('TRANSLUCENT', 'Translucent', 'Translucent two-sided', 2),
|
||||||
|
('MASKED', 'Masked', 'Masked two-sided', 3),
|
||||||
|
('MODULATE', 'Modulate', 'Modulation blended two-sided', 4),
|
||||||
|
('PLACEHOLDER', 'Placeholder', 'Placeholder triangle for positioning weapon. Invisible', 8),
|
||||||
|
)
|
||||||
|
|
||||||
|
mesh_triangle_bit_flags_items = (
|
||||||
|
('UNLIT', 'Unlit', 'Full brightness, no lighting', 16),
|
||||||
|
('FLAT', 'Flat', 'Flat surface, don\'t do bMeshCurvy thing', 32),
|
||||||
|
('ENVIRONMENT', 'Environment', 'Environment mapped', 64),
|
||||||
|
('NO_SMOOTH', 'No Smooth', 'No bilinear filtering on this poly\'s texture', 128),
|
||||||
|
)
|
||||||
|
|
||||||
|
class PSX_PG_material(PropertyGroup):
|
||||||
|
mesh_triangle_type: EnumProperty(
|
||||||
|
name='Triangle Type',
|
||||||
|
items=mesh_triangle_types_items
|
||||||
|
)
|
||||||
|
mesh_triangle_bit_flags: EnumProperty(
|
||||||
|
name='Triangle Bit Flags',
|
||||||
|
items=mesh_triangle_bit_flags_items,
|
||||||
|
options={'ENUM_FLAG'}
|
||||||
|
)
|
||||||
|
|
||||||
|
mesh_triangle_types_items_dict = {item[0]: item[3] for item in mesh_triangle_types_items}
|
||||||
|
mesh_triangle_bit_flags_items_dict = {item[0]: item[3] for item in mesh_triangle_bit_flags_items}
|
||||||
|
|
||||||
|
|
||||||
|
def triangle_type_and_bit_flags_to_poly_flags(mesh_triangle_type: str, mesh_triangle_bit_flags: set[str]) -> int:
|
||||||
|
poly_flags = 0
|
||||||
|
poly_flags |= mesh_triangle_types_items_dict.get(mesh_triangle_type, 0)
|
||||||
|
for flag in mesh_triangle_bit_flags:
|
||||||
|
poly_flags |= mesh_triangle_bit_flags_items_dict.get(flag, 0)
|
||||||
|
return poly_flags
|
||||||
|
|
||||||
|
|
||||||
|
def poly_flags_to_triangle_type_and_bit_flags(poly_flags: int) -> tuple[str, set[str]]:
|
||||||
|
try:
|
||||||
|
triangle_type = next(item[0] for item in mesh_triangle_types_items if item[3] == (poly_flags & 15))
|
||||||
|
except StopIteration:
|
||||||
|
triangle_type = 'NORMAL'
|
||||||
|
triangle_bit_flags = {item[0] for item in mesh_triangle_bit_flags_items if item[3] & poly_flags}
|
||||||
|
return triangle_type, triangle_bit_flags
|
||||||
|
|
||||||
|
|
||||||
|
def should_import_mesh_get(self):
|
||||||
|
return self.components in {'ALL', 'MESH'}
|
||||||
|
|
||||||
|
|
||||||
|
def should_import_skleton_get(self):
|
||||||
|
return self.components in {'ALL', 'ARMATURE'}
|
||||||
|
|
||||||
|
|
||||||
|
vertex_color_space_items = (
|
||||||
|
('LINEAR', 'Linear', ''),
|
||||||
|
('SRGBA', 'sRGBA', ''),
|
||||||
|
)
|
||||||
|
|
||||||
|
psk_import_components_items = (
|
||||||
|
('ALL', 'Mesh & Armature', 'Import mesh and armature'),
|
||||||
|
('MESH', 'Mesh Only', 'Import mesh only'),
|
||||||
|
('ARMATURE', 'Armature Only', 'Import armature only'),
|
||||||
|
)
|
||||||
|
|
||||||
|
class PskImportMixin:
|
||||||
|
should_import_vertex_colors: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
options=set(),
|
||||||
|
name='Import Vertex Colors',
|
||||||
|
description='Import vertex colors, if available'
|
||||||
|
)
|
||||||
|
vertex_color_space: EnumProperty(
|
||||||
|
name='Vertex Color Space',
|
||||||
|
options=set(),
|
||||||
|
description='The source vertex color space',
|
||||||
|
default='SRGBA',
|
||||||
|
items=vertex_color_space_items
|
||||||
|
)
|
||||||
|
should_import_vertex_normals: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
name='Import Vertex Normals',
|
||||||
|
options=set(),
|
||||||
|
description='Import vertex normals, if available.\n\nThis is only supported for PSKX files'
|
||||||
|
)
|
||||||
|
should_import_extra_uvs: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
name='Import Extra UVs',
|
||||||
|
options=set(),
|
||||||
|
description='Import extra UV maps, if available'
|
||||||
|
)
|
||||||
|
components: EnumProperty(
|
||||||
|
name='Components',
|
||||||
|
options=set(),
|
||||||
|
description='Which components to import',
|
||||||
|
items=psk_import_components_items,
|
||||||
|
default='ALL'
|
||||||
|
)
|
||||||
|
should_import_mesh: BoolProperty(
|
||||||
|
name='Import Mesh',
|
||||||
|
get=should_import_mesh_get,
|
||||||
|
)
|
||||||
|
should_import_materials: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
name='Import Materials',
|
||||||
|
options=set(),
|
||||||
|
)
|
||||||
|
should_import_armature: BoolProperty(
|
||||||
|
name='Import Skeleton',
|
||||||
|
get=should_import_skleton_get,
|
||||||
|
)
|
||||||
|
bone_length: FloatProperty(
|
||||||
|
default=1.0,
|
||||||
|
min=sys.float_info.epsilon,
|
||||||
|
step=100,
|
||||||
|
soft_min=1.0,
|
||||||
|
name='Bone Length',
|
||||||
|
options=set(),
|
||||||
|
subtype='DISTANCE',
|
||||||
|
description='Length of the bones'
|
||||||
|
)
|
||||||
|
should_import_shape_keys: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
name='Import Shape Keys',
|
||||||
|
options=set(),
|
||||||
|
description='Import shape keys, if available.\n\nThis is only supported for PSKX files'
|
||||||
|
)
|
||||||
|
scale: FloatProperty(
|
||||||
|
name='Scale',
|
||||||
|
default=1.0,
|
||||||
|
soft_min=0.0,
|
||||||
|
)
|
||||||
|
bdk_repository_id: StringProperty(
|
||||||
|
name='BDK Repository ID',
|
||||||
|
default='',
|
||||||
|
options=set(),
|
||||||
|
description='The ID of the BDK repository to use for loading materials'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSX_PG_material,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
|
|
||||||
@@ -1,55 +1,95 @@
|
|||||||
import ctypes
|
import ctypes
|
||||||
|
import os
|
||||||
from .data import *
|
import re
|
||||||
|
import warnings
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
|
from ..shared.data import Section
|
||||||
|
from .data import Color, Psk, PsxBone, Vector2, Vector3
|
||||||
|
|
||||||
|
|
||||||
class PskReader(object):
|
def _read_types(fp, data_class, section: Section, data):
|
||||||
|
buffer_length = section.data_size * section.data_count
|
||||||
|
buffer = fp.read(buffer_length)
|
||||||
|
offset = 0
|
||||||
|
for _ in range(section.data_count):
|
||||||
|
data.append(data_class.from_buffer_copy(buffer, offset))
|
||||||
|
offset += section.data_size
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
def _read_material_references(path: str) -> List[str]:
|
||||||
def read_types(fp, data_class: ctypes.Structure, section: Section, data):
|
property_file_path = Path(path).with_suffix('.props.txt')
|
||||||
buffer_length = section.data_size * section.data_count
|
if not property_file_path.is_file():
|
||||||
buffer = fp.read(buffer_length)
|
# Property file does not exist.
|
||||||
offset = 0
|
return []
|
||||||
for _ in range(section.data_count):
|
# Do a crude regex match to find the Material list entries.
|
||||||
data.append(data_class.from_buffer_copy(buffer, offset))
|
contents = property_file_path.read_text()
|
||||||
offset += section.data_size
|
pattern = r'Material\s*=\s*([^\s^,]+)'
|
||||||
|
return re.findall(pattern, contents)
|
||||||
|
|
||||||
def read(self, path) -> Psk:
|
|
||||||
psk = Psk()
|
def read_psk(path: str) -> Psk:
|
||||||
with open(path, 'rb') as fp:
|
psk = Psk()
|
||||||
while fp.read(1):
|
|
||||||
fp.seek(-1, 1)
|
# Read the PSK file sections.
|
||||||
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
|
with open(path, 'rb') as fp:
|
||||||
if section.name == b'ACTRHEAD':
|
while fp.read(1):
|
||||||
|
fp.seek(-1, 1)
|
||||||
|
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
|
||||||
|
match section.name:
|
||||||
|
case b'ACTRHEAD':
|
||||||
pass
|
pass
|
||||||
elif section.name == b'PNTS0000':
|
case b'PNTS0000':
|
||||||
PskReader.read_types(fp, Vector3, section, psk.points)
|
_read_types(fp, Vector3, section, psk.points)
|
||||||
elif section.name == b'VTXW0000':
|
case b'VTXW0000':
|
||||||
if section.data_size == ctypes.sizeof(Psk.Wedge16):
|
if section.data_size == ctypes.sizeof(Psk.Wedge16):
|
||||||
PskReader.read_types(fp, Psk.Wedge16, section, psk.wedges)
|
_read_types(fp, Psk.Wedge16, section, psk.wedges)
|
||||||
elif section.data_size == ctypes.sizeof(Psk.Wedge32):
|
elif section.data_size == ctypes.sizeof(Psk.Wedge32):
|
||||||
PskReader.read_types(fp, Psk.Wedge32, section, psk.wedges)
|
_read_types(fp, Psk.Wedge32, section, psk.wedges)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError('Unrecognized wedge format')
|
raise RuntimeError('Unrecognized wedge format')
|
||||||
elif section.name == b'FACE0000':
|
case b'FACE0000':
|
||||||
PskReader.read_types(fp, Psk.Face, section, psk.faces)
|
_read_types(fp, Psk.Face, section, psk.faces)
|
||||||
elif section.name == b'MATT0000':
|
case b'MATT0000':
|
||||||
PskReader.read_types(fp, Psk.Material, section, psk.materials)
|
_read_types(fp, Psk.Material, section, psk.materials)
|
||||||
elif section.name == b'REFSKELT':
|
case b'REFSKELT':
|
||||||
PskReader.read_types(fp, Psk.Bone, section, psk.bones)
|
_read_types(fp, PsxBone, section, psk.bones)
|
||||||
elif section.name == b'RAWWEIGHTS':
|
case b'RAWWEIGHTS':
|
||||||
PskReader.read_types(fp, Psk.Weight, section, psk.weights)
|
_read_types(fp, Psk.Weight, section, psk.weights)
|
||||||
elif section.name == b'FACE3200':
|
case b'FACE3200':
|
||||||
PskReader.read_types(fp, Psk.Face32, section, psk.faces)
|
_read_types(fp, Psk.Face32, section, psk.faces)
|
||||||
elif section.name == b'VERTEXCOLOR':
|
case b'VERTEXCOLOR':
|
||||||
PskReader.read_types(fp, Color, section, psk.vertex_colors)
|
_read_types(fp, Color, section, psk.vertex_colors)
|
||||||
elif section.name.startswith(b'EXTRAUVS'):
|
case b'VTXNORMS':
|
||||||
PskReader.read_types(fp, Vector2, section, psk.extra_uvs)
|
_read_types(fp, Vector3, section, psk.vertex_normals)
|
||||||
elif section.name == b'VTXNORMS':
|
case b'MRPHINFO':
|
||||||
PskReader.read_types(fp, Vector3, section, psk.vertex_normals)
|
_read_types(fp, Psk.MorphInfo, section, psk.morph_infos)
|
||||||
else:
|
case b'MRPHDATA':
|
||||||
raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"')
|
_read_types(fp, Psk.MorphData, section, psk.morph_data)
|
||||||
return psk
|
case _:
|
||||||
|
if section.name.startswith(b'EXTRAUV'):
|
||||||
|
_read_types(fp, Vector2, section, psk.extra_uvs)
|
||||||
|
else:
|
||||||
|
# Section is not handled, skip it.
|
||||||
|
fp.seek(section.data_size * section.data_count, os.SEEK_CUR)
|
||||||
|
warnings.warn(f'Unrecognized section "{section.name} at position {fp.tell():15}"')
|
||||||
|
|
||||||
|
"""
|
||||||
|
UEViewer exports a sidecar file (*.props.txt) with fully-qualified reference paths for each material
|
||||||
|
(e.g., Texture'Package.Group.Object').
|
||||||
|
"""
|
||||||
|
psk.material_references = _read_material_references(path)
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tools like UEViewer and CUE4Parse write the point index as a 32-bit integer, exploiting the fact that due to struct
|
||||||
|
alignment, there were 16-bits of padding following the original 16-bit point index in the wedge struct.
|
||||||
|
However, this breaks compatibility with PSK files that were created with older tools that treated the
|
||||||
|
point index as a 16-bit integer and might have junk data written to the padding bits.
|
||||||
|
To work around this, we check if each point is still addressable using a 16-bit index, and if it is, assume the
|
||||||
|
point index is a 16-bit integer and truncate the high bits.
|
||||||
|
"""
|
||||||
|
if len(psk.points) <= 65536:
|
||||||
|
for wedge in psk.wedges:
|
||||||
|
wedge.point_index &= 0xFFFF
|
||||||
|
|
||||||
|
return psk
|
||||||
|
|||||||
33
io_scene_psk_psa/psk/ui.py
Normal file
33
io_scene_psk_psa/psk/ui.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from bpy.types import Panel
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_PT_material(Panel):
|
||||||
|
bl_label = 'PSK Material'
|
||||||
|
bl_idname = 'PSK_PT_material'
|
||||||
|
bl_space_type = 'PROPERTIES'
|
||||||
|
bl_region_type = 'WINDOW'
|
||||||
|
bl_context = 'material'
|
||||||
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return context.material is not None
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
assert layout is not None
|
||||||
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False
|
||||||
|
material = context.material
|
||||||
|
layout.prop(material.psk, 'mesh_triangle_type')
|
||||||
|
col = layout.column()
|
||||||
|
col.prop(material.psk, 'mesh_triangle_bit_flags', expand=True, text='Flags')
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSK_PT_material,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
|
|
||||||
61
io_scene_psk_psa/psk/writer.py
Normal file
61
io_scene_psk_psa/psk/writer.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import os
|
||||||
|
from ctypes import Structure, sizeof
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
|
from .data import Psk
|
||||||
|
from ..shared.data import PsxBone, Section, Vector3
|
||||||
|
|
||||||
|
MAX_WEDGE_COUNT = 65536
|
||||||
|
MAX_POINT_COUNT = 4294967296
|
||||||
|
MAX_BONE_COUNT = 2147483647
|
||||||
|
MAX_MATERIAL_COUNT = 256
|
||||||
|
|
||||||
|
|
||||||
|
def _write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
||||||
|
section = Section()
|
||||||
|
section.name = name
|
||||||
|
if data_type is not None and data is not None:
|
||||||
|
section.data_size = sizeof(data_type)
|
||||||
|
section.data_count = len(data)
|
||||||
|
fp.write(section)
|
||||||
|
if data is not None:
|
||||||
|
for datum in data:
|
||||||
|
fp.write(datum)
|
||||||
|
|
||||||
|
|
||||||
|
def write_psk(psk: Psk, path: str):
|
||||||
|
if len(psk.wedges) > MAX_WEDGE_COUNT:
|
||||||
|
raise RuntimeError(f'Number of wedges ({len(psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
|
||||||
|
if len(psk.points) > MAX_POINT_COUNT:
|
||||||
|
raise RuntimeError(f'Numbers of vertices ({len(psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
|
||||||
|
if len(psk.materials) > MAX_MATERIAL_COUNT:
|
||||||
|
raise RuntimeError(f'Number of materials ({len(psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
|
||||||
|
if len(psk.bones) > MAX_BONE_COUNT:
|
||||||
|
raise RuntimeError(f'Number of bones ({len(psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
|
||||||
|
if len(psk.bones) == 0:
|
||||||
|
raise RuntimeError(f'At least one bone must be marked for export')
|
||||||
|
|
||||||
|
# Make the directory for the file if it doesn't exist.
|
||||||
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, 'wb') as fp:
|
||||||
|
_write_section(fp, b'ACTRHEAD')
|
||||||
|
_write_section(fp, b'PNTS0000', Vector3, psk.points)
|
||||||
|
|
||||||
|
wedges = []
|
||||||
|
for index, w in enumerate(psk.wedges):
|
||||||
|
wedge = Psk.Wedge16()
|
||||||
|
wedge.material_index = w.material_index
|
||||||
|
wedge.u = w.u
|
||||||
|
wedge.v = w.v
|
||||||
|
wedge.point_index = w.point_index
|
||||||
|
wedges.append(wedge)
|
||||||
|
|
||||||
|
_write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
|
||||||
|
_write_section(fp, b'FACE0000', Psk.Face, psk.faces)
|
||||||
|
_write_section(fp, b'MATT0000', Psk.Material, psk.materials)
|
||||||
|
_write_section(fp, b'REFSKELT', PsxBone, psk.bones)
|
||||||
|
_write_section(fp, b'RAWWEIGHTS', Psk.Weight, psk.weights)
|
||||||
|
except PermissionError as e:
|
||||||
|
raise RuntimeError(f'The current user "{os.getlogin()}" does not have permission to write to "{path}"') from e
|
||||||
0
io_scene_psk_psa/shared/__init__.py
Normal file
0
io_scene_psk_psa/shared/__init__.py
Normal file
@@ -1,4 +1,4 @@
|
|||||||
from ctypes import *
|
from ctypes import Structure, c_char, c_int32, c_float, c_ubyte
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
|
|
||||||
@@ -17,13 +17,18 @@ class Color(Structure):
|
|||||||
yield self.a
|
yield self.a
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return all(map(lambda x: x[0] == x[1], zip(self, other)))
|
return self.r == other.r and self.g == other.g and self.b == other.b and self.a == other.a
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return repr(tuple(self))
|
return repr(tuple(self))
|
||||||
|
|
||||||
def normalized(self) -> Tuple:
|
def normalized(self) -> Tuple:
|
||||||
return tuple(map(lambda x: x / 255.0, iter(self)))
|
return (
|
||||||
|
self.r / 255.0,
|
||||||
|
self.g / 255.0,
|
||||||
|
self.b / 255.0,
|
||||||
|
self.a / 255.0
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Vector2(Structure):
|
class Vector2(Structure):
|
||||||
@@ -82,6 +87,19 @@ class Quaternion(Structure):
|
|||||||
return Quaternion(0, 0, 0, 1)
|
return Quaternion(0, 0, 0, 1)
|
||||||
|
|
||||||
|
|
||||||
|
class PsxBone(Structure):
|
||||||
|
_fields_ = [
|
||||||
|
('name', c_char * 64),
|
||||||
|
('flags', c_int32),
|
||||||
|
('children_count', c_int32),
|
||||||
|
('parent_index', c_int32),
|
||||||
|
('rotation', Quaternion),
|
||||||
|
('location', Vector3),
|
||||||
|
('length', c_float),
|
||||||
|
('size', Vector3)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class Section(Structure):
|
class Section(Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
('name', c_char * 20),
|
('name', c_char * 20),
|
||||||
148
io_scene_psk_psa/shared/dfs.py
Normal file
148
io_scene_psk_psa/shared/dfs.py
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
"""
|
||||||
|
Depth-first object iterator functions for Blender collections and view layers.
|
||||||
|
|
||||||
|
These functions are used to iterate over objects in a collection or view layer in a depth-first manner, including
|
||||||
|
instances. This is useful for exporters that need to traverse the object hierarchy in a predictable order.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, Set, Iterable, List
|
||||||
|
|
||||||
|
from bpy.types import Collection, Object, ViewLayer, LayerCollection
|
||||||
|
from mathutils import Matrix
|
||||||
|
|
||||||
|
|
||||||
|
class DfsObject:
|
||||||
|
"""
|
||||||
|
Represents an object in a depth-first search.
|
||||||
|
"""
|
||||||
|
def __init__(self, obj: Object, instance_objects: List[Object], matrix_world: Matrix):
|
||||||
|
self.obj = obj
|
||||||
|
self.instance_objects = instance_objects
|
||||||
|
self.matrix_world = matrix_world
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_visible(self) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the object is visible.
|
||||||
|
|
||||||
|
@return: True if the object is visible, False otherwise.
|
||||||
|
"""
|
||||||
|
if self.instance_objects:
|
||||||
|
return self.instance_objects[-1].visible_get()
|
||||||
|
return self.obj.visible_get()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_selected(self) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the object is selected.
|
||||||
|
@return: True if the object is selected, False otherwise.
|
||||||
|
"""
|
||||||
|
if self.instance_objects:
|
||||||
|
return self.instance_objects[-1].select_get()
|
||||||
|
return self.obj.select_get()
|
||||||
|
|
||||||
|
|
||||||
|
def _dfs_object_children(obj: Object, collection: Collection) -> Iterable[Object]:
|
||||||
|
"""
|
||||||
|
Construct a list of objects in hierarchy order from `collection.objects`, only keeping those that are in the
|
||||||
|
collection.
|
||||||
|
|
||||||
|
@param obj: The object to start the search from.
|
||||||
|
@param collection: The collection to search in.
|
||||||
|
@return: An iterable of objects in hierarchy order.
|
||||||
|
"""
|
||||||
|
yield obj
|
||||||
|
for child in obj.children:
|
||||||
|
if child.name in collection.objects:
|
||||||
|
yield from _dfs_object_children(child, collection)
|
||||||
|
|
||||||
|
|
||||||
|
def dfs_objects_in_collection(collection: Collection) -> Iterable[Object]:
|
||||||
|
"""
|
||||||
|
Returns a depth-first iterator over all objects in a collection, only keeping those that are directly in the
|
||||||
|
collection.
|
||||||
|
|
||||||
|
@param collection: The collection to search in.
|
||||||
|
@return: An iterable of objects in hierarchy order.
|
||||||
|
"""
|
||||||
|
objects_hierarchy = []
|
||||||
|
for obj in collection.objects:
|
||||||
|
if obj.parent is None or obj.parent not in set(collection.objects):
|
||||||
|
objects_hierarchy.append(obj)
|
||||||
|
for obj in objects_hierarchy:
|
||||||
|
yield from _dfs_object_children(obj, collection)
|
||||||
|
|
||||||
|
|
||||||
|
def dfs_collection_objects(collection: Collection, visible_only: bool = False) -> Iterable[DfsObject]:
|
||||||
|
"""
|
||||||
|
Depth-first search of objects in a collection, including recursing into instances.
|
||||||
|
|
||||||
|
@param collection: The collection to search in.
|
||||||
|
@return: An iterable of tuples containing the object, the instance objects, and the world matrix.
|
||||||
|
"""
|
||||||
|
yield from _dfs_collection_objects_recursive(collection)
|
||||||
|
|
||||||
|
|
||||||
|
def _dfs_collection_objects_recursive(
|
||||||
|
collection: Collection,
|
||||||
|
instance_objects: Optional[List[Object]] = None,
|
||||||
|
matrix_world: Matrix = Matrix.Identity(4),
|
||||||
|
visited: Optional[Set[Object]]=None
|
||||||
|
) -> Iterable[DfsObject]:
|
||||||
|
"""
|
||||||
|
Depth-first search of objects in a collection, including recursing into instances.
|
||||||
|
This is a recursive function.
|
||||||
|
|
||||||
|
@param collection: The collection to search in.
|
||||||
|
@param instance_objects: The running hierarchy of instance objects.
|
||||||
|
@param matrix_world: The world matrix of the current object.
|
||||||
|
@param visited: A set of visited object-instance pairs.
|
||||||
|
@return: An iterable of tuples containing the object, the instance objects, and the world matrix.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# We want to also yield the top-level instance object so that callers can inspect the selection status etc.
|
||||||
|
if visited is None:
|
||||||
|
visited = set()
|
||||||
|
|
||||||
|
if instance_objects is None:
|
||||||
|
instance_objects = list()
|
||||||
|
|
||||||
|
# First, yield all objects in child collections.
|
||||||
|
for child in collection.children:
|
||||||
|
yield from _dfs_collection_objects_recursive(child, instance_objects, matrix_world.copy(), visited)
|
||||||
|
|
||||||
|
# Then, evaluate all objects in this collection.
|
||||||
|
for obj in dfs_objects_in_collection(collection):
|
||||||
|
visited_pair = (obj, instance_objects[-1] if instance_objects else None)
|
||||||
|
if visited_pair in visited:
|
||||||
|
continue
|
||||||
|
# If this an instance, we need to recurse into it.
|
||||||
|
if obj.instance_collection is not None:
|
||||||
|
# Calculate the instance transform.
|
||||||
|
instance_offset_matrix = Matrix.Translation(-obj.instance_collection.instance_offset)
|
||||||
|
# Recurse into the instance collection.
|
||||||
|
yield from _dfs_collection_objects_recursive(obj.instance_collection,
|
||||||
|
instance_objects + [obj],
|
||||||
|
matrix_world @ (obj.matrix_world @ instance_offset_matrix),
|
||||||
|
visited)
|
||||||
|
else:
|
||||||
|
# Object is not an instance, yield it.
|
||||||
|
yield DfsObject(obj, instance_objects, matrix_world @ obj.matrix_world)
|
||||||
|
visited.add(visited_pair)
|
||||||
|
|
||||||
|
|
||||||
|
def dfs_view_layer_objects(view_layer: ViewLayer) -> Iterable[DfsObject]:
|
||||||
|
"""
|
||||||
|
Depth-first iterator over all objects in a view layer, including recursing into instances.
|
||||||
|
|
||||||
|
@param view_layer: The view layer to inspect.
|
||||||
|
@return: An iterable of tuples containing the object, the instance objects, and the world matrix.
|
||||||
|
"""
|
||||||
|
visited = set()
|
||||||
|
def layer_collection_objects_recursive(layer_collection: LayerCollection):
|
||||||
|
for child in layer_collection.children:
|
||||||
|
yield from layer_collection_objects_recursive(child)
|
||||||
|
# Iterate only the top-level objects in this collection first.
|
||||||
|
yield from _dfs_collection_objects_recursive(layer_collection.collection, visited=visited)
|
||||||
|
|
||||||
|
yield from layer_collection_objects_recursive(view_layer.layer_collection)
|
||||||
502
io_scene_psk_psa/shared/helpers.py
Normal file
502
io_scene_psk_psa/shared/helpers.py
Normal file
@@ -0,0 +1,502 @@
|
|||||||
|
import bpy
|
||||||
|
from collections import Counter
|
||||||
|
from typing import List, Iterable, Optional, Dict, Tuple, cast as typing_cast
|
||||||
|
from bpy.types import Armature, AnimData, Collection, Context, Object, ArmatureModifier, SpaceProperties
|
||||||
|
from mathutils import Matrix, Vector, Quaternion as BpyQuaternion
|
||||||
|
from .data import Vector3, Quaternion
|
||||||
|
from ..shared.data import PsxBone
|
||||||
|
|
||||||
|
|
||||||
|
def rgb_to_srgb(c: float) -> float:
|
||||||
|
if c > 0.0031308:
|
||||||
|
return 1.055 * (pow(c, (1.0 / 2.4))) - 0.055
|
||||||
|
return 12.92 * c
|
||||||
|
|
||||||
|
|
||||||
|
def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, frame_max: float):
|
||||||
|
if animation_data is None:
|
||||||
|
return
|
||||||
|
for nla_track in animation_data.nla_tracks:
|
||||||
|
if nla_track.mute:
|
||||||
|
continue
|
||||||
|
for strip in nla_track.strips:
|
||||||
|
if (strip.frame_start < frame_min and strip.frame_end > frame_max) or \
|
||||||
|
(frame_min <= strip.frame_start < frame_max) or \
|
||||||
|
(frame_min < strip.frame_end <= frame_max):
|
||||||
|
yield strip
|
||||||
|
|
||||||
|
|
||||||
|
def populate_bone_collection_list(bone_collection_list, armature_objects: Iterable[Object], primary_key: str = 'OBJECT'):
|
||||||
|
"""
|
||||||
|
Updates the bone collection list.
|
||||||
|
|
||||||
|
Selection is preserved between updates unless none of the groups were previously selected.
|
||||||
|
Otherwise, all collections are selected by default.
|
||||||
|
|
||||||
|
The primary key is used to determine how to group the armature objects. For example, if the primary key is
|
||||||
|
'DATA', then all bone collections with the same armature data-block will be under one entry.
|
||||||
|
|
||||||
|
:param bone_collection_list: The list to update.
|
||||||
|
:param armature_objects: The armature objects to populate the collection with.
|
||||||
|
:param primary_key: The primary key to use for the collection (one of 'OBJECT' or 'DATA').
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
has_selected_collections = any([g.is_selected for g in bone_collection_list])
|
||||||
|
unassigned_collection_is_selected, selected_assigned_collection_names = True, []
|
||||||
|
|
||||||
|
if primary_key not in ('OBJECT', 'DATA'):
|
||||||
|
assert False, f'Invalid primary key: {primary_key}'
|
||||||
|
|
||||||
|
if not armature_objects:
|
||||||
|
return
|
||||||
|
|
||||||
|
if has_selected_collections:
|
||||||
|
# Preserve group selections before clearing the list.
|
||||||
|
# We handle selections for the unassigned group separately to cover the edge case
|
||||||
|
# where there might be an actual group with 'Unassigned' as its name.
|
||||||
|
unassigned_collection_idx, unassigned_collection_is_selected = next(iter([
|
||||||
|
(i, g.is_selected) for i, g in enumerate(bone_collection_list) if g.index == -1]), (-1, False))
|
||||||
|
|
||||||
|
selected_assigned_collection_names = [
|
||||||
|
g.name for i, g in enumerate(bone_collection_list) if i != unassigned_collection_idx and g.is_selected]
|
||||||
|
|
||||||
|
|
||||||
|
bone_collection_list.clear()
|
||||||
|
|
||||||
|
unique_armature_data = set()
|
||||||
|
|
||||||
|
for armature_object in armature_objects:
|
||||||
|
armature = typing_cast(Armature, armature_object.data)
|
||||||
|
|
||||||
|
if armature is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if primary_key == 'DATA' and armature_object.data in unique_armature_data:
|
||||||
|
# Skip this armature since we have already added an entry for it and we are using the data as the key.
|
||||||
|
continue
|
||||||
|
|
||||||
|
unique_armature_data.add(armature_object.data)
|
||||||
|
|
||||||
|
item = bone_collection_list.add()
|
||||||
|
item.armature_object_name = armature_object.name
|
||||||
|
item.armature_data_name = armature_object.data.name if armature_object.data else ''
|
||||||
|
item.name = 'Unassigned' # TODO: localize
|
||||||
|
item.index = -1
|
||||||
|
# Count the number of bones without an assigned bone collection
|
||||||
|
item.count = sum(map(lambda bone: 1 if len(bone.collections) == 0 else 0, armature.bones))
|
||||||
|
item.is_selected = unassigned_collection_is_selected
|
||||||
|
|
||||||
|
for bone_collection_index, bone_collection in enumerate(armature.collections_all):
|
||||||
|
item = bone_collection_list.add()
|
||||||
|
item.armature_object_name = armature_object.name
|
||||||
|
item.armature_data_name = armature_object.data.name if armature_object.data else ''
|
||||||
|
item.name = bone_collection.name
|
||||||
|
item.index = bone_collection_index
|
||||||
|
item.count = len(bone_collection.bones)
|
||||||
|
item.is_selected = bone_collection.name in selected_assigned_collection_names if has_selected_collections else True
|
||||||
|
|
||||||
|
|
||||||
|
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: Iterable[int]) -> List[str]:
|
||||||
|
"""
|
||||||
|
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone collections.
|
||||||
|
|
||||||
|
Note that the ancestors of bones within the bone collections will also be present in the returned list.
|
||||||
|
|
||||||
|
:param armature_object: Blender object with type `'ARMATURE'`
|
||||||
|
:param bone_filter_mode: One of `['ALL', 'BONE_COLLECTIONS']`
|
||||||
|
:param bone_collection_indices: A list of bone collection indices to export.
|
||||||
|
:return: A sorted list of bone indices that should be exported.
|
||||||
|
"""
|
||||||
|
if armature_object is None or armature_object.type != 'ARMATURE':
|
||||||
|
raise ValueError('An armature object must be supplied')
|
||||||
|
|
||||||
|
armature_data = typing_cast(Armature, armature_object.data)
|
||||||
|
bones = armature_data.bones
|
||||||
|
bone_names = [x.name for x in bones]
|
||||||
|
|
||||||
|
# Get a list of the bone indices that we are explicitly including.
|
||||||
|
bone_index_stack = []
|
||||||
|
is_exporting_unassigned_bone_collections = -1 in bone_collection_indices
|
||||||
|
bone_collections = list(armature_data.collections_all)
|
||||||
|
|
||||||
|
for bone_index, bone in enumerate(bones):
|
||||||
|
# Check if this bone is in any of the collections in the bone collection indices list.
|
||||||
|
this_bone_collection_indices = set(bone_collections.index(x) for x in bone.collections)
|
||||||
|
is_in_exported_bone_collections = len(set(bone_collection_indices).intersection(this_bone_collection_indices)) > 0
|
||||||
|
|
||||||
|
if bone_filter_mode == 'ALL' or \
|
||||||
|
(len(bone.collections) == 0 and is_exporting_unassigned_bone_collections) or \
|
||||||
|
is_in_exported_bone_collections:
|
||||||
|
bone_index_stack.append((bone_index, None))
|
||||||
|
|
||||||
|
# For each bone that is explicitly being added, recursively walk up the hierarchy and ensure that all of
|
||||||
|
# those ancestor bone indices are also in the list.
|
||||||
|
bone_indices = dict()
|
||||||
|
while len(bone_index_stack) > 0:
|
||||||
|
bone_index, instigator_bone_index = bone_index_stack.pop()
|
||||||
|
bone = bones[bone_index]
|
||||||
|
if bone.parent is not None:
|
||||||
|
parent_bone_index = bone_names.index(bone.parent.name)
|
||||||
|
if parent_bone_index not in bone_indices:
|
||||||
|
bone_index_stack.append((parent_bone_index, bone_index))
|
||||||
|
bone_indices[bone_index] = instigator_bone_index
|
||||||
|
|
||||||
|
# Sort the bone index list in-place.
|
||||||
|
bone_indices = [(x[0], x[1]) for x in bone_indices.items()]
|
||||||
|
bone_indices.sort(key=lambda x: x[0])
|
||||||
|
|
||||||
|
# Split out the bone indices and the instigator bone names into separate lists.
|
||||||
|
# We use the bone names for the return values because the bone name is a more universal way of referencing them.
|
||||||
|
# For example, users of this function may modify bone lists, which would invalidate the indices and require an
|
||||||
|
# index mapping scheme to resolve it. Using strings is more comfy and results in less code downstream.
|
||||||
|
instigator_bone_names = [bones[x[1]].name if x[1] is not None else None for x in bone_indices]
|
||||||
|
bone_names = [bones[x[0]].name for x in bone_indices]
|
||||||
|
|
||||||
|
# Ensure that the hierarchy we are sending back has a single root bone.
|
||||||
|
# TODO: This is only relevant if we are exporting a single armature; how should we reorganize this call?
|
||||||
|
bone_indices = [x[0] for x in bone_indices]
|
||||||
|
root_bones = [bones[bone_index] for bone_index in bone_indices if bones[bone_index].parent is None]
|
||||||
|
if len(root_bones) > 1:
|
||||||
|
# There is more than one root bone.
|
||||||
|
# Print out why each root bone was included by linking it to one of the explicitly included bones.
|
||||||
|
root_bone_names = [bone.name for bone in root_bones]
|
||||||
|
for root_bone_name in root_bone_names:
|
||||||
|
bone_name = root_bone_name
|
||||||
|
while True:
|
||||||
|
# Traverse the instigator chain until the end to find the true instigator bone.
|
||||||
|
# TODO: in future, it would be preferential to have a readout of *all* instigator bones.
|
||||||
|
instigator_bone_name = instigator_bone_names[bone_names.index(bone_name)]
|
||||||
|
if instigator_bone_name is None:
|
||||||
|
print(f'Root bone "{root_bone_name}" was included because {bone_name} was marked for export')
|
||||||
|
break
|
||||||
|
bone_name = instigator_bone_name
|
||||||
|
|
||||||
|
raise RuntimeError('Exported bone hierarchy must have a single root bone.\n'
|
||||||
|
f'The bone hierarchy marked for export has {len(root_bones)} root bones: {root_bone_names}.\n'
|
||||||
|
f'Additional debugging information has been written to the console.')
|
||||||
|
|
||||||
|
return bone_names
|
||||||
|
|
||||||
|
|
||||||
|
def is_bdk_addon_loaded() -> bool:
|
||||||
|
return 'bdk' in dir(bpy.ops)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_string_to_cp1252_bytes(string: str) -> bytes:
|
||||||
|
try:
|
||||||
|
return bytes(string, encoding='windows-1252')
|
||||||
|
except UnicodeEncodeError as e:
|
||||||
|
raise RuntimeError(f'The string "{string}" contains characters that cannot be encoded in the Windows-1252 codepage') from e
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Perhaps export space should just be a transform matrix, since the below is not actually used unless we're using WORLD space.
|
||||||
|
def create_psx_bones_from_blender_bones(
|
||||||
|
bones: List[bpy.types.Bone],
|
||||||
|
export_space: str = 'WORLD',
|
||||||
|
armature_object_matrix_world: Matrix = Matrix.Identity(4),
|
||||||
|
scale = 1.0,
|
||||||
|
forward_axis: str = 'X',
|
||||||
|
up_axis: str = 'Z',
|
||||||
|
root_bone: Optional = None,
|
||||||
|
) -> List[PsxBone]:
|
||||||
|
|
||||||
|
scale_matrix = Matrix.Scale(scale, 4)
|
||||||
|
|
||||||
|
coordinate_system_transform = get_coordinate_system_transform(forward_axis, up_axis)
|
||||||
|
coordinate_system_default_rotation = coordinate_system_transform.to_quaternion()
|
||||||
|
|
||||||
|
psx_bones = []
|
||||||
|
for bone in bones:
|
||||||
|
psx_bone = PsxBone()
|
||||||
|
psx_bone.name = convert_string_to_cp1252_bytes(bone.name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
parent_index = bones.index(bone.parent)
|
||||||
|
psx_bone.parent_index = parent_index
|
||||||
|
psx_bones[parent_index].children_count += 1
|
||||||
|
except ValueError:
|
||||||
|
psx_bone.parent_index = 0
|
||||||
|
|
||||||
|
if bone.parent is not None:
|
||||||
|
rotation = bone.matrix.to_quaternion().conjugated()
|
||||||
|
inverse_parent_rotation = bone.parent.matrix.to_quaternion().inverted()
|
||||||
|
parent_head = inverse_parent_rotation @ bone.parent.head
|
||||||
|
parent_tail = inverse_parent_rotation @ bone.parent.tail
|
||||||
|
location = (parent_tail - parent_head) + bone.head
|
||||||
|
elif bone.parent is None and root_bone is not None:
|
||||||
|
# This is a special case for the root bone when export
|
||||||
|
# Because the root bone and child bones are in different spaces, we need to treat the root bone of this
|
||||||
|
# armature as though it were a child bone.
|
||||||
|
bone_rotation = bone.matrix.to_quaternion().conjugated()
|
||||||
|
local_rotation = armature_object_matrix_world.to_3x3().to_quaternion().conjugated()
|
||||||
|
rotation = bone_rotation @ local_rotation
|
||||||
|
translation, _, scale = armature_object_matrix_world.decompose()
|
||||||
|
# Invert the scale of the armature object matrix.
|
||||||
|
inverse_scale_matrix = Matrix.Identity(4)
|
||||||
|
inverse_scale_matrix[0][0] = 1.0 / scale.x
|
||||||
|
inverse_scale_matrix[1][1] = 1.0 / scale.y
|
||||||
|
inverse_scale_matrix[2][2] = 1.0 / scale.z
|
||||||
|
|
||||||
|
translation = translation @ inverse_scale_matrix
|
||||||
|
location = translation + bone.head
|
||||||
|
else:
|
||||||
|
def get_armature_local_matrix():
|
||||||
|
match export_space:
|
||||||
|
case 'WORLD':
|
||||||
|
return armature_object_matrix_world
|
||||||
|
case 'ARMATURE':
|
||||||
|
return Matrix.Identity(4)
|
||||||
|
case 'ROOT':
|
||||||
|
return bone.matrix.inverted()
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid export space: {export_space}'
|
||||||
|
|
||||||
|
armature_local_matrix = get_armature_local_matrix()
|
||||||
|
location = armature_local_matrix @ bone.head
|
||||||
|
location = coordinate_system_transform @ location
|
||||||
|
bone_rotation = bone.matrix.to_quaternion().conjugated()
|
||||||
|
local_rotation = armature_local_matrix.to_3x3().to_quaternion().conjugated()
|
||||||
|
rotation = bone_rotation @ local_rotation
|
||||||
|
rotation.conjugate()
|
||||||
|
rotation = coordinate_system_default_rotation @ rotation
|
||||||
|
|
||||||
|
location = scale_matrix @ location
|
||||||
|
|
||||||
|
# If the armature object has been scaled, we need to scale the bone's location to match.
|
||||||
|
_, _, armature_object_scale = armature_object_matrix_world.decompose()
|
||||||
|
location.x *= armature_object_scale.x
|
||||||
|
location.y *= armature_object_scale.y
|
||||||
|
location.z *= armature_object_scale.z
|
||||||
|
|
||||||
|
psx_bone.location.x = location.x
|
||||||
|
psx_bone.location.y = location.y
|
||||||
|
psx_bone.location.z = location.z
|
||||||
|
|
||||||
|
psx_bone.rotation.w = rotation.w
|
||||||
|
psx_bone.rotation.x = rotation.x
|
||||||
|
psx_bone.rotation.y = rotation.y
|
||||||
|
psx_bone.rotation.z = rotation.z
|
||||||
|
|
||||||
|
psx_bones.append(psx_bone)
|
||||||
|
|
||||||
|
return psx_bones
|
||||||
|
|
||||||
|
|
||||||
|
class PsxBoneCreateResult:
|
||||||
|
def __init__(self,
|
||||||
|
bones: List[Tuple[PsxBone, Optional[Object]]], # List of tuples of (psx_bone, armature_object)
|
||||||
|
armature_object_root_bone_indices: Dict[Object, int],
|
||||||
|
armature_object_bone_names: Dict[Object, List[str]],
|
||||||
|
):
|
||||||
|
self.bones = bones
|
||||||
|
self.armature_object_root_bone_indices = armature_object_root_bone_indices
|
||||||
|
self.armature_object_bone_names = armature_object_bone_names
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_false_root_bone(self) -> bool:
|
||||||
|
return len(self.bones) > 0 and self.bones[0][1] is None
|
||||||
|
|
||||||
|
|
||||||
|
def convert_bpy_quaternion_to_psx_quaternion(other: BpyQuaternion) -> Quaternion:
|
||||||
|
quaternion = Quaternion()
|
||||||
|
quaternion.x = other.x
|
||||||
|
quaternion.y = other.y
|
||||||
|
quaternion.z = other.z
|
||||||
|
quaternion.w = other.w
|
||||||
|
return quaternion
|
||||||
|
|
||||||
|
|
||||||
|
class PsxBoneCollection:
|
||||||
|
"""
|
||||||
|
Stores the armature's object name, data-block name and bone collection index.
|
||||||
|
"""
|
||||||
|
def __init__(self, armature_object_name: str, armature_data_name: str, index: int):
|
||||||
|
self.armature_object_name = armature_object_name
|
||||||
|
self.armature_data_name = armature_data_name
|
||||||
|
self.index = index
|
||||||
|
|
||||||
|
|
||||||
|
def create_psx_bones(
|
||||||
|
armature_objects: List[Object],
|
||||||
|
export_space: str = 'WORLD',
|
||||||
|
root_bone_name: str = 'ROOT',
|
||||||
|
forward_axis: str = 'X',
|
||||||
|
up_axis: str = 'Z',
|
||||||
|
scale: float = 1.0,
|
||||||
|
bone_filter_mode: str = 'ALL',
|
||||||
|
bone_collection_indices: Optional[List[PsxBoneCollection]] = None,
|
||||||
|
bone_collection_primary_key: str = 'OBJECT',
|
||||||
|
) -> PsxBoneCreateResult:
|
||||||
|
"""
|
||||||
|
Creates a list of PSX bones from the given armature objects and options.
|
||||||
|
This function will throw a RuntimeError if multiple armature objects are passed in and the export space is not WORLD.
|
||||||
|
It will also throw a RuntimeError if the bone names are not unique when compared case-insensitively.
|
||||||
|
"""
|
||||||
|
if bone_collection_indices is None:
|
||||||
|
bone_collection_indices = []
|
||||||
|
|
||||||
|
bones: List[Tuple[PsxBone, Optional[Object]]] = []
|
||||||
|
|
||||||
|
if export_space != 'WORLD' and len(armature_objects) >= 2:
|
||||||
|
armature_object_names = [armature_object.name for armature_object in armature_objects]
|
||||||
|
raise RuntimeError(f'When exporting multiple armatures, the Export Space must be World.\n' \
|
||||||
|
f'The following armatures are attempting to be exported: {armature_object_names}')
|
||||||
|
|
||||||
|
coordinate_system_matrix = get_coordinate_system_transform(forward_axis, up_axis)
|
||||||
|
coordinate_system_default_rotation = coordinate_system_matrix.to_quaternion()
|
||||||
|
|
||||||
|
total_bone_count = sum(len(armature_object.data.bones) for armature_object in armature_objects)
|
||||||
|
|
||||||
|
# Store the bone names to be exported for each armature object.
|
||||||
|
armature_object_bone_names: Dict[Object, List[str]] = dict()
|
||||||
|
for armature_object in armature_objects:
|
||||||
|
armature_bone_collection_indices: List[int] = []
|
||||||
|
match bone_collection_primary_key:
|
||||||
|
case 'OBJECT':
|
||||||
|
armature_bone_collection_indices.extend([x.index for x in bone_collection_indices if x.armature_object_name == armature_object.name])
|
||||||
|
case 'DATA':
|
||||||
|
armature_bone_collection_indices.extend([x.index for x in bone_collection_indices if armature_object.data and x.armature_data_name == armature_object.data.name])
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid primary key: {bone_collection_primary_key}'
|
||||||
|
bone_names = get_export_bone_names(armature_object, bone_filter_mode, armature_bone_collection_indices)
|
||||||
|
armature_object_bone_names[armature_object] = bone_names
|
||||||
|
|
||||||
|
# Store the index of the root bone for each armature object.
|
||||||
|
# We will need this later to correctly assign vertex weights.
|
||||||
|
armature_object_root_bone_indices: Dict[Optional[Object], int] = dict()
|
||||||
|
|
||||||
|
if len(armature_objects) == 0 or total_bone_count == 0:
|
||||||
|
# If the mesh has no armature object or no bones, simply assign it a dummy bone at the root to satisfy the
|
||||||
|
# requirement that a PSK file must have at least one bone.
|
||||||
|
psx_bone = PsxBone()
|
||||||
|
psx_bone.name = convert_string_to_cp1252_bytes(root_bone_name)
|
||||||
|
psx_bone.flags = 0
|
||||||
|
psx_bone.children_count = 0
|
||||||
|
psx_bone.parent_index = 0
|
||||||
|
psx_bone.location = Vector3.zero()
|
||||||
|
psx_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(coordinate_system_default_rotation)
|
||||||
|
bones.append((psx_bone, None))
|
||||||
|
|
||||||
|
armature_object_root_bone_indices[None] = 0
|
||||||
|
else:
|
||||||
|
# If we have multiple armature objects, create a root bone at the world origin.
|
||||||
|
if len(armature_objects) > 1:
|
||||||
|
psx_bone = PsxBone()
|
||||||
|
psx_bone.name = convert_string_to_cp1252_bytes(root_bone_name)
|
||||||
|
psx_bone.flags = 0
|
||||||
|
psx_bone.children_count = total_bone_count
|
||||||
|
psx_bone.parent_index = 0
|
||||||
|
psx_bone.location = Vector3.zero()
|
||||||
|
psx_bone.rotation = convert_bpy_quaternion_to_psx_quaternion(coordinate_system_default_rotation)
|
||||||
|
bones.append((psx_bone, None))
|
||||||
|
|
||||||
|
armature_object_root_bone_indices[None] = 0
|
||||||
|
|
||||||
|
root_bone = bones[0][0] if len(bones) > 0 else None
|
||||||
|
|
||||||
|
for armature_object in armature_objects:
|
||||||
|
bone_names = armature_object_bone_names[armature_object]
|
||||||
|
armature_data = typing_cast(Armature, armature_object.data)
|
||||||
|
armature_bones = [armature_data.bones[bone_name] for bone_name in bone_names]
|
||||||
|
|
||||||
|
armature_psx_bones = create_psx_bones_from_blender_bones(
|
||||||
|
bones=armature_bones,
|
||||||
|
export_space=export_space,
|
||||||
|
armature_object_matrix_world=armature_object.matrix_world,
|
||||||
|
scale=scale,
|
||||||
|
forward_axis=forward_axis,
|
||||||
|
up_axis=up_axis,
|
||||||
|
root_bone=root_bone,
|
||||||
|
)
|
||||||
|
|
||||||
|
# If we are appending these bones to an existing list of bones, we need to adjust the parent indices for
|
||||||
|
# all the non-root bones.
|
||||||
|
if len(bones) > 0:
|
||||||
|
parent_index_offset = len(bones)
|
||||||
|
for bone in armature_psx_bones[1:]:
|
||||||
|
bone.parent_index += parent_index_offset
|
||||||
|
|
||||||
|
armature_object_root_bone_indices[armature_object] = len(bones)
|
||||||
|
|
||||||
|
bones.extend((psx_bone, armature_object) for psx_bone in armature_psx_bones)
|
||||||
|
|
||||||
|
# Check if there are bone name conflicts between armatures.
|
||||||
|
bone_name_counts = Counter(bone[0].name.decode('windows-1252').upper() for bone in bones)
|
||||||
|
for bone_name, count in bone_name_counts.items():
|
||||||
|
if count > 1:
|
||||||
|
error_message = f'Found {count} bones with the name "{bone_name}". '
|
||||||
|
f'Bone names must be unique when compared case-insensitively.'
|
||||||
|
|
||||||
|
if len(armature_objects) > 1 and bone_name == root_bone_name.upper():
|
||||||
|
error_message += f' This is the name of the automatically generated root bone. Consider changing this '
|
||||||
|
f''
|
||||||
|
raise RuntimeError(error_message)
|
||||||
|
|
||||||
|
return PsxBoneCreateResult(
|
||||||
|
bones=bones,
|
||||||
|
armature_object_root_bone_indices=armature_object_root_bone_indices,
|
||||||
|
armature_object_bone_names=armature_object_bone_names,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_vector_from_axis_identifier(axis_identifier: str) -> Vector:
|
||||||
|
match axis_identifier:
|
||||||
|
case 'X':
|
||||||
|
return Vector((1.0, 0.0, 0.0))
|
||||||
|
case 'Y':
|
||||||
|
return Vector((0.0, 1.0, 0.0))
|
||||||
|
case 'Z':
|
||||||
|
return Vector((0.0, 0.0, 1.0))
|
||||||
|
case '-X':
|
||||||
|
return Vector((-1.0, 0.0, 0.0))
|
||||||
|
case '-Y':
|
||||||
|
return Vector((0.0, -1.0, 0.0))
|
||||||
|
case '-Z':
|
||||||
|
return Vector((0.0, 0.0, -1.0))
|
||||||
|
case _:
|
||||||
|
assert False, f'Invalid axis identifier: {axis_identifier}'
|
||||||
|
|
||||||
|
|
||||||
|
def get_coordinate_system_transform(forward_axis: str = 'X', up_axis: str = 'Z') -> Matrix:
|
||||||
|
forward = get_vector_from_axis_identifier(forward_axis)
|
||||||
|
up = get_vector_from_axis_identifier(up_axis)
|
||||||
|
left = up.cross(forward)
|
||||||
|
return Matrix((
|
||||||
|
(forward.x, forward.y, forward.z, 0.0),
|
||||||
|
(left.x, left.y, left.z, 0.0),
|
||||||
|
(up.x, up.y, up.z, 0.0),
|
||||||
|
(0.0, 0.0, 0.0, 1.0)
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
def get_armatures_for_mesh_objects(mesh_objects: Iterable[Object]):
|
||||||
|
"""
|
||||||
|
Returns a generator of unique armature objects that are used by the given mesh objects.
|
||||||
|
"""
|
||||||
|
armature_objects: set[Object] = set()
|
||||||
|
for mesh_object in mesh_objects:
|
||||||
|
armature_modifiers = [typing_cast(ArmatureModifier, x) for x in mesh_object.modifiers if x.type == 'ARMATURE']
|
||||||
|
for armature_object in map(lambda x: x.object, armature_modifiers):
|
||||||
|
if armature_object is not None:
|
||||||
|
armature_objects.add(armature_object)
|
||||||
|
yield from armature_objects
|
||||||
|
|
||||||
|
|
||||||
|
def get_collection_from_context(context: Context) -> Optional[Collection]:
|
||||||
|
if context.space_data is None or context.space_data.type != 'PROPERTIES':
|
||||||
|
return None
|
||||||
|
space_data = typing_cast(SpaceProperties, context.space_data)
|
||||||
|
if space_data.use_pin_id:
|
||||||
|
return typing_cast(Collection, space_data.pin_id)
|
||||||
|
else:
|
||||||
|
return context.collection
|
||||||
|
|
||||||
|
|
||||||
|
def get_collection_export_operator_from_context(context: Context) -> Optional[object]:
|
||||||
|
collection = get_collection_from_context(context)
|
||||||
|
if collection is None or collection.active_exporter_index is None:
|
||||||
|
return None
|
||||||
|
if 0 > collection.active_exporter_index >= len(collection.exporters):
|
||||||
|
return None
|
||||||
|
exporter = collection.exporters[collection.active_exporter_index]
|
||||||
|
return exporter.export_properties
|
||||||
54
io_scene_psk_psa/shared/semver.py
Normal file
54
io_scene_psk_psa/shared/semver.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
class SemanticVersion(object):
|
||||||
|
def __init__(self, version: Tuple[int, int, int]):
|
||||||
|
self.major, self.minor, self.patch = version
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
yield self.major
|
||||||
|
yield self.minor
|
||||||
|
yield self.patch
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compare(lhs: 'SemanticVersion', rhs: 'SemanticVersion') -> int:
|
||||||
|
"""
|
||||||
|
Compares two semantic versions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-1 if lhs < rhs
|
||||||
|
0 if lhs == rhs
|
||||||
|
1 if lhs > rhs
|
||||||
|
"""
|
||||||
|
for l, r in zip(lhs, rhs):
|
||||||
|
if l < r:
|
||||||
|
return -1
|
||||||
|
if l > r:
|
||||||
|
return 1
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'{self.major}.{self.minor}.{self.patch}'
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return str(self)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.compare(self, other) == 0
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.compare(self, other) == -1
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.compare(self, other) <= 0
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.compare(self, other) == 1
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.compare(self, other) >= 0
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.major, self.minor, self.patch))
|
||||||
166
io_scene_psk_psa/shared/types.py
Normal file
166
io_scene_psk_psa/shared/types.py
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
import bpy
|
||||||
|
from bpy.props import CollectionProperty, EnumProperty, StringProperty, IntProperty, BoolProperty, FloatProperty
|
||||||
|
from bpy.types import PropertyGroup, UIList, UILayout, Context, AnyType, Panel
|
||||||
|
|
||||||
|
|
||||||
|
class PSX_UL_bone_collection_list(UIList):
|
||||||
|
|
||||||
|
def draw_item(self, _context: Context, layout: UILayout, _data: AnyType, item: AnyType, _icon: int,
|
||||||
|
_active_data: AnyType, _active_property: str, _index: int = 0, _flt_flag: int = 0):
|
||||||
|
row = layout.row()
|
||||||
|
|
||||||
|
row.prop(item, 'is_selected', text=getattr(item, 'name'))
|
||||||
|
row.label(text=str(getattr(item, 'count')), icon='BONE_DATA')
|
||||||
|
|
||||||
|
armature_object = bpy.data.objects.get(item.armature_object_name, None)
|
||||||
|
if armature_object is None:
|
||||||
|
row.label(icon='ERROR')
|
||||||
|
else:
|
||||||
|
row.label(text=armature_object.name, icon='ARMATURE_DATA')
|
||||||
|
|
||||||
|
|
||||||
|
class PSX_PG_bone_collection_list_item(PropertyGroup):
|
||||||
|
armature_object_name: StringProperty()
|
||||||
|
armature_data_name: StringProperty()
|
||||||
|
name: StringProperty()
|
||||||
|
index: IntProperty()
|
||||||
|
count: IntProperty()
|
||||||
|
is_selected: BoolProperty(default=False)
|
||||||
|
|
||||||
|
|
||||||
|
class PSX_PG_action_export(PropertyGroup):
|
||||||
|
compression_ratio: FloatProperty(name='Compression Ratio', default=1.0, min=0.0, max=1.0, subtype='FACTOR', description='The key sampling ratio of the exported sequence.\n\nA compression ratio of 1.0 will export all frames, while a compression ratio of 0.5 will export half of the frames')
|
||||||
|
key_quota: IntProperty(name='Key Quota', default=0, min=1, description='The minimum number of frames to be exported')
|
||||||
|
fps: FloatProperty(name='FPS', default=30.0, min=0.0, description='The frame rate of the exported sequence')
|
||||||
|
|
||||||
|
|
||||||
|
class PSX_PT_action(Panel):
|
||||||
|
bl_idname = 'PSX_PT_action'
|
||||||
|
bl_label = 'PSA Export'
|
||||||
|
bl_space_type = 'DOPESHEET_EDITOR'
|
||||||
|
bl_region_type = 'UI'
|
||||||
|
bl_context = 'action'
|
||||||
|
bl_category = 'Action'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: 'Context'):
|
||||||
|
return context.active_object and context.active_object.type == 'ARMATURE' and context.active_action is not None
|
||||||
|
|
||||||
|
def draw(self, context: 'Context'):
|
||||||
|
action = context.active_action
|
||||||
|
layout = self.layout
|
||||||
|
flow = layout.grid_flow(columns=1)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(action.psa_export, 'compression_ratio')
|
||||||
|
flow.prop(action.psa_export, 'key_quota')
|
||||||
|
flow.prop(action.psa_export, 'fps')
|
||||||
|
|
||||||
|
|
||||||
|
bone_filter_mode_items = (
|
||||||
|
('ALL', 'All', 'All bones will be exported'),
|
||||||
|
('BONE_COLLECTIONS', 'Bone Collections', 'Only bones belonging to the selected bone collections and their ancestors will be exported')
|
||||||
|
)
|
||||||
|
|
||||||
|
axis_identifiers = ('X', 'Y', 'Z', '-X', '-Y', '-Z')
|
||||||
|
forward_items = (
|
||||||
|
('X', 'X Forward', ''),
|
||||||
|
('Y', 'Y Forward', ''),
|
||||||
|
('Z', 'Z Forward', ''),
|
||||||
|
('-X', '-X Forward', ''),
|
||||||
|
('-Y', '-Y Forward', ''),
|
||||||
|
('-Z', '-Z Forward', ''),
|
||||||
|
)
|
||||||
|
|
||||||
|
up_items = (
|
||||||
|
('X', 'X Up', ''),
|
||||||
|
('Y', 'Y Up', ''),
|
||||||
|
('Z', 'Z Up', ''),
|
||||||
|
('-X', '-X Up', ''),
|
||||||
|
('-Y', '-Y Up', ''),
|
||||||
|
('-Z', '-Z Up', ''),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def forward_axis_update(self, __context):
|
||||||
|
if self.forward_axis == self.up_axis:
|
||||||
|
# Automatically set the up axis to the next available axis
|
||||||
|
self.up_axis = next((axis for axis in axis_identifiers if axis != self.forward_axis), 'Z')
|
||||||
|
|
||||||
|
|
||||||
|
def up_axis_update(self, __context):
|
||||||
|
if self.up_axis == self.forward_axis:
|
||||||
|
# Automatically set the forward axis to the next available axis
|
||||||
|
self.forward_axis = next((axis for axis in axis_identifiers if axis != self.up_axis), 'X')
|
||||||
|
|
||||||
|
|
||||||
|
class AxisMixin:
|
||||||
|
forward_axis: EnumProperty(
|
||||||
|
name='Forward',
|
||||||
|
items=forward_items,
|
||||||
|
default='X',
|
||||||
|
update=forward_axis_update
|
||||||
|
)
|
||||||
|
up_axis: EnumProperty(
|
||||||
|
name='Up',
|
||||||
|
items=up_items,
|
||||||
|
default='Z',
|
||||||
|
update=up_axis_update
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TransformMixin(AxisMixin):
|
||||||
|
scale: FloatProperty(
|
||||||
|
name='Scale',
|
||||||
|
default=1.0,
|
||||||
|
description='Scale factor to apply to all location data',
|
||||||
|
soft_min=0.0,
|
||||||
|
soft_max=100.0
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
export_space_items = [
|
||||||
|
('WORLD', 'World', 'Export in world space'),
|
||||||
|
('ARMATURE', 'Armature', 'Export the local space of the armature object'),
|
||||||
|
('ROOT', 'Root', 'Export in the space of the root bone')
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ExportSpaceMixin:
|
||||||
|
export_space: EnumProperty(
|
||||||
|
name='Export Space',
|
||||||
|
items=export_space_items,
|
||||||
|
default='WORLD'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PsxBoneExportMixin:
|
||||||
|
bone_filter_mode: EnumProperty(
|
||||||
|
name='Bone Filter',
|
||||||
|
options=set(),
|
||||||
|
description='',
|
||||||
|
items=bone_filter_mode_items,
|
||||||
|
)
|
||||||
|
bone_collection_list: CollectionProperty(type=PSX_PG_bone_collection_list_item)
|
||||||
|
bone_collection_list_index: IntProperty(default=0, name='', description='')
|
||||||
|
root_bone_name: StringProperty(
|
||||||
|
name='Root Bone Name',
|
||||||
|
description='The name of the root bone when exporting a PSK with either no armature or multiple armatures',
|
||||||
|
default='ROOT',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PSX_PG_scene_export(PropertyGroup, TransformMixin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSX_PG_scene_export,
|
||||||
|
PSX_PG_action_export,
|
||||||
|
PSX_PG_bone_collection_list_item,
|
||||||
|
PSX_UL_bone_collection_list,
|
||||||
|
PSX_PT_action,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
59
io_scene_psk_psa/shared/ui.py
Normal file
59
io_scene_psk_psa/shared/ui.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import bpy
|
||||||
|
from bpy.types import Context, UILayout, Panel
|
||||||
|
|
||||||
|
from .types import bone_filter_mode_items
|
||||||
|
|
||||||
|
|
||||||
|
def is_bone_filter_mode_item_available(pg, identifier):
|
||||||
|
if identifier == 'BONE_COLLECTIONS' and len(pg.bone_collection_list) == 0:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def draw_bone_filter_mode(layout: UILayout, pg, should_always_show_bone_collections=False):
|
||||||
|
row = layout.row(align=True)
|
||||||
|
for item_identifier, _, _ in bone_filter_mode_items:
|
||||||
|
identifier = item_identifier
|
||||||
|
item_layout = row.row(align=True)
|
||||||
|
item_layout.prop_enum(pg, 'bone_filter_mode', item_identifier)
|
||||||
|
item_layout.enabled = should_always_show_bone_collections or is_bone_filter_mode_item_available(pg, identifier)
|
||||||
|
|
||||||
|
|
||||||
|
class PSX_PT_scene(Panel):
|
||||||
|
bl_idname = 'PSX_PT_scene'
|
||||||
|
bl_label = 'PSK Export'
|
||||||
|
bl_space_type = 'PROPERTIES'
|
||||||
|
bl_region_type = 'WINDOW'
|
||||||
|
bl_context = 'scene'
|
||||||
|
bl_category = 'PSK/PSA'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return context.scene is not None
|
||||||
|
|
||||||
|
def draw(self, context: Context):
|
||||||
|
layout = self.layout
|
||||||
|
scene = bpy.context.scene
|
||||||
|
psx_export = getattr(scene, 'psx_export', None)
|
||||||
|
if psx_export is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Transform
|
||||||
|
transform_header, transform_panel = layout.panel('Transform', default_closed=False)
|
||||||
|
transform_header.label(text='Transform')
|
||||||
|
if transform_panel:
|
||||||
|
flow = layout.grid_flow(columns=1)
|
||||||
|
flow.use_property_split = True
|
||||||
|
flow.use_property_decorate = False
|
||||||
|
flow.prop(psx_export, 'scale')
|
||||||
|
flow.prop(psx_export, 'forward_axis')
|
||||||
|
flow.prop(psx_export, 'up_axis')
|
||||||
|
|
||||||
|
|
||||||
|
_classes = (
|
||||||
|
PSX_PT_scene,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy.utils import register_classes_factory
|
||||||
|
register, unregister = register_classes_factory(_classes)
|
||||||
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
from bpy.props import StringProperty, IntProperty, BoolProperty
|
|
||||||
from bpy.types import PropertyGroup, UIList
|
|
||||||
|
|
||||||
|
|
||||||
class PSX_UL_BoneGroupList(UIList):
|
|
||||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
|
||||||
row = layout.row()
|
|
||||||
row.prop(item, 'is_selected', text=item.name)
|
|
||||||
row.label(text=str(item.count), icon='BONE_DATA')
|
|
||||||
|
|
||||||
|
|
||||||
class BoneGroupListItem(PropertyGroup):
|
|
||||||
name: StringProperty()
|
|
||||||
index: IntProperty()
|
|
||||||
count: IntProperty()
|
|
||||||
is_selected: BoolProperty(default=False)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
|
||||||
BoneGroupListItem,
|
|
||||||
PSX_UL_BoneGroupList,
|
|
||||||
)
|
|
||||||
15
pyproject.toml
Normal file
15
pyproject.toml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
[project]
|
||||||
|
name = "io_scene_psk_psa"
|
||||||
|
|
||||||
|
[pytest]
|
||||||
|
blender-addons-dirs = "io_scene_psk_psa"
|
||||||
|
testpaths = "../tests"
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
branch = true
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
ignore_errors = true
|
||||||
|
|
||||||
|
[tool.pyright]
|
||||||
|
reportInvalidTypeForm = false
|
||||||
2
test.sh
Executable file
2
test.sh
Executable file
@@ -0,0 +1,2 @@
|
|||||||
|
# usr/bin/env bash
|
||||||
|
docker run -it --volume ${PWD}:/io_scene_psk_psa --volume ${PWD}/io_scene_psk_psa:/addons/io_scene_psk_psa --volume ${PWD}/tests:/tests $(docker build -q .)
|
||||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
BIN
tests/data/Bat.psk
LFS
Normal file
BIN
tests/data/Bat.psk
LFS
Normal file
Binary file not shown.
BIN
tests/data/CS_Sarge_S0_Skelmesh.pskx
LFS
Normal file
BIN
tests/data/CS_Sarge_S0_Skelmesh.pskx
LFS
Normal file
Binary file not shown.
BIN
tests/data/Shrek.psa
LFS
Normal file
BIN
tests/data/Shrek.psa
LFS
Normal file
Binary file not shown.
BIN
tests/data/Shrek.psk
LFS
Normal file
BIN
tests/data/Shrek.psk
LFS
Normal file
Binary file not shown.
BIN
tests/data/Slurp_Monster_Axe_LOD0.psk
LFS
Normal file
BIN
tests/data/Slurp_Monster_Axe_LOD0.psk
LFS
Normal file
Binary file not shown.
BIN
tests/data/Suzanne.psk
LFS
Normal file
BIN
tests/data/Suzanne.psk
LFS
Normal file
Binary file not shown.
39
tests/psa_import_test.py
Normal file
39
tests/psa_import_test.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import bpy
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
SHREK_PSK_FILEPATH = 'tests/data/Shrek.psk'
|
||||||
|
SHREK_PSA_FILEPATH = 'tests/data/Shrek.psa'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def run_before_and_after_Tests(tmpdir):
|
||||||
|
# Setup: Run before the tests
|
||||||
|
bpy.ops.wm.read_homefile(app_template='')
|
||||||
|
yield
|
||||||
|
# Teardown: Run after the tests
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_psa_import_all():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SHREK_PSK_FILEPATH,
|
||||||
|
components='ALL',
|
||||||
|
) == {'FINISHED'}, "PSK import failed."
|
||||||
|
|
||||||
|
armature_object = bpy.data.objects.get('Shrek', None)
|
||||||
|
assert armature_object is not None, "Armature object not found in the scene."
|
||||||
|
assert armature_object.type == 'ARMATURE', "Object is not of type ARMATURE."
|
||||||
|
|
||||||
|
# Select the armature object
|
||||||
|
bpy.context.view_layer.objects.active = armature_object
|
||||||
|
armature_object.select_set(True)
|
||||||
|
|
||||||
|
# Import the associated PSA file with import_all operator.
|
||||||
|
assert bpy.ops.psa.import_all(
|
||||||
|
filepath=SHREK_PSA_FILEPATH
|
||||||
|
) == {'FINISHED'}, "PSA import failed."
|
||||||
|
|
||||||
|
# TODO: More thorough tests on the imported data for the animations.
|
||||||
|
EXPECTED_ACTION_COUNT = 135
|
||||||
|
assert len(bpy.data.actions) == EXPECTED_ACTION_COUNT, \
|
||||||
|
f"Expected {EXPECTED_ACTION_COUNT} actions, but found {len(bpy.data.actions)}."
|
||||||
295
tests/psk_import_test.py
Normal file
295
tests/psk_import_test.py
Normal file
@@ -0,0 +1,295 @@
|
|||||||
|
import bpy
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
SUZANNE_FILEPATH = 'tests/data/Suzanne.psk'
|
||||||
|
SARGE_FILEPATH = 'tests/data/CS_Sarge_S0_Skelmesh.pskx'
|
||||||
|
SLURP_MONSTER_AXE_FILEPATH = 'tests/data/Slurp_Monster_Axe_LOD0.psk'
|
||||||
|
BAT_FILEPATH = 'tests/data/Bat.psk'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def run_before_and_after_Tests(tmpdir):
|
||||||
|
# Setup: Run before the tests
|
||||||
|
bpy.ops.wm.read_homefile(app_template='')
|
||||||
|
yield
|
||||||
|
# Teardown: Run after the tests
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_all():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SUZANNE_FILEPATH,
|
||||||
|
components='ALL',
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
armature_object = bpy.data.objects.get('Suzanne', None)
|
||||||
|
|
||||||
|
assert armature_object is not None, "Armature object not found in the scene"
|
||||||
|
assert armature_object.type == 'ARMATURE', "Armature object type should be ARMATURE"
|
||||||
|
assert armature_object is not None, "Armature object not found in the scene"
|
||||||
|
assert len(armature_object.children) == 1, "Armature object should have one child"
|
||||||
|
|
||||||
|
armature_data = armature_object.data
|
||||||
|
|
||||||
|
assert len(armature_data.bones) == 1, "Armature should have one bone"
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('Suzanne.001', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
|
||||||
|
assert len(mesh_data.vertices) == 507
|
||||||
|
assert len(mesh_data.polygons) == 968
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_armature_only():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SUZANNE_FILEPATH,
|
||||||
|
components='ARMATURE',
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
armature_object = bpy.data.objects.get('Suzanne', None)
|
||||||
|
|
||||||
|
assert armature_object.type == 'ARMATURE', "Armature object type should be ARMATURE"
|
||||||
|
assert armature_object is not None, "Armature object not found in the scene"
|
||||||
|
assert len(armature_object.children) == 0, "Armature object should have no children"
|
||||||
|
|
||||||
|
armature_data = armature_object.data
|
||||||
|
|
||||||
|
assert len(armature_data.bones) == 1, "Armature should have one bone"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_mesh_only():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SUZANNE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('Suzanne', None)
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
|
||||||
|
assert len(mesh_data.vertices) == 507
|
||||||
|
assert len(mesh_data.polygons) == 968
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_scale():
|
||||||
|
"""
|
||||||
|
Test the import of a PSK file with a scale factor of 2.0.
|
||||||
|
The scale factor is applied to the armature object.
|
||||||
|
"""
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SUZANNE_FILEPATH,
|
||||||
|
components='ALL',
|
||||||
|
scale=2.0,
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
armature_object = bpy.data.objects.get('Suzanne', None)
|
||||||
|
assert armature_object is not None, "Armature object not found in the scene"
|
||||||
|
assert armature_object.type == 'ARMATURE', "Armature object type should be ARMATURE"
|
||||||
|
assert tuple(armature_object.scale) == (2.0, 2.0, 2.0), "Armature object scale should be (2.0, 2.0, 2.0)"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_bone_length():
|
||||||
|
bone_length = 1.25
|
||||||
|
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SUZANNE_FILEPATH,
|
||||||
|
components='ARMATURE',
|
||||||
|
bone_length=bone_length,
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
armature_object = bpy.data.objects.get('Suzanne', None)
|
||||||
|
assert armature_object is not None, "Armature object not found in the scene"
|
||||||
|
assert armature_object.type == 'ARMATURE', "Armature object type should be ARMATURE"
|
||||||
|
|
||||||
|
armature_data = armature_object.data
|
||||||
|
assert armature_data is not None, "Armature data not found in the scene"
|
||||||
|
assert len(armature_data.bones) == 1, "Armature should have one bone"
|
||||||
|
assert 'ROOT' in armature_data.bones, "Armature should have a bone named 'ROOT'"
|
||||||
|
|
||||||
|
root_bone = armature_data.bones['ROOT']
|
||||||
|
assert tuple(root_bone.head) == (0.0, 0.0, 0.0), "Bone head should be (0.0, 0.0, 0.0)"
|
||||||
|
assert tuple(root_bone.tail) == (0.0, bone_length, 0.0), f"Bone tail should be (0.0, {bone_length}, 0.0)"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_with_vertex_normals():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SARGE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
should_import_vertex_normals=True,
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('CS_Sarge_S0_Skelmesh', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
assert mesh_data is not None, "Mesh data not found in the scene"
|
||||||
|
assert mesh_data.has_custom_normals, "Mesh should have custom normals"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_without_vertex_normals():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SARGE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
should_import_vertex_normals=False,
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('CS_Sarge_S0_Skelmesh', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
assert mesh_data is not None, "Mesh data not found in the scene"
|
||||||
|
assert not mesh_data.has_custom_normals, "Mesh should not have custom normals"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_with_vertex_colors_srgba():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SARGE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
should_import_vertex_colors=True,
|
||||||
|
vertex_color_space='SRGBA',
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('CS_Sarge_S0_Skelmesh', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
assert mesh_data is not None, "Mesh data not found in the scene"
|
||||||
|
assert len(mesh_data.color_attributes) == 1, "Mesh should have one vertex color layer"
|
||||||
|
assert mesh_data.color_attributes[0].name == 'VERTEXCOLOR', "Vertex color layer should be named 'VERTEXCOLOR'"
|
||||||
|
assert tuple(mesh_data.color_attributes[0].data[3303].color) == (0.34586891531944275, 0.0, 0.0, 1.0), "Unexpected vertex color value"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_vertex_colors_linear():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SARGE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
should_import_vertex_colors=True,
|
||||||
|
vertex_color_space='LINEAR',
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('CS_Sarge_S0_Skelmesh', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
assert mesh_data is not None, "Mesh data not found in the scene"
|
||||||
|
assert len(mesh_data.color_attributes) == 1, "Mesh should have one vertex color layer"
|
||||||
|
assert mesh_data.color_attributes[0].name == 'VERTEXCOLOR', "Vertex color layer should be named 'VERTEXCOLOR'"
|
||||||
|
assert tuple(mesh_data.color_attributes[0].data[3303].color) == (0.09803921729326248, 0.0, 0.0, 1.0), "Unexpected vertex color value"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_without_vertex_colors():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SARGE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
should_import_vertex_colors=False,
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('CS_Sarge_S0_Skelmesh', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
assert mesh_data is not None, "Mesh data not found in the scene"
|
||||||
|
assert len(mesh_data.color_attributes) == 0, "Mesh should not have any vertex color layers"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_extra_uvs():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SARGE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
should_import_vertex_colors=True,
|
||||||
|
vertex_color_space='LINEAR',
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('CS_Sarge_S0_Skelmesh', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
assert mesh_data is not None, "Mesh data not found in the scene"
|
||||||
|
assert len(mesh_data.uv_layers) == 2, "Mesh should have two UV layers"
|
||||||
|
|
||||||
|
assert mesh_data.uv_layers[0].name == 'UVMap', "First UV layer should be named 'UVMap'"
|
||||||
|
assert mesh_data.uv_layers[1].name == 'EXTRAUV0', "Second UV layer should be named 'EXTRAUV0'"
|
||||||
|
|
||||||
|
# Verify that the data is actually different
|
||||||
|
assert mesh_data.uv_layers[0].uv[0].vector.x == 0.92480468750
|
||||||
|
assert mesh_data.uv_layers[0].uv[0].vector.y == 0.90533447265625
|
||||||
|
assert mesh_data.uv_layers[1].uv[0].vector.x == 3.0517578125e-05
|
||||||
|
assert mesh_data.uv_layers[1].uv[0].vector.y == 0.999969482421875
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_materials():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SARGE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('CS_Sarge_S0_Skelmesh', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
|
||||||
|
mesh_data = mesh_object.data
|
||||||
|
|
||||||
|
assert mesh_data is not None, "Mesh data not found in the scene"
|
||||||
|
assert len(mesh_data.materials) == 4, "Mesh should have four materials"
|
||||||
|
material_names = (
|
||||||
|
'CS_Sarge_S0_MI',
|
||||||
|
'TP_Core_Eye_MI',
|
||||||
|
'AB_Sarge_S0_E_StimPack_MI1',
|
||||||
|
'CS_Sarge_S0_MI'
|
||||||
|
)
|
||||||
|
for i, material in enumerate(mesh_data.materials):
|
||||||
|
assert material.name == material_names[i], f"Material {i} name should be {material_names[i]}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_shape_keys():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SLURP_MONSTER_AXE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('Slurp_Monster_Axe_LOD0', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
assert mesh_object.data.shape_keys is not None, "Mesh object should have shape keys"
|
||||||
|
|
||||||
|
shape_key_names = (
|
||||||
|
'MORPH_BASE',
|
||||||
|
'pickaxe',
|
||||||
|
'axe',
|
||||||
|
'Blob_03',
|
||||||
|
'Blob02',
|
||||||
|
'Blob01',
|
||||||
|
)
|
||||||
|
shape_keys = mesh_object.data.shape_keys.key_blocks
|
||||||
|
assert len(shape_keys) == 6, "Mesh object should have 6 shape keys"
|
||||||
|
for i, shape_key in enumerate(shape_keys):
|
||||||
|
assert shape_key.name == shape_key_names[i], f"Shape key {i} name should be {shape_key_names[i]}"
|
||||||
|
|
||||||
|
def test_psk_import_without_shape_keys():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=SLURP_MONSTER_AXE_FILEPATH,
|
||||||
|
components='MESH',
|
||||||
|
should_import_shape_keys=False,
|
||||||
|
) == {'FINISHED'}
|
||||||
|
|
||||||
|
mesh_object = bpy.data.objects.get('Slurp_Monster_Axe_LOD0', None)
|
||||||
|
assert mesh_object is not None, "Mesh object not found in the scene"
|
||||||
|
assert mesh_object.type == 'MESH', "Mesh object type should be MESH"
|
||||||
|
assert mesh_object.data.shape_keys is None, "Mesh object should not have shape keys"
|
||||||
|
|
||||||
|
|
||||||
|
def test_psk_import_with_invalid_faces():
|
||||||
|
assert bpy.ops.psk.import_file(
|
||||||
|
filepath=BAT_FILEPATH,
|
||||||
|
components='MESH'
|
||||||
|
) == {'FINISHED'}
|
||||||
1
tests/requirements.txt
Normal file
1
tests/requirements.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pytest
|
||||||
8
tests/test.sh
Normal file
8
tests/test.sh
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# usr/bin/env bash
|
||||||
|
# This file is meant to be executed from inside a Docker container.
|
||||||
|
# To run tests on the host system, use the `test.sh` script in the root directory.
|
||||||
|
export BLENDER_EXECUTABLE=$(cat /blender_executable_path)
|
||||||
|
pytest --cov-report xml --cov=/root/.config/blender -svv tests --blender-executable $BLENDER_EXECUTABLE --blender-addons-dirs ../addons
|
||||||
|
# Fixes the paths in the coverage report to be relative to the current directory.
|
||||||
|
sed -i 's|/root/.config/blender||g' coverage.xml
|
||||||
|
sed -i 's|4.4/scripts/addons/io_scene_psk_psa/||g' coverage.xml
|
||||||
Reference in New Issue
Block a user