Compare commits
284 Commits
v0.2.0
...
46-composi
Author | SHA1 | Date | |
---|---|---|---|
c06febed45 | |||
8c3e510231 | |||
ff1630f9cc | |||
5830fe1abb | |||
c609f72080 | |||
a28a6f91bd | |||
a996f39d3b | |||
7790a16034 | |||
836fdd02b8 | |||
7cb3482353 | |||
041022056c | |||
05f3eb1445 | |||
17193bde3a | |||
a14b4313f5 | |||
b203d9dffd | |||
f64db2155e | |||
e07ebdeff5 | |||
3d6453f7a2 | |||
7421511079 | |||
bc24525cec | |||
699cf578e2 | |||
e9b4afb440 | |||
0c6491590e | |||
b87e733ddc | |||
cb0962b484 | |||
1fc25412ac | |||
b5405553dc | |||
a1b6fb0533 | |||
b6a8a2ec01 | |||
3e41b18af1 | |||
f7c4f5d1fe | |||
c616054878 | |||
5c08493774 | |||
af8a138b4f | |||
6d9216f14a | |||
fc4fb088bb | |||
98553ba00c | |||
1e15a12b10 | |||
569543650f | |||
07358802f7 | |||
a059fafe12 | |||
297f68ccfe | |||
c9c70d1e08 | |||
a34f58ef3f | |||
e7b7f38991 | |||
392e0aaaa3 | |||
4c774d5d53 | |||
4c4cf8a970 | |||
211d0848c2 | |||
c9665c4719 | |||
431fe0d840 | |||
df7ca66ad8 | |||
c2d2db78e6 | |||
ad89a4e389 | |||
6ca6d4443d | |||
81c9b5fc06 | |||
9fddfe084c | |||
ca40523393 | |||
76e28ced21 | |||
55c6002b28 | |||
8d5c8aded3 | |||
8ebba80b97 | |||
50d6c6b3c8 | |||
f0b03c50f2 | |||
28e83a38e6 | |||
2e261cd66b | |||
3f6e4f7333 | |||
49fadf084a | |||
e2e0dc31c1 | |||
389bbd97d5 | |||
19602691d3 | |||
2e2ff5d4bf | |||
fef6559ce0 | |||
5f669fd49a | |||
330ff08fd3 | |||
f3be8f9623 | |||
ffb70ab74c | |||
26140eefb2 | |||
cdf0433e8a | |||
acd70f73bf | |||
36c3a9ab0b | |||
cfb1afdd72 | |||
4eeb80350e | |||
fb1c985f31 | |||
689c2473d6 | |||
41620fce90 | |||
249bcf827b | |||
d47eab4f26 | |||
f011089d82 | |||
acc58a1c9f | |||
24d850de9f | |||
b045911a59 | |||
a67be76422 | |||
32033c743c | |||
5da8650611 | |||
aec5096f87 | |||
fba39b9980 | |||
6af3e4b777 | |||
58d639e9d8 | |||
0efe5d5a10 | |||
2ad93cf304 | |||
771d76a98b | |||
1e83241494 | |||
1bcbff3ed4 | |||
9a45fe7125 | |||
207901afdd | |||
c6eb1ba22f | |||
ba4168d0fd | |||
00e7adf022 | |||
d9d8ca7ca0 | |||
e8cd271bd8 | |||
e71af6402c | |||
dd1c6a4fc7 | |||
7fe1ae83b1 | |||
a7ad9d30c3 | |||
14779be1ed | |||
a36c3740cc | |||
d2108facab | |||
e5651151d9 | |||
fb61b380b6 | |||
e538752fbc | |||
53eaaa2fcd | |||
a7e9108bff | |||
570909a7c4 | |||
736c3df7c4 | |||
8e606068f3 | |||
eb631e2d4b | |||
70641435cc | |||
552c649d34 | |||
d9d5a34653 | |||
12acd22660 | |||
826a59085e | |||
5ee4988aca | |||
cb85a1db4c | |||
5e30e215ab | |||
9f167256d0 | |||
4e19c169b2 | |||
9c633c35ec | |||
9610b50a49 | |||
67d18f08e2 | |||
9d0d684589 | |||
2446df4fe3 | |||
07862f1cf0 | |||
3a02711baa | |||
c7e8002fed | |||
f4e7ec6be8 | |||
480818fe85 | |||
b965c80ba5 | |||
235db712fd | |||
647ac46c01 | |||
8e3c86561f | |||
dba19e831d | |||
93df5ca5fa | |||
b17104c67e | |||
b66d0dd4ce | |||
9487753307 | |||
df1257ca4c | |||
875b9ce934 | |||
2d638ef76f | |||
cc5a87adb8 | |||
19c56e590b | |||
d0e80da945 | |||
0ccd0563ea | |||
1c3394ce56 | |||
d2b63df68e | |||
3d9c78c2f9 | |||
4726a90a4a | |||
73b763d85f | |||
5e29c6fe26 | |||
113ab81cbf | |||
d2215b662c | |||
238a34d023 | |||
55ca8a7b84 | |||
7049c1723d | |||
6586647eac | |||
fb6f170d60 | |||
c1c39438e3 | |||
317fc03f87 | |||
505f3ab770 | |||
209062af4f | |||
88bab2a4c6 | |||
a91bae3506 | |||
0a96643a9f | |||
261d4d9610 | |||
3293741969 | |||
3eee8db1ae | |||
031b143843 | |||
7dd6e38e3f | |||
1dd0235061 | |||
cdcb2de786 | |||
0b88631250 | |||
c00b2a2d7d | |||
1f0f44fdbf | |||
8262fb9d4e | |||
c2114b593e | |||
7e28ca3fa1 | |||
d0bd4193d9 | |||
d09479fd47 | |||
07cfb85561 | |||
cf0d7a1122 | |||
3f335c7031 | |||
2180db5206 | |||
e6110b4cea | |||
819598ebd9 | |||
1be43f0336 | |||
6a5ff9a097 | |||
86cb3d29fb | |||
589702dab7 | |||
ed76210270 | |||
75c4f42796 | |||
1fd54769b9 | |||
c4484b4b51 | |||
4eb787cc0f | |||
c855b5a424 | |||
ee4083c134 | |||
0325e9d0bd | |||
21dc95b5a7 | |||
d8161f22f3 | |||
94f8bff231 | |||
d3a1094cc2 | |||
d08e0a80a4 | |||
26148e9934 | |||
0a7be03c6f | |||
b6449a7da2 | |||
36038effdf | |||
cb90c196a5 | |||
ae3c9fe43e | |||
2983195af4 | |||
4d69faf186 | |||
2304563b6e | |||
1cfb4e797e | |||
8f95158f08 | |||
5949e3c5cc | |||
790f145022 | |||
ec40862dd0 | |||
9cc1c92e0e | |||
7a716b4c37 | |||
a4ef8a6344 | |||
c5e20085f0 | |||
f4463f9cfe | |||
a212445927 | |||
89a8891073 | |||
52ebb874b0 | |||
2913e6d5a7 | |||
6a00b58600 | |||
2bde136bb6 | |||
f5c77fec3a | |||
d3211199c4 | |||
2d90ea7679 | |||
abd846fc8d | |||
e3bd7ea445 | |||
12bd4a603b | |||
3c31fb5118 | |||
c24f70fad5 | |||
ca2d8e49b5 | |||
4b1499f6ae | |||
f0c1fe9c87 | |||
dfaf1be4ff | |||
51cb099c4c | |||
64731a9198 | |||
213523c8d6 | |||
4fdb72f874 | |||
1412fc638c | |||
7920f67aea | |||
a0c0f781e2 | |||
ec74ea0038 | |||
2c016833fd | |||
3d9da73ab0 | |||
2f4e30f432 | |||
cd1e535a56 | |||
8a8cc0b322 | |||
1e64e17ff4 | |||
66b6c06a2c | |||
45fbc46d8d | |||
39e3c1dbd5 | |||
f043b03128 | |||
03b92eb5e7 | |||
d0dc61bf66 | |||
056b3524e5 | |||
16fc4b8c54 | |||
e62f0682a2 | |||
56ea93508c | |||
5f95eadc1d | |||
40ad96b0af |
3
.gitignore
vendored
@ -13,4 +13,5 @@ multi_user_updater/
|
||||
_build
|
||||
|
||||
# ignore generated zip generated from blender_addon_tester
|
||||
*.zip
|
||||
*.zip
|
||||
libs
|
@ -2,9 +2,12 @@ stages:
|
||||
- test
|
||||
- build
|
||||
- deploy
|
||||
- doc
|
||||
|
||||
|
||||
|
||||
include:
|
||||
- local: .gitlab/ci/test.gitlab-ci.yml
|
||||
- local: .gitlab/ci/build.gitlab-ci.yml
|
||||
- local: .gitlab/ci/deploy.gitlab-ci.yml
|
||||
- local: .gitlab/ci/doc.gitlab-ci.yml
|
||||
|
@ -1,5 +1,6 @@
|
||||
build:
|
||||
stage: build
|
||||
needs: ["test"]
|
||||
image: debian:stable-slim
|
||||
script:
|
||||
- rm -rf tests .git .gitignore script
|
||||
@ -7,7 +8,5 @@ build:
|
||||
name: multi_user
|
||||
paths:
|
||||
- multi_user
|
||||
only:
|
||||
refs:
|
||||
- master
|
||||
- develop
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
@ -1,9 +1,11 @@
|
||||
deploy:
|
||||
stage: deploy
|
||||
needs: ["build"]
|
||||
image: slumber/docker-python
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
@ -15,10 +17,5 @@ deploy:
|
||||
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
|
||||
- echo "Pushing to gitlab registry ${VERSION}"
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} registry.gitlab.com/slumber/multi-user/multi-user-server:latest
|
||||
- docker tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} registry.gitlab.com/slumber/multi-user/multi-user-server:${CI_COMMIT_REF_NAME}
|
||||
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server
|
||||
|
||||
only:
|
||||
refs:
|
||||
- master
|
||||
- develop
|
16
.gitlab/ci/doc.gitlab-ci.yml
Normal file
@ -0,0 +1,16 @@
|
||||
pages:
|
||||
stage: doc
|
||||
needs: ["deploy"]
|
||||
image: python
|
||||
script:
|
||||
- pip install -U sphinx sphinx_rtd_theme sphinx-material
|
||||
- sphinx-build -b html ./docs public
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
only:
|
||||
refs:
|
||||
- master
|
||||
- develop
|
||||
|
||||
|
@ -3,3 +3,5 @@ test:
|
||||
image: slumber/blender-addon-testing:latest
|
||||
script:
|
||||
- python3 scripts/test_addon.py
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
3
.gitmodules
vendored
@ -0,0 +1,3 @@
|
||||
[submodule "multi_user/libs/replication"]
|
||||
path = multi_user/libs/replication
|
||||
url = https://gitlab.com/slumber/replication.git
|
||||
|
31
CHANGELOG.md
@ -157,4 +157,33 @@ All notable changes to this project will be documented in this file.
|
||||
- Empty and Light object selection highlights
|
||||
- Material renaming
|
||||
- Default material nodes input parameters
|
||||
- blender 2.91 python api compatibility
|
||||
- blender 2.91 python api compatibility
|
||||
|
||||
## [0.3.0] - 2021-04-14
|
||||
|
||||
### Added
|
||||
|
||||
- Curve material support
|
||||
- Cycle visibility settings
|
||||
- Session save/load operator
|
||||
- Add new scene support
|
||||
- Physic initial support
|
||||
- Geometry node initial support
|
||||
- Blender 2.93 compatibility
|
||||
### Changed
|
||||
|
||||
- Host documentation on Gitlab Page
|
||||
- Event driven update (from the blender deps graph)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Vertex group assignation
|
||||
- Parent relation can't be removed
|
||||
- Separate object
|
||||
- Delete animation
|
||||
- Sync missing holdout option for grease pencil material
|
||||
- Sync missing `skin_vertices`
|
||||
- Exception access violation during Undo/Redo
|
||||
- Sync missing armature bone Roll
|
||||
- Sync missing driver data_path
|
||||
- Constraint replication
|
64
README.md
@ -19,44 +19,46 @@ This tool aims to allow multiple users to work on the same scene over the networ
|
||||
|
||||
## Usage
|
||||
|
||||
See the [documentation](https://multi-user.readthedocs.io/en/latest/) for details.
|
||||
See the [documentation](https://slumber.gitlab.io/multi-user/index.html) for details.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
See the [troubleshooting guide](https://multi-user.readthedocs.io/en/latest/getting_started/troubleshooting.html) for tips on the most common issues.
|
||||
See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_started/troubleshooting.html) for tips on the most common issues.
|
||||
|
||||
## Current development status
|
||||
|
||||
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
||||
|
||||
| Name | Status | Comment |
|
||||
| ----------- | :----: | :--------------------------------------------------------------------------: |
|
||||
| action | ✔️ | |
|
||||
| armature | ❗ | Not stable |
|
||||
| camera | ✔️ | |
|
||||
| collection | ✔️ | |
|
||||
| curve | ❗ | Nurbs not supported |
|
||||
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| node_groups | ❗ | Material only |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| textures | ❗ | Supported for modifiers only |
|
||||
| texts | ✔️ | |
|
||||
| scene | ✔️ | |
|
||||
| world | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| nla | ❌ | |
|
||||
| volumes | ✔️ | |
|
||||
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
|
||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||
| vse | ❗ | Mask and Clip not supported yet |
|
||||
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||
| libraries | ❗ | Partial |
|
||||
| Name | Status | Comment |
|
||||
| -------------- | :----: | :----------------------------------------------------------: |
|
||||
| action | ✔️ | |
|
||||
| camera | ✔️ | |
|
||||
| collection | ✔️ | |
|
||||
| gpencil | ✔️ | |
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| node_groups | ✔️ | Material & Geometry only |
|
||||
| geometry nodes | ✔️ | |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| texts | ✔️ | |
|
||||
| scene | ✔️ | |
|
||||
| world | ✔️ | |
|
||||
| volumes | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| physics | ✔️ | |
|
||||
| curve | ❗ | Nurbs surfaces not supported |
|
||||
| textures | ❗ | Supported for modifiers/materials/geo nodes only |
|
||||
| armature | ❗ | Not stable |
|
||||
| particles | ❗ | The cache isn't syncing. |
|
||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||
| vse | ❗ | Mask and Clip not supported yet |
|
||||
| libraries | ❗ | Partial |
|
||||
| nla | ❌ | |
|
||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
|
||||
|
||||
|
||||
### Performance issues
|
||||
@ -74,7 +76,7 @@ I'm working on it.
|
||||
|
||||
## Contributing
|
||||
|
||||
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
|
||||
See [contributing section](https://slumber.gitlab.io/multi-user/ways_to_contribute.html) of the documentation.
|
||||
|
||||
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.
|
||||
|
||||
|
BIN
docs/getting_started/img/quickstart_cancel_save_session_data.png
Normal file
After Width: | Height: | Size: 14 KiB |
BIN
docs/getting_started/img/quickstart_import_session_data.png
Normal file
After Width: | Height: | Size: 106 KiB |
BIN
docs/getting_started/img/quickstart_save_session_data.png
Normal file
After Width: | Height: | Size: 17 KiB |
BIN
docs/getting_started/img/quickstart_save_session_data_cancel.png
Normal file
After Width: | Height: | Size: 14 KiB |
BIN
docs/getting_started/img/quickstart_save_session_data_dialog.png
Normal file
After Width: | Height: | Size: 80 KiB |
BIN
docs/getting_started/img/server_preset_exemple.gif
Normal file
After Width: | Height: | Size: 320 KiB |
BIN
docs/getting_started/img/server_preset_image_add.png
Normal file
After Width: | Height: | Size: 7.3 KiB |
BIN
docs/getting_started/img/server_preset_image_admin.png
Normal file
After Width: | Height: | Size: 4.2 KiB |
BIN
docs/getting_started/img/server_preset_image_normal_server.png
Normal file
After Width: | Height: | Size: 9.0 KiB |
BIN
docs/getting_started/img/server_preset_image_report.png
Normal file
After Width: | Height: | Size: 3.2 KiB |
@ -108,36 +108,69 @@ Before starting make sure that you have access to the session IP address and por
|
||||
1. Fill in your user information
|
||||
--------------------------------
|
||||
|
||||
Follow the user-info_ section for this step.
|
||||
Joining a server
|
||||
=======================
|
||||
|
||||
----------------
|
||||
2. Network setup
|
||||
----------------
|
||||
--------------
|
||||
Network setup
|
||||
--------------
|
||||
|
||||
In the network panel, select **JOIN**.
|
||||
The **join sub-panel** (see image below) allows you to configure your client to join a
|
||||
collaborative session which is already hosted.
|
||||
|
||||
.. figure:: img/quickstart_join.png
|
||||
:align: center
|
||||
:alt: Connect menu
|
||||
.. figure:: img/server_preset_image_normal_server.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
Connection panel
|
||||
Connection pannel
|
||||
|
||||
Fill in the fields with your information:
|
||||
|
||||
- **IP**: the host's IP address.
|
||||
- **Port**: the host's port number.
|
||||
- **Connect as admin**: connect yourself with **admin rights** (see :ref:`admin` ) to the session.
|
||||
|
||||
.. Maybe something more explicit here
|
||||
|
||||
.. note::
|
||||
Additional configuration settings can be found in the :ref:`advanced` section.
|
||||
|
||||
Once you've configured every field, hit the button **CONNECT** to join the session !
|
||||
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
|
||||
|
||||
.. note::
|
||||
|
||||
If you want to have **administrator rights** (see :ref:`admin` ) on the server, just enter the password created by the host in the **Connect as admin** section
|
||||
|
||||
.. figure:: img/server_preset_image_admin.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
Admin password
|
||||
|
||||
---------------
|
||||
Server presets
|
||||
---------------
|
||||
|
||||
You can save your server presets in a preset list below the 'JOIN' and 'HOST' buttons. This allows you to quickly access and manage your servers.
|
||||
|
||||
To add a server, first enter the ip address and the port (plus the password if needed), then click on the + icon to add a name to your preset. To remove a server from the list, select it and click on the - icon.
|
||||
|
||||
.. figure:: img/server_preset_exemple.gif
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
.. warning:: Be careful, if you don't rename your new preset, or if it has the same name as an existing preset, the old preset will be overwritten.
|
||||
|
||||
.. figure:: img/server_preset_image_report.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
.. note::
|
||||
|
||||
Two presets are already present when the addon is launched:
|
||||
|
||||
- The 'localhost' preset, to host and join a local session quickly
|
||||
- The 'public session' preset, to join the public sessions of the multi-user server (official discord to participate : https://discord.gg/aBPvGws)
|
||||
|
||||
.. note::
|
||||
Additional configuration settings can be found in the :ref:`advanced` section.
|
||||
|
||||
.. note::
|
||||
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
|
||||
|
||||
@ -292,7 +325,7 @@ a connected user or be under :ref:`common-right<**COMMON**>` rights.
|
||||
|
||||
The Repository panel (see image below) allows you to monitor, change datablock states and rights manually.
|
||||
|
||||
.. figure:: img/quickstart_properties.png
|
||||
.. figure:: img/quickstart_save_session_data.png
|
||||
:align: center
|
||||
|
||||
Repository panel
|
||||
@ -319,6 +352,40 @@ Here is a quick list of available actions:
|
||||
| .. image:: img/quickstart_remove.png | **Delete** | Remove the data-block from network replication |
|
||||
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+
|
||||
|
||||
Save session data
|
||||
-----------------
|
||||
|
||||
.. danger::
|
||||
This is an experimental feature, until the stable release it is highly recommended to use regular .blend save.
|
||||
|
||||
The save session data allows you to create a backup of the session data.
|
||||
|
||||
When you hit the **save session data** button, the following popup dialog will appear.
|
||||
It allows you to choose the destination folder and if you want to run an auto-save.
|
||||
|
||||
.. figure:: img/quickstart_save_session_data_dialog.png
|
||||
:align: center
|
||||
|
||||
Save session data dialog.
|
||||
|
||||
If you enabled the auto-save option, you can cancel it from the **Cancel auto-save** button.
|
||||
|
||||
.. figure:: img/quickstart_save_session_data_cancel.png
|
||||
:align: center
|
||||
|
||||
Cancel session autosave.
|
||||
|
||||
|
||||
To import session data backups, use the following **Multiuser session snapshot** import dialog
|
||||
|
||||
.. figure:: img/quickstart_import_session_data.png
|
||||
:align: center
|
||||
|
||||
Import session data dialog.
|
||||
|
||||
.. note::
|
||||
It is not yet possible to start a session directly from a backup.
|
||||
|
||||
.. _advanced:
|
||||
|
||||
Advanced settings
|
||||
@ -340,15 +407,6 @@ Network
|
||||
|
||||
Advanced network settings
|
||||
|
||||
**IPC Port** is the port used for Inter Process Communication. This port is used
|
||||
by the multi-user subprocesses to communicate with each other. If different instances
|
||||
of multi-user are using the same IPC port, this will create conflict !
|
||||
|
||||
.. note::
|
||||
You only need to modify this setting if you need to launch multiple clients from the same
|
||||
computer (or if you try to host and join from the same computer). To resolve this, you simply need to enter a different
|
||||
**IPC port** for each blender instance.
|
||||
|
||||
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
|
||||
You should only increase it if you have a bad connection.
|
||||
|
||||
|
@ -76,7 +76,7 @@ Hit 'Create a network'(see image below) and go to the network settings.
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Network page
|
||||
Admin password
|
||||
|
||||
Now that the network is created, let's configure it.
|
||||
|
||||
@ -258,33 +258,55 @@ You can check that your container is running, and find its ID and name with:
|
||||
.. _docker-logs:
|
||||
|
||||
Viewing logs in a docker container
|
||||
-----------------------------------
|
||||
----------------------------------
|
||||
|
||||
Logs for the server running in a docker container can be accessed by outputting the following to a log file:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker log your-container-id >& dockerserver.log
|
||||
|
||||
.. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available.
|
||||
|
||||
.. This may not be true. Need to write up how to locally start a docker container from WSL2
|
||||
|
||||
First, you'll need to know your container ID, which you can find by running:
|
||||
Logs for the server running in a docker container can be accessed by outputting the container logs to a log file. First, you'll need to know your container ID, which you can find by running:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker ps
|
||||
|
||||
If you're cloud-hosting with e.g. Google Cloud, your container will be the one associated with the `registry address <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_ where your Docker image was located. e.g. registry.gitlab.com/slumber/multi-user/multi-user-server:0.2.0
|
||||
|
||||
You can either ssh in to your server and then run
|
||||
Then, output the container logs to a file:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cat your-log-name.log
|
||||
docker logs your-container-id >& dockerserver.log
|
||||
|
||||
or view the docker container logs with
|
||||
.. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available.
|
||||
|
||||
.. This may not be true. Need to write up how to locally start a docker container from WSL2
|
||||
|
||||
|
||||
Downloading logs from a docker container on a cloud-hosted server
|
||||
-----------------------------------------------------------------
|
||||
|
||||
If you'd like to pull the log files from a cloud-hosted server to submit to a developer for review, a simple process using SSH and SCP is as follows:
|
||||
|
||||
First SSH into your instance. You can either open the `VM Instances console <https://console.cloud.google.com/compute/instances>`_ and use the browser terminal provided by Google Cloud (I had the best luck using the Google Chrome browser)... or you can see `here <https://cloud.google.com/compute/docs/instances/connecting-advanced#thirdpartytools>`_ for how to set up your instance for SSH access from your local terminal.
|
||||
|
||||
If using SSH from your terminal, first generate SSH keys (setting their access permissions to e.g. chmod 400 level whereby only the user has permissions) and submit the public key to the cloud-hosted VM instance, storing the private key on your local machine.
|
||||
Then, SSH into your cloud server from your local terminal, with the following command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ssh -i PATH_TO_PRIVATE_KEY USERNAME@EXTERNAL_IP_ADDRESS
|
||||
|
||||
Use the private key which corresponds to the public key you uploaded, and the username associated with that key (visible in the Google Cloud console for your VM Instance). Use the external IP address for the server, available from the `VM Instances console <https://console.cloud.google.com/compute/instances>`_
|
||||
e.g.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ssh -i ~/.ssh/id_rsa user@xxx.xxx.xxx.xxx
|
||||
|
||||
Once you've connected to the server's secure shell, you can generate a log file from the docker container running the replication server. First, you'll need to know your container ID, which you can find by running:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker ps
|
||||
|
||||
If you're cloud-hosting with e.g. Google Cloud, your container will be the one associated with the `registry address <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_ where your Docker image was located. e.g. registry.gitlab.com/slumber/multi-user/multi-user-server:latest
|
||||
|
||||
To view the docker container logs, run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
@ -296,7 +318,29 @@ OR
|
||||
|
||||
docker logs your-container-id
|
||||
|
||||
Note, see these `notes <https://cloud.google.com/compute/docs/containers/deploying-containers?_ga=2.113663175.-1396941296.1606125558#viewing_container_logs>`_ for how to check server logs on Google Cloud.
|
||||
To save the output to a file, run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker logs your-container-id >& dockerserver.log
|
||||
|
||||
Now that the server logs are available in a file, we can disconnect from the secure shell (SSH), and then copy the file to the local machine using SCP. In your local terminal, execute the following:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
scp -i PATH_TO_PRIVATE_KEY USERNAME@EXTERNAL_IP_ADDRESS:"dockerserver.log" LOCAL_PATH_TO_COPY_FILE_TO
|
||||
|
||||
e.g.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
scp -i ~/.ssh/id_rsa user@xxx.xxx.xxx.xxx:"dockerserver.log" .
|
||||
|
||||
This copies the file dockerserver.log generated in the previous step to the current directory on the local machine. From there, you can send it to the multi-user maintainers for review.
|
||||
|
||||
|
||||
.. Note:: See these `notes <https://cloud.google.com/compute/docs/containers/deploying-containers?_ga=2.113663175.-1396941296.1606125558#viewing_container_logs>`_ for how to check server logs on Google Cloud using other tools.
|
||||
|
||||
|
||||
.. _serverstartscripts:
|
||||
|
||||
|
@ -19,9 +19,9 @@
|
||||
bl_info = {
|
||||
"name": "Multi-User",
|
||||
"author": "Swann Martinez",
|
||||
"version": (0, 2, 0),
|
||||
"version": (0, 5, 0),
|
||||
"description": "Enable real-time collaborative workflow inside blender",
|
||||
"blender": (2, 82, 0),
|
||||
"blender": (2, 93, 0),
|
||||
"location": "3D View > Sidebar > Multi-User tab",
|
||||
"warning": "Unstable addon, use it at your own risks",
|
||||
"category": "Collaboration",
|
||||
@ -43,13 +43,10 @@ from bpy.app.handlers import persistent
|
||||
from . import environment
|
||||
|
||||
|
||||
DEPENDENCIES = {
|
||||
("replication", '0.1.17'),
|
||||
}
|
||||
|
||||
|
||||
module_error_msg = "Insufficient rights to install the multi-user \
|
||||
dependencies, aunch blender with administrator rights."
|
||||
|
||||
|
||||
def register():
|
||||
# Setup logging policy
|
||||
logging.basicConfig(
|
||||
@ -58,12 +55,7 @@ def register():
|
||||
level=logging.INFO)
|
||||
|
||||
try:
|
||||
if bpy.app.version[1] >= 91:
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
environment.setup(DEPENDENCIES, python_binary_path)
|
||||
environment.register()
|
||||
|
||||
from . import presence
|
||||
from . import operators
|
||||
@ -89,6 +81,8 @@ def register():
|
||||
type=preferences.SessionUser
|
||||
)
|
||||
bpy.types.WindowManager.user_index = bpy.props.IntProperty()
|
||||
bpy.types.TOPBAR_MT_file_import.append(operators.menu_func_import)
|
||||
|
||||
|
||||
def unregister():
|
||||
from . import presence
|
||||
@ -97,6 +91,8 @@ def unregister():
|
||||
from . import preferences
|
||||
from . import addon_updater_ops
|
||||
|
||||
bpy.types.TOPBAR_MT_file_import.remove(operators.menu_func_import)
|
||||
|
||||
presence.unregister()
|
||||
addon_updater_ops.unregister()
|
||||
ui.unregister()
|
||||
@ -107,3 +103,5 @@ def unregister():
|
||||
del bpy.types.ID.uuid
|
||||
del bpy.types.WindowManager.online_users
|
||||
del bpy.types.WindowManager.user_index
|
||||
|
||||
environment.unregister()
|
||||
|
@ -1688,10 +1688,7 @@ class GitlabEngine(object):
|
||||
# Could clash with tag names and if it does, it will
|
||||
# download TAG zip instead of branch zip to get
|
||||
# direct path, would need.
|
||||
return "{}{}{}".format(
|
||||
self.form_repo_url(updater),
|
||||
"/repository/archive.zip?sha=",
|
||||
branch)
|
||||
return f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{branch}/download?job=build"
|
||||
|
||||
def get_zip_url(self, sha, updater):
|
||||
return "{base}/repository/archive.zip?sha={sha}".format(
|
||||
|
@ -122,13 +122,13 @@ class addon_updater_install_popup(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
options={'HIDDEN'}
|
||||
)
|
||||
ignore_enum = bpy.props.EnumProperty(
|
||||
ignore_enum: bpy.props.EnumProperty(
|
||||
name="Process update",
|
||||
description="Decide to install, ignore, or defer new addon update",
|
||||
items=[
|
||||
@ -264,7 +264,7 @@ class addon_updater_update_now(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
@ -332,7 +332,7 @@ class addon_updater_update_target(bpy.types.Operator):
|
||||
i+=1
|
||||
return ret
|
||||
|
||||
target = bpy.props.EnumProperty(
|
||||
target: bpy.props.EnumProperty(
|
||||
name="Target version to install",
|
||||
description="Select the version to install",
|
||||
items=target_version
|
||||
@ -341,7 +341,7 @@ class addon_updater_update_target(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
@ -399,7 +399,7 @@ class addon_updater_install_manually(bpy.types.Operator):
|
||||
bl_description = "Proceed to manually install update"
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
|
||||
error = bpy.props.StringProperty(
|
||||
error: bpy.props.StringProperty(
|
||||
name="Error Occurred",
|
||||
default="",
|
||||
options={'HIDDEN'}
|
||||
@ -461,7 +461,7 @@ class addon_updater_updated_successful(bpy.types.Operator):
|
||||
bl_description = "Update installation response"
|
||||
bl_options = {'REGISTER', 'INTERNAL', 'UNDO'}
|
||||
|
||||
error = bpy.props.StringProperty(
|
||||
error: bpy.props.StringProperty(
|
||||
name="Error Occurred",
|
||||
default="",
|
||||
options={'HIDDEN'}
|
||||
|
@ -28,7 +28,6 @@ __all__ = [
|
||||
'bl_light',
|
||||
'bl_scene',
|
||||
'bl_material',
|
||||
'bl_library',
|
||||
'bl_armature',
|
||||
'bl_action',
|
||||
'bl_world',
|
||||
@ -39,17 +38,28 @@ __all__ = [
|
||||
'bl_font',
|
||||
'bl_sound',
|
||||
'bl_file',
|
||||
'bl_sequencer',
|
||||
'bl_node_group',
|
||||
'bl_texture',
|
||||
"bl_particle",
|
||||
# 'bl_compositor',
|
||||
] # Order here defines execution order
|
||||
|
||||
if bpy.app.version[1] >= 91:
|
||||
__all__.append('bl_volume')
|
||||
|
||||
from . import *
|
||||
from replication.data import ReplicatedDataFactory
|
||||
|
||||
def types_to_register():
|
||||
return __all__
|
||||
|
||||
from replication.protocol import DataTranslationProtocol
|
||||
|
||||
def get_data_translation_protocol()-> DataTranslationProtocol:
|
||||
""" Return a data translation protocol from implemented bpy types
|
||||
"""
|
||||
bpy_protocol = DataTranslationProtocol()
|
||||
for module_name in __all__:
|
||||
impl = globals().get(module_name)
|
||||
if impl and hasattr(impl, "_type") and hasattr(impl, "_type"):
|
||||
bpy_protocol.register_implementation(impl._type, impl._class)
|
||||
return bpy_protocol
|
||||
|
@ -25,8 +25,8 @@ from enum import Enum
|
||||
from .. import utils
|
||||
from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
KEYFRAME = [
|
||||
'amplitude',
|
||||
@ -41,6 +41,66 @@ KEYFRAME = [
|
||||
'interpolation',
|
||||
]
|
||||
|
||||
def has_action(datablock):
|
||||
""" Check if the datablock datablock has actions
|
||||
"""
|
||||
return (hasattr(datablock, 'animation_data')
|
||||
and datablock.animation_data
|
||||
and datablock.animation_data.action)
|
||||
|
||||
|
||||
def has_driver(datablock):
|
||||
""" Check if the datablock datablock is driven
|
||||
"""
|
||||
return (hasattr(datablock, 'animation_data')
|
||||
and datablock.animation_data
|
||||
and datablock.animation_data.drivers)
|
||||
|
||||
|
||||
def dump_driver(driver):
|
||||
dumper = Dumper()
|
||||
dumper.depth = 6
|
||||
data = dumper.dump(driver)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def load_driver(target_datablock, src_driver):
|
||||
loader = Loader()
|
||||
drivers = target_datablock.animation_data.drivers
|
||||
src_driver_data = src_driver['driver']
|
||||
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
|
||||
|
||||
# Settings
|
||||
new_driver.driver.type = src_driver_data['type']
|
||||
new_driver.driver.expression = src_driver_data['expression']
|
||||
loader.load(new_driver, src_driver)
|
||||
|
||||
# Variables
|
||||
for src_variable in src_driver_data['variables']:
|
||||
src_var_data = src_driver_data['variables'][src_variable]
|
||||
new_var = new_driver.driver.variables.new()
|
||||
new_var.name = src_var_data['name']
|
||||
new_var.type = src_var_data['type']
|
||||
|
||||
for src_target in src_var_data['targets']:
|
||||
src_target_data = src_var_data['targets'][src_target]
|
||||
src_id = src_target_data.get('id')
|
||||
if src_id:
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
|
||||
loader.load(new_var.targets[src_target], src_target_data)
|
||||
|
||||
# Fcurve
|
||||
new_fcurve = new_driver.keyframe_points
|
||||
for p in reversed(new_fcurve):
|
||||
new_fcurve.remove(p, fast=True)
|
||||
|
||||
new_fcurve.add(len(src_driver['keyframe_points']))
|
||||
|
||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||
new_point = new_fcurve[index]
|
||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||
|
||||
|
||||
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
||||
""" Dump a sigle curve to a dict
|
||||
@ -61,7 +121,6 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
||||
points = fcurve.keyframe_points
|
||||
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
|
||||
|
||||
else: # Legacy method
|
||||
dumper = Dumper()
|
||||
fcurve_data["keyframe_points"] = []
|
||||
@ -71,6 +130,18 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
||||
dumper.dump(k)
|
||||
)
|
||||
|
||||
if fcurve.modifiers:
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'is_valid',
|
||||
'active'
|
||||
]
|
||||
dumped_modifiers = []
|
||||
for modfifier in fcurve.modifiers:
|
||||
dumped_modifiers.append(dumper.dump(modfifier))
|
||||
|
||||
fcurve_data['modifiers'] = dumped_modifiers
|
||||
|
||||
return fcurve_data
|
||||
|
||||
|
||||
@ -83,7 +154,7 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
:type fcurve: bpy.types.FCurve
|
||||
"""
|
||||
use_numpy = fcurve_data.get('use_numpy')
|
||||
|
||||
loader = Loader()
|
||||
keyframe_points = fcurve.keyframe_points
|
||||
|
||||
# Remove all keyframe points
|
||||
@ -128,30 +199,87 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
|
||||
fcurve.update()
|
||||
|
||||
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
|
||||
|
||||
class BlAction(BlDatablock):
|
||||
if dumped_fcurve_modifiers:
|
||||
# clear modifiers
|
||||
for fmod in fcurve.modifiers:
|
||||
fcurve.modifiers.remove(fmod)
|
||||
|
||||
# Load each modifiers in order
|
||||
for modifier_data in dumped_fcurve_modifiers:
|
||||
modifier = fcurve.modifiers.new(modifier_data['type'])
|
||||
|
||||
loader.load(modifier, modifier_data)
|
||||
elif fcurve.modifiers:
|
||||
for fmod in fcurve.modifiers:
|
||||
fcurve.modifiers.remove(fmod)
|
||||
|
||||
|
||||
def dump_animation_data(datablock):
|
||||
animation_data = {}
|
||||
if has_action(datablock):
|
||||
animation_data['action'] = datablock.animation_data.action.name
|
||||
if has_driver(datablock):
|
||||
animation_data['drivers'] = []
|
||||
for driver in datablock.animation_data.drivers:
|
||||
animation_data['drivers'].append(dump_driver(driver))
|
||||
|
||||
return animation_data
|
||||
|
||||
|
||||
def load_animation_data(animation_data, datablock):
|
||||
# Load animation data
|
||||
if animation_data:
|
||||
if datablock.animation_data is None:
|
||||
datablock.animation_data_create()
|
||||
|
||||
for d in datablock.animation_data.drivers:
|
||||
datablock.animation_data.drivers.remove(d)
|
||||
|
||||
if 'drivers' in animation_data:
|
||||
for driver in animation_data['drivers']:
|
||||
load_driver(datablock, driver)
|
||||
|
||||
if 'action' in animation_data:
|
||||
datablock.animation_data.action = bpy.data.actions[animation_data['action']]
|
||||
elif datablock.animation_data.action:
|
||||
datablock.animation_data.action = None
|
||||
|
||||
# Remove existing animation data if there is not more to load
|
||||
elif hasattr(datablock, 'animation_data') and datablock.animation_data:
|
||||
datablock.animation_data_clear()
|
||||
|
||||
|
||||
def resolve_animation_dependencies(datablock):
|
||||
if has_action(datablock):
|
||||
return [datablock.animation_data.action]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
class BlAction(ReplicatedDatablock):
|
||||
bl_id = "actions"
|
||||
bl_class = bpy.types.Action
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'ACTION_TWEAK'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.actions.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
for dumped_fcurve in data["fcurves"]:
|
||||
dumped_data_path = dumped_fcurve["data_path"]
|
||||
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
||||
|
||||
# create fcurve if needed
|
||||
fcurve = target.fcurves.find(
|
||||
fcurve = datablock.fcurves.find(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
if fcurve is None:
|
||||
fcurve = target.fcurves.new(
|
||||
fcurve = datablock.fcurves.new(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
|
||||
load_fcurve(dumped_fcurve, fcurve)
|
||||
@ -159,9 +287,10 @@ class BlAction(BlDatablock):
|
||||
id_root = data.get('id_root')
|
||||
|
||||
if id_root:
|
||||
target.id_root = id_root
|
||||
datablock.id_root = id_root
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'name_full',
|
||||
@ -176,11 +305,23 @@ class BlAction(BlDatablock):
|
||||
'users'
|
||||
]
|
||||
dumper.depth = 1
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data["fcurves"] = []
|
||||
|
||||
for fcurve in instance.fcurves:
|
||||
for fcurve in datablock.fcurves:
|
||||
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.actions)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
_type = bpy.types.Action
|
||||
_class = BlAction
|
||||
|
@ -22,33 +22,43 @@ import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .. import presence, operators, utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
def get_roll(bone: bpy.types.Bone) -> float:
|
||||
""" Compute the actuall roll of a pose bone
|
||||
|
||||
:arg pose_bone: target pose bone
|
||||
:type pose_bone: bpy.types.PoseBone
|
||||
:return: float
|
||||
"""
|
||||
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
|
||||
|
||||
|
||||
class BlArmature(BlDatablock):
|
||||
class BlArmature(ReplicatedDatablock):
|
||||
bl_id = "armatures"
|
||||
bl_class = bpy.types.Armature
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 0
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'ARMATURE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.armatures.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
# Load parent object
|
||||
parent_object = utils.find_from_attr(
|
||||
'uuid',
|
||||
data['user'],
|
||||
bpy.data.objects
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
if parent_object is None:
|
||||
parent_object = bpy.data.objects.new(
|
||||
data['user_name'], target)
|
||||
data['user_name'], datablock)
|
||||
parent_object.uuid = data['user']
|
||||
|
||||
is_object_in_master = (
|
||||
@ -83,10 +93,10 @@ class BlArmature(BlDatablock):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for bone in data['bones']:
|
||||
if bone not in target.edit_bones:
|
||||
new_bone = target.edit_bones.new(bone)
|
||||
if bone not in datablock.edit_bones:
|
||||
new_bone = datablock.edit_bones.new(bone)
|
||||
else:
|
||||
new_bone = target.edit_bones[bone]
|
||||
new_bone = datablock.edit_bones[bone]
|
||||
|
||||
bone_data = data['bones'].get(bone)
|
||||
|
||||
@ -94,16 +104,16 @@ class BlArmature(BlDatablock):
|
||||
new_bone.head = bone_data['head_local']
|
||||
new_bone.tail_radius = bone_data['tail_radius']
|
||||
new_bone.head_radius = bone_data['head_radius']
|
||||
# new_bone.roll = bone_data['roll']
|
||||
|
||||
new_bone.roll = bone_data['roll']
|
||||
|
||||
if 'parent' in bone_data:
|
||||
new_bone.parent = target.edit_bones[data['bones']
|
||||
[bone]['parent']]
|
||||
new_bone.parent = datablock.edit_bones[data['bones']
|
||||
[bone]['parent']]
|
||||
new_bone.use_connect = bone_data['use_connect']
|
||||
|
||||
loader = Loader()
|
||||
loader.load(new_bone, bone_data)
|
||||
|
||||
|
||||
if bpy.context.mode != 'OBJECT':
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
bpy.context.view_layer.objects.active = current_active_object
|
||||
@ -112,9 +122,10 @@ class BlArmature(BlDatablock):
|
||||
if 'EDIT' in current_mode:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 4
|
||||
dumper.include_filter = [
|
||||
@ -127,16 +138,15 @@ class BlArmature(BlDatablock):
|
||||
'parent',
|
||||
'name',
|
||||
'layers',
|
||||
# 'roll',
|
||||
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
for bone in instance.bones:
|
||||
for bone in datablock.bones:
|
||||
if bone.parent:
|
||||
data['bones'][bone.name]['parent'] = bone.parent.name
|
||||
# get the parent Object
|
||||
object_users = utils.get_datablock_users(instance)[0]
|
||||
# TODO: Use id_data instead
|
||||
object_users = utils.get_datablock_users(datablock)[0]
|
||||
data['user'] = object_users.uuid
|
||||
data['user_name'] = object_users.name
|
||||
|
||||
@ -146,6 +156,26 @@ class BlArmature(BlDatablock):
|
||||
item.name for item in container_users if isinstance(item, bpy.types.Collection)]
|
||||
data['user_scene'] = [
|
||||
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
||||
|
||||
for bone in datablock.bones:
|
||||
data['bones'][bone.name]['roll'] = get_roll(bone)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
name = data.get('name')
|
||||
datablock = resolve_datablock_from_uuid(uuid, bpy.data.armatures)
|
||||
if datablock is None:
|
||||
datablock = bpy.data.armatures.get(name)
|
||||
|
||||
return datablock
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_animation_dependencies(datablock)
|
||||
|
||||
_type = bpy.types.Armature
|
||||
_class = BlArmature
|
@ -20,50 +20,56 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlCamera(BlDatablock):
|
||||
class BlCamera(ReplicatedDatablock):
|
||||
bl_id = "cameras"
|
||||
bl_class = bpy.types.Camera
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'CAMERA_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.cameras.new(data["name"])
|
||||
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
dof_settings = data.get('dof')
|
||||
|
||||
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
# DOF settings
|
||||
if dof_settings:
|
||||
loader.load(target.dof, dof_settings)
|
||||
loader.load(datablock.dof, dof_settings)
|
||||
|
||||
background_images = data.get('background_images')
|
||||
|
||||
target.background_images.clear()
|
||||
|
||||
datablock.background_images.clear()
|
||||
|
||||
if background_images:
|
||||
for img_name, img_data in background_images.items():
|
||||
img_id = img_data.get('image')
|
||||
if img_id:
|
||||
target_img = target.background_images.new()
|
||||
target_img = datablock.background_images.new()
|
||||
target_img.image = bpy.data.images[img_id]
|
||||
loader.load(target_img, img_data)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
img_user = img_data.get('image_user')
|
||||
if img_user:
|
||||
loader.load(target_img.image_user, img_user)
|
||||
|
||||
# TODO: background image support
|
||||
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
@ -104,14 +110,37 @@ class BlCamera(BlDatablock):
|
||||
'scale',
|
||||
'use_flip_x',
|
||||
'use_flip_y',
|
||||
'image'
|
||||
'image_user',
|
||||
'image',
|
||||
'frame_duration',
|
||||
'frame_start',
|
||||
'frame_offset',
|
||||
'use_cyclic',
|
||||
'use_auto_refresh'
|
||||
]
|
||||
return dumper.dump(instance)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
for index, image in enumerate(datablock.background_images):
|
||||
if image.image_user:
|
||||
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.cameras)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
for background in self.instance.background_images:
|
||||
for background in datablock.background_images:
|
||||
if background.image:
|
||||
deps.append(background.image)
|
||||
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Camera
|
||||
_class = BlCamera
|
||||
|
@ -19,10 +19,12 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from .dump_anything import Loader, Dumper
|
||||
from deepdiff import DeepDiff, Delta
|
||||
|
||||
from .. import utils
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
def dump_collection_children(collection):
|
||||
collection_children = []
|
||||
@ -81,57 +83,82 @@ def resolve_collection_dependencies(collection):
|
||||
|
||||
return deps
|
||||
|
||||
class BlCollection(BlDatablock):
|
||||
class BlCollection(ReplicatedDatablock):
|
||||
bl_id = "collections"
|
||||
bl_icon = 'FILE_FOLDER'
|
||||
bl_class = bpy.types.Collection
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = True
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
if self.is_library:
|
||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||
targetData.collections = [
|
||||
name for name in sourceData.collections if name == self.data['name']]
|
||||
|
||||
instance = bpy.data.collections[self.data['name']]
|
||||
|
||||
return instance
|
||||
use_delta = True
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = bpy.data.collections.new(data["name"])
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# Objects
|
||||
load_collection_objects(data['objects'], target)
|
||||
load_collection_objects(data['objects'], datablock)
|
||||
|
||||
# Link childrens
|
||||
load_collection_childrens(data['children'], target)
|
||||
load_collection_childrens(data['children'], datablock)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
# FIXME: Find a better way after the replication big refacotoring
|
||||
# Keep other user from deleting collection object by flushing their history
|
||||
utils.flush_history()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
"instance_offset"
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
# dump objects
|
||||
data['objects'] = dump_collection_objects(instance)
|
||||
data['objects'] = dump_collection_objects(datablock)
|
||||
|
||||
# dump children collections
|
||||
data['children'] = dump_collection_children(instance)
|
||||
data['children'] = dump_collection_children(datablock)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return resolve_collection_dependencies(self.instance)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.collections)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_collection_dependencies(datablock)
|
||||
|
||||
@staticmethod
|
||||
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||
diff_params = {
|
||||
'ignore_order': True,
|
||||
'report_repetition': True
|
||||
}
|
||||
delta_params = {
|
||||
# 'mutate': True
|
||||
}
|
||||
|
||||
return Delta(
|
||||
DeepDiff(last_data,
|
||||
current_data,
|
||||
cache_size=5000,
|
||||
**diff_params),
|
||||
**delta_params)
|
||||
|
||||
_type = bpy.types.Collection
|
||||
_class = BlCollection
|
81
multi_user/bl_types/bl_compositor.py
Normal file
@ -0,0 +1,81 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
import logging
|
||||
import re
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from .node_tree import load_node_tree, dump_node_tree, get_node_tree_dependencies
|
||||
|
||||
class BlCompositor(ReplicatedDatablock):
|
||||
bl_id = "compositor"
|
||||
bl_class = bpy.types.CompositorNodeTree
|
||||
bl_check_common = True
|
||||
bl_icon = 'COMPOSITOR_NODE'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.scenes["Scene"].node_tree # TODO: resolve_datablock_from_uuid for multiple scenes
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
load_node_tree(data['node_tree'], datablock)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
comp_dumper = Dumper()
|
||||
comp_dumper.depth = 1
|
||||
comp_dumper.include_filter = [
|
||||
'use_nodes',
|
||||
'name',
|
||||
]
|
||||
data = comp_dumper.dump(datablock)
|
||||
|
||||
data['node_tree'] = dump_node_tree(datablock)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.scenes["Scene"].node_tree)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(get_node_tree_dependencies(datablock))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.CompositorNodeTree
|
||||
_class = BlCompositor
|
@ -21,11 +21,14 @@ import bpy.types as T
|
||||
import mathutils
|
||||
import logging
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from ..utils import get_preferences
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import (Dumper, Loader,
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
SPLINE_BEZIER_POINT = [
|
||||
@ -68,8 +71,6 @@ CURVE_METADATA = [
|
||||
'font_bold',
|
||||
'font_bold_italic',
|
||||
'font_italic',
|
||||
'make_local',
|
||||
'materials',
|
||||
'name',
|
||||
'offset',
|
||||
'offset_x',
|
||||
@ -79,7 +80,6 @@ CURVE_METADATA = [
|
||||
'override_create',
|
||||
'override_library',
|
||||
'path_duration',
|
||||
'preview',
|
||||
'render_resolution_u',
|
||||
'render_resolution_v',
|
||||
'resolution_u',
|
||||
@ -113,8 +113,6 @@ CURVE_METADATA = [
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
SPLINE_METADATA = [
|
||||
'hide',
|
||||
'material_index',
|
||||
@ -138,58 +136,57 @@ SPLINE_METADATA = [
|
||||
]
|
||||
|
||||
|
||||
class BlCurve(BlDatablock):
|
||||
class BlCurve(ReplicatedDatablock):
|
||||
bl_id = "curves"
|
||||
bl_class = bpy.types.Curve
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'CURVE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.curves.new(data["name"], data["type"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
target.splines.clear()
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
datablock.splines.clear()
|
||||
|
||||
# load splines
|
||||
for spline in data['splines'].values():
|
||||
new_spline = target.splines.new(spline['type'])
|
||||
|
||||
new_spline = datablock.splines.new(spline['type'])
|
||||
|
||||
# Load curve geometry data
|
||||
if new_spline.type == 'BEZIER':
|
||||
bezier_points = new_spline.bezier_points
|
||||
bezier_points = new_spline.bezier_points
|
||||
bezier_points.add(spline['bezier_points_count'])
|
||||
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
|
||||
|
||||
if new_spline.type == 'POLY':
|
||||
points = new_spline.points
|
||||
np_load_collection(
|
||||
spline['bezier_points'],
|
||||
bezier_points,
|
||||
SPLINE_BEZIER_POINT)
|
||||
|
||||
if new_spline.type in ['POLY', 'NURBS']:
|
||||
points = new_spline.points
|
||||
points.add(spline['points_count'])
|
||||
np_load_collection(spline['points'], points, SPLINE_POINT)
|
||||
# Not working for now...
|
||||
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
|
||||
if new_spline.type == 'NURBS':
|
||||
logging.error("NURBS not supported.")
|
||||
# new_spline.points.add(len(data['splines'][spline]["points"])-1)
|
||||
# for point_index in data['splines'][spline]["points"]:
|
||||
# loader.load(
|
||||
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
||||
|
||||
loader.load(new_spline, spline)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
# Conflicting attributes
|
||||
# TODO: remove them with the NURBS support
|
||||
dumper.include_filter = CURVE_METADATA
|
||||
|
||||
dumper.exclude_filter = [
|
||||
'users',
|
||||
'order_u',
|
||||
@ -198,38 +195,50 @@ class BlCurve(BlDatablock):
|
||||
'point_count_u',
|
||||
'active_textbox'
|
||||
]
|
||||
if instance.use_auto_texspace:
|
||||
if datablock.use_auto_texspace:
|
||||
dumper.exclude_filter.extend([
|
||||
'texspace_location',
|
||||
'texspace_size'])
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
data['splines'] = {}
|
||||
|
||||
for index, spline in enumerate(instance.splines):
|
||||
for index, spline in enumerate(datablock.splines):
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = SPLINE_METADATA
|
||||
spline_data = dumper.dump(spline)
|
||||
|
||||
if spline.type == 'POLY':
|
||||
spline_data['points_count'] = len(spline.points)-1
|
||||
spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
|
||||
spline_data['points_count'] = len(spline.points)-1
|
||||
spline_data['points'] = np_dump_collection(
|
||||
spline.points, SPLINE_POINT)
|
||||
|
||||
spline_data['bezier_points_count'] = len(spline.bezier_points)-1
|
||||
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||
spline_data['bezier_points'] = np_dump_collection(
|
||||
spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||
data['splines'][index] = spline_data
|
||||
|
||||
if isinstance(instance, T.SurfaceCurve):
|
||||
if isinstance(datablock, T.SurfaceCurve):
|
||||
data['type'] = 'SURFACE'
|
||||
elif isinstance(instance, T.TextCurve):
|
||||
elif isinstance(datablock, T.TextCurve):
|
||||
data['type'] = 'FONT'
|
||||
elif isinstance(instance, T.Curve):
|
||||
elif isinstance(datablock, T.Curve):
|
||||
data['type'] = 'CURVE'
|
||||
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.curves)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
curve = self.instance
|
||||
curve = datablock
|
||||
|
||||
if isinstance(curve, T.TextCurve):
|
||||
deps.extend([
|
||||
@ -237,5 +246,20 @@ class BlCurve(BlDatablock):
|
||||
curve.font_bold,
|
||||
curve.font_bold_italic,
|
||||
curve.font_italic])
|
||||
|
||||
return deps
|
||||
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return 'EDIT' not in bpy.context.mode \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
|
||||
_type = [bpy.types.Curve, bpy.types.TextCurve]
|
||||
_class = BlCurve
|
||||
|
@ -22,73 +22,11 @@ from collections.abc import Iterable
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
|
||||
from replication.data import ReplicatedDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
|
||||
def has_action(target):
|
||||
""" Check if the target datablock has actions
|
||||
"""
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.action)
|
||||
|
||||
|
||||
def has_driver(target):
|
||||
""" Check if the target datablock is driven
|
||||
"""
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.drivers)
|
||||
|
||||
|
||||
def dump_driver(driver):
|
||||
dumper = Dumper()
|
||||
dumper.depth = 6
|
||||
data = dumper.dump(driver)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def load_driver(target_datablock, src_driver):
|
||||
loader = Loader()
|
||||
drivers = target_datablock.animation_data.drivers
|
||||
src_driver_data = src_driver['driver']
|
||||
new_driver = drivers.new(src_driver['data_path'])
|
||||
|
||||
# Settings
|
||||
new_driver.driver.type = src_driver_data['type']
|
||||
new_driver.driver.expression = src_driver_data['expression']
|
||||
loader.load(new_driver, src_driver)
|
||||
|
||||
# Variables
|
||||
for src_variable in src_driver_data['variables']:
|
||||
src_var_data = src_driver_data['variables'][src_variable]
|
||||
new_var = new_driver.driver.variables.new()
|
||||
new_var.name = src_var_data['name']
|
||||
new_var.type = src_var_data['type']
|
||||
|
||||
for src_target in src_var_data['targets']:
|
||||
src_target_data = src_var_data['targets'][src_target]
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(
|
||||
src_target_data['id'], src_target_data['id_type'])
|
||||
loader.load(
|
||||
new_var.targets[src_target], src_target_data)
|
||||
|
||||
# Fcurve
|
||||
new_fcurve = new_driver.keyframe_points
|
||||
for p in reversed(new_fcurve):
|
||||
new_fcurve.remove(p, fast=True)
|
||||
|
||||
new_fcurve.add(len(src_driver['keyframe_points']))
|
||||
|
||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||
new_point = new_fcurve[index]
|
||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||
|
||||
|
||||
def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||
if not uuid:
|
||||
return default
|
||||
@ -100,132 +38,8 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||
return item
|
||||
return default
|
||||
|
||||
|
||||
class BlDatablock(ReplicatedDatablock):
|
||||
"""BlDatablock
|
||||
|
||||
bl_id : blender internal storage identifier
|
||||
bl_class : blender internal type
|
||||
bl_delay_refresh : refresh rate in second for observers
|
||||
bl_delay_apply : refresh rate in sec for apply
|
||||
bl_automatic_push : boolean
|
||||
bl_icon : type icon (blender icon name)
|
||||
bl_check_common: enable check even in common rights
|
||||
bl_reload_parent: reload parent
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
instance = kwargs.get('instance', None)
|
||||
|
||||
self.preferences = utils.get_preferences()
|
||||
|
||||
# TODO: use is_library_indirect
|
||||
self.is_library = (instance and hasattr(instance, 'library') and
|
||||
instance.library) or \
|
||||
(hasattr(self,'data') and self.data and 'library' in self.data)
|
||||
|
||||
if instance and hasattr(instance, 'uuid'):
|
||||
instance.uuid = self.uuid
|
||||
|
||||
if logging.getLogger().level == logging.DEBUG:
|
||||
self.diff_method = DIFF_JSON
|
||||
else:
|
||||
self.diff_method = DIFF_BINARY
|
||||
|
||||
def resolve(self):
|
||||
datablock_ref = None
|
||||
datablock_root = getattr(bpy.data, self.bl_id)
|
||||
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
||||
|
||||
if not datablock_ref:
|
||||
try:
|
||||
datablock_ref = datablock_root[self.data['name']]
|
||||
except Exception:
|
||||
name = self.data.get('name')
|
||||
logging.debug(f"Constructing {name}")
|
||||
datablock_ref = self._construct(data=self.data)
|
||||
|
||||
if datablock_ref:
|
||||
setattr(datablock_ref, 'uuid', self.uuid)
|
||||
|
||||
self.instance = datablock_ref
|
||||
|
||||
def remove_instance(self):
|
||||
"""
|
||||
Remove instance from blender data
|
||||
"""
|
||||
assert(self.instance)
|
||||
|
||||
datablock_root = getattr(bpy.data, self.bl_id)
|
||||
datablock_root.remove(self.instance)
|
||||
|
||||
def _dump(self, instance=None):
|
||||
dumper = Dumper()
|
||||
data = {}
|
||||
# Dump animation data
|
||||
if has_action(instance):
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = ['action']
|
||||
data['animation_data'] = dumper.dump(instance.animation_data)
|
||||
|
||||
if has_driver(instance):
|
||||
dumped_drivers = {'animation_data': {'drivers': []}}
|
||||
for driver in instance.animation_data.drivers:
|
||||
dumped_drivers['animation_data']['drivers'].append(
|
||||
dump_driver(driver))
|
||||
|
||||
data.update(dumped_drivers)
|
||||
|
||||
if self.is_library:
|
||||
data.update(dumper.dump(instance))
|
||||
else:
|
||||
data.update(self._dump_implementation(data, instance=instance))
|
||||
|
||||
return data
|
||||
|
||||
def _dump_implementation(self, data, target):
|
||||
raise NotImplementedError
|
||||
|
||||
def _load(self, data, target):
|
||||
# Load animation data
|
||||
if 'animation_data' in data.keys():
|
||||
if target.animation_data is None:
|
||||
target.animation_data_create()
|
||||
|
||||
for d in target.animation_data.drivers:
|
||||
target.animation_data.drivers.remove(d)
|
||||
|
||||
if 'drivers' in data['animation_data']:
|
||||
for driver in data['animation_data']['drivers']:
|
||||
load_driver(target, driver)
|
||||
|
||||
if 'action' in data['animation_data']:
|
||||
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
|
||||
|
||||
if self.is_library:
|
||||
return
|
||||
else:
|
||||
self._load_implementation(data, target)
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
raise NotImplementedError
|
||||
|
||||
def resolve_deps(self):
|
||||
dependencies = []
|
||||
|
||||
if has_action(self.instance):
|
||||
dependencies.append(self.instance.animation_data.action)
|
||||
|
||||
if not self.is_library:
|
||||
dependencies.extend(self._resolve_deps_implementation())
|
||||
|
||||
logging.debug(f"{self.instance} dependencies: {dependencies}")
|
||||
return dependencies
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return []
|
||||
|
||||
def is_valid(self):
|
||||
return getattr(bpy.data, self.bl_id).get(self.data['name'])
|
||||
def resolve_datablock_from_uuid(uuid, bpy_collection):
|
||||
for item in bpy_collection:
|
||||
if getattr(item, 'uuid', None) == uuid:
|
||||
return item
|
||||
return None
|
||||
|
@ -19,14 +19,15 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from pathlib import Path, WindowsPath, PosixPath
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.constants import DIFF_BINARY, UP
|
||||
from replication.data import ReplicatedDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .. import utils
|
||||
from ..utils import get_preferences
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
|
||||
@ -54,38 +55,20 @@ class BlFile(ReplicatedDatablock):
|
||||
bl_id = 'file'
|
||||
bl_name = "file"
|
||||
bl_class = Path
|
||||
bl_delay_refresh = 2
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'FILE'
|
||||
bl_reload_parent = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.instance = kwargs.get('instance', None)
|
||||
|
||||
if self.instance and not self.instance.exists():
|
||||
raise FileNotFoundError(str(self.instance))
|
||||
|
||||
self.preferences = utils.get_preferences()
|
||||
self.diff_method = DIFF_BINARY
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return Path(get_filepath(data['name']))
|
||||
|
||||
def resolve(self):
|
||||
if self.data:
|
||||
self.instance = Path(get_filepath(self.data['name']))
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
return Path(get_filepath(data['name']))
|
||||
|
||||
if not self.instance.exists():
|
||||
logging.debug("File don't exist, loading it.")
|
||||
self._load(self.data, self.instance)
|
||||
|
||||
def push(self, socket, identity=None):
|
||||
super().push(socket, identity=None)
|
||||
|
||||
if self.preferences.clear_memory_filecache:
|
||||
del self.data['file']
|
||||
|
||||
def _dump(self, instance=None):
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
"""
|
||||
Read the file and return a dict as:
|
||||
{
|
||||
@ -97,44 +80,62 @@ class BlFile(ReplicatedDatablock):
|
||||
logging.info(f"Extracting file metadata")
|
||||
|
||||
data = {
|
||||
'name': self.instance.name,
|
||||
'name': datablock.name,
|
||||
}
|
||||
|
||||
logging.info(
|
||||
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
|
||||
logging.info(f"Reading {datablock.name} content: {datablock.stat().st_size} bytes")
|
||||
|
||||
try:
|
||||
file = open(self.instance, "rb")
|
||||
file = open(datablock, "rb")
|
||||
data['file'] = file.read()
|
||||
|
||||
file.close()
|
||||
except IOError:
|
||||
logging.warning(f"{self.instance} doesn't exist, skipping")
|
||||
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||
else:
|
||||
file.close()
|
||||
|
||||
return data
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
"""
|
||||
Writing the file
|
||||
"""
|
||||
|
||||
try:
|
||||
file = open(target, "wb")
|
||||
file = open(datablock, "wb")
|
||||
file.write(data['file'])
|
||||
|
||||
if self.preferences.clear_memory_filecache:
|
||||
del self.data['file']
|
||||
if get_preferences().clear_memory_filecache:
|
||||
del data['file']
|
||||
except IOError:
|
||||
logging.warning(f"{target} doesn't exist, skipping")
|
||||
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||
else:
|
||||
file.close()
|
||||
|
||||
def diff(self):
|
||||
if self.preferences.clear_memory_filecache:
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
if get_preferences().clear_memory_filecache:
|
||||
return False
|
||||
else:
|
||||
memory_size = sys.getsizeof(self.data['file'])-33
|
||||
disk_size = self.instance.stat().st_size
|
||||
return memory_size != disk_size
|
||||
if not datablock:
|
||||
return None
|
||||
|
||||
if not data:
|
||||
return True
|
||||
|
||||
memory_size = sys.getsizeof(data['file'])-33
|
||||
disk_size = datablock.stat().st_size
|
||||
|
||||
if memory_size != disk_size:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
_type = [WindowsPath, PosixPath]
|
||||
_class = BlFile
|
@ -22,22 +22,20 @@ from pathlib import Path
|
||||
|
||||
import bpy
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlFont(BlDatablock):
|
||||
class BlFont(ReplicatedDatablock):
|
||||
bl_id = "fonts"
|
||||
bl_class = bpy.types.VectorFont
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'FILE_FONT'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
filename = data.get('filename')
|
||||
|
||||
if filename == '<builtin>':
|
||||
@ -45,31 +43,43 @@ class BlFont(BlDatablock):
|
||||
else:
|
||||
return bpy.data.fonts.load(get_filepath(filename))
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
pass
|
||||
|
||||
def _dump(self, instance=None):
|
||||
if instance.filepath == '<builtin>':
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if datablock.filepath == '<builtin>':
|
||||
filename = '<builtin>'
|
||||
else:
|
||||
filename = Path(instance.filepath).name
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
if not filename:
|
||||
raise FileExistsError(instance.filepath)
|
||||
raise FileExistsError(datablock.filepath)
|
||||
|
||||
return {
|
||||
'filename': filename,
|
||||
'name': instance.name
|
||||
'name': datablock.name
|
||||
}
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.fonts)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
||||
ensure_unpacked(self.instance)
|
||||
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||
ensure_unpacked(datablock)
|
||||
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
return False
|
||||
|
||||
_type = bpy.types.VectorFont
|
||||
_class = BlFont
|
@ -24,10 +24,11 @@ from .dump_anything import (Dumper,
|
||||
Loader,
|
||||
np_dump_collection,
|
||||
np_load_collection)
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from ..utils import get_preferences
|
||||
|
||||
# GPencil data api is structured as it follow:
|
||||
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
|
||||
|
||||
STROKE_POINT = [
|
||||
'co',
|
||||
@ -109,6 +110,9 @@ def load_stroke(stroke_data, stroke):
|
||||
stroke.points.add(stroke_data["p_count"])
|
||||
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
|
||||
|
||||
# HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to
|
||||
# fix fill issues
|
||||
stroke.uv_scale = stroke_data["uv_scale"]
|
||||
|
||||
|
||||
def dump_frame(frame):
|
||||
@ -149,6 +153,7 @@ def load_frame(frame_data, frame):
|
||||
|
||||
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
|
||||
|
||||
|
||||
def dump_layer(layer):
|
||||
""" Dump a grease pencil layer
|
||||
|
||||
@ -204,7 +209,7 @@ def dump_layer(layer):
|
||||
|
||||
for frame in layer.frames:
|
||||
dumped_layer['frames'].append(dump_frame(frame))
|
||||
|
||||
|
||||
return dumped_layer
|
||||
|
||||
|
||||
@ -226,50 +231,58 @@ def load_layer(layer_data, layer):
|
||||
load_frame(frame_data, target_frame)
|
||||
|
||||
|
||||
def layer_changed(datablock: object, data: dict) -> bool:
|
||||
if datablock.layers.active and \
|
||||
datablock.layers.active.info != data["active_layers"]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
class BlGpencil(BlDatablock):
|
||||
|
||||
def frame_changed(data: dict) -> bool:
|
||||
return bpy.context.scene.frame_current != data["eval_frame"]
|
||||
|
||||
class BlGpencil(ReplicatedDatablock):
|
||||
bl_id = "grease_pencils"
|
||||
bl_class = bpy.types.GreasePencil
|
||||
bl_delay_refresh = 2
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'GREASEPENCIL'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.grease_pencils.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
target.materials.clear()
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
datablock.materials.clear()
|
||||
if "materials" in data.keys():
|
||||
for mat in data['materials']:
|
||||
target.materials.append(bpy.data.materials[mat])
|
||||
datablock.materials.append(bpy.data.materials[mat])
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# TODO: reuse existing layer
|
||||
for layer in target.layers:
|
||||
target.layers.remove(layer)
|
||||
for layer in datablock.layers:
|
||||
datablock.layers.remove(layer)
|
||||
|
||||
if "layers" in data.keys():
|
||||
for layer in data["layers"]:
|
||||
layer_data = data["layers"].get(layer)
|
||||
|
||||
# if layer not in target.layers.keys():
|
||||
target_layer = target.layers.new(data["layers"][layer]["info"])
|
||||
# if layer not in datablock.layers.keys():
|
||||
target_layer = datablock.layers.new(data["layers"][layer]["info"])
|
||||
# else:
|
||||
# target_layer = target.layers[layer]
|
||||
# target_layer.clear()
|
||||
|
||||
load_layer(layer_data, target_layer)
|
||||
|
||||
datablock.layers.update()
|
||||
|
||||
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
@ -280,19 +293,37 @@ class BlGpencil(BlDatablock):
|
||||
'pixel_factor',
|
||||
'stroke_depth_order'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['layers'] = {}
|
||||
|
||||
for layer in instance.layers:
|
||||
for layer in datablock.layers:
|
||||
data['layers'][layer.info] = dump_layer(layer)
|
||||
|
||||
data["active_layers"] = datablock.layers.active.info if datablock.layers.active else "None"
|
||||
data["eval_frame"] = bpy.context.scene.frame_current
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.grease_pencils)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
deps.append(material)
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return bpy.context.mode == 'OBJECT' \
|
||||
or layer_changed(datablock, data) \
|
||||
or frame_changed(data) \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
_type = bpy.types.GreasePencil
|
||||
_class = BlGpencil
|
||||
|
@ -24,9 +24,12 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
format_to_ext = {
|
||||
'BMP': 'bmp',
|
||||
@ -48,35 +51,36 @@ format_to_ext = {
|
||||
}
|
||||
|
||||
|
||||
class BlImage(BlDatablock):
|
||||
class BlImage(ReplicatedDatablock):
|
||||
bl_id = "images"
|
||||
bl_class = bpy.types.Image
|
||||
bl_delay_refresh = 2
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'IMAGE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.images.new(
|
||||
name=data['name'],
|
||||
width=data['size'][0],
|
||||
height=data['size'][1]
|
||||
)
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(data, target)
|
||||
loader.load(data, datablock)
|
||||
|
||||
target.source = 'FILE'
|
||||
target.filepath_raw = get_filepath(data['filename'])
|
||||
target.colorspace_settings.name = data["colorspace_settings"]["name"]
|
||||
datablock.source = 'FILE'
|
||||
datablock.filepath_raw = get_filepath(data['filename'])
|
||||
color_space_name = data["colorspace_settings"]["name"]
|
||||
|
||||
def _dump(self, instance=None):
|
||||
assert(instance)
|
||||
if color_space_name:
|
||||
datablock.colorspace_settings.name = color_space_name
|
||||
|
||||
filename = Path(instance.filepath).name
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
data = {
|
||||
"filename": filename
|
||||
@ -86,41 +90,52 @@ class BlImage(BlDatablock):
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
# 'source',
|
||||
'size',
|
||||
'height',
|
||||
'alpha',
|
||||
'float_buffer',
|
||||
'alpha_mode',
|
||||
'colorspace_settings']
|
||||
data.update(dumper.dump(instance))
|
||||
data.update(dumper.dump(datablock))
|
||||
return data
|
||||
|
||||
def diff(self):
|
||||
if self.instance.is_dirty:
|
||||
self.instance.save()
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.images)
|
||||
|
||||
if self.instance and (self.instance.name != self.data['name']):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
if datablock.packed_file:
|
||||
filename = Path(bpy.path.abspath(datablock.filepath)).name
|
||||
datablock.filepath_raw = get_filepath(filename)
|
||||
datablock.save()
|
||||
# An image can't be unpacked to the modified path
|
||||
# TODO: make a bug report
|
||||
datablock.unpack(method="REMOVE")
|
||||
|
||||
elif datablock.source == "GENERATED":
|
||||
filename = f"{datablock.name}.png"
|
||||
datablock.filepath = get_filepath(filename)
|
||||
datablock.save()
|
||||
|
||||
if datablock.filepath:
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
if datablock.is_dirty:
|
||||
datablock.save()
|
||||
|
||||
if not data or (datablock and (datablock.name != data.get('name'))):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
if self.instance.packed_file:
|
||||
filename = Path(bpy.path.abspath(self.instance.filepath)).name
|
||||
self.instance.filepath_raw = get_filepath(filename)
|
||||
self.instance.save()
|
||||
# An image can't be unpacked to the modified path
|
||||
# TODO: make a bug report
|
||||
self.instance.unpack(method="REMOVE")
|
||||
|
||||
elif self.instance.source == "GENERATED":
|
||||
filename = f"{self.instance.name}.png"
|
||||
self.instance.filepath = get_filepath(filename)
|
||||
self.instance.save()
|
||||
|
||||
if self.instance.filepath:
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
|
||||
return deps
|
||||
_type = bpy.types.Image
|
||||
_class = BlImage
|
||||
|
@ -20,36 +20,39 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.exception import ContextError
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||
|
||||
|
||||
class BlLattice(BlDatablock):
|
||||
class BlLattice(ReplicatedDatablock):
|
||||
bl_id = "lattices"
|
||||
bl_class = bpy.types.Lattice
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'LATTICE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.lattices.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
if target.is_editmode:
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
if datablock.is_editmode:
|
||||
raise ContextError("lattice is in edit mode")
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
np_load_collection(data['points'], target.points, POINT)
|
||||
np_load_collection(data['points'], datablock.points, POINT)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
if instance.is_editmode:
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if datablock.is_editmode:
|
||||
raise ContextError("lattice is in edit mode")
|
||||
|
||||
dumper = Dumper()
|
||||
@ -65,9 +68,20 @@ class BlLattice(BlDatablock):
|
||||
'interpolation_type_w',
|
||||
'use_outside'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data['points'] = np_dump_collection(instance.points, POINT)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['points'] = np_dump_collection(datablock.points, POINT)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lattices)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_animation_dependencies(datablock)
|
||||
|
||||
_type = bpy.types.Lattice
|
||||
_class = BlLattice
|
@ -1,48 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlLibrary(BlDatablock):
|
||||
bl_id = "libraries"
|
||||
bl_class = bpy.types.Library
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIBRARY_DATA_DIRECT'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
|
||||
targetData = sourceData
|
||||
return sourceData
|
||||
def _load(self, data, target):
|
||||
pass
|
||||
|
||||
def _dump(self, instance=None):
|
||||
assert(instance)
|
||||
dumper = Dumper()
|
||||
return dumper.dump(instance)
|
||||
|
||||
|
@ -20,28 +20,32 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlLight(BlDatablock):
|
||||
class BlLight(ReplicatedDatablock):
|
||||
bl_id = "lights"
|
||||
bl_class = bpy.types.Light
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIGHT_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.lights.new(data["name"], data["type"])
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = bpy.data.lights.new(data["name"], data["type"])
|
||||
instance.uuid = data.get("uuid")
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
@ -70,9 +74,23 @@ class BlLight(BlDatablock):
|
||||
'spot_size',
|
||||
'spot_blend'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lights)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
|
||||
_type = [bpy.types.SpotLight, bpy.types.PointLight, bpy.types.AreaLight, bpy.types.SunLight]
|
||||
_class = BlLight
|
||||
|
@ -21,20 +21,18 @@ import mathutils
|
||||
import logging
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlLightprobe(BlDatablock):
|
||||
class BlLightprobe(ReplicatedDatablock):
|
||||
bl_id = "lightprobes"
|
||||
bl_class = bpy.types.LightProbe
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIGHTPROBE_GRID'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
|
||||
# See https://developer.blender.org/D6396
|
||||
if bpy.app.version[1] >= 83:
|
||||
@ -42,12 +40,13 @@ class BlLightprobe(BlDatablock):
|
||||
else:
|
||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if bpy.app.version[1] < 83:
|
||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
@ -74,7 +73,16 @@ class BlLightprobe(BlDatablock):
|
||||
'visibility_blur'
|
||||
]
|
||||
|
||||
return dumper.dump(instance)
|
||||
return dumper.dump(datablock)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lightprobes)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
_type = bpy.types.LightProbe
|
||||
_class = BlLightprobe
|
@ -24,334 +24,77 @@ import re
|
||||
from uuid import uuid4
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from .node_tree import load_node_tree, dump_node_tree, get_node_tree_dependencies
|
||||
|
||||
def dump_materials_slots(materials: bpy.types.bpy_prop_collection) -> list:
|
||||
""" Dump material slots collection
|
||||
|
||||
def load_node(node_data, node_tree):
|
||||
""" Load a node into a node_tree from a dict
|
||||
|
||||
:arg node_data: dumped node data
|
||||
:type node_data: dict
|
||||
:arg node_tree: target node_tree
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
:arg materials: material slots collection to dump
|
||||
:type materials: bpy.types.bpy_prop_collection
|
||||
:return: list of tuples (mat_uuid, mat_name)
|
||||
"""
|
||||
loader = Loader()
|
||||
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
|
||||
target_node.select = False
|
||||
loader.load(target_node, node_data)
|
||||
image_uuid = node_data.get('image_uuid', None)
|
||||
node_tree_uuid = node_data.get('node_tree_uuid', None)
|
||||
|
||||
if image_uuid and not target_node.image:
|
||||
target_node.image = get_datablock_from_uuid(image_uuid, None)
|
||||
|
||||
if node_tree_uuid:
|
||||
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
|
||||
|
||||
inputs_data = node_data.get('inputs')
|
||||
if inputs_data:
|
||||
inputs = target_node.inputs
|
||||
for idx, inpt in enumerate(inputs_data):
|
||||
if idx < len(inputs) and hasattr(inputs[idx], "default_value"):
|
||||
try:
|
||||
inputs[idx].default_value = inpt
|
||||
except Exception as e:
|
||||
logging.warning(f"Node {target_node.name} input {inputs[idx].name} parameter not supported, skipping ({e})")
|
||||
else:
|
||||
logging.warning(f"Node {target_node.name} input length mismatch.")
|
||||
|
||||
outputs_data = node_data.get('outputs')
|
||||
if outputs_data:
|
||||
outputs = target_node.outputs
|
||||
for idx, output in enumerate(outputs_data):
|
||||
if idx < len(outputs) and hasattr(outputs[idx], "default_value"):
|
||||
try:
|
||||
outputs[idx].default_value = output
|
||||
except:
|
||||
logging.warning(f"Node {target_node.name} output {outputs[idx].name} parameter not supported, skipping ({e})")
|
||||
else:
|
||||
logging.warning(f"Node {target_node.name} output length mismatch.")
|
||||
return [(m.uuid, m.name) for m in materials if m]
|
||||
|
||||
|
||||
def load_links(links_data, node_tree):
|
||||
""" Load node_tree links from a list
|
||||
def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_collection):
|
||||
""" Load material slots
|
||||
|
||||
:arg links_data: dumped node links
|
||||
:type links_data: list
|
||||
:arg node_tree: node links collection
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
:arg src_materials: dumped material collection (ex: object.materials)
|
||||
:type src_materials: list of tuples (uuid, name)
|
||||
:arg dst_materials: target material collection pointer
|
||||
:type dst_materials: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
# MATERIAL SLOTS
|
||||
dst_materials.clear()
|
||||
|
||||
for link in links_data:
|
||||
input_socket = node_tree.nodes[link['to_node']
|
||||
].inputs[int(link['to_socket'])]
|
||||
output_socket = node_tree.nodes[link['from_node']].outputs[int(
|
||||
link['from_socket'])]
|
||||
node_tree.links.new(input_socket, output_socket)
|
||||
for mat_uuid, mat_name in src_materials:
|
||||
mat_ref = None
|
||||
if mat_uuid is not None:
|
||||
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
||||
else:
|
||||
mat_ref = bpy.data.materials[mat_name]
|
||||
|
||||
dst_materials.append(mat_ref)
|
||||
|
||||
|
||||
def dump_links(links):
|
||||
""" Dump node_tree links collection to a list
|
||||
|
||||
:arg links: node links collection
|
||||
:type links: bpy.types.NodeLinks
|
||||
:retrun: list
|
||||
"""
|
||||
|
||||
links_data = []
|
||||
|
||||
for link in links:
|
||||
to_socket = NODE_SOCKET_INDEX.search(
|
||||
link.to_socket.path_from_id()).group(1)
|
||||
from_socket = NODE_SOCKET_INDEX.search(
|
||||
link.from_socket.path_from_id()).group(1)
|
||||
links_data.append({
|
||||
'to_node': link.to_node.name,
|
||||
'to_socket': to_socket,
|
||||
'from_node': link.from_node.name,
|
||||
'from_socket': from_socket,
|
||||
})
|
||||
|
||||
return links_data
|
||||
|
||||
|
||||
def dump_node(node):
|
||||
""" Dump a single node to a dict
|
||||
|
||||
:arg node: target node
|
||||
:type node: bpy.types.Node
|
||||
:retrun: dict
|
||||
"""
|
||||
|
||||
node_dumper = Dumper()
|
||||
node_dumper.depth = 1
|
||||
node_dumper.exclude_filter = [
|
||||
"dimensions",
|
||||
"show_expanded",
|
||||
"name_full",
|
||||
"select",
|
||||
"bl_label",
|
||||
"bl_height_min",
|
||||
"bl_height_max",
|
||||
"bl_height_default",
|
||||
"bl_width_min",
|
||||
"bl_width_max",
|
||||
"type",
|
||||
"bl_icon",
|
||||
"bl_width_default",
|
||||
"bl_static_type",
|
||||
"show_tetxure",
|
||||
"is_active_output",
|
||||
"hide",
|
||||
"show_options",
|
||||
"show_preview",
|
||||
"show_texture",
|
||||
"outputs",
|
||||
"width_hidden",
|
||||
"image"
|
||||
]
|
||||
|
||||
dumped_node = node_dumper.dump(node)
|
||||
|
||||
dump_io_needed = (node.type not in ['REROUTE','OUTPUT_MATERIAL'])
|
||||
|
||||
if dump_io_needed:
|
||||
io_dumper = Dumper()
|
||||
io_dumper.depth = 2
|
||||
io_dumper.include_filter = ["default_value"]
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
dumped_node['inputs'] = []
|
||||
for idx, inpt in enumerate(node.inputs):
|
||||
if hasattr(inpt, 'default_value'):
|
||||
dumped_node['inputs'].append(io_dumper.dump(inpt.default_value))
|
||||
|
||||
if hasattr(node, 'outputs'):
|
||||
dumped_node['outputs'] = []
|
||||
for idx, output in enumerate(node.outputs):
|
||||
if hasattr(output, 'default_value'):
|
||||
dumped_node['outputs'].append(io_dumper.dump(output.default_value))
|
||||
|
||||
if hasattr(node, 'color_ramp'):
|
||||
ramp_dumper = Dumper()
|
||||
ramp_dumper.depth = 4
|
||||
ramp_dumper.include_filter = [
|
||||
'elements',
|
||||
'alpha',
|
||||
'color',
|
||||
'position',
|
||||
'interpolation',
|
||||
'color_mode'
|
||||
]
|
||||
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
|
||||
if hasattr(node, 'mapping'):
|
||||
curve_dumper = Dumper()
|
||||
curve_dumper.depth = 5
|
||||
curve_dumper.include_filter = [
|
||||
'curves',
|
||||
'points',
|
||||
'location'
|
||||
]
|
||||
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
|
||||
if hasattr(node, 'image') and getattr(node, 'image'):
|
||||
dumped_node['image_uuid'] = node.image.uuid
|
||||
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
|
||||
dumped_node['node_tree_uuid'] = node.node_tree.uuid
|
||||
return dumped_node
|
||||
|
||||
|
||||
def dump_shader_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
|
||||
""" Dump a shader node_tree to a dict including links and nodes
|
||||
|
||||
:arg node_tree: dumped shader node tree
|
||||
:type node_tree: bpy.types.ShaderNodeTree
|
||||
:return: dict
|
||||
"""
|
||||
node_tree_data = {
|
||||
'nodes': {node.name: dump_node(node) for node in node_tree.nodes},
|
||||
'links': dump_links(node_tree.links),
|
||||
'name': node_tree.name,
|
||||
'type': type(node_tree).__name__
|
||||
}
|
||||
|
||||
for socket_id in ['inputs', 'outputs']:
|
||||
socket_collection = getattr(node_tree, socket_id)
|
||||
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
|
||||
|
||||
return node_tree_data
|
||||
|
||||
|
||||
def dump_node_tree_sockets(sockets: bpy.types.Collection)->dict:
|
||||
""" dump sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
:return: dict
|
||||
"""
|
||||
sockets_data = []
|
||||
for socket in sockets:
|
||||
try:
|
||||
socket_uuid = socket['uuid']
|
||||
except Exception:
|
||||
socket_uuid = str(uuid4())
|
||||
socket['uuid'] = socket_uuid
|
||||
|
||||
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
|
||||
|
||||
return sockets_data
|
||||
|
||||
def load_node_tree_sockets(sockets: bpy.types.Collection,
|
||||
sockets_data: dict):
|
||||
""" load sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
:arg socket_data: dumped socket data
|
||||
:type socket_data: dict
|
||||
"""
|
||||
# Check for removed sockets
|
||||
for socket in sockets:
|
||||
if not [s for s in sockets_data if socket['uuid'] == s[2]]:
|
||||
sockets.remove(socket)
|
||||
|
||||
# Check for new sockets
|
||||
for idx, socket_data in enumerate(sockets_data):
|
||||
try:
|
||||
checked_socket = sockets[idx]
|
||||
if checked_socket.name != socket_data[0]:
|
||||
checked_socket.name = socket_data[0]
|
||||
except Exception:
|
||||
s = sockets.new(socket_data[1], socket_data[0])
|
||||
s['uuid'] = socket_data[2]
|
||||
|
||||
|
||||
def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.ShaderNodeTree)->dict:
|
||||
"""Load a shader node_tree from dumped data
|
||||
|
||||
:arg node_tree_data: dumped node data
|
||||
:type node_tree_data: dict
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
# TODO: load only required nodes
|
||||
target_node_tree.nodes.clear()
|
||||
|
||||
if not target_node_tree.is_property_readonly('name'):
|
||||
target_node_tree.name = node_tree_data['name']
|
||||
|
||||
if 'inputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'inputs')
|
||||
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
|
||||
|
||||
if 'outputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'outputs')
|
||||
load_node_tree_sockets(socket_collection,node_tree_data['outputs'])
|
||||
|
||||
# Load nodes
|
||||
for node in node_tree_data["nodes"]:
|
||||
load_node(node_tree_data["nodes"][node], target_node_tree)
|
||||
|
||||
# TODO: load only required nodes links
|
||||
# Load nodes links
|
||||
target_node_tree.links.clear()
|
||||
|
||||
load_links(node_tree_data["links"], target_node_tree)
|
||||
|
||||
|
||||
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
|
||||
has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
|
||||
has_node_group = lambda node : (hasattr(node,'node_tree') and node.node_tree)
|
||||
|
||||
deps = []
|
||||
|
||||
for node in node_tree.nodes:
|
||||
if has_image(node):
|
||||
deps.append(node.image)
|
||||
elif has_node_group(node):
|
||||
deps.append(node.node_tree)
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
class BlMaterial(BlDatablock):
|
||||
class BlMaterial(ReplicatedDatablock):
|
||||
bl_id = "materials"
|
||||
bl_class = bpy.types.Material
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'MATERIAL_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.materials.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
loader = Loader()
|
||||
|
||||
is_grease_pencil = data.get('is_grease_pencil')
|
||||
use_nodes = data.get('use_nodes')
|
||||
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
if is_grease_pencil:
|
||||
if not target.is_grease_pencil:
|
||||
bpy.data.materials.create_gpencil_data(target)
|
||||
loader.load(target.grease_pencil, data['grease_pencil'])
|
||||
if not datablock.is_grease_pencil:
|
||||
bpy.data.materials.create_gpencil_data(datablock)
|
||||
loader.load(datablock.grease_pencil, data['grease_pencil'])
|
||||
elif use_nodes:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
if datablock.node_tree is None:
|
||||
datablock.use_nodes = True
|
||||
|
||||
load_shader_node_tree(data['node_tree'], target.node_tree)
|
||||
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
mat_dumper = Dumper()
|
||||
mat_dumper.depth = 2
|
||||
mat_dumper.include_filter = [
|
||||
@ -377,9 +120,9 @@ class BlMaterial(BlDatablock):
|
||||
'line_priority',
|
||||
'is_grease_pencil'
|
||||
]
|
||||
data = mat_dumper.dump(instance)
|
||||
data = mat_dumper.dump(datablock)
|
||||
|
||||
if instance.is_grease_pencil:
|
||||
if datablock.is_grease_pencil:
|
||||
gp_mat_dumper = Dumper()
|
||||
gp_mat_dumper.depth = 3
|
||||
|
||||
@ -409,20 +152,32 @@ class BlMaterial(BlDatablock):
|
||||
'fill_style',
|
||||
'gradient_type',
|
||||
# 'fill_image',
|
||||
'use_stroke_holdout',
|
||||
'use_overlap_strokes',
|
||||
'use_fill_holdout',
|
||||
]
|
||||
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
||||
elif instance.use_nodes:
|
||||
data['node_tree'] = dump_shader_node_tree(instance.node_tree)
|
||||
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
|
||||
elif datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
# TODO: resolve node group deps
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.materials)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
if self.instance.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Material
|
||||
_class = BlMaterial
|
@ -25,7 +25,13 @@ import numpy as np
|
||||
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||
from replication.constants import DIFF_BINARY
|
||||
from replication.exception import ContextError
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from ..utils import get_preferences
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
VERTICE = ['co']
|
||||
|
||||
@ -33,6 +39,8 @@ EDGE = [
|
||||
'vertices',
|
||||
'crease',
|
||||
'bevel_weight',
|
||||
'use_seam',
|
||||
'use_edge_sharp',
|
||||
]
|
||||
LOOP = [
|
||||
'vertex_index',
|
||||
@ -46,89 +54,77 @@ POLYGON = [
|
||||
'material_index',
|
||||
]
|
||||
|
||||
class BlMesh(BlDatablock):
|
||||
class BlMesh(ReplicatedDatablock):
|
||||
bl_id = "meshes"
|
||||
bl_class = bpy.types.Mesh
|
||||
bl_delay_refresh = 2
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'MESH_DATA'
|
||||
bl_reload_parent = False
|
||||
bl_reload_parent = True
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.meshes.new(data["name"])
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.meshes.new(data.get("name"))
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
if not target or target.is_editmode:
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
if not datablock or datablock.is_editmode:
|
||||
raise ContextError
|
||||
else:
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# MATERIAL SLOTS
|
||||
target.materials.clear()
|
||||
|
||||
for mat_uuid, mat_name in data["material_list"]:
|
||||
mat_ref = None
|
||||
if mat_uuid is not None:
|
||||
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
||||
else:
|
||||
mat_ref = bpy.data.materials.get(mat_name, None)
|
||||
|
||||
if mat_ref is None:
|
||||
raise Exception("Material doesn't exist")
|
||||
|
||||
target.materials.append(mat_ref)
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
# CLEAR GEOMETRY
|
||||
if target.vertices:
|
||||
target.clear_geometry()
|
||||
if datablock.vertices:
|
||||
datablock.clear_geometry()
|
||||
|
||||
target.vertices.add(data["vertex_count"])
|
||||
target.edges.add(data["egdes_count"])
|
||||
target.loops.add(data["loop_count"])
|
||||
target.polygons.add(data["poly_count"])
|
||||
datablock.vertices.add(data["vertex_count"])
|
||||
datablock.edges.add(data["egdes_count"])
|
||||
datablock.loops.add(data["loop_count"])
|
||||
datablock.polygons.add(data["poly_count"])
|
||||
|
||||
# LOADING
|
||||
np_load_collection(data['vertices'], target.vertices, VERTICE)
|
||||
np_load_collection(data['edges'], target.edges, EDGE)
|
||||
np_load_collection(data['loops'], target.loops, LOOP)
|
||||
np_load_collection(data["polygons"],target.polygons, POLYGON)
|
||||
np_load_collection(data['vertices'], datablock.vertices, VERTICE)
|
||||
np_load_collection(data['edges'], datablock.edges, EDGE)
|
||||
np_load_collection(data['loops'], datablock.loops, LOOP)
|
||||
np_load_collection(data["polygons"],datablock.polygons, POLYGON)
|
||||
|
||||
# UV Layers
|
||||
if 'uv_layers' in data.keys():
|
||||
for layer in data['uv_layers']:
|
||||
if layer not in target.uv_layers:
|
||||
target.uv_layers.new(name=layer)
|
||||
if layer not in datablock.uv_layers:
|
||||
datablock.uv_layers.new(name=layer)
|
||||
|
||||
np_load_collection_primitives(
|
||||
target.uv_layers[layer].data,
|
||||
datablock.uv_layers[layer].data,
|
||||
'uv',
|
||||
data["uv_layers"][layer]['data'])
|
||||
|
||||
# Vertex color
|
||||
if 'vertex_colors' in data.keys():
|
||||
for color_layer in data['vertex_colors']:
|
||||
if color_layer not in target.vertex_colors:
|
||||
target.vertex_colors.new(name=color_layer)
|
||||
if color_layer not in datablock.vertex_colors:
|
||||
datablock.vertex_colors.new(name=color_layer)
|
||||
|
||||
np_load_collection_primitives(
|
||||
target.vertex_colors[color_layer].data,
|
||||
datablock.vertex_colors[color_layer].data,
|
||||
'color',
|
||||
data["vertex_colors"][color_layer]['data'])
|
||||
|
||||
target.validate()
|
||||
target.update()
|
||||
datablock.validate()
|
||||
datablock.update()
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
if instance.is_editmode and not self.preferences.sync_flags.sync_during_editmode:
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
|
||||
raise ContextError("Mesh is in edit mode")
|
||||
mesh = instance
|
||||
mesh = datablock
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
@ -142,6 +138,8 @@ class BlMesh(BlDatablock):
|
||||
|
||||
data = dumper.dump(mesh)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
# VERTICES
|
||||
data["vertex_count"] = len(mesh.vertices)
|
||||
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
|
||||
@ -172,16 +170,31 @@ class BlMesh(BlDatablock):
|
||||
data['vertex_colors'][color_map.name] = {}
|
||||
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
||||
|
||||
# Fix material index
|
||||
data['material_list'] = [(m.uuid, m.name) for m in instance.materials if m]
|
||||
|
||||
# Materials
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.meshes)
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return ('EDIT' not in bpy.context.mode and bpy.context.mode != 'SCULPT') \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
_type = bpy.types.Mesh
|
||||
_class = BlMesh
|
||||
|
@ -23,7 +23,9 @@ from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
|
||||
np_dump_collection, np_load_collection)
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
ELEMENT = [
|
||||
@ -62,32 +64,33 @@ def load_metaball_elements(elements_data, elements):
|
||||
np_load_collection(elements_data, elements, ELEMENT)
|
||||
|
||||
|
||||
class BlMetaball(BlDatablock):
|
||||
class BlMetaball(ReplicatedDatablock):
|
||||
bl_id = "metaballs"
|
||||
bl_class = bpy.types.MetaBall
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'META_BALL'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.metaballs.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
target.elements.clear()
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
datablock.elements.clear()
|
||||
|
||||
for mtype in data["elements"]['type']:
|
||||
new_element = target.elements.new()
|
||||
new_element = datablock.elements.new()
|
||||
|
||||
load_metaball_elements(data['elements'], target.elements)
|
||||
load_metaball_elements(data['elements'], datablock.elements)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -101,7 +104,24 @@ class BlMetaball(BlDatablock):
|
||||
'texspace_size'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
data['elements'] = dump_metaball_elements(instance.elements)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
data['elements'] = dump_metaball_elements(datablock.elements)
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.metaballs)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.MetaBall
|
||||
_class = BlMetaball
|
@ -20,29 +20,43 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_material import (dump_shader_node_tree,
|
||||
load_shader_node_tree,
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_material import (dump_node_tree,
|
||||
load_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlNodeGroup(BlDatablock):
|
||||
class BlNodeGroup(ReplicatedDatablock):
|
||||
bl_id = "node_groups"
|
||||
bl_class = bpy.types.ShaderNodeTree
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_class = bpy.types.NodeTree
|
||||
bl_check_common = False
|
||||
bl_icon = 'NODETREE'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.node_groups.new(data["name"], data["type"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
load_shader_node_tree(data, target)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_node_tree(data, datablock)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
return dump_shader_node_tree(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
return dump_node_tree(datablock)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return get_node_tree_dependencies(self.instance)
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.node_groups)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
deps.extend(get_node_tree_dependencies(datablock))
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = [bpy.types.ShaderNodeTree, bpy.types.GeometryNodeTree]
|
||||
_class = BlNodeGroup
|
@ -17,13 +17,153 @@
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import re
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.exception import ContextError
|
||||
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from .dump_anything import Dumper, Loader
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .node_tree import IGNORED_SOCKETS
|
||||
from ..utils import get_preferences
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from .dump_anything import (
|
||||
Dumper,
|
||||
Loader,
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
|
||||
|
||||
SKIN_DATA = [
|
||||
'radius',
|
||||
'use_loose',
|
||||
'use_root'
|
||||
]
|
||||
|
||||
SHAPEKEY_BLOCK_ATTR = [
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
]
|
||||
|
||||
|
||||
if bpy.app.version[1] >= 93:
|
||||
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
|
||||
else:
|
||||
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str)
|
||||
logging.warning("Geometry node Float parameter not supported in \
|
||||
blender 2.92.")
|
||||
|
||||
|
||||
def get_node_group_inputs(node_group):
|
||||
inputs = []
|
||||
for inpt in node_group.inputs:
|
||||
if inpt.type in IGNORED_SOCKETS:
|
||||
continue
|
||||
else:
|
||||
inputs.append(inpt)
|
||||
return inputs
|
||||
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
|
||||
|
||||
|
||||
def dump_physics(target: bpy.types.Object)->dict:
|
||||
"""
|
||||
Dump all physics settings from a given object excluding modifier
|
||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
physics_data = {}
|
||||
|
||||
# Collisions (collision)
|
||||
if target.collision and target.collision.use:
|
||||
physics_data['collision'] = dumper.dump(target.collision)
|
||||
|
||||
# Field (field)
|
||||
if target.field and target.field.type != "NONE":
|
||||
physics_data['field'] = dumper.dump(target.field)
|
||||
|
||||
# Rigid Body (rigid_body)
|
||||
if target.rigid_body:
|
||||
physics_data['rigid_body'] = dumper.dump(target.rigid_body)
|
||||
|
||||
# Rigid Body constraint (rigid_body_constraint)
|
||||
if target.rigid_body_constraint:
|
||||
physics_data['rigid_body_constraint'] = dumper.dump(target.rigid_body_constraint)
|
||||
|
||||
return physics_data
|
||||
|
||||
|
||||
def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
||||
""" Load all physics settings from a given object excluding modifier
|
||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||
"""
|
||||
loader = Loader()
|
||||
|
||||
if 'collision' in dumped_settings:
|
||||
loader.load(target.collision, dumped_settings['collision'])
|
||||
|
||||
if 'field' in dumped_settings:
|
||||
loader.load(target.field, dumped_settings['field'])
|
||||
|
||||
if 'rigid_body' in dumped_settings:
|
||||
if not target.rigid_body:
|
||||
bpy.ops.rigidbody.object_add({"object": target})
|
||||
loader.load(target.rigid_body, dumped_settings['rigid_body'])
|
||||
elif target.rigid_body:
|
||||
bpy.ops.rigidbody.object_remove({"object": target})
|
||||
|
||||
if 'rigid_body_constraint' in dumped_settings:
|
||||
if not target.rigid_body_constraint:
|
||||
bpy.ops.rigidbody.constraint_add({"object": target})
|
||||
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
|
||||
elif target.rigid_body_constraint:
|
||||
bpy.ops.rigidbody.constraint_remove({"object": target})
|
||||
|
||||
|
||||
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
|
||||
""" Dump geometry node modifier input properties
|
||||
|
||||
:arg modifier: geometry node modifier to dump
|
||||
:type modifier: bpy.type.Modifier
|
||||
"""
|
||||
dumped_inputs = []
|
||||
for inpt in get_node_group_inputs(modifier.node_group):
|
||||
input_value = modifier[inpt.identifier]
|
||||
|
||||
dumped_input = None
|
||||
if isinstance(input_value, bpy.types.ID):
|
||||
dumped_input = input_value.uuid
|
||||
elif isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
|
||||
dumped_input = input_value
|
||||
elif hasattr(input_value, 'to_list'):
|
||||
dumped_input = input_value.to_list()
|
||||
dumped_inputs.append(dumped_input)
|
||||
|
||||
return dumped_inputs
|
||||
|
||||
|
||||
def load_modifier_geometry_node_inputs(dumped_modifier: dict, target_modifier: bpy.types.Modifier):
|
||||
""" Load geometry node modifier inputs
|
||||
|
||||
:arg dumped_modifier: source dumped modifier to load
|
||||
:type dumped_modifier: dict
|
||||
:arg target_modifier: target geometry node modifier
|
||||
:type target_modifier: bpy.type.Modifier
|
||||
"""
|
||||
|
||||
for input_index, inpt in enumerate(get_node_group_inputs(target_modifier.node_group)):
|
||||
dumped_value = dumped_modifier['inputs'][input_index]
|
||||
input_value = target_modifier[inpt.identifier]
|
||||
if isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
|
||||
target_modifier[inpt.identifier] = dumped_value
|
||||
elif hasattr(input_value, 'to_list'):
|
||||
for index in range(len(input_value)):
|
||||
input_value[index] = dumped_value[index]
|
||||
elif inpt.type in ['COLLECTION', 'OBJECT']:
|
||||
target_modifier[inpt.identifier] = get_datablock_from_uuid(
|
||||
dumped_value, None)
|
||||
|
||||
|
||||
def load_pose(target_bone, data):
|
||||
@ -64,7 +204,7 @@ def find_data_from_name(name=None):
|
||||
logging.warning(
|
||||
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys():
|
||||
# Only supported since 2.91
|
||||
# Only supported since 2.91
|
||||
instance = bpy.data.volumes[name]
|
||||
return instance
|
||||
|
||||
@ -81,120 +221,362 @@ def _is_editmode(object: bpy.types.Object) -> bool:
|
||||
child_data.is_editmode)
|
||||
|
||||
|
||||
def find_textures_dependencies(collection):
|
||||
""" Check collection
|
||||
def find_textures_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.Texture]:
|
||||
""" Find textures lying in a modifier stack
|
||||
|
||||
:arg modifiers: modifiers collection
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
:return: list of bpy.types.Texture pointers
|
||||
"""
|
||||
textures = []
|
||||
for item in collection:
|
||||
for attr in dir(item):
|
||||
inst = getattr(item, attr)
|
||||
if issubclass(type(inst), bpy.types.Texture) and inst is not None:
|
||||
textures.append(inst)
|
||||
for mod in modifiers:
|
||||
modifier_attributes = [getattr(mod, attr_name)
|
||||
for attr_name in mod.bl_rna.properties.keys()]
|
||||
for attr in modifier_attributes:
|
||||
if issubclass(type(attr), bpy.types.Texture) and attr is not None:
|
||||
textures.append(attr)
|
||||
|
||||
return textures
|
||||
|
||||
|
||||
class BlObject(BlDatablock):
|
||||
def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]:
|
||||
""" Find geometry nodes dependencies from a modifier stack
|
||||
|
||||
:arg modifiers: modifiers collection
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
:return: list of bpy.types.NodeTree pointers
|
||||
"""
|
||||
dependencies = []
|
||||
for mod in modifiers:
|
||||
if mod.type == 'NODES' and mod.node_group:
|
||||
dependencies.append(mod.node_group)
|
||||
# for inpt in get_node_group_inputs(mod.node_group):
|
||||
# parameter = mod.get(inpt.identifier)
|
||||
# if parameter and isinstance(parameter, bpy.types.ID):
|
||||
# dependencies.append(parameter)
|
||||
|
||||
return dependencies
|
||||
|
||||
|
||||
def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
|
||||
""" Dump object's vertex groups
|
||||
|
||||
:param target_object: dump vertex groups of this object
|
||||
:type target_object: bpy.types.Object
|
||||
"""
|
||||
if isinstance(src_object.data, bpy.types.GreasePencil):
|
||||
logging.warning(
|
||||
"Grease pencil vertex groups are not supported yet. More info: https://gitlab.com/slumber/multi-user/-/issues/161")
|
||||
else:
|
||||
points_attr = 'vertices' if isinstance(
|
||||
src_object.data, bpy.types.Mesh) else 'points'
|
||||
dumped_vertex_groups = {}
|
||||
|
||||
# Vertex group metadata
|
||||
for vg in src_object.vertex_groups:
|
||||
dumped_vertex_groups[vg.index] = {
|
||||
'name': vg.name,
|
||||
'vertices': []
|
||||
}
|
||||
|
||||
# Vertex group assignation
|
||||
for vert in getattr(src_object.data, points_attr):
|
||||
for vg in vert.groups:
|
||||
vertices = dumped_vertex_groups.get(vg.group)['vertices']
|
||||
vertices.append((vert.index, vg.weight))
|
||||
|
||||
return dumped_vertex_groups
|
||||
|
||||
|
||||
def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Object):
|
||||
""" Load object vertex groups
|
||||
|
||||
:param dumped_vertex_groups: vertex_groups to load
|
||||
:type dumped_vertex_groups: dict
|
||||
:param target_object: object to load the vertex groups into
|
||||
:type target_object: bpy.types.Object
|
||||
"""
|
||||
target_object.vertex_groups.clear()
|
||||
for vg in dumped_vertex_groups.values():
|
||||
vertex_group = target_object.vertex_groups.new(name=vg['name'])
|
||||
for index, weight in vg['vertices']:
|
||||
vertex_group.add([index], weight, 'REPLACE')
|
||||
|
||||
|
||||
def dump_shape_keys(target_key: bpy.types.Key)->dict:
|
||||
""" Dump the target shape_keys datablock to a dict using numpy
|
||||
|
||||
:param dumped_key: target key datablock
|
||||
:type dumped_key: bpy.types.Key
|
||||
:return: dict
|
||||
"""
|
||||
|
||||
dumped_key_blocks = []
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
]
|
||||
for key in target_key.key_blocks:
|
||||
dumped_key_block = dumper.dump(key)
|
||||
dumped_key_block['data'] = np_dump_collection(key.data, ['co'])
|
||||
dumped_key_block['relative_key'] = key.relative_key.name
|
||||
dumped_key_blocks.append(dumped_key_block)
|
||||
|
||||
return {
|
||||
'reference_key': target_key.reference_key.name,
|
||||
'use_relative': target_key.use_relative,
|
||||
'key_blocks': dumped_key_blocks,
|
||||
'animation_data': dump_animation_data(target_key)
|
||||
}
|
||||
|
||||
|
||||
def load_shape_keys(dumped_shape_keys: dict, target_object: bpy.types.Object):
|
||||
""" Load the target shape_keys datablock to a dict using numpy
|
||||
|
||||
:param dumped_key: src key data
|
||||
:type dumped_key: bpy.types.Key
|
||||
:param target_object: object used to load the shapekeys data onto
|
||||
:type target_object: bpy.types.Object
|
||||
"""
|
||||
loader = Loader()
|
||||
# Remove existing ones
|
||||
target_object.shape_key_clear()
|
||||
|
||||
# Create keys and load vertices coords
|
||||
dumped_key_blocks = dumped_shape_keys.get('key_blocks')
|
||||
for dumped_key_block in dumped_key_blocks:
|
||||
key_block = target_object.shape_key_add(name=dumped_key_block['name'])
|
||||
|
||||
loader.load(key_block, dumped_key_block)
|
||||
np_load_collection(dumped_key_block['data'], key_block.data, ['co'])
|
||||
|
||||
# Load relative key after all
|
||||
for dumped_key_block in dumped_key_blocks:
|
||||
relative_key_name = dumped_key_block.get('relative_key')
|
||||
key_name = dumped_key_block.get('name')
|
||||
|
||||
target_keyblock = target_object.data.shape_keys.key_blocks[key_name]
|
||||
relative_key = target_object.data.shape_keys.key_blocks[relative_key_name]
|
||||
|
||||
target_keyblock.relative_key = relative_key
|
||||
|
||||
# Shape keys animation data
|
||||
anim_data = dumped_shape_keys.get('animation_data')
|
||||
|
||||
if anim_data:
|
||||
load_animation_data(anim_data, target_object.data.shape_keys)
|
||||
|
||||
|
||||
def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
|
||||
""" Dump all modifiers of a modifier collection into a dict
|
||||
|
||||
:param modifiers: modifiers
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
:return: dict
|
||||
"""
|
||||
dumped_modifiers = []
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = ['is_active']
|
||||
|
||||
for modifier in modifiers:
|
||||
dumped_modifier = dumper.dump(modifier)
|
||||
# hack to dump geometry nodes inputs
|
||||
if modifier.type == 'NODES':
|
||||
dumped_inputs = dump_modifier_geometry_node_inputs(
|
||||
modifier)
|
||||
dumped_modifier['inputs'] = dumped_inputs
|
||||
|
||||
elif modifier.type == 'PARTICLE_SYSTEM':
|
||||
dumper.exclude_filter = [
|
||||
"is_edited",
|
||||
"is_editable",
|
||||
"is_global_hair"
|
||||
]
|
||||
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
|
||||
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
|
||||
|
||||
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
||||
elif modifier.type == 'UV_PROJECT':
|
||||
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
|
||||
|
||||
dumped_modifiers.append(dumped_modifier)
|
||||
return dumped_modifiers
|
||||
|
||||
def dump_constraints(constraints: bpy.types.bpy_prop_collection)->list:
|
||||
"""Dump all constraints to a list
|
||||
|
||||
:param constraints: constraints
|
||||
:type constraints: bpy.types.bpy_prop_collection
|
||||
:return: dict
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = None
|
||||
dumped_constraints = []
|
||||
for constraint in constraints:
|
||||
dumped_constraints.append(dumper.dump(constraint))
|
||||
return dumped_constraints
|
||||
|
||||
def load_constraints(dumped_constraints: list, constraints: bpy.types.bpy_prop_collection):
|
||||
""" Load dumped constraints
|
||||
|
||||
:param dumped_constraints: list of constraints to load
|
||||
:type dumped_constraints: list
|
||||
:param constraints: constraints
|
||||
:type constraints: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
constraints.clear()
|
||||
for dumped_constraint in dumped_constraints:
|
||||
constraint_type = dumped_constraint.get('type')
|
||||
new_constraint = constraints.new(constraint_type)
|
||||
loader.load(new_constraint, dumped_constraint)
|
||||
|
||||
def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collection):
|
||||
""" Dump all modifiers of a modifier collection into a dict
|
||||
|
||||
:param dumped_modifiers: list of modifiers to load
|
||||
:type dumped_modifiers: list
|
||||
:param modifiers: modifiers
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
modifiers.clear()
|
||||
for dumped_modifier in dumped_modifiers:
|
||||
name = dumped_modifier.get('name')
|
||||
mtype = dumped_modifier.get('type')
|
||||
loaded_modifier = modifiers.new(name, mtype)
|
||||
loader.load(loaded_modifier, dumped_modifier)
|
||||
|
||||
if loaded_modifier.type == 'NODES':
|
||||
load_modifier_geometry_node_inputs(dumped_modifier, loaded_modifier)
|
||||
elif loaded_modifier.type == 'PARTICLE_SYSTEM':
|
||||
default = loaded_modifier.particle_system.settings
|
||||
dumped_particles = dumped_modifier['particle_system']
|
||||
loader.load(loaded_modifier.particle_system, dumped_particles)
|
||||
|
||||
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
||||
if settings:
|
||||
loaded_modifier.particle_system.settings = settings
|
||||
# Hack to remove the default generated particle settings
|
||||
if not default.uuid:
|
||||
bpy.data.particles.remove(default)
|
||||
elif loaded_modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
loader.load(loaded_modifier.settings, dumped_modifier['settings'])
|
||||
elif loaded_modifier.type == 'UV_PROJECT':
|
||||
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
|
||||
target_object = bpy.data.objects.get(projector_object)
|
||||
if target_object:
|
||||
loaded_modifier.projectors[projector_index].object = target_object
|
||||
else:
|
||||
logging.error("Could't load projector target object {projector_object}")
|
||||
|
||||
|
||||
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
|
||||
""" Load modifiers custom data not managed by the dump_anything loader
|
||||
|
||||
:param dumped_modifiers: modifiers to load
|
||||
:type dumped_modifiers: dict
|
||||
:param modifiers: target modifiers collection
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
|
||||
for modifier in modifiers:
|
||||
dumped_modifier = dumped_modifiers.get(modifier.name)
|
||||
|
||||
|
||||
class BlObject(ReplicatedDatablock):
|
||||
bl_id = "objects"
|
||||
bl_class = bpy.types.Object
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'OBJECT_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = None
|
||||
|
||||
if self.is_library:
|
||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||
targetData.objects = [
|
||||
name for name in sourceData.objects if name == self.data['name']]
|
||||
|
||||
instance = bpy.data.objects[self.data['name']]
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
|
||||
# TODO: refactoring
|
||||
object_name = data.get("name")
|
||||
data_uuid = data.get("data_uuid")
|
||||
data_id = data.get("data")
|
||||
data_type = data.get("type")
|
||||
|
||||
object_data = get_datablock_from_uuid(
|
||||
data_uuid,
|
||||
find_data_from_name(data_id),
|
||||
ignore=['images']) # TODO: use resolve_from_id
|
||||
instance = bpy.data.objects.new(object_name, object_data)
|
||||
instance.uuid = self.uuid
|
||||
|
||||
return instance
|
||||
if data_type != 'EMPTY' and object_data is None:
|
||||
raise Exception(f"Fail to load object {data['name']})")
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
return bpy.data.objects.new(object_name, object_data)
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
data_uuid = data.get("data_uuid")
|
||||
data_id = data.get("data")
|
||||
|
||||
if target.data and (target.data.name != data_id):
|
||||
target.data = get_datablock_from_uuid(
|
||||
if datablock.data and (datablock.data.name != data_id):
|
||||
datablock.data = get_datablock_from_uuid(
|
||||
data_uuid, find_data_from_name(data_id), ignore=['images'])
|
||||
|
||||
# vertex groups
|
||||
if 'vertex_groups' in data:
|
||||
target.vertex_groups.clear()
|
||||
for vg in data['vertex_groups']:
|
||||
vertex_group = target.vertex_groups.new(name=vg['name'])
|
||||
point_attr = 'vertices' if 'vertices' in vg else 'points'
|
||||
for vert in vg[point_attr]:
|
||||
vertex_group.add(
|
||||
[vert['index']], vert['weight'], 'REPLACE')
|
||||
vertex_groups = data.get('vertex_groups', None)
|
||||
if vertex_groups:
|
||||
load_vertex_groups(vertex_groups, datablock)
|
||||
|
||||
object_data = datablock.data
|
||||
|
||||
# SHAPE KEYS
|
||||
if 'shape_keys' in data:
|
||||
target.shape_key_clear()
|
||||
|
||||
object_data = target.data
|
||||
|
||||
# Create keys and load vertices coords
|
||||
for key_block in data['shape_keys']['key_blocks']:
|
||||
key_data = data['shape_keys']['key_blocks'][key_block]
|
||||
target.shape_key_add(name=key_block)
|
||||
|
||||
loader.load(
|
||||
target.data.shape_keys.key_blocks[key_block], key_data)
|
||||
for vert in key_data['data']:
|
||||
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
||||
|
||||
# Load relative key after all
|
||||
for key_block in data['shape_keys']['key_blocks']:
|
||||
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
|
||||
|
||||
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
||||
shape_keys = data.get('shape_keys')
|
||||
if shape_keys:
|
||||
load_shape_keys(shape_keys, datablock)
|
||||
|
||||
# Load transformation data
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
loader.load(target.display, data['display'])
|
||||
# Object display fields
|
||||
if 'display' in data:
|
||||
loader.load(datablock.display, data['display'])
|
||||
|
||||
# Parenting
|
||||
parent_id = data.get('parent_uid')
|
||||
if parent_id:
|
||||
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
|
||||
# Avoid reloading
|
||||
if datablock.parent != parent and parent is not None:
|
||||
datablock.parent = parent
|
||||
elif datablock.parent:
|
||||
datablock.parent = None
|
||||
|
||||
# Pose
|
||||
if 'pose' in data:
|
||||
if not target.pose:
|
||||
if not datablock.pose:
|
||||
raise Exception('No pose data yet (Fixed in a near futur)')
|
||||
# Bone groups
|
||||
for bg_name in data['pose']['bone_groups']:
|
||||
bg_data = data['pose']['bone_groups'].get(bg_name)
|
||||
bg_target = target.pose.bone_groups.get(bg_name)
|
||||
bg_target = datablock.pose.bone_groups.get(bg_name)
|
||||
|
||||
if not bg_target:
|
||||
bg_target = target.pose.bone_groups.new(name=bg_name)
|
||||
bg_target = datablock.pose.bone_groups.new(name=bg_name)
|
||||
|
||||
loader.load(bg_target, bg_data)
|
||||
# target.pose.bone_groups.get
|
||||
# datablock.pose.bone_groups.get
|
||||
|
||||
# Bones
|
||||
for bone in data['pose']['bones']:
|
||||
target_bone = target.pose.bones.get(bone)
|
||||
target_bone = datablock.pose.bones.get(bone)
|
||||
bone_data = data['pose']['bones'].get(bone)
|
||||
|
||||
if 'constraints' in bone_data.keys():
|
||||
@ -203,20 +585,50 @@ class BlObject(BlDatablock):
|
||||
load_pose(target_bone, bone_data)
|
||||
|
||||
if 'bone_index' in bone_data.keys():
|
||||
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
|
||||
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
|
||||
|
||||
# TODO: find another way...
|
||||
if target.empty_display_type == "IMAGE":
|
||||
if datablock.empty_display_type == "IMAGE":
|
||||
img_uuid = data.get('data_uuid')
|
||||
if target.data is None and img_uuid:
|
||||
target.data = get_datablock_from_uuid(img_uuid, None)
|
||||
if datablock.data is None and img_uuid:
|
||||
datablock.data = get_datablock_from_uuid(img_uuid, None)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
if hasattr(object_data, 'skin_vertices') \
|
||||
and object_data.skin_vertices\
|
||||
and 'skin_vertices' in data:
|
||||
for index, skin_data in enumerate(object_data.skin_vertices):
|
||||
np_load_collection(
|
||||
data['skin_vertices'][index],
|
||||
skin_data.data,
|
||||
SKIN_DATA)
|
||||
|
||||
if _is_editmode(instance):
|
||||
if self.preferences.sync_flags.sync_during_editmode:
|
||||
instance.update_from_editmode()
|
||||
if hasattr(datablock, 'cycles_visibility') \
|
||||
and 'cycles_visibility' in data:
|
||||
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
|
||||
|
||||
if hasattr(datablock, 'modifiers'):
|
||||
load_modifiers(data['modifiers'], datablock.modifiers)
|
||||
|
||||
constraints = data.get('constraints')
|
||||
if constraints:
|
||||
load_constraints(constraints, datablock.constraints)
|
||||
|
||||
# PHYSICS
|
||||
load_physics(data, datablock)
|
||||
|
||||
transform = data.get('transforms', None)
|
||||
if transform:
|
||||
datablock.matrix_parent_inverse = mathutils.Matrix(
|
||||
transform['matrix_parent_inverse'])
|
||||
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
|
||||
datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
|
||||
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if _is_editmode(datablock):
|
||||
if get_preferences().sync_flags.sync_during_editmode:
|
||||
datablock.update_from_editmode()
|
||||
else:
|
||||
raise ContextError("Object is in edit-mode.")
|
||||
|
||||
@ -225,9 +637,7 @@ class BlObject(BlDatablock):
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
"rotation_mode",
|
||||
"parent",
|
||||
"data",
|
||||
"children",
|
||||
"library",
|
||||
"empty_display_type",
|
||||
"empty_display_size",
|
||||
@ -241,8 +651,6 @@ class BlObject(BlDatablock):
|
||||
"color",
|
||||
"instance_collection",
|
||||
"instance_type",
|
||||
"location",
|
||||
"scale",
|
||||
'lock_location',
|
||||
'lock_rotation',
|
||||
'lock_scale',
|
||||
@ -257,30 +665,36 @@ class BlObject(BlDatablock):
|
||||
'show_texture_space',
|
||||
'show_in_front',
|
||||
'type',
|
||||
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
|
||||
'parent_type',
|
||||
'parent_bone',
|
||||
'track_axis',
|
||||
'up_axis',
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
dumper.include_filter = [
|
||||
'matrix_parent_inverse',
|
||||
'matrix_local',
|
||||
'matrix_basis']
|
||||
data['transforms'] = dumper.dump(datablock)
|
||||
dumper.include_filter = [
|
||||
'show_shadows',
|
||||
]
|
||||
data['display'] = dumper.dump(instance.display)
|
||||
data['display'] = dumper.dump(datablock.display)
|
||||
|
||||
data['data_uuid'] = getattr(instance.data, 'uuid', None)
|
||||
if self.is_library:
|
||||
return data
|
||||
data['data_uuid'] = getattr(datablock.data, 'uuid', None)
|
||||
|
||||
# PARENTING
|
||||
if datablock.parent:
|
||||
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
|
||||
|
||||
# MODIFIERS
|
||||
modifiers = getattr(instance,'modifiers', None )
|
||||
if modifiers:
|
||||
dumper.include_filter = None
|
||||
dumper.depth = 1
|
||||
data["modifiers"] = {}
|
||||
for index, modifier in enumerate(modifiers):
|
||||
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
||||
modifiers = getattr(datablock, 'modifiers', None)
|
||||
if hasattr(datablock, 'modifiers'):
|
||||
data['modifiers'] = dump_modifiers(modifiers)
|
||||
|
||||
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
|
||||
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
|
||||
|
||||
if gp_modifiers:
|
||||
dumper.include_filter = None
|
||||
@ -301,16 +715,16 @@ class BlObject(BlDatablock):
|
||||
'location']
|
||||
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
|
||||
|
||||
|
||||
# CONSTRAINTS
|
||||
if hasattr(instance, 'constraints'):
|
||||
dumper.depth = 3
|
||||
data["constraints"] = dumper.dump(instance.constraints)
|
||||
if hasattr(datablock, 'constraints'):
|
||||
data["constraints"] = dump_constraints(datablock.constraints)
|
||||
|
||||
# POSE
|
||||
if hasattr(instance, 'pose') and instance.pose:
|
||||
if hasattr(datablock, 'pose') and datablock.pose:
|
||||
# BONES
|
||||
bones = {}
|
||||
for bone in instance.pose.bones:
|
||||
for bone in datablock.pose.bones:
|
||||
bones[bone.name] = {}
|
||||
dumper.depth = 1
|
||||
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
||||
@ -335,7 +749,7 @@ class BlObject(BlDatablock):
|
||||
|
||||
# GROUPS
|
||||
bone_groups = {}
|
||||
for group in instance.pose.bone_groups:
|
||||
for group in datablock.pose.bone_groups:
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
@ -344,87 +758,75 @@ class BlObject(BlDatablock):
|
||||
bone_groups[group.name] = dumper.dump(group)
|
||||
data['pose']['bone_groups'] = bone_groups
|
||||
|
||||
# CHILDS
|
||||
if len(instance.children) > 0:
|
||||
childs = []
|
||||
for child in instance.children:
|
||||
childs.append(child.name)
|
||||
|
||||
data["children"] = childs
|
||||
|
||||
# VERTEx GROUP
|
||||
if len(instance.vertex_groups) > 0:
|
||||
points_attr = 'vertices' if isinstance(
|
||||
instance.data, bpy.types.Mesh) else 'points'
|
||||
vg_data = []
|
||||
for vg in instance.vertex_groups:
|
||||
vg_idx = vg.index
|
||||
dumped_vg = {}
|
||||
dumped_vg['name'] = vg.name
|
||||
|
||||
vertices = []
|
||||
|
||||
for i, v in enumerate(getattr(instance.data, points_attr)):
|
||||
for vg in v.groups:
|
||||
if vg.group == vg_idx:
|
||||
vertices.append({
|
||||
'index': i,
|
||||
'weight': vg.weight
|
||||
})
|
||||
|
||||
dumped_vg['vertices'] = vertices
|
||||
|
||||
vg_data.append(dumped_vg)
|
||||
|
||||
data['vertex_groups'] = vg_data
|
||||
if len(datablock.vertex_groups) > 0:
|
||||
data['vertex_groups'] = dump_vertex_groups(datablock)
|
||||
|
||||
# SHAPE KEYS
|
||||
object_data = instance.data
|
||||
object_data = datablock.data
|
||||
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
|
||||
|
||||
# SKIN VERTICES
|
||||
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
|
||||
skin_vertices = list()
|
||||
for skin_data in object_data.skin_vertices:
|
||||
skin_vertices.append(
|
||||
np_dump_collection(skin_data.data, SKIN_DATA))
|
||||
data['skin_vertices'] = skin_vertices
|
||||
|
||||
# CYCLE SETTINGS
|
||||
if hasattr(datablock, 'cycles_visibility'):
|
||||
dumper.include_filter = [
|
||||
'reference_key',
|
||||
'use_relative'
|
||||
'camera',
|
||||
'diffuse',
|
||||
'glossy',
|
||||
'transmission',
|
||||
'scatter',
|
||||
'shadow',
|
||||
]
|
||||
data['shape_keys'] = dumper.dump(object_data.shape_keys)
|
||||
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
|
||||
key_blocks = {}
|
||||
for key in object_data.shape_keys.key_blocks:
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
'data',
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
'data',
|
||||
'co'
|
||||
]
|
||||
key_blocks[key.name] = dumper.dump(key)
|
||||
key_blocks[key.name]['relative_key'] = key.relative_key.name
|
||||
data['shape_keys']['key_blocks'] = key_blocks
|
||||
data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility)
|
||||
|
||||
# PHYSICS
|
||||
data.update(dump_physics(datablock))
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
# Avoid Empty case
|
||||
if self.instance.data:
|
||||
deps.append(self.instance.data)
|
||||
if len(self.instance.children) > 0:
|
||||
deps.extend(list(self.instance.children))
|
||||
if datablock.data:
|
||||
deps.append(datablock.data)
|
||||
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
# Particle systems
|
||||
for particle_slot in datablock.particle_systems:
|
||||
deps.append(particle_slot.settings)
|
||||
|
||||
if self.instance.instance_type == 'COLLECTION':
|
||||
if datablock.parent:
|
||||
deps.append(datablock.parent)
|
||||
|
||||
if datablock.instance_type == 'COLLECTION':
|
||||
# TODO: uuid based
|
||||
deps.append(self.instance.instance_collection)
|
||||
deps.append(datablock.instance_collection)
|
||||
|
||||
if self.instance.modifiers:
|
||||
deps.extend(find_textures_dependencies(self.instance.modifiers))
|
||||
if datablock.modifiers:
|
||||
deps.extend(find_textures_dependencies(datablock.modifiers))
|
||||
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
|
||||
|
||||
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
|
||||
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.objects)
|
||||
|
||||
_type = bpy.types.Object
|
||||
_class = BlObject
|
103
multi_user/bl_types/bl_particle.py
Normal file
@ -0,0 +1,103 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from . import dump_anything
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
|
||||
""" Dump every texture slot collection as the form:
|
||||
[(index, slot_texture_uuid, slot_texture_name), (), ...]
|
||||
"""
|
||||
dumped_slots = []
|
||||
for index, slot in enumerate(texture_slots):
|
||||
if slot and slot.texture:
|
||||
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
|
||||
|
||||
return dumped_slots
|
||||
|
||||
|
||||
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
|
||||
"""
|
||||
"""
|
||||
for index, slot in enumerate(target_slots):
|
||||
if slot:
|
||||
target_slots.clear(index)
|
||||
|
||||
for index, slot_uuid, slot_name in dumped_slots:
|
||||
target_slots.create(index).texture = get_datablock_from_uuid(
|
||||
slot_uuid, slot_name
|
||||
)
|
||||
|
||||
IGNORED_ATTR = [
|
||||
"is_embedded_data",
|
||||
"is_evaluated",
|
||||
"is_fluid",
|
||||
"is_library_indirect",
|
||||
"users"
|
||||
]
|
||||
|
||||
class BlParticle(ReplicatedDatablock):
|
||||
bl_id = "particles"
|
||||
bl_class = bpy.types.ParticleSettings
|
||||
bl_icon = "PARTICLES"
|
||||
bl_check_common = False
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.particles.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
dump_anything.load(datablock, data)
|
||||
|
||||
dump_anything.load(datablock.effector_weights, data["effector_weights"])
|
||||
|
||||
# Force field
|
||||
force_field_1 = data.get("force_field_1", None)
|
||||
if force_field_1:
|
||||
dump_anything.load(datablock.force_field_1, force_field_1)
|
||||
|
||||
force_field_2 = data.get("force_field_2", None)
|
||||
if force_field_2:
|
||||
dump_anything.load(datablock.force_field_2, force_field_2)
|
||||
|
||||
# Texture slots
|
||||
load_texture_slots(data["texture_slots"], datablock.texture_slots)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = IGNORED_ATTR
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
# Particle effectors
|
||||
data["effector_weights"] = dumper.dump(datablock.effector_weights)
|
||||
if datablock.force_field_1:
|
||||
data["force_field_1"] = dumper.dump(datablock.force_field_1)
|
||||
if datablock.force_field_2:
|
||||
data["force_field_2"] = dumper.dump(datablock.force_field_2)
|
||||
|
||||
# Texture slots
|
||||
data["texture_slots"] = dump_textures_slots(datablock.texture_slots)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.particles)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = [t.texture for t in datablock.texture_slots if t and t.texture]
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = bpy.types.ParticleSettings
|
||||
_class = BlParticle
|
@ -17,16 +17,26 @@
|
||||
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import re
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from deepdiff import DeepDiff
|
||||
from deepdiff import DeepDiff, Delta
|
||||
from replication.constants import DIFF_JSON, MODIFIED
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from ..utils import flush_history, get_preferences
|
||||
from .bl_action import (dump_animation_data, load_animation_data,
|
||||
resolve_animation_dependencies)
|
||||
from .node_tree import (get_node_tree_dependencies, load_node_tree,
|
||||
dump_node_tree)
|
||||
from .bl_collection import (dump_collection_children, dump_collection_objects,
|
||||
load_collection_childrens, load_collection_objects,
|
||||
resolve_collection_dependencies)
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_file import get_filepath
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
RENDER_SETTINGS = [
|
||||
@ -265,76 +275,199 @@ VIEW_SETTINGS = [
|
||||
]
|
||||
|
||||
|
||||
def dump_sequence(sequence: bpy.types.Sequence) -> dict:
|
||||
""" Dump a sequence to a dict
|
||||
|
||||
:arg sequence: sequence to dump
|
||||
:type sequence: bpy.types.Sequence
|
||||
:return dict:
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'lock',
|
||||
'select',
|
||||
'select_left_handle',
|
||||
'select_right_handle',
|
||||
'strobe'
|
||||
]
|
||||
dumper.depth = 1
|
||||
data = dumper.dump(sequence)
|
||||
|
||||
# TODO: Support multiple images
|
||||
if sequence.type == 'IMAGE':
|
||||
data['filenames'] = [e.filename for e in sequence.elements]
|
||||
|
||||
# Effect strip inputs
|
||||
input_count = getattr(sequence, 'input_count', None)
|
||||
if input_count:
|
||||
for n in range(input_count):
|
||||
input_name = f"input_{n+1}"
|
||||
data[input_name] = getattr(sequence, input_name).name
|
||||
|
||||
return data
|
||||
|
||||
def load_sequence(sequence_data: dict,
|
||||
sequence_editor: bpy.types.SequenceEditor):
|
||||
""" Load sequence from dumped data
|
||||
|
||||
:arg sequence_data: sequence to dump
|
||||
:type sequence_data:dict
|
||||
:arg sequence_editor: root sequence editor
|
||||
:type sequence_editor: bpy.types.SequenceEditor
|
||||
"""
|
||||
strip_type = sequence_data.get('type')
|
||||
strip_name = sequence_data.get('name')
|
||||
strip_channel = sequence_data.get('channel')
|
||||
strip_frame_start = sequence_data.get('frame_start')
|
||||
|
||||
sequence = sequence_editor.sequences_all.get(strip_name, None)
|
||||
|
||||
if sequence is None:
|
||||
if strip_type == 'SCENE':
|
||||
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
|
||||
sequence = sequence_editor.sequences.new_scene(strip_name,
|
||||
strip_scene,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'MOVIE':
|
||||
filepath = get_filepath(Path(sequence_data['filepath']).name)
|
||||
sequence = sequence_editor.sequences.new_movie(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'SOUND':
|
||||
filepath = bpy.data.sounds[sequence_data['sound']].filepath
|
||||
sequence = sequence_editor.sequences.new_sound(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'IMAGE':
|
||||
images_name = sequence_data.get('filenames')
|
||||
filepath = get_filepath(images_name[0])
|
||||
sequence = sequence_editor.sequences.new_image(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
# load other images
|
||||
if len(images_name) > 1:
|
||||
for img_idx in range(1, len(images_name)):
|
||||
sequence.elements.append((images_name[img_idx]))
|
||||
else:
|
||||
seq = {}
|
||||
|
||||
for i in range(sequence_data['input_count']):
|
||||
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(
|
||||
sequence_data.get(f"input_{i+1}", None))
|
||||
|
||||
sequence = sequence_editor.sequences.new_effect(name=strip_name,
|
||||
type=strip_type,
|
||||
channel=strip_channel,
|
||||
frame_start=strip_frame_start,
|
||||
frame_end=sequence_data['frame_final_end'],
|
||||
**seq)
|
||||
|
||||
loader = Loader()
|
||||
|
||||
loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
|
||||
loader.load(sequence, sequence_data)
|
||||
sequence.select = False
|
||||
|
||||
class BlScene(ReplicatedDatablock):
|
||||
is_root = True
|
||||
use_delta = True
|
||||
|
||||
class BlScene(BlDatablock):
|
||||
bl_id = "scenes"
|
||||
bl_class = bpy.types.Scene
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = True
|
||||
bl_icon = 'SCENE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.scenes.new(data["name"])
|
||||
|
||||
self.diff_method = DIFF_JSON
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.scenes.new(data["name"])
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
# Load other meshes metadata
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# Load master collection
|
||||
load_collection_objects(
|
||||
data['collection']['objects'], target.collection)
|
||||
data['collection']['objects'], datablock.collection)
|
||||
load_collection_childrens(
|
||||
data['collection']['children'], target.collection)
|
||||
data['collection']['children'], datablock.collection)
|
||||
|
||||
if 'world' in data.keys():
|
||||
target.world = bpy.data.worlds[data['world']]
|
||||
datablock.world = bpy.data.worlds[data['world']]
|
||||
|
||||
# Annotation
|
||||
if 'grease_pencil' in data.keys():
|
||||
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
||||
datablock.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
||||
|
||||
if self.preferences.sync_flags.sync_render_settings:
|
||||
if get_preferences().sync_flags.sync_render_settings:
|
||||
if 'eevee' in data.keys():
|
||||
loader.load(target.eevee, data['eevee'])
|
||||
loader.load(datablock.eevee, data['eevee'])
|
||||
|
||||
if 'cycles' in data.keys():
|
||||
loader.load(target.cycles, data['cycles'])
|
||||
loader.load(datablock.cycles, data['cycles'])
|
||||
|
||||
if 'render' in data.keys():
|
||||
loader.load(target.render, data['render'])
|
||||
loader.load(datablock.render, data['render'])
|
||||
|
||||
if 'view_settings' in data.keys():
|
||||
loader.load(target.view_settings, data['view_settings'])
|
||||
if target.view_settings.use_curve_mapping and \
|
||||
'curve_mapping' in data['view_settings']:
|
||||
view_settings = data.get('view_settings')
|
||||
if view_settings:
|
||||
loader.load(datablock.view_settings, view_settings)
|
||||
if datablock.view_settings.use_curve_mapping and \
|
||||
'curve_mapping' in view_settings:
|
||||
# TODO: change this ugly fix
|
||||
target.view_settings.curve_mapping.white_level = data[
|
||||
'view_settings']['curve_mapping']['white_level']
|
||||
target.view_settings.curve_mapping.black_level = data[
|
||||
'view_settings']['curve_mapping']['black_level']
|
||||
target.view_settings.curve_mapping.update()
|
||||
datablock.view_settings.curve_mapping.white_level = view_settings['curve_mapping']['white_level']
|
||||
datablock.view_settings.curve_mapping.black_level = view_settings['curve_mapping']['black_level']
|
||||
datablock.view_settings.curve_mapping.update()
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
# Sequencer
|
||||
sequences = data.get('sequences')
|
||||
|
||||
if sequences:
|
||||
# Create sequencer data
|
||||
datablock.sequence_editor_create()
|
||||
vse = datablock.sequence_editor
|
||||
|
||||
# Clear removed sequences
|
||||
for seq in vse.sequences_all:
|
||||
if seq.name not in sequences:
|
||||
vse.sequences.remove(seq)
|
||||
# Load existing sequences
|
||||
for seq_data in sequences.value():
|
||||
load_sequence(seq_data, vse)
|
||||
# If the sequence is no longer used, clear it
|
||||
elif datablock.sequence_editor and not sequences:
|
||||
datablock.sequence_editor_clear()
|
||||
|
||||
# FIXME: Find a better way after the replication big refacotoring
|
||||
# Keep other user from deleting collection object by flushing their history
|
||||
|
||||
# Compositor
|
||||
if data["use_nodes"]:
|
||||
if datablock.node_tree is None:
|
||||
datablock.use_nodes = True
|
||||
|
||||
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||
|
||||
flush_history()
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
data = {}
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
# Metadata
|
||||
scene_dumper = Dumper()
|
||||
scene_dumper.depth = 1
|
||||
scene_dumper.include_filter = [
|
||||
'use_nodes',
|
||||
'name',
|
||||
'world',
|
||||
'id',
|
||||
@ -343,40 +476,40 @@ class BlScene(BlDatablock):
|
||||
'frame_end',
|
||||
'frame_step',
|
||||
]
|
||||
if self.preferences.sync_flags.sync_active_camera:
|
||||
if get_preferences().sync_flags.sync_active_camera:
|
||||
scene_dumper.include_filter.append('camera')
|
||||
|
||||
data.update(scene_dumper.dump(instance))
|
||||
data.update(scene_dumper.dump(datablock))
|
||||
|
||||
# Master collection
|
||||
data['collection'] = {}
|
||||
data['collection']['children'] = dump_collection_children(
|
||||
instance.collection)
|
||||
datablock.collection)
|
||||
data['collection']['objects'] = dump_collection_objects(
|
||||
instance.collection)
|
||||
datablock.collection)
|
||||
|
||||
scene_dumper.depth = 1
|
||||
scene_dumper.include_filter = None
|
||||
|
||||
# Render settings
|
||||
if self.preferences.sync_flags.sync_render_settings:
|
||||
if get_preferences().sync_flags.sync_render_settings:
|
||||
scene_dumper.include_filter = RENDER_SETTINGS
|
||||
|
||||
data['render'] = scene_dumper.dump(instance.render)
|
||||
data['render'] = scene_dumper.dump(datablock.render)
|
||||
|
||||
if instance.render.engine == 'BLENDER_EEVEE':
|
||||
if datablock.render.engine == 'BLENDER_EEVEE':
|
||||
scene_dumper.include_filter = EVEE_SETTINGS
|
||||
data['eevee'] = scene_dumper.dump(instance.eevee)
|
||||
elif instance.render.engine == 'CYCLES':
|
||||
data['eevee'] = scene_dumper.dump(datablock.eevee)
|
||||
elif datablock.render.engine == 'CYCLES':
|
||||
scene_dumper.include_filter = CYCLES_SETTINGS
|
||||
data['cycles'] = scene_dumper.dump(instance.cycles)
|
||||
data['cycles'] = scene_dumper.dump(datablock.cycles)
|
||||
|
||||
scene_dumper.include_filter = VIEW_SETTINGS
|
||||
data['view_settings'] = scene_dumper.dump(instance.view_settings)
|
||||
data['view_settings'] = scene_dumper.dump(datablock.view_settings)
|
||||
|
||||
if instance.view_settings.use_curve_mapping:
|
||||
if datablock.view_settings.use_curve_mapping:
|
||||
data['view_settings']['curve_mapping'] = scene_dumper.dump(
|
||||
instance.view_settings.curve_mapping)
|
||||
datablock.view_settings.curve_mapping)
|
||||
scene_dumper.depth = 5
|
||||
scene_dumper.include_filter = [
|
||||
'curves',
|
||||
@ -384,46 +517,101 @@ class BlScene(BlDatablock):
|
||||
'location',
|
||||
]
|
||||
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
|
||||
instance.view_settings.curve_mapping.curves)
|
||||
datablock.view_settings.curve_mapping.curves)
|
||||
|
||||
if instance.sequence_editor:
|
||||
data['has_sequence'] = True
|
||||
else:
|
||||
data['has_sequence'] = False
|
||||
# Sequence
|
||||
vse = datablock.sequence_editor
|
||||
if vse:
|
||||
dumped_sequences = {}
|
||||
for seq in vse.sequences_all:
|
||||
dumped_sequences[seq.name] = dump_sequence(seq)
|
||||
data['sequences'] = dumped_sequences
|
||||
|
||||
# Compositor
|
||||
if datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
# Master Collection
|
||||
deps.extend(resolve_collection_dependencies(self.instance.collection))
|
||||
deps.extend(resolve_collection_dependencies(datablock.collection))
|
||||
|
||||
# world
|
||||
if self.instance.world:
|
||||
deps.append(self.instance.world)
|
||||
if datablock.world:
|
||||
deps.append(datablock.world)
|
||||
|
||||
# annotations
|
||||
if self.instance.grease_pencil:
|
||||
deps.append(self.instance.grease_pencil)
|
||||
if datablock.grease_pencil:
|
||||
deps.append(datablock.grease_pencil)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
# Sequences
|
||||
# deps.extend(list(self.instance.sequence_editor.sequences_all))
|
||||
if self.instance.sequence_editor:
|
||||
deps.append(self.instance.sequence_editor)
|
||||
vse = datablock.sequence_editor
|
||||
if vse:
|
||||
for sequence in vse.sequences_all:
|
||||
if sequence.type == 'MOVIE' and sequence.filepath:
|
||||
deps.append(Path(bpy.path.abspath(sequence.filepath)))
|
||||
elif sequence.type == 'SOUND' and sequence.sound:
|
||||
deps.append(sequence.sound)
|
||||
elif sequence.type == 'IMAGE':
|
||||
for elem in sequence.elements:
|
||||
sequence.append(
|
||||
Path(bpy.path.abspath(sequence.directory),
|
||||
elem.filename))
|
||||
|
||||
# Compositor
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
def diff(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
name = data.get('name')
|
||||
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
|
||||
if datablock is None:
|
||||
datablock = bpy.data.scenes.get(name)
|
||||
|
||||
return datablock
|
||||
|
||||
@staticmethod
|
||||
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||
exclude_path = []
|
||||
|
||||
if not self.preferences.sync_flags.sync_render_settings:
|
||||
if not get_preferences().sync_flags.sync_render_settings:
|
||||
exclude_path.append("root['eevee']")
|
||||
exclude_path.append("root['cycles']")
|
||||
exclude_path.append("root['view_settings']")
|
||||
exclude_path.append("root['render']")
|
||||
|
||||
if not self.preferences.sync_flags.sync_active_camera:
|
||||
if not get_preferences().sync_flags.sync_active_camera:
|
||||
exclude_path.append("root['camera']")
|
||||
|
||||
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)
|
||||
diff_params = {
|
||||
'exclude_paths': exclude_path,
|
||||
'ignore_order': True,
|
||||
'report_repetition': True
|
||||
}
|
||||
delta_params = {
|
||||
# 'mutate': True
|
||||
}
|
||||
|
||||
return Delta(
|
||||
DeepDiff(last_data,
|
||||
current_data,
|
||||
cache_size=5000,
|
||||
**diff_params),
|
||||
**delta_params)
|
||||
|
||||
|
||||
_type = bpy.types.Scene
|
||||
_class = BlScene
|
||||
|
@ -1,198 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from pathlib import Path
|
||||
import logging
|
||||
|
||||
from .bl_file import get_filepath
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
|
||||
def dump_sequence(sequence: bpy.types.Sequence) -> dict:
|
||||
""" Dump a sequence to a dict
|
||||
|
||||
:arg sequence: sequence to dump
|
||||
:type sequence: bpy.types.Sequence
|
||||
:return dict:
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'lock',
|
||||
'select',
|
||||
'select_left_handle',
|
||||
'select_right_handle',
|
||||
'strobe'
|
||||
]
|
||||
dumper.depth = 1
|
||||
data = dumper.dump(sequence)
|
||||
|
||||
|
||||
# TODO: Support multiple images
|
||||
if sequence.type == 'IMAGE':
|
||||
data['filenames'] = [e.filename for e in sequence.elements]
|
||||
|
||||
|
||||
# Effect strip inputs
|
||||
input_count = getattr(sequence, 'input_count', None)
|
||||
if input_count:
|
||||
for n in range(input_count):
|
||||
input_name = f"input_{n+1}"
|
||||
data[input_name] = getattr(sequence, input_name).name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor):
|
||||
""" Load sequence from dumped data
|
||||
|
||||
:arg sequence_data: sequence to dump
|
||||
:type sequence_data:dict
|
||||
:arg sequence_editor: root sequence editor
|
||||
:type sequence_editor: bpy.types.SequenceEditor
|
||||
"""
|
||||
strip_type = sequence_data.get('type')
|
||||
strip_name = sequence_data.get('name')
|
||||
strip_channel = sequence_data.get('channel')
|
||||
strip_frame_start = sequence_data.get('frame_start')
|
||||
|
||||
sequence = sequence_editor.sequences_all.get(strip_name, None)
|
||||
|
||||
if sequence is None:
|
||||
if strip_type == 'SCENE':
|
||||
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
|
||||
sequence = sequence_editor.sequences.new_scene(strip_name,
|
||||
strip_scene,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'MOVIE':
|
||||
filepath = get_filepath(Path(sequence_data['filepath']).name)
|
||||
sequence = sequence_editor.sequences.new_movie(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'SOUND':
|
||||
filepath = bpy.data.sounds[sequence_data['sound']].filepath
|
||||
sequence = sequence_editor.sequences.new_sound(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'IMAGE':
|
||||
images_name = sequence_data.get('filenames')
|
||||
filepath = get_filepath(images_name[0])
|
||||
sequence = sequence_editor.sequences.new_image(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
# load other images
|
||||
if len(images_name)>1:
|
||||
for img_idx in range(1,len(images_name)):
|
||||
sequence.elements.append((images_name[img_idx]))
|
||||
else:
|
||||
seq = {}
|
||||
|
||||
for i in range(sequence_data['input_count']):
|
||||
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(sequence_data.get(f"input_{i+1}", None))
|
||||
|
||||
sequence = sequence_editor.sequences.new_effect(name=strip_name,
|
||||
type=strip_type,
|
||||
channel=strip_channel,
|
||||
frame_start=strip_frame_start,
|
||||
frame_end=sequence_data['frame_final_end'],
|
||||
**seq)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(sequence, sequence_data)
|
||||
sequence.select = False
|
||||
|
||||
|
||||
class BlSequencer(BlDatablock):
|
||||
bl_id = "scenes"
|
||||
bl_class = bpy.types.SequenceEditor
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = True
|
||||
bl_icon = 'SEQUENCE'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
# Get the scene
|
||||
scene_id = data.get('name')
|
||||
scene = bpy.data.scenes.get(scene_id, None)
|
||||
|
||||
# Create sequencer data
|
||||
scene.sequence_editor_clear()
|
||||
scene.sequence_editor_create()
|
||||
|
||||
return scene.sequence_editor
|
||||
|
||||
def resolve(self):
|
||||
scene = bpy.data.scenes.get(self.data['name'], None)
|
||||
if scene:
|
||||
if scene.sequence_editor is None:
|
||||
self.instance = self._construct(self.data)
|
||||
else:
|
||||
self.instance = scene.sequence_editor
|
||||
else:
|
||||
logging.warning("Sequencer editor scene not found")
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
# Sequencer
|
||||
sequences = data.get('sequences')
|
||||
if sequences:
|
||||
for seq in target.sequences_all:
|
||||
if seq.name not in sequences:
|
||||
target.sequences.remove(seq)
|
||||
for seq_name, seq_data in sequences.items():
|
||||
load_sequence(seq_data, target)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
sequence_dumper = Dumper()
|
||||
sequence_dumper.depth = 1
|
||||
sequence_dumper.include_filter = [
|
||||
'proxy_storage',
|
||||
]
|
||||
data = {}#sequence_dumper.dump(instance)
|
||||
# Sequencer
|
||||
sequences = {}
|
||||
|
||||
for seq in instance.sequences_all:
|
||||
sequences[seq.name] = dump_sequence(seq)
|
||||
|
||||
data['sequences'] = sequences
|
||||
data['name'] = instance.id_data.name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
for seq in self.instance.sequences_all:
|
||||
if seq.type == 'MOVIE' and seq.filepath:
|
||||
deps.append(Path(bpy.path.abspath(seq.filepath)))
|
||||
elif seq.type == 'SOUND' and seq.sound:
|
||||
deps.append(seq.sound)
|
||||
elif seq.type == 'IMAGE':
|
||||
for e in seq.elements:
|
||||
deps.append(Path(bpy.path.abspath(seq.directory), e.filename))
|
||||
return deps
|
@ -23,48 +23,59 @@ from pathlib import Path
|
||||
import bpy
|
||||
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlSound(BlDatablock):
|
||||
class BlSound(ReplicatedDatablock):
|
||||
bl_id = "sounds"
|
||||
bl_class = bpy.types.Sound
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'SOUND'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
filename = data.get('filename')
|
||||
|
||||
return bpy.data.sounds.load(get_filepath(filename))
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
|
||||
def _dump(self, instance=None):
|
||||
filename = Path(instance.filepath).name
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
if not filename:
|
||||
raise FileExistsError(instance.filepath)
|
||||
|
||||
raise FileExistsError(datablock.filepath)
|
||||
|
||||
return {
|
||||
'filename': filename,
|
||||
'name': instance.name
|
||||
'name': datablock.name
|
||||
}
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
||||
ensure_unpacked(self.instance)
|
||||
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||
ensure_unpacked(datablock)
|
||||
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.sounds)
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
return False
|
||||
|
||||
_type = bpy.types.Sound
|
||||
_class = BlSound
|
@ -20,29 +20,29 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlSpeaker(BlDatablock):
|
||||
class BlSpeaker(ReplicatedDatablock):
|
||||
bl_id = "speakers"
|
||||
bl_class = bpy.types.Speaker
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'SPEAKER'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.speakers.new(data["name"])
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -61,17 +61,27 @@ class BlSpeaker(BlDatablock):
|
||||
'cone_volume_outer'
|
||||
]
|
||||
|
||||
return dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.speakers)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
|
||||
sound = self.instance.sound
|
||||
sound = datablock.sound
|
||||
|
||||
if sound:
|
||||
deps.append(sound)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
|
||||
_type = bpy.types.Speaker
|
||||
_class = BlSpeaker
|
||||
|
@ -20,28 +20,30 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
import bpy.types as T
|
||||
|
||||
|
||||
class BlTexture(BlDatablock):
|
||||
class BlTexture(ReplicatedDatablock):
|
||||
bl_id = "textures"
|
||||
bl_class = bpy.types.Texture
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'TEXTURE'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.textures.new(data["name"], data["type"])
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
@ -55,24 +57,39 @@ class BlTexture(BlDatablock):
|
||||
'name_full'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
color_ramp = getattr(instance, 'color_ramp', None)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
color_ramp = getattr(datablock, 'color_ramp', None)
|
||||
|
||||
if color_ramp:
|
||||
dumper.depth = 4
|
||||
data['color_ramp'] = dumper.dump(color_ramp)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
# TODO: resolve material
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.textures)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
image = getattr(self.instance,"image", None)
|
||||
image = getattr(datablock,"image", None)
|
||||
|
||||
if image:
|
||||
deps.append(image)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
_type = [T.WoodTexture, T.VoronoiTexture,
|
||||
T.StucciTexture, T.NoiseTexture,
|
||||
T.MusgraveTexture, T.MarbleTexture,
|
||||
T.MagicTexture, T.ImageTexture,
|
||||
T.DistortedNoiseTexture, T.CloudsTexture,
|
||||
T.BlendTexture]
|
||||
_class = BlTexture
|
||||
|
@ -21,45 +21,24 @@ import mathutils
|
||||
from pathlib import Path
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlVolume(BlDatablock):
|
||||
class BlVolume(ReplicatedDatablock):
|
||||
bl_id = "volumes"
|
||||
bl_class = bpy.types.Volume
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'VOLUME_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(target.display, data['display'])
|
||||
|
||||
# MATERIAL SLOTS
|
||||
target.materials.clear()
|
||||
|
||||
for mat_uuid, mat_name in data["material_list"]:
|
||||
mat_ref = None
|
||||
if mat_uuid is not None:
|
||||
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
||||
else:
|
||||
mat_ref = bpy.data.materials.get(mat_name, None)
|
||||
|
||||
if mat_ref is None:
|
||||
raise Exception("Material doesn't exist")
|
||||
|
||||
target.materials.append(mat_ref)
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.volumes.new(data["name"])
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = [
|
||||
@ -73,27 +52,48 @@ class BlVolume(BlDatablock):
|
||||
'use_fake_user'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['display'] = dumper.dump(instance.display)
|
||||
data['display'] = dumper.dump(datablock.display)
|
||||
|
||||
# Fix material index
|
||||
data['material_list'] = [(m.uuid, m.name) for m in instance.materials if m]
|
||||
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
loader.load(datablock.display, data['display'])
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.volumes)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
|
||||
external_vdb = Path(bpy.path.abspath(self.instance.filepath))
|
||||
external_vdb = Path(bpy.path.abspath(datablock.filepath))
|
||||
if external_vdb.exists() and not external_vdb.is_dir():
|
||||
deps.append(external_vdb)
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
_type = bpy.types.Volume
|
||||
_class = BlVolume
|
||||
|
@ -20,38 +20,40 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_material import (load_shader_node_tree,
|
||||
dump_shader_node_tree,
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .node_tree import (load_node_tree,
|
||||
dump_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlWorld(BlDatablock):
|
||||
|
||||
class BlWorld(ReplicatedDatablock):
|
||||
bl_id = "worlds"
|
||||
bl_class = bpy.types.World
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = True
|
||||
bl_icon = 'WORLD_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.worlds.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
if data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
if datablock.node_tree is None:
|
||||
datablock.use_nodes = True
|
||||
|
||||
load_shader_node_tree(data['node_tree'], target.node_tree)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
world_dumper = Dumper()
|
||||
world_dumper.depth = 1
|
||||
world_dumper.include_filter = [
|
||||
@ -59,17 +61,27 @@ class BlWorld(BlDatablock):
|
||||
"name",
|
||||
"color"
|
||||
]
|
||||
data = world_dumper.dump(instance)
|
||||
if instance.use_nodes:
|
||||
data['node_tree'] = dump_shader_node_tree(instance.node_tree)
|
||||
data = world_dumper.dump(datablock)
|
||||
if datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.worlds)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
if self.instance.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = bpy.types.World
|
||||
_class = BlWorld
|
@ -465,6 +465,7 @@ class Loader:
|
||||
self.type_subset = self.match_subset_all
|
||||
self.occlude_read_only = False
|
||||
self.order = ['*']
|
||||
self.exclure_filter = []
|
||||
|
||||
def load(self, dst_data, src_dumped_data):
|
||||
self._load_any(
|
||||
@ -475,7 +476,8 @@ class Loader:
|
||||
|
||||
def _load_any(self, any, dump):
|
||||
for filter_function, load_function in self.type_subset:
|
||||
if filter_function(any):
|
||||
if filter_function(any) and \
|
||||
any.sub_element_name not in self.exclure_filter:
|
||||
load_function(any, dump)
|
||||
return
|
||||
|
||||
@ -505,16 +507,12 @@ class Loader:
|
||||
_constructors = {
|
||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
|
||||
}
|
||||
|
||||
destructors = {
|
||||
T.ColorRampElement: DESTRUCTOR_REMOVE,
|
||||
T.Modifier: DESTRUCTOR_CLEAR,
|
||||
T.GpencilModifier: DESTRUCTOR_CLEAR,
|
||||
T.Constraint: CONSTRUCTOR_NEW,
|
||||
}
|
||||
element_type = element.bl_rna_property.fixed_type
|
||||
|
||||
@ -529,7 +527,13 @@ class Loader:
|
||||
if destructor:
|
||||
if destructor == DESTRUCTOR_REMOVE:
|
||||
collection = element.read()
|
||||
for i in range(len(collection)-1):
|
||||
elems_to_remove = len(collection)
|
||||
|
||||
# Color ramp doesn't allow to remove all elements
|
||||
if type(element_type) == T.ColorRampElement:
|
||||
elems_to_remove -= 1
|
||||
|
||||
for i in range(elems_to_remove):
|
||||
collection.remove(collection[0])
|
||||
else:
|
||||
getattr(element.read(), DESTRUCTOR_CLEAR)()
|
||||
@ -588,6 +592,8 @@ class Loader:
|
||||
instance.write(bpy.data.textures.get(dump))
|
||||
elif isinstance(rna_property_type, T.ColorRamp):
|
||||
self._load_default(instance, dump)
|
||||
elif isinstance(rna_property_type, T.NodeTree):
|
||||
instance.write(bpy.data.node_groups.get(dump))
|
||||
elif isinstance(rna_property_type, T.Object):
|
||||
instance.write(bpy.data.objects.get(dump))
|
||||
elif isinstance(rna_property_type, T.Mesh):
|
||||
@ -600,6 +606,8 @@ class Loader:
|
||||
instance.write(bpy.data.fonts.get(dump))
|
||||
elif isinstance(rna_property_type, T.Sound):
|
||||
instance.write(bpy.data.sounds.get(dump))
|
||||
# elif isinstance(rna_property_type, T.ParticleSettings):
|
||||
# instance.write(bpy.data.particles.get(dump))
|
||||
|
||||
def _load_matrix(self, matrix, dump):
|
||||
matrix.write(mathutils.Matrix(dump))
|
||||
|
362
multi_user/bl_types/node_tree.py
Normal file
@ -0,0 +1,362 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
import logging
|
||||
import re
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
|
||||
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
|
||||
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
||||
|
||||
def load_node(node_data: dict, node_tree: bpy.types.NodeTree):
|
||||
""" Load a node into a node_tree from a dict
|
||||
|
||||
:arg node_data: dumped node data
|
||||
:type node_data: dict
|
||||
:arg node_tree: target node_tree
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
loader = Loader()
|
||||
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
|
||||
target_node.select = False
|
||||
loader.load(target_node, node_data)
|
||||
image_uuid = node_data.get('image_uuid', None)
|
||||
node_tree_uuid = node_data.get('node_tree_uuid', None)
|
||||
|
||||
if image_uuid and not target_node.image:
|
||||
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
|
||||
if image is None:
|
||||
logging.error(f"Fail to find material image from uuid {image_uuid}")
|
||||
else:
|
||||
target_node.image = image
|
||||
|
||||
if node_tree_uuid:
|
||||
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
|
||||
|
||||
inputs_data = node_data.get('inputs')
|
||||
if inputs_data:
|
||||
inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS]
|
||||
for idx, inpt in enumerate(inputs):
|
||||
if idx < len(inputs_data) and hasattr(inpt, "default_value"):
|
||||
loaded_input = inputs_data[idx]
|
||||
try:
|
||||
if inpt.type in ['OBJECT', 'COLLECTION']:
|
||||
inpt.default_value = get_datablock_from_uuid(loaded_input, None)
|
||||
else:
|
||||
inpt.default_value = loaded_input
|
||||
except Exception as e:
|
||||
logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})")
|
||||
else:
|
||||
logging.warning(f"Node {target_node.name} input length mismatch.")
|
||||
|
||||
outputs_data = node_data.get('outputs')
|
||||
if outputs_data:
|
||||
outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS]
|
||||
for idx, output in enumerate(outputs):
|
||||
if idx < len(outputs_data) and hasattr(output, "default_value"):
|
||||
loaded_output = outputs_data[idx]
|
||||
try:
|
||||
if output.type in ['OBJECT', 'COLLECTION']:
|
||||
output.default_value = get_datablock_from_uuid(loaded_output, None)
|
||||
else:
|
||||
output.default_value = loaded_output
|
||||
except Exception as e:
|
||||
logging.warning(
|
||||
f"Node {target_node.name} output {output.name} parameter not supported, skipping ({e})")
|
||||
else:
|
||||
logging.warning(
|
||||
f"Node {target_node.name} output length mismatch.")
|
||||
|
||||
|
||||
def dump_node(node: bpy.types.Node) -> dict:
|
||||
""" Dump a single node to a dict
|
||||
|
||||
:arg node: target node
|
||||
:type node: bpy.types.Node
|
||||
:retrun: dict
|
||||
"""
|
||||
|
||||
node_dumper = Dumper()
|
||||
node_dumper.depth = 1
|
||||
node_dumper.exclude_filter = [
|
||||
"dimensions",
|
||||
"show_expanded",
|
||||
"name_full",
|
||||
"select",
|
||||
"bl_label",
|
||||
"bl_height_min",
|
||||
"bl_height_max",
|
||||
"bl_height_default",
|
||||
"bl_width_min",
|
||||
"bl_width_max",
|
||||
"type",
|
||||
"bl_icon",
|
||||
"bl_width_default",
|
||||
"bl_static_type",
|
||||
"show_tetxure",
|
||||
"is_active_output",
|
||||
"hide",
|
||||
"show_options",
|
||||
"show_preview",
|
||||
"show_texture",
|
||||
"outputs",
|
||||
"width_hidden",
|
||||
"image"
|
||||
]
|
||||
|
||||
dumped_node = node_dumper.dump(node)
|
||||
|
||||
if node.parent:
|
||||
dumped_node['parent'] = node.parent.name
|
||||
|
||||
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
|
||||
|
||||
|
||||
if dump_io_needed:
|
||||
io_dumper = Dumper()
|
||||
io_dumper.depth = 2
|
||||
io_dumper.include_filter = ["default_value"]
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
dumped_node['inputs'] = []
|
||||
inputs = [i for i in node.inputs if i.type not in IGNORED_SOCKETS]
|
||||
for idx, inpt in enumerate(inputs):
|
||||
if hasattr(inpt, 'default_value'):
|
||||
if isinstance(inpt.default_value, bpy.types.ID):
|
||||
dumped_input = inpt.default_value.uuid
|
||||
else:
|
||||
dumped_input = io_dumper.dump(inpt.default_value)
|
||||
|
||||
dumped_node['inputs'].append(dumped_input)
|
||||
|
||||
if hasattr(node, 'outputs'):
|
||||
dumped_node['outputs'] = []
|
||||
for idx, output in enumerate(node.outputs):
|
||||
if output.type not in IGNORED_SOCKETS:
|
||||
if hasattr(output, 'default_value'):
|
||||
dumped_node['outputs'].append(
|
||||
io_dumper.dump(output.default_value))
|
||||
|
||||
if hasattr(node, 'color_ramp'):
|
||||
ramp_dumper = Dumper()
|
||||
ramp_dumper.depth = 4
|
||||
ramp_dumper.include_filter = [
|
||||
'elements',
|
||||
'alpha',
|
||||
'color',
|
||||
'position',
|
||||
'interpolation',
|
||||
'hue_interpolation',
|
||||
'color_mode'
|
||||
]
|
||||
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
|
||||
if hasattr(node, 'mapping'):
|
||||
curve_dumper = Dumper()
|
||||
curve_dumper.depth = 5
|
||||
curve_dumper.include_filter = [
|
||||
'curves',
|
||||
'points',
|
||||
'location'
|
||||
]
|
||||
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
|
||||
if hasattr(node, 'image') and getattr(node, 'image'):
|
||||
dumped_node['image_uuid'] = node.image.uuid
|
||||
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
|
||||
dumped_node['node_tree_uuid'] = node.node_tree.uuid
|
||||
return dumped_node
|
||||
|
||||
|
||||
def load_links(links_data, node_tree):
|
||||
""" Load node_tree links from a list
|
||||
|
||||
:arg links_data: dumped node links
|
||||
:type links_data: list
|
||||
:arg node_tree: node links collection
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
|
||||
for link in links_data:
|
||||
input_socket = node_tree.nodes[link['to_node']
|
||||
].inputs[int(link['to_socket'])]
|
||||
output_socket = node_tree.nodes[link['from_node']].outputs[int(
|
||||
link['from_socket'])]
|
||||
node_tree.links.new(input_socket, output_socket)
|
||||
|
||||
|
||||
def dump_links(links):
|
||||
""" Dump node_tree links collection to a list
|
||||
|
||||
:arg links: node links collection
|
||||
:type links: bpy.types.NodeLinks
|
||||
:retrun: list
|
||||
"""
|
||||
|
||||
links_data = []
|
||||
|
||||
for link in links:
|
||||
to_socket = NODE_SOCKET_INDEX.search(
|
||||
link.to_socket.path_from_id()).group(1)
|
||||
from_socket = NODE_SOCKET_INDEX.search(
|
||||
link.from_socket.path_from_id()).group(1)
|
||||
links_data.append({
|
||||
'to_node': link.to_node.name,
|
||||
'to_socket': to_socket,
|
||||
'from_node': link.from_node.name,
|
||||
'from_socket': from_socket,
|
||||
})
|
||||
|
||||
return links_data
|
||||
|
||||
|
||||
def dump_node_tree(node_tree: bpy.types.NodeTree) -> dict:
|
||||
""" Dump a node_tree to a dict including links and nodes
|
||||
|
||||
:arg node_tree: dumped node tree
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
:return: dict
|
||||
"""
|
||||
node_tree_data = {
|
||||
'nodes': {node.name: dump_node(node) for node in node_tree.nodes},
|
||||
'links': dump_links(node_tree.links),
|
||||
'name': node_tree.name,
|
||||
'type': type(node_tree).__name__
|
||||
}
|
||||
|
||||
for socket_id in ['inputs', 'outputs']:
|
||||
socket_collection = getattr(node_tree, socket_id)
|
||||
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
|
||||
|
||||
return node_tree_data
|
||||
|
||||
|
||||
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
|
||||
""" dump sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
:return: dict
|
||||
"""
|
||||
sockets_data = []
|
||||
for socket in sockets:
|
||||
try:
|
||||
socket_uuid = socket['uuid']
|
||||
except Exception:
|
||||
socket_uuid = str(uuid4())
|
||||
socket['uuid'] = socket_uuid
|
||||
|
||||
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
|
||||
|
||||
return sockets_data
|
||||
|
||||
|
||||
def load_node_tree_sockets(sockets: bpy.types.Collection,
|
||||
sockets_data: dict):
|
||||
""" load sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
:arg socket_data: dumped socket data
|
||||
:type socket_data: dict
|
||||
"""
|
||||
# Check for removed sockets
|
||||
for socket in sockets:
|
||||
if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]:
|
||||
sockets.remove(socket)
|
||||
|
||||
# Check for new sockets
|
||||
for idx, socket_data in enumerate(sockets_data):
|
||||
try:
|
||||
checked_socket = sockets[idx]
|
||||
if checked_socket.name != socket_data[0]:
|
||||
checked_socket.name = socket_data[0]
|
||||
except Exception:
|
||||
s = sockets.new(socket_data[1], socket_data[0])
|
||||
s['uuid'] = socket_data[2]
|
||||
|
||||
|
||||
def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.NodeTree) -> dict:
|
||||
"""Load a shader node_tree from dumped data
|
||||
|
||||
:arg node_tree_data: dumped node data
|
||||
:type node_tree_data: dict
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
# TODO: load only required nodes
|
||||
target_node_tree.nodes.clear()
|
||||
|
||||
if not target_node_tree.is_property_readonly('name'):
|
||||
target_node_tree.name = node_tree_data['name']
|
||||
|
||||
if 'inputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'inputs')
|
||||
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
|
||||
|
||||
if 'outputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'outputs')
|
||||
load_node_tree_sockets(socket_collection, node_tree_data['outputs'])
|
||||
|
||||
# Load nodes
|
||||
for node in node_tree_data["nodes"]:
|
||||
load_node(node_tree_data["nodes"][node], target_node_tree)
|
||||
|
||||
for node_id, node_data in node_tree_data["nodes"].items():
|
||||
target_node = target_node_tree.nodes.get(node_id, None)
|
||||
if target_node is None:
|
||||
continue
|
||||
elif 'parent' in node_data:
|
||||
target_node.parent = target_node_tree.nodes[node_data['parent']]
|
||||
else:
|
||||
target_node.parent = None
|
||||
# TODO: load only required nodes links
|
||||
# Load nodes links
|
||||
target_node_tree.links.clear()
|
||||
|
||||
load_links(node_tree_data["links"], target_node_tree)
|
||||
|
||||
|
||||
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
|
||||
def has_image(node): return (
|
||||
node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT','IMAGE','R_LAYER'] and node.image)
|
||||
|
||||
def has_node_group(node): return (
|
||||
hasattr(node, 'node_tree') and node.node_tree)
|
||||
|
||||
def has_texture(node): return (
|
||||
node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture)
|
||||
deps = []
|
||||
|
||||
for node in node_tree.nodes:
|
||||
if has_image(node):
|
||||
deps.append(node.image)
|
||||
elif has_node_group(node):
|
||||
deps.append(node.node_tree)
|
||||
elif has_texture(node):
|
||||
deps.append(node.texture)
|
||||
|
||||
return deps
|
@ -24,20 +24,25 @@ import sys
|
||||
from pathlib import Path
|
||||
import socket
|
||||
import re
|
||||
import bpy
|
||||
|
||||
VERSION_EXPR = re.compile('\d+.\d+.\d+')
|
||||
|
||||
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
DEFAULT_CACHE_DIR = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "cache")
|
||||
REPLICATION_DEPENDENCIES = {
|
||||
"zmq",
|
||||
"deepdiff"
|
||||
}
|
||||
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
REPLICATION = os.path.join(LIBS,"replication")
|
||||
|
||||
PYTHON_PATH = None
|
||||
SUBPROCESS_DIR = None
|
||||
|
||||
|
||||
rtypes = []
|
||||
|
||||
|
||||
def module_can_be_imported(name):
|
||||
def module_can_be_imported(name: str) -> bool:
|
||||
try:
|
||||
__import__(name)
|
||||
return True
|
||||
@ -50,7 +55,7 @@ def install_pip():
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
||||
|
||||
|
||||
def install_package(name, version):
|
||||
def install_package(name: str, install_dir: str):
|
||||
logging.info(f"installing {name} version...")
|
||||
env = os.environ
|
||||
if "PIP_REQUIRE_VIRTUALENV" in env:
|
||||
@ -60,12 +65,13 @@ def install_package(name, version):
|
||||
# env var for the subprocess.
|
||||
env = os.environ.copy()
|
||||
del env["PIP_REQUIRE_VIRTUALENV"]
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
|
||||
|
||||
if name in sys.modules:
|
||||
del sys.modules[name]
|
||||
|
||||
def check_package_version(name, required_version):
|
||||
|
||||
def check_package_version(name: str, required_version: str):
|
||||
logging.info(f"Checking {name} version...")
|
||||
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
||||
|
||||
@ -77,6 +83,7 @@ def check_package_version(name, required_version):
|
||||
logging.info(f"{name} need an update")
|
||||
return False
|
||||
|
||||
|
||||
def get_ip():
|
||||
"""
|
||||
Retrieve the main network interface IP.
|
||||
@ -94,7 +101,25 @@ def check_dir(dir):
|
||||
os.makedirs(dir)
|
||||
|
||||
|
||||
def setup(dependencies, python_path):
|
||||
def setup_paths(paths: list):
|
||||
""" Add missing path to sys.path
|
||||
"""
|
||||
for path in paths:
|
||||
if path not in sys.path:
|
||||
logging.debug(f"Adding {path} dir to the path.")
|
||||
sys.path.insert(0, path)
|
||||
|
||||
|
||||
def remove_paths(paths: list):
|
||||
""" Remove list of path from sys.path
|
||||
"""
|
||||
for path in paths:
|
||||
if path in sys.path:
|
||||
logging.debug(f"Removing {path} dir from the path.")
|
||||
sys.path.remove(path)
|
||||
|
||||
|
||||
def install_modules(dependencies: list, python_path: str, install_dir: str):
|
||||
global PYTHON_PATH, SUBPROCESS_DIR
|
||||
|
||||
PYTHON_PATH = Path(python_path)
|
||||
@ -103,9 +128,23 @@ def setup(dependencies, python_path):
|
||||
if not module_can_be_imported("pip"):
|
||||
install_pip()
|
||||
|
||||
for package_name, package_version in dependencies:
|
||||
for package_name in dependencies:
|
||||
if not module_can_be_imported(package_name):
|
||||
install_package(package_name, package_version)
|
||||
install_package(package_name, install_dir=install_dir)
|
||||
module_can_be_imported(package_name)
|
||||
elif not check_package_version(package_name, package_version):
|
||||
install_package(package_name, package_version)
|
||||
|
||||
def register():
|
||||
if bpy.app.version[1] >= 91:
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
for module_name in list(sys.modules.keys()):
|
||||
if 'replication' in module_name:
|
||||
del sys.modules[module_name]
|
||||
|
||||
setup_paths([LIBS, REPLICATION])
|
||||
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
|
||||
|
||||
def unregister():
|
||||
remove_paths([REPLICATION, LIBS])
|
1
multi_user/libs/replication
Submodule
@ -17,6 +17,8 @@
|
||||
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
import gzip
|
||||
import logging
|
||||
import os
|
||||
import queue
|
||||
@ -25,27 +27,41 @@ import shutil
|
||||
import string
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
from pathlib import Path
|
||||
from queue import Queue
|
||||
from time import gmtime, strftime
|
||||
import traceback
|
||||
|
||||
from bpy.props import FloatProperty
|
||||
|
||||
try:
|
||||
import _pickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from bpy.app.handlers import persistent
|
||||
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
|
||||
from bpy_extras.io_utils import ExportHelper, ImportHelper
|
||||
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
|
||||
STATE_INITIAL, STATE_SYNCING, UP)
|
||||
from replication.data import ReplicatedDataFactory
|
||||
from replication.exception import NonAuthorizedOperationError
|
||||
from replication.protocol import DataTranslationProtocol
|
||||
from replication.exception import ContextError, NonAuthorizedOperationError
|
||||
from replication.interface import session
|
||||
from replication import porcelain
|
||||
from replication.repository import Repository
|
||||
from replication.objects import Node
|
||||
|
||||
from . import bl_types, delayable, environment, ui, utils
|
||||
from . import bl_types, environment, timers, ui, utils
|
||||
from .presence import SessionStatusWidget, renderer, view3d_find
|
||||
from .timers import registry
|
||||
|
||||
background_execution_queue = Queue()
|
||||
deleyables = []
|
||||
stop_modal_executor = False
|
||||
|
||||
|
||||
def session_callback(name):
|
||||
""" Session callback wrapper
|
||||
|
||||
@ -67,26 +83,36 @@ def initialize_session():
|
||||
settings = utils.get_preferences()
|
||||
runtime_settings = bpy.context.window_manager.session
|
||||
|
||||
# Step 1: Constrect nodes
|
||||
for node in session._graph.list_ordered():
|
||||
node_ref = session.get(node)
|
||||
if node_ref.state == FETCHED:
|
||||
node_ref.resolve()
|
||||
if not runtime_settings.is_host:
|
||||
logging.info("Intializing the scene")
|
||||
# Step 1: Constrect nodes
|
||||
logging.info("Instantiating nodes")
|
||||
for node in session.repository.index_sorted:
|
||||
node_ref = session.repository.graph.get(node)
|
||||
if node_ref is None:
|
||||
logging.error(f"Can't construct node {node}")
|
||||
elif node_ref.state == FETCHED:
|
||||
node_ref.instance = session.repository.rdp.resolve(node_ref.data)
|
||||
if node_ref.instance is None:
|
||||
node_ref.instance = session.repository.rdp.construct(node_ref.data)
|
||||
node_ref.instance.uuid = node_ref.uuid
|
||||
|
||||
# Step 2: Load nodes
|
||||
for node in session._graph.list_ordered():
|
||||
node_ref = session.get(node)
|
||||
if node_ref.state == FETCHED:
|
||||
node_ref.apply()
|
||||
# Step 2: Load nodes
|
||||
logging.info("Applying nodes")
|
||||
for node in session.repository.index_sorted:
|
||||
porcelain.apply(session.repository, node)
|
||||
|
||||
logging.info("Registering timers")
|
||||
# Step 4: Register blender timers
|
||||
for d in deleyables:
|
||||
d.register()
|
||||
|
||||
if settings.update_method == 'DEPSGRAPH':
|
||||
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
|
||||
|
||||
bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
|
||||
# Step 5: Clearing history
|
||||
utils.flush_history()
|
||||
|
||||
# Step 6: Launch deps graph update handling
|
||||
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
|
||||
|
||||
|
||||
@session_callback('on_exit')
|
||||
@ -106,9 +132,8 @@ def on_connection_end(reason="none"):
|
||||
|
||||
stop_modal_executor = True
|
||||
|
||||
if settings.update_method == 'DEPSGRAPH':
|
||||
bpy.app.handlers.depsgraph_update_post.remove(
|
||||
depsgraph_evaluation)
|
||||
if depsgraph_evaluation in bpy.app.handlers.depsgraph_update_post:
|
||||
bpy.app.handlers.depsgraph_update_post.remove(depsgraph_evaluation)
|
||||
|
||||
# Step 3: remove file handled
|
||||
logger = logging.getLogger()
|
||||
@ -137,8 +162,8 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
settings = utils.get_preferences()
|
||||
runtime_settings = context.window_manager.session
|
||||
users = bpy.data.window_managers['WinMan'].online_users
|
||||
admin_pass = runtime_settings.password
|
||||
use_extern_update = settings.update_method == 'DEPSGRAPH'
|
||||
admin_pass = settings.password
|
||||
|
||||
users.clear()
|
||||
deleyables.clear()
|
||||
|
||||
@ -149,9 +174,10 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
datefmt='%H:%M:%S'
|
||||
)
|
||||
|
||||
start_time = datetime.now().strftime('%Y_%m_%d_%H-%M-%S')
|
||||
log_directory = os.path.join(
|
||||
settings.cache_directory,
|
||||
"multiuser_client.log")
|
||||
f"multiuser_{start_time}.log")
|
||||
|
||||
os.makedirs(settings.cache_directory, exist_ok=True)
|
||||
|
||||
@ -164,53 +190,26 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
bpy_factory = ReplicatedDataFactory()
|
||||
supported_bl_types = []
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
|
||||
# init the factory with supported types
|
||||
for type in bl_types.types_to_register():
|
||||
type_module = getattr(bl_types, type)
|
||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
||||
type_impl_name = 'Bl'+''.join(name)
|
||||
type_module_class = getattr(type_module, type_impl_name)
|
||||
|
||||
supported_bl_types.append(type_module_class.bl_id)
|
||||
|
||||
if type_impl_name not in settings.supported_datablocks:
|
||||
logging.info(f"{type_impl_name} not found, \
|
||||
# Check if supported_datablocks are up to date before starting the
|
||||
# the session
|
||||
for dcc_type_id in bpy_protocol.implementations.keys():
|
||||
if dcc_type_id not in settings.supported_datablocks:
|
||||
logging.info(f"{dcc_type_id} not found, \
|
||||
regenerate type settings...")
|
||||
settings.generate_supported_types()
|
||||
|
||||
type_local_config = settings.supported_datablocks[type_impl_name]
|
||||
|
||||
bpy_factory.register_type(
|
||||
type_module_class.bl_class,
|
||||
type_module_class,
|
||||
timer=type_local_config.bl_delay_refresh*1000,
|
||||
automatic=type_local_config.auto_push,
|
||||
check_common=type_module_class.bl_check_common)
|
||||
|
||||
if settings.update_method == 'DEFAULT':
|
||||
if type_local_config.bl_delay_apply > 0:
|
||||
deleyables.append(
|
||||
delayable.ApplyTimer(
|
||||
timout=type_local_config.bl_delay_apply,
|
||||
target_type=type_module_class))
|
||||
|
||||
if bpy.app.version[1] >= 91:
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
session.configure(
|
||||
factory=bpy_factory,
|
||||
python_path=python_binary_path,
|
||||
external_update_handling=use_extern_update)
|
||||
|
||||
if settings.update_method == 'DEPSGRAPH':
|
||||
deleyables.append(delayable.ApplyTimer(
|
||||
settings.depsgraph_update_rate/1000))
|
||||
|
||||
repo = Repository(
|
||||
rdp=bpy_protocol,
|
||||
username=settings.username)
|
||||
|
||||
# Host a session
|
||||
if self.host:
|
||||
if settings.init_method == 'EMPTY':
|
||||
@ -220,13 +219,19 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
runtime_settings.internet_ip = environment.get_ip()
|
||||
|
||||
try:
|
||||
# Init repository
|
||||
for scene in bpy.data.scenes:
|
||||
session.add(scene)
|
||||
porcelain.add(repo, scene)
|
||||
|
||||
porcelain.remote_add(
|
||||
repo,
|
||||
'origin',
|
||||
'127.0.0.1',
|
||||
settings.port,
|
||||
admin_password=admin_pass)
|
||||
session.host(
|
||||
id=settings.username,
|
||||
port=settings.port,
|
||||
ipc_port=settings.ipc_port,
|
||||
repository= repo,
|
||||
remote='origin',
|
||||
timeout=settings.connection_timeout,
|
||||
password=admin_pass,
|
||||
cache_directory=settings.cache_directory,
|
||||
@ -236,7 +241,6 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
logging.error(f"Error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
# Join a session
|
||||
else:
|
||||
@ -246,11 +250,14 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
admin_pass = None
|
||||
|
||||
try:
|
||||
porcelain.remote_add(
|
||||
repo,
|
||||
'origin',
|
||||
settings.ip,
|
||||
settings.port,
|
||||
admin_password=admin_pass)
|
||||
session.connect(
|
||||
id=settings.username,
|
||||
address=settings.ip,
|
||||
port=settings.port,
|
||||
ipc_port=settings.ipc_port,
|
||||
repository= repo,
|
||||
timeout=settings.connection_timeout,
|
||||
password=admin_pass
|
||||
)
|
||||
@ -259,14 +266,17 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
logging.error(str(e))
|
||||
|
||||
# Background client updates service
|
||||
deleyables.append(delayable.ClientUpdate())
|
||||
deleyables.append(delayable.DynamicRightSelectTimer())
|
||||
deleyables.append(timers.ClientUpdate())
|
||||
deleyables.append(timers.DynamicRightSelectTimer())
|
||||
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
|
||||
|
||||
session_update = delayable.SessionStatusUpdate()
|
||||
session_user_sync = delayable.SessionUserSync()
|
||||
session_background_executor = delayable.MainThreadExecutor(
|
||||
session_update = timers.SessionStatusUpdate()
|
||||
session_user_sync = timers.SessionUserSync()
|
||||
session_background_executor = timers.MainThreadExecutor(
|
||||
execution_queue=background_execution_queue)
|
||||
session_listen = timers.SessionListenTimer(timeout=0.001)
|
||||
|
||||
session_listen.register()
|
||||
session_update.register()
|
||||
session_user_sync.register()
|
||||
session_background_executor.register()
|
||||
@ -274,12 +284,8 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
deleyables.append(session_background_executor)
|
||||
deleyables.append(session_update)
|
||||
deleyables.append(session_user_sync)
|
||||
deleyables.append(session_listen)
|
||||
|
||||
|
||||
|
||||
self.report(
|
||||
{'INFO'},
|
||||
f"connecting to tcp://{settings.ip}:{settings.port}")
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
@ -315,9 +321,10 @@ class SessionInitOperator(bpy.types.Operator):
|
||||
utils.clean_scene()
|
||||
|
||||
for scene in bpy.data.scenes:
|
||||
session.add(scene)
|
||||
porcelain.add(session.repository, scene)
|
||||
|
||||
session.init()
|
||||
context.window_manager.session.is_host = True
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
@ -337,7 +344,7 @@ class SessionStopOperator(bpy.types.Operator):
|
||||
|
||||
if session:
|
||||
try:
|
||||
session.disconnect()
|
||||
session.disconnect(reason='user')
|
||||
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
@ -364,7 +371,7 @@ class SessionKickOperator(bpy.types.Operator):
|
||||
assert(session)
|
||||
|
||||
try:
|
||||
session.kick(self.user)
|
||||
porcelain.kick(session.repository, self.user)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
|
||||
@ -393,7 +400,7 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
try:
|
||||
session.remove(self.property_path)
|
||||
porcelain.rm(session.repository, self.property_path)
|
||||
|
||||
return {"FINISHED"}
|
||||
except: # NonAuthorizedOperationError:
|
||||
@ -435,10 +442,17 @@ class SessionPropertyRightOperator(bpy.types.Operator):
|
||||
runtime_settings = context.window_manager.session
|
||||
|
||||
if session:
|
||||
session.change_owner(self.key,
|
||||
runtime_settings.clients,
|
||||
if runtime_settings.clients == RP_COMMON:
|
||||
porcelain.unlock(session.repository,
|
||||
self.key,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=self.recursive)
|
||||
else:
|
||||
porcelain.lock(session.repository,
|
||||
self.key,
|
||||
runtime_settings.clients,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=self.recursive)
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
@ -553,7 +567,7 @@ class SessionSnapTimeOperator(bpy.types.Operator):
|
||||
|
||||
def modal(self, context, event):
|
||||
is_running = context.window_manager.session.user_snap_running
|
||||
if event.type in {'RIGHTMOUSE', 'ESC'} or not is_running:
|
||||
if not is_running:
|
||||
self.cancel(context)
|
||||
return {'CANCELLED'}
|
||||
|
||||
@ -586,12 +600,23 @@ class SessionApply(bpy.types.Operator):
|
||||
def execute(self, context):
|
||||
logging.debug(f"Running apply on {self.target}")
|
||||
try:
|
||||
session.apply(self.target,
|
||||
force=True,
|
||||
force_dependencies=self.reset_dependencies)
|
||||
node_ref = session.repository.graph.get(self.target)
|
||||
porcelain.apply(session.repository,
|
||||
self.target,
|
||||
force=True,
|
||||
force_dependencies=self.reset_dependencies)
|
||||
impl = session.repository.rdp.get_implementation(node_ref.instance)
|
||||
if impl.bl_reload_parent:
|
||||
for parent in session.repository.graph.get_parents(self.target):
|
||||
logging.debug(f"Refresh parent {parent}")
|
||||
|
||||
porcelain.apply(session.repository,
|
||||
parent.uuid,
|
||||
force=True)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
return {"CANCELED"}
|
||||
traceback.print_exc()
|
||||
return {"CANCELLED"}
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
@ -610,54 +635,12 @@ class SessionCommit(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
try:
|
||||
session.commit(uuid=self.target)
|
||||
session.push(self.target)
|
||||
porcelain.commit(session.repository, self.target)
|
||||
porcelain.push(session.repository, 'origin', self.target)
|
||||
return {"FINISHED"}
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
return {"CANCELED"}
|
||||
|
||||
class ApplyArmatureOperator(bpy.types.Operator):
|
||||
"""Operator which runs its self from a timer"""
|
||||
bl_idname = "session.apply_armature_operator"
|
||||
bl_label = "Modal Executor Operator"
|
||||
|
||||
_timer = None
|
||||
|
||||
def modal(self, context, event):
|
||||
global stop_modal_executor, modal_executor_queue
|
||||
if stop_modal_executor:
|
||||
self.cancel(context)
|
||||
return {'CANCELLED'}
|
||||
|
||||
if event.type == 'TIMER':
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
nodes = session.list(filter=bl_types.bl_armature.BlArmature)
|
||||
|
||||
for node in nodes:
|
||||
node_ref = session.get(uuid=node)
|
||||
|
||||
if node_ref.state == FETCHED:
|
||||
try:
|
||||
session.apply(node)
|
||||
except Exception as e:
|
||||
logging.error("Fail to apply armature: {e}")
|
||||
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
def execute(self, context):
|
||||
wm = context.window_manager
|
||||
self._timer = wm.event_timer_add(2, window=context.window)
|
||||
wm.modal_handler_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def cancel(self, context):
|
||||
global stop_modal_executor
|
||||
|
||||
wm = context.window_manager
|
||||
wm.event_timer_remove(self._timer)
|
||||
|
||||
stop_modal_executor = False
|
||||
return {"CANCELLED"}
|
||||
|
||||
|
||||
class SessionClearCache(bpy.types.Operator):
|
||||
@ -688,6 +671,32 @@ class SessionClearCache(bpy.types.Operator):
|
||||
row = self.layout
|
||||
row.label(text=f" Do you really want to remove local cache ? ")
|
||||
|
||||
|
||||
class SessionPurgeOperator(bpy.types.Operator):
|
||||
"Remove node with lost references"
|
||||
bl_idname = "session.purge"
|
||||
bl_label = "Purge session data"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
try:
|
||||
sanitize_deps_graph(remove_nodes=True)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
def draw(self, context):
|
||||
row = self.layout
|
||||
row.label(text=f" Do you really want to remove local cache ? ")
|
||||
|
||||
|
||||
class SessionNotifyOperator(bpy.types.Operator):
|
||||
"""Dialog only operator"""
|
||||
bl_idname = "session.notify"
|
||||
@ -712,6 +721,182 @@ class SessionNotifyOperator(bpy.types.Operator):
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
|
||||
class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
|
||||
bl_idname = "session.save"
|
||||
bl_label = "Save session data"
|
||||
bl_description = "Save a snapshot of the collaborative session"
|
||||
|
||||
# ExportHelper mixin class uses this
|
||||
filename_ext = ".db"
|
||||
|
||||
filter_glob: bpy.props.StringProperty(
|
||||
default="*.db",
|
||||
options={'HIDDEN'},
|
||||
maxlen=255, # Max internal buffer length, longer would be clamped.
|
||||
)
|
||||
|
||||
enable_autosave: bpy.props.BoolProperty(
|
||||
name="Auto-save",
|
||||
description="Enable session auto-save",
|
||||
default=True,
|
||||
)
|
||||
save_interval: bpy.props.FloatProperty(
|
||||
name="Auto save interval",
|
||||
description="auto-save interval (seconds)",
|
||||
default=10,
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
if self.enable_autosave:
|
||||
recorder = timers.SessionBackupTimer(
|
||||
filepath=self.filepath,
|
||||
timeout=self.save_interval)
|
||||
recorder.register()
|
||||
deleyables.append(recorder)
|
||||
else:
|
||||
session.repository.dumps(self.filepath)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return session.state == STATE_ACTIVE
|
||||
|
||||
|
||||
class SessionStopAutoSaveOperator(bpy.types.Operator):
|
||||
bl_idname = "session.cancel_autosave"
|
||||
bl_label = "Cancel auto-save"
|
||||
bl_description = "Cancel session auto-save"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return (session.state == STATE_ACTIVE and 'SessionBackupTimer' in registry)
|
||||
|
||||
def execute(self, context):
|
||||
autosave_timer = registry.get('SessionBackupTimer')
|
||||
autosave_timer.unregister()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
bl_idname = "session.load"
|
||||
bl_label = "Load session save"
|
||||
bl_description = "Load a Multi-user session save"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
# ExportHelper mixin class uses this
|
||||
filename_ext = ".db"
|
||||
|
||||
filter_glob: bpy.props.StringProperty(
|
||||
default="*.db",
|
||||
options={'HIDDEN'},
|
||||
maxlen=255, # Max internal buffer length, longer would be clamped.
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
from replication.repository import Repository
|
||||
|
||||
# init the factory with supported types
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
repo = Repository(bpy_protocol)
|
||||
repo.loads(self.filepath)
|
||||
utils.clean_scene()
|
||||
|
||||
nodes = [repo.graph.get(n) for n in repo.index_sorted]
|
||||
|
||||
# Step 1: Construct nodes
|
||||
for node in nodes:
|
||||
node.instance = bpy_protocol.resolve(node.data)
|
||||
if node.instance is None:
|
||||
node.instance = bpy_protocol.construct(node.data)
|
||||
node.instance.uuid = node.uuid
|
||||
|
||||
# Step 2: Load nodes
|
||||
for node in nodes:
|
||||
porcelain.apply(repo, node.uuid)
|
||||
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return True
|
||||
|
||||
class SessionPresetServerAdd(bpy.types.Operator):
|
||||
"""Add a server to the server list preset"""
|
||||
bl_idname = "session.preset_server_add"
|
||||
bl_label = "add server preset"
|
||||
bl_description = "add the current server to the server preset list"
|
||||
bl_options = {"REGISTER"}
|
||||
|
||||
name : bpy.props.StringProperty(default="server_preset")
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return True
|
||||
|
||||
def invoke(self, context, event):
|
||||
assert(context)
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
|
||||
col = layout.column()
|
||||
settings = utils.get_preferences()
|
||||
|
||||
col.prop(settings, "server_name", text="server name")
|
||||
|
||||
def execute(self, context):
|
||||
assert(context)
|
||||
|
||||
settings = utils.get_preferences()
|
||||
|
||||
existing_preset = settings.server_preset.get(settings.server_name)
|
||||
|
||||
new_server = existing_preset if existing_preset else settings.server_preset.add()
|
||||
new_server.name = settings.server_name
|
||||
new_server.server_ip = settings.ip
|
||||
new_server.server_port = settings.port
|
||||
new_server.server_password = settings.password
|
||||
|
||||
settings.server_preset_interface = settings.server_name
|
||||
|
||||
if new_server == existing_preset :
|
||||
self.report({'INFO'}, "Server '" + settings.server_name + "' override")
|
||||
else :
|
||||
self.report({'INFO'}, "New '" + settings.server_name + "' server preset")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class SessionPresetServerRemove(bpy.types.Operator):
|
||||
"""Remove a server to the server list preset"""
|
||||
bl_idname = "session.preset_server_remove"
|
||||
bl_label = "remove server preset"
|
||||
bl_description = "remove the current server from the server preset list"
|
||||
bl_options = {"REGISTER"}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
assert(context)
|
||||
|
||||
settings = utils.get_preferences()
|
||||
|
||||
settings.server_preset.remove(settings.server_preset.find(settings.server_preset_interface))
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
|
||||
def menu_func_import(self, context):
|
||||
self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)')
|
||||
|
||||
|
||||
classes = (
|
||||
SessionStartOperator,
|
||||
SessionStopOperator,
|
||||
@ -721,99 +906,147 @@ classes = (
|
||||
SessionPropertyRightOperator,
|
||||
SessionApply,
|
||||
SessionCommit,
|
||||
ApplyArmatureOperator,
|
||||
SessionKickOperator,
|
||||
SessionInitOperator,
|
||||
SessionClearCache,
|
||||
SessionNotifyOperator,
|
||||
SessionNotifyOperator,
|
||||
SessionSaveBackupOperator,
|
||||
SessionLoadSaveOperator,
|
||||
SessionStopAutoSaveOperator,
|
||||
SessionPurgeOperator,
|
||||
SessionPresetServerAdd,
|
||||
SessionPresetServerRemove,
|
||||
)
|
||||
|
||||
|
||||
def update_external_dependencies():
|
||||
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in ['WindowsPath', 'PosixPath']]
|
||||
for node_id in nodes_ids:
|
||||
node = session.repository.graph.get(node_id)
|
||||
if node and node.owner in [session.repository.username, RP_COMMON]:
|
||||
porcelain.commit(session.repository, node_id)
|
||||
porcelain.push(session.repository,'origin', node_id)
|
||||
|
||||
|
||||
def sanitize_deps_graph(remove_nodes: bool = False):
|
||||
""" Cleanup the replication graph
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
start = utils.current_milli_time()
|
||||
rm_cpt = 0
|
||||
for node in session.repository.graph.values():
|
||||
node.instance = session.repository.rdp.resolve(node.data)
|
||||
if node is None \
|
||||
or (node.state == UP and not node.instance):
|
||||
if remove_nodes:
|
||||
try:
|
||||
porcelain.rm(session.repository,
|
||||
node.uuid,
|
||||
remove_dependencies=False)
|
||||
logging.info(f"Removing {node.uuid}")
|
||||
rm_cpt += 1
|
||||
except NonAuthorizedOperationError:
|
||||
continue
|
||||
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
|
||||
|
||||
|
||||
@persistent
|
||||
def sanitize_deps_graph(dummy):
|
||||
"""sanitize deps graph
|
||||
def resolve_deps_graph(dummy):
|
||||
"""Resolve deps graph
|
||||
|
||||
Temporary solution to resolve each node pointers after a Undo.
|
||||
A future solution should be to avoid storing dataclock reference...
|
||||
|
||||
"""
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
for node_key in session.list():
|
||||
session.get(node_key).resolve()
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
sanitize_deps_graph(remove_nodes=True)
|
||||
|
||||
|
||||
@persistent
|
||||
def load_pre_handler(dummy):
|
||||
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
|
||||
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
|
||||
bpy.ops.session.stop()
|
||||
|
||||
|
||||
@persistent
|
||||
def update_client_frame(scene):
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
session.update_user_metadata({
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
porcelain.update_user_metadata(session.repository, {
|
||||
'frame_current': scene.frame_current
|
||||
})
|
||||
|
||||
|
||||
@persistent
|
||||
def depsgraph_evaluation(scene):
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
context = bpy.context
|
||||
blender_depsgraph = bpy.context.view_layer.depsgraph
|
||||
dependency_updates = [u for u in blender_depsgraph.updates]
|
||||
settings = utils.get_preferences()
|
||||
|
||||
# NOTE: maybe we don't need to check each update but only the first
|
||||
update_external_dependencies()
|
||||
|
||||
is_internal = [u for u in dependency_updates if u.is_updated_geometry or u.is_updated_shading or u.is_updated_transform]
|
||||
|
||||
# NOTE: maybe we don't need to check each update but only the first
|
||||
if not is_internal:
|
||||
return
|
||||
for update in reversed(dependency_updates):
|
||||
# Is the object tracked ?
|
||||
if update.id.uuid:
|
||||
# Retrieve local version
|
||||
node = session.get(update.id.uuid)
|
||||
|
||||
node = session.repository.graph.get(update.id.uuid)
|
||||
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
|
||||
# Check our right on this update:
|
||||
# - if its ours or ( under common and diff), launch the
|
||||
# update process
|
||||
# - if its to someone else, ignore the update (go deeper ?)
|
||||
if node and node.owner in [session.id, RP_COMMON] and node.state == UP:
|
||||
# Avoid slow geometry update
|
||||
if 'EDIT' in context.mode and \
|
||||
not settings.sync_flags.sync_during_editmode:
|
||||
break
|
||||
|
||||
session.stash(node.uuid)
|
||||
# - if its to someone else, ignore the update
|
||||
if node and (node.owner == session.repository.username or check_common):
|
||||
if node.state == UP:
|
||||
try:
|
||||
porcelain.commit(session.repository, node.uuid)
|
||||
porcelain.push(session.repository, 'origin', node.uuid)
|
||||
except ReferenceError:
|
||||
logging.debug(f"Reference error {node.uuid}")
|
||||
except ContextError as e:
|
||||
logging.debug(e)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
else:
|
||||
# Distant update
|
||||
continue
|
||||
# else:
|
||||
# # New items !
|
||||
# logger.error("UPDATE: ADD")
|
||||
|
||||
|
||||
# A new scene is created
|
||||
elif isinstance(update.id, bpy.types.Scene):
|
||||
ref = session.repository.get_node_by_datablock(update.id)
|
||||
if ref:
|
||||
pass
|
||||
else:
|
||||
scn_uuid = porcelain.add(session.repository, update.id)
|
||||
porcelain.commit(session.node_id, scn_uuid)
|
||||
porcelain.push(session.repository,'origin', scn_uuid)
|
||||
def register():
|
||||
from bpy.utils import register_class
|
||||
|
||||
for cls in classes:
|
||||
for cls in classes:
|
||||
register_class(cls)
|
||||
|
||||
bpy.app.handlers.undo_post.append(sanitize_deps_graph)
|
||||
bpy.app.handlers.redo_post.append(sanitize_deps_graph)
|
||||
|
||||
bpy.app.handlers.undo_post.append(resolve_deps_graph)
|
||||
bpy.app.handlers.redo_post.append(resolve_deps_graph)
|
||||
|
||||
bpy.app.handlers.load_pre.append(load_pre_handler)
|
||||
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
||||
|
||||
|
||||
def unregister():
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
session.disconnect()
|
||||
|
||||
from bpy.utils import unregister_class
|
||||
for cls in reversed(classes):
|
||||
unregister_class(cls)
|
||||
|
||||
bpy.app.handlers.undo_post.remove(sanitize_deps_graph)
|
||||
bpy.app.handlers.redo_post.remove(sanitize_deps_graph)
|
||||
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
|
||||
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
|
||||
|
||||
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
||||
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
||||
|
@ -33,6 +33,19 @@ from replication.interface import session
|
||||
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
|
||||
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
|
||||
|
||||
DEFAULT_PRESETS = {
|
||||
"localhost" : {
|
||||
"server_ip": "localhost",
|
||||
"server_port": 5555,
|
||||
"server_password": "admin"
|
||||
},
|
||||
"public session" : {
|
||||
"server_ip": "51.75.71.183",
|
||||
"server_port": 5555,
|
||||
"server_password": ""
|
||||
},
|
||||
}
|
||||
|
||||
def randomColor():
|
||||
"""Generate a random color """
|
||||
r = random.random()
|
||||
@ -65,16 +78,11 @@ def update_ip(self, context):
|
||||
logging.error("Wrong IP format")
|
||||
self['ip'] = "127.0.0.1"
|
||||
|
||||
|
||||
def update_port(self, context):
|
||||
max_port = self.port + 3
|
||||
|
||||
if self.ipc_port < max_port and \
|
||||
self['ipc_port'] >= self.port:
|
||||
logging.error(
|
||||
"IPC Port in conflict with the port, assigning a random value")
|
||||
self['ipc_port'] = random.randrange(self.port+4, 10000)
|
||||
|
||||
def update_server_preset_interface(self, context):
|
||||
self.server_name = self.server_preset.get(self.server_preset_interface).name
|
||||
self.ip = self.server_preset.get(self.server_preset_interface).server_ip
|
||||
self.port = self.server_preset.get(self.server_preset_interface).server_port
|
||||
self.password = self.server_preset.get(self.server_preset_interface).server_password
|
||||
|
||||
def update_directory(self, context):
|
||||
new_dir = Path(self.cache_directory)
|
||||
@ -97,12 +105,14 @@ def get_log_level(self):
|
||||
class ReplicatedDatablock(bpy.types.PropertyGroup):
|
||||
type_name: bpy.props.StringProperty()
|
||||
bl_name: bpy.props.StringProperty()
|
||||
bl_delay_refresh: bpy.props.FloatProperty()
|
||||
bl_delay_apply: bpy.props.FloatProperty()
|
||||
use_as_filter: bpy.props.BoolProperty(default=True)
|
||||
auto_push: bpy.props.BoolProperty(default=True)
|
||||
icon: bpy.props.StringProperty()
|
||||
|
||||
class ServerPreset(bpy.types.PropertyGroup):
|
||||
server_ip: bpy.props.StringProperty()
|
||||
server_port: bpy.props.IntProperty(default=5555)
|
||||
server_password: bpy.props.StringProperty(default="admin", subtype = "PASSWORD")
|
||||
|
||||
def set_sync_render_settings(self, value):
|
||||
self['sync_render_settings'] = value
|
||||
@ -155,7 +165,7 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
ip: bpy.props.StringProperty(
|
||||
name="ip",
|
||||
description='Distant host ip',
|
||||
default="127.0.0.1",
|
||||
default="localhost",
|
||||
update=update_ip)
|
||||
username: bpy.props.StringProperty(
|
||||
name="Username",
|
||||
@ -170,18 +180,23 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
description='Distant host port',
|
||||
default=5555
|
||||
)
|
||||
server_name: bpy.props.StringProperty(
|
||||
name="server_name",
|
||||
description="Custom name of the server",
|
||||
default='localhost',
|
||||
)
|
||||
password: bpy.props.StringProperty(
|
||||
name="password",
|
||||
default=random_string_digits(),
|
||||
description='Session password',
|
||||
subtype='PASSWORD'
|
||||
)
|
||||
sync_flags: bpy.props.PointerProperty(
|
||||
type=ReplicationFlags
|
||||
)
|
||||
supported_datablocks: bpy.props.CollectionProperty(
|
||||
type=ReplicatedDatablock,
|
||||
)
|
||||
ipc_port: bpy.props.IntProperty(
|
||||
name="ipc_port",
|
||||
description='internal ttl port(only useful for multiple local instances)',
|
||||
default=random.randrange(5570, 70000),
|
||||
update=update_port,
|
||||
)
|
||||
init_method: bpy.props.EnumProperty(
|
||||
name='init_method',
|
||||
description='Init repo',
|
||||
@ -197,22 +212,13 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
connection_timeout: bpy.props.IntProperty(
|
||||
name='connection timeout',
|
||||
description='connection timeout before disconnection',
|
||||
default=1000
|
||||
)
|
||||
update_method: bpy.props.EnumProperty(
|
||||
name='update method',
|
||||
description='replication update method',
|
||||
items=[
|
||||
('DEFAULT', "Default", "Default: Use threads to monitor databloc changes"),
|
||||
('DEPSGRAPH', "Depsgraph",
|
||||
"Experimental: Use the blender dependency graph to trigger updates"),
|
||||
],
|
||||
default=5000
|
||||
)
|
||||
# Replication update settings
|
||||
depsgraph_update_rate: bpy.props.IntProperty(
|
||||
name='depsgraph update rate',
|
||||
description='Dependency graph uppdate rate (milliseconds)',
|
||||
default=1000
|
||||
depsgraph_update_rate: bpy.props.FloatProperty(
|
||||
name='depsgraph update rate (s)',
|
||||
description='Dependency graph uppdate rate (s)',
|
||||
default=1
|
||||
)
|
||||
clear_memory_filecache: bpy.props.BoolProperty(
|
||||
name="Clear memory filecache",
|
||||
@ -282,11 +288,6 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
description="Rights",
|
||||
default=False
|
||||
)
|
||||
conf_session_timing_expanded: bpy.props.BoolProperty(
|
||||
name="timings",
|
||||
description="timings",
|
||||
default=False
|
||||
)
|
||||
conf_session_cache_expanded: bpy.props.BoolProperty(
|
||||
name="Cache",
|
||||
description="cache",
|
||||
@ -351,6 +352,25 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
max=59
|
||||
)
|
||||
|
||||
# Server preset
|
||||
def server_list_callback(scene, context):
|
||||
settings = get_preferences()
|
||||
enum = []
|
||||
for i in settings.server_preset:
|
||||
enum.append((i.name, i.name, ""))
|
||||
return enum
|
||||
|
||||
server_preset: bpy.props.CollectionProperty(
|
||||
name="server preset",
|
||||
type=ServerPreset,
|
||||
)
|
||||
server_preset_interface: bpy.props.EnumProperty(
|
||||
name="servers",
|
||||
description="servers enum",
|
||||
items=server_list_callback,
|
||||
update=update_server_preset_interface,
|
||||
)
|
||||
|
||||
# Custom panel
|
||||
panel_category: bpy.props.StringProperty(
|
||||
description="Choose a name for the category of the panel",
|
||||
@ -390,28 +410,7 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
row = box.row()
|
||||
row.label(text="Init the session from:")
|
||||
row.prop(self, "init_method", text="")
|
||||
row = box.row()
|
||||
row.label(text="Update method:")
|
||||
row.prop(self, "update_method", text="")
|
||||
|
||||
table = box.box()
|
||||
table.row().prop(
|
||||
self, "conf_session_timing_expanded", text="Refresh rates",
|
||||
icon=get_expanded_icon(self.conf_session_timing_expanded),
|
||||
emboss=False)
|
||||
|
||||
if self.conf_session_timing_expanded:
|
||||
line = table.row()
|
||||
line.label(text=" ")
|
||||
line.separator()
|
||||
line.label(text="refresh (sec)")
|
||||
line.label(text="apply (sec)")
|
||||
|
||||
for item in self.supported_datablocks:
|
||||
line = table.row(align=True)
|
||||
line.label(text="", icon=item.icon)
|
||||
line.prop(item, "bl_delay_refresh", text="")
|
||||
line.prop(item, "bl_delay_apply", text="")
|
||||
# HOST SETTINGS
|
||||
box = grid.box()
|
||||
box.prop(
|
||||
@ -458,21 +457,30 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
def generate_supported_types(self):
|
||||
self.supported_datablocks.clear()
|
||||
|
||||
for type in bl_types.types_to_register():
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
|
||||
# init the factory with supported types
|
||||
for dcc_type_id, impl in bpy_protocol.implementations.items():
|
||||
new_db = self.supported_datablocks.add()
|
||||
|
||||
type_module = getattr(bl_types, type)
|
||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
||||
type_impl_name = 'Bl'+''.join(name)
|
||||
type_module_class = getattr(type_module, type_impl_name)
|
||||
new_db.name = type_impl_name
|
||||
new_db.type_name = type_impl_name
|
||||
new_db.bl_delay_refresh = type_module_class.bl_delay_refresh
|
||||
new_db.bl_delay_apply = type_module_class.bl_delay_apply
|
||||
new_db.name = dcc_type_id
|
||||
new_db.type_name = dcc_type_id
|
||||
new_db.use_as_filter = True
|
||||
new_db.icon = type_module_class.bl_icon
|
||||
new_db.auto_push = type_module_class.bl_automatic_push
|
||||
new_db.bl_name = type_module_class.bl_id
|
||||
new_db.icon = impl.bl_icon
|
||||
new_db.bl_name = impl.bl_id
|
||||
|
||||
|
||||
# custom at launch server preset
|
||||
def generate_default_presets(self):
|
||||
for preset_name, preset_data in DEFAULT_PRESETS.items():
|
||||
existing_preset = self.server_preset.get(preset_name)
|
||||
if existing_preset :
|
||||
continue
|
||||
new_server = self.server_preset.add()
|
||||
new_server.name = preset_name
|
||||
new_server.server_ip = preset_data.get('server_ip')
|
||||
new_server.server_port = preset_data.get('server_port')
|
||||
new_server.server_password = preset_data.get('server_password',None)
|
||||
|
||||
|
||||
def client_list_callback(scene, context):
|
||||
@ -545,17 +553,16 @@ class SessionProps(bpy.types.PropertyGroup):
|
||||
description='Show only owned datablocks',
|
||||
default=True
|
||||
)
|
||||
filter_name: bpy.props.StringProperty(
|
||||
name="filter_name",
|
||||
default="",
|
||||
description='Node name filter',
|
||||
)
|
||||
admin: bpy.props.BoolProperty(
|
||||
name="admin",
|
||||
description='Connect as admin',
|
||||
default=False
|
||||
)
|
||||
password: bpy.props.StringProperty(
|
||||
name="password",
|
||||
default=random_string_digits(),
|
||||
description='Session password',
|
||||
subtype='PASSWORD'
|
||||
)
|
||||
internet_ip: bpy.props.StringProperty(
|
||||
name="internet ip",
|
||||
default="no found",
|
||||
@ -577,6 +584,7 @@ classes = (
|
||||
SessionProps,
|
||||
ReplicationFlags,
|
||||
ReplicatedDatablock,
|
||||
ServerPreset,
|
||||
SessionPrefs,
|
||||
)
|
||||
|
||||
@ -591,6 +599,10 @@ def register():
|
||||
if len(prefs.supported_datablocks) == 0:
|
||||
logging.debug('Generating bl_types preferences')
|
||||
prefs.generate_supported_types()
|
||||
|
||||
# at launch server presets
|
||||
prefs.generate_default_presets()
|
||||
|
||||
|
||||
|
||||
def unregister():
|
||||
|
@ -30,7 +30,7 @@ import mathutils
|
||||
from bpy_extras import view3d_utils
|
||||
from gpu_extras.batch import batch_for_shader
|
||||
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
|
||||
STATE_INITIAL, STATE_LAUNCHING_SERVICES,
|
||||
STATE_INITIAL, CONNECTING,
|
||||
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
|
||||
STATE_SYNCING, STATE_WAITING)
|
||||
from replication.interface import session
|
||||
@ -302,9 +302,10 @@ class UserSelectionWidget(Widget):
|
||||
return
|
||||
|
||||
vertex_pos = bbox_from_obj(ob, 1.0)
|
||||
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
|
||||
(4, 5), (4, 6), (5, 7), (6, 7),
|
||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||
vertex_indices = (
|
||||
(0, 1), (1, 2), (2, 3), (0, 3),
|
||||
(4, 5), (5, 6), (6, 7), (4, 7),
|
||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||
|
||||
if ob.instance_collection:
|
||||
for obj in ob.instance_collection.objects:
|
||||
@ -399,7 +400,7 @@ class SessionStatusWidget(Widget):
|
||||
text_scale = self.preferences.presence_hud_scale
|
||||
ui_scale = bpy.context.preferences.view.ui_scale
|
||||
color = [1, 1, 0, 1]
|
||||
state = session.state.get('STATE')
|
||||
state = session.state
|
||||
state_str = f"{get_state_str(state)}"
|
||||
|
||||
if state == STATE_ACTIVE:
|
||||
|
@ -16,68 +16,49 @@
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import logging
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
import bpy
|
||||
|
||||
from . import utils
|
||||
from .presence import (renderer,
|
||||
UserFrustumWidget,
|
||||
UserNameWidget,
|
||||
UserSelectionWidget,
|
||||
refresh_3d_view,
|
||||
generate_user_camera,
|
||||
get_view_matrix,
|
||||
refresh_sidebar_view)
|
||||
from . import operators
|
||||
from replication.constants import (FETCHED,
|
||||
UP,
|
||||
RP_COMMON,
|
||||
STATE_INITIAL,
|
||||
STATE_QUITTING,
|
||||
STATE_ACTIVE,
|
||||
STATE_SYNCING,
|
||||
STATE_LOBBY,
|
||||
STATE_SRV_SYNC)
|
||||
|
||||
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
|
||||
STATE_INITIAL, STATE_LOBBY, STATE_QUITTING,
|
||||
STATE_SRV_SYNC, STATE_SYNCING, UP)
|
||||
from replication.exception import NonAuthorizedOperationError, ContextError
|
||||
from replication.interface import session
|
||||
from replication.exception import NonAuthorizedOperationError
|
||||
from replication import porcelain
|
||||
|
||||
from . import operators, utils
|
||||
from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
|
||||
generate_user_camera, get_view_matrix, refresh_3d_view,
|
||||
refresh_sidebar_view, renderer)
|
||||
|
||||
this = sys.modules[__name__]
|
||||
|
||||
# Registered timers
|
||||
this.registry = dict()
|
||||
|
||||
def is_annotating(context: bpy.types.Context):
|
||||
""" Check if the annotate mode is enabled
|
||||
"""
|
||||
return bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False).idname == 'builtin.annotate'
|
||||
|
||||
class Delayable():
|
||||
"""Delayable task interface
|
||||
"""
|
||||
|
||||
def register(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def unregister(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Timer(Delayable):
|
||||
class Timer(object):
|
||||
"""Timer binder interface for blender
|
||||
|
||||
Run a bpy.app.Timer in the background looping at the given rate
|
||||
"""
|
||||
|
||||
def __init__(self, duration=1):
|
||||
super().__init__()
|
||||
self._timeout = duration
|
||||
def __init__(self, timeout=10, id=None):
|
||||
self._timeout = timeout
|
||||
self.is_running = False
|
||||
self.id = id if id else self.__class__.__name__
|
||||
|
||||
def register(self):
|
||||
"""Register the timer into the blender timer system
|
||||
"""
|
||||
|
||||
if not self.is_running:
|
||||
this.registry[self.id] = self
|
||||
bpy.app.timers.register(self.main)
|
||||
self.is_running = True
|
||||
logging.debug(f"Register {self.__class__.__name__}")
|
||||
@ -91,7 +72,8 @@ class Timer(Delayable):
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
self.unregister()
|
||||
session.disconnect()
|
||||
traceback.print_exc()
|
||||
session.disconnect(reason=f"Error during timer {self.id} execution")
|
||||
else:
|
||||
if self.is_running:
|
||||
return self._timeout
|
||||
@ -105,43 +87,50 @@ class Timer(Delayable):
|
||||
"""Unnegister the timer of the blender timer system
|
||||
"""
|
||||
if bpy.app.timers.is_registered(self.main):
|
||||
logging.info(f"Unregistering {self.id}")
|
||||
bpy.app.timers.unregister(self.main)
|
||||
|
||||
|
||||
del this.registry[self.id]
|
||||
self.is_running = False
|
||||
|
||||
class SessionBackupTimer(Timer):
|
||||
def __init__(self, timeout=10, filepath=None):
|
||||
self._filepath = filepath
|
||||
super().__init__(timeout)
|
||||
|
||||
class ApplyTimer(Timer):
|
||||
def __init__(self, timout=1, target_type=None):
|
||||
self._type = target_type
|
||||
super().__init__(timout)
|
||||
|
||||
def execute(self):
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
if self._type:
|
||||
nodes = session.list(filter=self._type)
|
||||
else:
|
||||
nodes = session.list()
|
||||
session.repository.dumps(self._filepath)
|
||||
|
||||
for node in nodes:
|
||||
node_ref = session.get(uuid=node)
|
||||
class SessionListenTimer(Timer):
|
||||
def execute(self):
|
||||
session.listen()
|
||||
|
||||
class ApplyTimer(Timer):
|
||||
def execute(self):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
for node in session.repository.graph.keys():
|
||||
node_ref = session.repository.graph.get(node)
|
||||
|
||||
if node_ref.state == FETCHED:
|
||||
try:
|
||||
session.apply(node)
|
||||
porcelain.apply(session.repository, node)
|
||||
except Exception as e:
|
||||
logging.error(f"Fail to apply {node_ref.uuid}: {e}")
|
||||
logging.error(f"Fail to apply {node_ref.uuid}")
|
||||
traceback.print_exc()
|
||||
else:
|
||||
if self._type.bl_reload_parent:
|
||||
parents = []
|
||||
impl = session.repository.rdp.get_implementation(node_ref.instance)
|
||||
if impl.bl_reload_parent:
|
||||
for parent in session.repository.graph.get_parents(node):
|
||||
logging.debug("Refresh parent {node}")
|
||||
porcelain.apply(session.repository,
|
||||
parent.uuid,
|
||||
force=True)
|
||||
|
||||
for n in session.list():
|
||||
deps = session.get(uuid=n).dependencies
|
||||
if deps and node in deps:
|
||||
session.apply(n, force=True)
|
||||
|
||||
class DynamicRightSelectTimer(Timer):
|
||||
def __init__(self, timout=.1):
|
||||
super().__init__(timout)
|
||||
def __init__(self, timeout=.1):
|
||||
super().__init__(timeout)
|
||||
self._last_selection = []
|
||||
self._user = None
|
||||
self._annotating = False
|
||||
@ -149,7 +138,7 @@ class DynamicRightSelectTimer(Timer):
|
||||
def execute(self):
|
||||
settings = utils.get_preferences()
|
||||
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
# Find user
|
||||
if self._user is None:
|
||||
self._user = session.online_users.get(settings.username)
|
||||
@ -158,26 +147,33 @@ class DynamicRightSelectTimer(Timer):
|
||||
ctx = bpy.context
|
||||
annotation_gp = ctx.scene.grease_pencil
|
||||
|
||||
if annotation_gp and not annotation_gp.uuid:
|
||||
ctx.scene.update_tag()
|
||||
|
||||
# if an annotation exist and is tracked
|
||||
if annotation_gp and annotation_gp.uuid:
|
||||
registered_gp = session.get(uuid=annotation_gp.uuid)
|
||||
registered_gp = session.repository.graph.get(annotation_gp.uuid)
|
||||
if is_annotating(bpy.context):
|
||||
# try to get the right on it
|
||||
if registered_gp.owner == RP_COMMON:
|
||||
self._annotating = True
|
||||
logging.debug(
|
||||
"Getting the right on the annotation GP")
|
||||
session.change_owner(
|
||||
registered_gp.uuid,
|
||||
settings.username,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
porcelain.lock(session.repository,
|
||||
registered_gp.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
|
||||
if registered_gp.owner == settings.username:
|
||||
gp_node = session.repository.graph.get(annotation_gp.uuid)
|
||||
porcelain.commit(session.repository, gp_node.uuid)
|
||||
porcelain.push(session.repository, 'origin', gp_node.uuid)
|
||||
|
||||
elif self._annotating:
|
||||
session.change_owner(
|
||||
registered_gp.uuid,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
porcelain.unlock(session.repository,
|
||||
registered_gp.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
|
||||
current_selection = utils.get_selected_objects(
|
||||
bpy.context.scene,
|
||||
@ -191,25 +187,24 @@ class DynamicRightSelectTimer(Timer):
|
||||
|
||||
# change old selection right to common
|
||||
for obj in obj_common:
|
||||
node = session.get(uuid=obj)
|
||||
node = session.repository.graph.get(obj)
|
||||
|
||||
if node and (node.owner == settings.username or node.owner == RP_COMMON):
|
||||
recursive = True
|
||||
if node.data and 'instance_type' in node.data.keys():
|
||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||
try:
|
||||
session.change_owner(
|
||||
node.uuid,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.unlock(session.repository,
|
||||
node.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {node} owner")
|
||||
|
||||
# change new selection to our
|
||||
for obj in obj_ours:
|
||||
node = session.get(uuid=obj)
|
||||
node = session.repository.graph.get(obj)
|
||||
|
||||
if node and node.owner == RP_COMMON:
|
||||
recursive = True
|
||||
@ -217,11 +212,10 @@ class DynamicRightSelectTimer(Timer):
|
||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||
|
||||
try:
|
||||
session.change_owner(
|
||||
node.uuid,
|
||||
settings.username,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.lock(session.repository,
|
||||
node.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {node} owner")
|
||||
@ -234,21 +228,19 @@ class DynamicRightSelectTimer(Timer):
|
||||
'selected_objects': current_selection
|
||||
}
|
||||
|
||||
session.update_user_metadata(user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, user_metadata)
|
||||
logging.debug("Update selection")
|
||||
|
||||
# Fix deselection until right managment refactoring (with Roles concepts)
|
||||
if len(current_selection) == 0 :
|
||||
owned_keys = session.list(
|
||||
filter_owner=settings.username)
|
||||
owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
|
||||
for key in owned_keys:
|
||||
node = session.get(uuid=key)
|
||||
node = session.repository.graph.get(key)
|
||||
try:
|
||||
session.change_owner(
|
||||
key,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.unlock(session.repository,
|
||||
key,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=True)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {key} owner")
|
||||
@ -256,14 +248,14 @@ class DynamicRightSelectTimer(Timer):
|
||||
for obj in bpy.data.objects:
|
||||
object_uuid = getattr(obj, 'uuid', None)
|
||||
if object_uuid:
|
||||
is_selectable = not session.is_readonly(object_uuid)
|
||||
is_selectable = not session.repository.is_node_readonly(object_uuid)
|
||||
if obj.hide_select != is_selectable:
|
||||
obj.hide_select = is_selectable
|
||||
|
||||
|
||||
class ClientUpdate(Timer):
|
||||
def __init__(self, timout=.1):
|
||||
super().__init__(timout)
|
||||
def __init__(self, timeout=.1):
|
||||
super().__init__(timeout)
|
||||
self.handle_quit = False
|
||||
self.users_metadata = {}
|
||||
|
||||
@ -271,7 +263,7 @@ class ClientUpdate(Timer):
|
||||
settings = utils.get_preferences()
|
||||
|
||||
if session and renderer:
|
||||
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
|
||||
if session.state in [STATE_ACTIVE, STATE_LOBBY]:
|
||||
local_user = session.online_users.get(
|
||||
settings.username)
|
||||
|
||||
@ -310,31 +302,31 @@ class ClientUpdate(Timer):
|
||||
'frame_current': bpy.context.scene.frame_current,
|
||||
'scene_current': scene_current
|
||||
}
|
||||
session.update_user_metadata(metadata)
|
||||
porcelain.update_user_metadata(session.repository, metadata)
|
||||
|
||||
# Update client representation
|
||||
# Update client current scene
|
||||
elif scene_current != local_user_metadata['scene_current']:
|
||||
local_user_metadata['scene_current'] = scene_current
|
||||
session.update_user_metadata(local_user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
||||
local_user_metadata['view_corners'] = current_view_corners
|
||||
local_user_metadata['view_matrix'] = get_view_matrix(
|
||||
)
|
||||
session.update_user_metadata(local_user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||
|
||||
|
||||
class SessionStatusUpdate(Timer):
|
||||
def __init__(self, timout=1):
|
||||
super().__init__(timout)
|
||||
def __init__(self, timeout=1):
|
||||
super().__init__(timeout)
|
||||
|
||||
def execute(self):
|
||||
refresh_sidebar_view()
|
||||
|
||||
|
||||
class SessionUserSync(Timer):
|
||||
def __init__(self, timout=1):
|
||||
super().__init__(timout)
|
||||
def __init__(self, timeout=1):
|
||||
super().__init__(timeout)
|
||||
self.settings = utils.get_preferences()
|
||||
|
||||
def execute(self):
|
||||
@ -367,12 +359,12 @@ class SessionUserSync(Timer):
|
||||
|
||||
|
||||
class MainThreadExecutor(Timer):
|
||||
def __init__(self, timout=1, execution_queue=None):
|
||||
super().__init__(timout)
|
||||
def __init__(self, timeout=1, execution_queue=None):
|
||||
super().__init__(timeout)
|
||||
self.execution_queue = execution_queue
|
||||
|
||||
def execute(self):
|
||||
while not self.execution_queue.empty():
|
||||
function, kwargs = self.execution_queue.get()
|
||||
logging.debug(f"Executing {function.__name__}")
|
||||
function(**kwargs)
|
||||
function(**kwargs)
|
138
multi_user/ui.py
@ -26,9 +26,10 @@ from replication.constants import (ADDED, ERROR, FETCHED,
|
||||
STATE_INITIAL, STATE_SRV_SYNC,
|
||||
STATE_WAITING, STATE_QUITTING,
|
||||
STATE_LOBBY,
|
||||
STATE_LAUNCHING_SERVICES)
|
||||
CONNECTING)
|
||||
from replication import __version__
|
||||
from replication.interface import session
|
||||
from .timers import registry
|
||||
|
||||
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
|
||||
'TRIA_UP', # COMMITED
|
||||
@ -70,9 +71,9 @@ class SESSION_PT_settings(bpy.types.Panel):
|
||||
|
||||
def draw_header(self, context):
|
||||
layout = self.layout
|
||||
if session and session.state['STATE'] != STATE_INITIAL:
|
||||
if session and session.state != STATE_INITIAL:
|
||||
cli_state = session.state
|
||||
state = session.state.get('STATE')
|
||||
state = session.state
|
||||
connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
|
||||
|
||||
if state == STATE_ACTIVE:
|
||||
@ -80,7 +81,7 @@ class SESSION_PT_settings(bpy.types.Panel):
|
||||
else:
|
||||
connection_icon = 'PROP_CON'
|
||||
|
||||
layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
|
||||
layout.label(text=f"Session - {get_state_str(cli_state)}", icon=connection_icon)
|
||||
else:
|
||||
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
|
||||
|
||||
@ -93,13 +94,13 @@ class SESSION_PT_settings(bpy.types.Panel):
|
||||
if hasattr(context.window_manager, 'session'):
|
||||
# STATE INITIAL
|
||||
if not session \
|
||||
or (session and session.state['STATE'] == STATE_INITIAL):
|
||||
or (session and session.state == STATE_INITIAL):
|
||||
pass
|
||||
else:
|
||||
cli_state = session.state
|
||||
progress = session.state_progress
|
||||
row = layout.row()
|
||||
|
||||
current_state = cli_state['STATE']
|
||||
current_state = session.state
|
||||
info_msg = None
|
||||
|
||||
if current_state in [STATE_ACTIVE]:
|
||||
@ -123,8 +124,8 @@ class SESSION_PT_settings(bpy.types.Panel):
|
||||
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
|
||||
info_box = row.box()
|
||||
info_box.row().label(text=printProgressBar(
|
||||
cli_state['CURRENT'],
|
||||
cli_state['TOTAL'],
|
||||
progress['current'],
|
||||
progress['total'],
|
||||
length=16
|
||||
))
|
||||
|
||||
@ -140,7 +141,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return not session \
|
||||
or (session and session.state['STATE'] == 0)
|
||||
or (session and session.state == 0)
|
||||
|
||||
def draw_header(self, context):
|
||||
self.layout.label(text="", icon='URL')
|
||||
@ -155,7 +156,13 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
||||
row = layout.row()
|
||||
row.prop(runtime_settings, "session_mode", expand=True)
|
||||
row = layout.row()
|
||||
|
||||
col = row.row(align=True)
|
||||
col.prop(settings, "server_preset_interface", text="")
|
||||
col.operator("session.preset_server_add", icon='ADD', text="")
|
||||
col.operator("session.preset_server_remove", icon='REMOVE', text="")
|
||||
|
||||
row = layout.row()
|
||||
box = row.box()
|
||||
|
||||
if runtime_settings.session_mode == 'HOST':
|
||||
@ -167,7 +174,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
||||
row.prop(settings, "init_method", text="")
|
||||
row = box.row()
|
||||
row.label(text="Admin password:")
|
||||
row.prop(runtime_settings, "password", text="")
|
||||
row.prop(settings, "password", text="")
|
||||
row = box.row()
|
||||
row.operator("session.start", text="HOST").host = True
|
||||
else:
|
||||
@ -183,11 +190,10 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
||||
if runtime_settings.admin:
|
||||
row = box.row()
|
||||
row.label(text="Password:")
|
||||
row.prop(runtime_settings, "password", text="")
|
||||
row.prop(settings, "password", text="")
|
||||
row = box.row()
|
||||
row.operator("session.start", text="CONNECT").host = False
|
||||
|
||||
|
||||
class SESSION_PT_settings_user(bpy.types.Panel):
|
||||
bl_idname = "MULTIUSER_SETTINGS_USER_PT_panel"
|
||||
bl_label = "User info"
|
||||
@ -198,7 +204,7 @@ class SESSION_PT_settings_user(bpy.types.Panel):
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return not session \
|
||||
or (session and session.state['STATE'] == 0)
|
||||
or (session and session.state == 0)
|
||||
|
||||
def draw_header(self, context):
|
||||
self.layout.label(text="", icon='USER')
|
||||
@ -229,7 +235,7 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return not session \
|
||||
or (session and session.state['STATE'] == 0)
|
||||
or (session and session.state == 0)
|
||||
|
||||
def draw_header(self, context):
|
||||
self.layout.label(text="", icon='PREFERENCES')
|
||||
@ -250,9 +256,6 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
||||
emboss=False)
|
||||
|
||||
if settings.sidebar_advanced_net_expanded:
|
||||
net_section_row = net_section.row()
|
||||
net_section_row.label(text="IPC Port:")
|
||||
net_section_row.prop(settings, "ipc_port", text="")
|
||||
net_section_row = net_section.row()
|
||||
net_section_row.label(text="Timeout (ms):")
|
||||
net_section_row.prop(settings, "connection_timeout", text="")
|
||||
@ -268,7 +271,6 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
||||
if settings.sidebar_advanced_rep_expanded:
|
||||
replication_section_row = replication_section.row()
|
||||
|
||||
replication_section_row.label(text="Sync flags", icon='COLLECTION_NEW')
|
||||
replication_section_row = replication_section.row()
|
||||
replication_section_row.prop(settings.sync_flags, "sync_render_settings")
|
||||
replication_section_row = replication_section.row()
|
||||
@ -281,34 +283,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
||||
warning = replication_section_row.box()
|
||||
warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
|
||||
replication_section_row = replication_section.row()
|
||||
replication_section_row.prop(settings, "depsgraph_update_rate", text="Apply delay")
|
||||
|
||||
replication_section_row.label(text="Update method", icon='RECOVER_LAST')
|
||||
replication_section_row = replication_section.row()
|
||||
replication_section_row.prop(settings, "update_method", expand=True)
|
||||
replication_section_row = replication_section.row()
|
||||
replication_timers = replication_section_row.box()
|
||||
replication_timers.label(text="Replication timers", icon='TIME')
|
||||
if settings.update_method == "DEFAULT":
|
||||
replication_timers = replication_timers.row()
|
||||
# Replication frequencies
|
||||
flow = replication_timers.grid_flow(
|
||||
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
|
||||
line = flow.row(align=True)
|
||||
line.label(text=" ")
|
||||
line.separator()
|
||||
line.label(text="refresh (sec)")
|
||||
line.label(text="apply (sec)")
|
||||
|
||||
for item in settings.supported_datablocks:
|
||||
line = flow.row(align=True)
|
||||
line.prop(item, "auto_push", text="", icon=item.icon)
|
||||
line.separator()
|
||||
line.prop(item, "bl_delay_refresh", text="")
|
||||
line.prop(item, "bl_delay_apply", text="")
|
||||
else:
|
||||
replication_timers = replication_timers.row()
|
||||
replication_timers.label(text="Update rate (ms):")
|
||||
replication_timers.prop(settings, "depsgraph_update_rate", text="")
|
||||
|
||||
cache_section = layout.row().box()
|
||||
cache_section.prop(
|
||||
@ -348,7 +324,7 @@ class SESSION_PT_user(bpy.types.Panel):
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return session and session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
|
||||
return session and session.state in [STATE_ACTIVE, STATE_LOBBY]
|
||||
|
||||
def draw_header(self, context):
|
||||
self.layout.label(text="", icon='USER')
|
||||
@ -379,7 +355,7 @@ class SESSION_PT_user(bpy.types.Panel):
|
||||
if active_user != 0 and active_user.username != settings.username:
|
||||
row = layout.row()
|
||||
user_operations = row.split()
|
||||
if session.state['STATE'] == STATE_ACTIVE:
|
||||
if session.state == STATE_ACTIVE:
|
||||
|
||||
user_operations.alert = context.window_manager.session.time_snap_running
|
||||
user_operations.operator(
|
||||
@ -437,7 +413,7 @@ class SESSION_PT_presence(bpy.types.Panel):
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return not session \
|
||||
or (session and session.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
|
||||
or (session and session.state in [STATE_INITIAL, STATE_ACTIVE])
|
||||
|
||||
def draw_header(self, context):
|
||||
self.layout.prop(context.window_manager.session,
|
||||
@ -467,8 +443,8 @@ class SESSION_PT_presence(bpy.types.Panel):
|
||||
def draw_property(context, parent, property_uuid, level=0):
|
||||
settings = get_preferences()
|
||||
runtime_settings = context.window_manager.session
|
||||
item = session.get(uuid=property_uuid)
|
||||
|
||||
item = session.repository.graph.get(property_uuid)
|
||||
type_id = item.data.get('type_id')
|
||||
area_msg = parent.row(align=True)
|
||||
|
||||
if item.state == ERROR:
|
||||
@ -479,11 +455,10 @@ def draw_property(context, parent, property_uuid, level=0):
|
||||
line = area_msg.box()
|
||||
|
||||
name = item.data['name'] if item.data else item.uuid
|
||||
|
||||
icon = settings.supported_datablocks[type_id].icon if type_id else 'ERROR'
|
||||
detail_item_box = line.row(align=True)
|
||||
|
||||
detail_item_box.label(text="",
|
||||
icon=settings.supported_datablocks[item.str_type].icon)
|
||||
detail_item_box.label(text="", icon=icon)
|
||||
detail_item_box.label(text=f"{name}")
|
||||
|
||||
# Operations
|
||||
@ -545,8 +520,8 @@ class SESSION_PT_repository(bpy.types.Panel):
|
||||
admin = usr['admin']
|
||||
return hasattr(context.window_manager, 'session') and \
|
||||
session and \
|
||||
(session.state['STATE'] == STATE_ACTIVE or \
|
||||
session.state['STATE'] == STATE_LOBBY and admin)
|
||||
(session.state == STATE_ACTIVE or \
|
||||
session.state == STATE_LOBBY and admin)
|
||||
|
||||
def draw_header(self, context):
|
||||
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
|
||||
@ -562,43 +537,42 @@ class SESSION_PT_repository(bpy.types.Panel):
|
||||
|
||||
row = layout.row()
|
||||
|
||||
if session.state['STATE'] == STATE_ACTIVE:
|
||||
flow = layout.grid_flow(
|
||||
row_major=True,
|
||||
columns=0,
|
||||
even_columns=True,
|
||||
even_rows=False,
|
||||
align=True)
|
||||
if session.state == STATE_ACTIVE:
|
||||
if 'SessionBackupTimer' in registry:
|
||||
row.alert = True
|
||||
row.operator('session.cancel_autosave', icon="CANCEL")
|
||||
row.alert = False
|
||||
else:
|
||||
row.operator('session.save', icon="FILE_TICK")
|
||||
|
||||
for item in settings.supported_datablocks:
|
||||
col = flow.column(align=True)
|
||||
col.prop(item, "use_as_filter", text="", icon=item.icon)
|
||||
|
||||
row = layout.row(align=True)
|
||||
row.prop(runtime_settings, "filter_owned", text="Show only owned")
|
||||
|
||||
row = layout.row(align=True)
|
||||
box = layout.box()
|
||||
row = box.row()
|
||||
row.prop(runtime_settings, "filter_owned", text="Show only owned Nodes", icon_only=True, icon="DECORATE_UNLOCKED")
|
||||
row = box.row()
|
||||
row.prop(runtime_settings, "filter_name", text="Filter")
|
||||
row = box.row()
|
||||
|
||||
# Properties
|
||||
types_filter = [t.type_name for t in settings.supported_datablocks
|
||||
if t.use_as_filter]
|
||||
owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
|
||||
|
||||
key_to_filter = session.list(
|
||||
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
|
||||
filtered_node = owned_nodes if runtime_settings.filter_owned else session.repository.graph.keys()
|
||||
|
||||
client_keys = [key for key in key_to_filter
|
||||
if session.get(uuid=key).str_type
|
||||
in types_filter]
|
||||
if runtime_settings.filter_name:
|
||||
for node_id in filtered_node:
|
||||
node_instance = session.repository.graph.get(node_id)
|
||||
name = node_instance.data.get('name')
|
||||
if runtime_settings.filter_name not in name:
|
||||
filtered_node.remove(node_id)
|
||||
|
||||
if client_keys:
|
||||
if filtered_node:
|
||||
col = layout.column(align=True)
|
||||
for key in client_keys:
|
||||
for key in filtered_node:
|
||||
draw_property(context, col, key)
|
||||
|
||||
else:
|
||||
row.label(text="Empty")
|
||||
layout.row().label(text="Empty")
|
||||
|
||||
elif session.state['STATE'] == STATE_LOBBY and usr and usr['admin']:
|
||||
elif session.state == STATE_LOBBY and usr and usr['admin']:
|
||||
row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
|
||||
else:
|
||||
row.label(text="Waiting to start")
|
||||
|
@ -36,7 +36,7 @@ from replication.constants import (STATE_ACTIVE, STATE_AUTH,
|
||||
STATE_INITIAL, STATE_SRV_SYNC,
|
||||
STATE_WAITING, STATE_QUITTING,
|
||||
STATE_LOBBY,
|
||||
STATE_LAUNCHING_SERVICES)
|
||||
CONNECTING)
|
||||
|
||||
|
||||
def find_from_attr(attr_name, attr_value, list):
|
||||
@ -65,6 +65,15 @@ def get_datablock_users(datablock):
|
||||
return users
|
||||
|
||||
|
||||
def flush_history():
|
||||
try:
|
||||
logging.debug("Flushing history")
|
||||
for i in range(bpy.context.preferences.edit.undo_steps+1):
|
||||
bpy.ops.ed.undo_push(message="Multiuser history flush")
|
||||
except RuntimeError:
|
||||
logging.error("Fail to overwrite history")
|
||||
|
||||
|
||||
def get_state_str(state):
|
||||
state_str = 'UNKOWN'
|
||||
if state == STATE_WAITING:
|
||||
@ -83,7 +92,7 @@ def get_state_str(state):
|
||||
state_str = 'OFFLINE'
|
||||
elif state == STATE_QUITTING:
|
||||
state_str = 'QUITTING'
|
||||
elif state == STATE_LAUNCHING_SERVICES:
|
||||
elif state == CONNECTING:
|
||||
state_str = 'LAUNCHING SERVICES'
|
||||
elif state == STATE_LOBBY:
|
||||
state_str = 'LOBBY'
|
||||
@ -92,11 +101,17 @@ def get_state_str(state):
|
||||
|
||||
|
||||
def clean_scene():
|
||||
for type_name in dir(bpy.data):
|
||||
to_delete = [f for f in dir(bpy.data) if f not in ['brushes', 'palettes']]
|
||||
for type_name in to_delete:
|
||||
try:
|
||||
sub_collection_to_avoid = [bpy.data.linestyles['LineStyle'], bpy.data.materials['Dots Stroke']]
|
||||
type_collection = getattr(bpy.data, type_name)
|
||||
for item in type_collection:
|
||||
type_collection.remove(item)
|
||||
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid]
|
||||
for item in items_to_remove:
|
||||
try:
|
||||
type_collection.remove(item)
|
||||
except:
|
||||
continue
|
||||
except:
|
||||
continue
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import re
|
||||
|
||||
init_py = open("multi_user/__init__.py").read()
|
||||
init_py = open("multi_user/libs/replication/replication/__init__.py").read()
|
||||
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))
|
||||
|
@ -13,7 +13,7 @@ def main():
|
||||
if len(sys.argv) > 2:
|
||||
blender_rev = sys.argv[2]
|
||||
else:
|
||||
blender_rev = "2.91.0"
|
||||
blender_rev = "2.93.0"
|
||||
|
||||
try:
|
||||
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)
|
||||
|
@ -8,6 +8,7 @@ import random
|
||||
from multi_user.bl_types.bl_action import BlAction
|
||||
|
||||
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
|
||||
FMODIFIERS = ['GENERATOR', 'FNGENERATOR', 'ENVELOPE', 'CYCLES', 'NOISE', 'LIMITS', 'STEPPED']
|
||||
|
||||
# @pytest.mark.parametrize('blendname', ['test_action.blend'])
|
||||
def test_action(clear_blend):
|
||||
@ -22,17 +23,20 @@ def test_action(clear_blend):
|
||||
point.co[1] = random.randint(-10,10)
|
||||
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
|
||||
|
||||
for mod_type in FMODIFIERS:
|
||||
fcurve_sample.modifiers.new(mod_type)
|
||||
|
||||
bpy.ops.mesh.primitive_plane_add()
|
||||
bpy.data.objects[0].animation_data_create()
|
||||
bpy.data.objects[0].animation_data.action = datablock
|
||||
|
||||
# Test
|
||||
implementation = BlAction()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.actions.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -12,11 +12,11 @@ def test_armature(clear_blend):
|
||||
datablock = bpy.data.armatures[0]
|
||||
|
||||
implementation = BlArmature()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.armatures.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -15,11 +15,11 @@ def test_camera(clear_blend, camera_type):
|
||||
datablock.type = camera_type
|
||||
|
||||
camera_dumper = BlCamera()
|
||||
expected = camera_dumper._dump(datablock)
|
||||
expected = camera_dumper.dump(datablock)
|
||||
bpy.data.cameras.remove(datablock)
|
||||
|
||||
test = camera_dumper._construct(expected)
|
||||
camera_dumper._load(expected, test)
|
||||
result = camera_dumper._dump(test)
|
||||
test = camera_dumper.construct(expected)
|
||||
camera_dumper.load(expected, test)
|
||||
result = camera_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -23,11 +23,11 @@ def test_collection(clear_blend):
|
||||
|
||||
# Test
|
||||
implementation = BlCollection()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.collections.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -19,11 +19,11 @@ def test_curve(clear_blend, curve_type):
|
||||
datablock = bpy.data.curves[0]
|
||||
|
||||
implementation = BlCurve()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.curves.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -13,11 +13,11 @@ def test_gpencil(clear_blend):
|
||||
datablock = bpy.data.grease_pencils[0]
|
||||
|
||||
implementation = BlGpencil()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.grease_pencils.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -13,11 +13,11 @@ def test_lattice(clear_blend):
|
||||
datablock = bpy.data.lattices[0]
|
||||
|
||||
implementation = BlLattice()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.lattices.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -14,11 +14,11 @@ def test_lightprobes(clear_blend, lightprobe_type):
|
||||
|
||||
blender_light = bpy.data.lightprobes[0]
|
||||
lightprobe_dumper = BlLightprobe()
|
||||
expected = lightprobe_dumper._dump(blender_light)
|
||||
expected = lightprobe_dumper.dump(blender_light)
|
||||
bpy.data.lightprobes.remove(blender_light)
|
||||
|
||||
test = lightprobe_dumper._construct(expected)
|
||||
lightprobe_dumper._load(expected, test)
|
||||
result = lightprobe_dumper._dump(test)
|
||||
test = lightprobe_dumper.construct(expected)
|
||||
lightprobe_dumper.load(expected, test)
|
||||
result = lightprobe_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -13,11 +13,11 @@ def test_light(clear_blend, light_type):
|
||||
|
||||
blender_light = bpy.data.lights[0]
|
||||
light_dumper = BlLight()
|
||||
expected = light_dumper._dump(blender_light)
|
||||
expected = light_dumper.dump(blender_light)
|
||||
bpy.data.lights.remove(blender_light)
|
||||
|
||||
test = light_dumper._construct(expected)
|
||||
light_dumper._load(expected, test)
|
||||
result = light_dumper._dump(test)
|
||||
test = light_dumper.construct(expected)
|
||||
light_dumper.load(expected, test)
|
||||
result = light_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -8,7 +8,7 @@ from multi_user.bl_types.bl_material import BlMaterial
|
||||
|
||||
|
||||
def test_material_nodes(clear_blend):
|
||||
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()]
|
||||
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()] # Faire un peu comme ici
|
||||
|
||||
datablock = bpy.data.materials.new("test")
|
||||
datablock.use_nodes = True
|
||||
@ -17,12 +17,12 @@ def test_material_nodes(clear_blend):
|
||||
datablock.node_tree.nodes.new(ntype)
|
||||
|
||||
implementation = BlMaterial()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.materials.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
||||
@ -32,11 +32,11 @@ def test_material_gpencil(clear_blend):
|
||||
bpy.data.materials.create_gpencil_data(datablock)
|
||||
|
||||
implementation = BlMaterial()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.materials.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -18,11 +18,11 @@ def test_mesh(clear_blend, mesh_type):
|
||||
|
||||
# Test
|
||||
implementation = BlMesh()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.meshes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -13,11 +13,11 @@ def test_metaball(clear_blend, metaballs_type):
|
||||
|
||||
datablock = bpy.data.metaballs[0]
|
||||
dumper = BlMetaball()
|
||||
expected = dumper._dump(datablock)
|
||||
expected = dumper.dump(datablock)
|
||||
bpy.data.metaballs.remove(datablock)
|
||||
|
||||
test = dumper._construct(expected)
|
||||
dumper._load(expected, test)
|
||||
result = dumper._dump(test)
|
||||
test = dumper.construct(expected)
|
||||
dumper.load(expected, test)
|
||||
result = dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -7,7 +7,7 @@ import bpy
|
||||
import random
|
||||
from multi_user.bl_types.bl_object import BlObject
|
||||
|
||||
# Removed 'BUILD' modifier because the seed doesn't seems to be
|
||||
# Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be
|
||||
# correctly initialized (#TODO: report the bug)
|
||||
MOFIFIERS_TYPES = [
|
||||
'DATA_TRANSFER', 'MESH_CACHE', 'MESH_SEQUENCE_CACHE',
|
||||
@ -22,8 +22,7 @@ MOFIFIERS_TYPES = [
|
||||
'MESH_DEFORM', 'SHRINKWRAP', 'SIMPLE_DEFORM', 'SMOOTH',
|
||||
'CORRECTIVE_SMOOTH', 'LAPLACIANSMOOTH', 'SURFACE_DEFORM',
|
||||
'WARP', 'WAVE', 'CLOTH', 'COLLISION', 'DYNAMIC_PAINT',
|
||||
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE',
|
||||
'SOFT_BODY', 'SURFACE']
|
||||
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE', 'SURFACE']
|
||||
|
||||
GP_MODIFIERS_TYPE = [
|
||||
'GP_ARRAY', 'GP_BUILD', 'GP_MIRROR', 'GP_MULTIPLY',
|
||||
@ -66,11 +65,11 @@ def test_object(clear_blend):
|
||||
datablock.shape_key_add(name='shape2')
|
||||
|
||||
implementation = BlObject()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.objects.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
print(DeepDiff(expected, result))
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -11,15 +11,16 @@ from multi_user.utils import get_preferences
|
||||
def test_scene(clear_blend):
|
||||
get_preferences().sync_flags.sync_render_settings = True
|
||||
|
||||
datablock = bpy.data.scenes.new("toto")
|
||||
# datablock = bpy.data.scenes.new("toto") # TODO: trouver datablock -> active compositing 'Use nodes'
|
||||
datablock = bpy.data.scenes["Scene"].use_nodes
|
||||
datablock.view_settings.use_curve_mapping = True
|
||||
# Test
|
||||
implementation = BlScene()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.scenes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -12,11 +12,11 @@ def test_speaker(clear_blend):
|
||||
datablock = bpy.data.speakers[0]
|
||||
|
||||
implementation = BlSpeaker()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.speakers.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -14,11 +14,11 @@ def test_texture(clear_blend, texture_type):
|
||||
datablock = bpy.data.textures.new('test', texture_type)
|
||||
|
||||
implementation = BlTexture()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.textures.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -11,11 +11,11 @@ def test_volume(clear_blend):
|
||||
datablock = bpy.data.volumes.new("Test")
|
||||
|
||||
implementation = BlVolume()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.volumes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -12,11 +12,11 @@ def test_world(clear_blend):
|
||||
datablock.use_nodes = True
|
||||
|
||||
implementation = BlWorld()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.worlds.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|