Compare commits
223 Commits
218-ui-ux-
...
234-user-i
Author | SHA1 | Date | |
---|---|---|---|
de9255f71c | |||
99528ea3e0 | |||
bb342951a5 | |||
438a79177b | |||
08fc49c40f | |||
d7e25b1192 | |||
1671422143 | |||
a9620c0752 | |||
583beaf6fe | |||
126d2338f2 | |||
24b0c0ed8a | |||
07fc1cf000 | |||
8e0131b3a8 | |||
912a2d524c | |||
82a5124d64 | |||
cca5bf903b | |||
4c0d4cb1c7 | |||
ca64797641 | |||
a49d9ee437 | |||
4c1cd6b8f8 | |||
d6cda709a6 | |||
4bc0feb3a5 | |||
59aab7159a | |||
0a798bb21b | |||
beaafce4fa | |||
6f77337832 | |||
07252d62df | |||
ac615cd134 | |||
a4f9f6e051 | |||
10de88cdc9 | |||
e4fa34c984 | |||
0dd685d009 | |||
3e8c30c0ab | |||
21cc3cd917 | |||
81e620ee3d | |||
fb9bd108bd | |||
4846fbb589 | |||
cab6625399 | |||
1b81251a11 | |||
cf44e547a2 | |||
0269363c63 | |||
4ffca17c54 | |||
77bf269fb5 | |||
1e675132d4 | |||
781287c390 | |||
fc91b252f4 | |||
41c7c569ca | |||
a82d263f05 | |||
d4476baa1b | |||
467e98906e | |||
64a25f94a3 | |||
e6996316be | |||
cf4cd94096 | |||
e9ab633aac | |||
297639e80f | |||
f0cc63b6f0 | |||
d433e8f241 | |||
963a551a1e | |||
d01a434fb7 | |||
3a5a5fc633 | |||
8926ab44e1 | |||
a8f96581c5 | |||
440a4cc1cd | |||
a207c51973 | |||
e706c8e0bf | |||
e590e896da | |||
4140b62a8e | |||
6d9c9c4532 | |||
e9e1911840 | |||
ab350ca7bc | |||
0a8f0b5f88 | |||
2238a15c11 | |||
de73f022e6 | |||
f517205647 | |||
f33c3d8481 | |||
71c69000ec | |||
de1e684b3c | |||
d87730cffb | |||
3f005b86ab | |||
5098e5135d | |||
37cfed489c | |||
9003abcd18 | |||
a199e0df00 | |||
3774419b7e | |||
3e552cb406 | |||
9f381b44c8 | |||
ad795caed5 | |||
504dd77405 | |||
82022c9e4d | |||
d81b4dc014 | |||
63affa079f | |||
fcf5a12dd0 | |||
b0529e4444 | |||
bdfd89c085 | |||
ff1630f9cc | |||
5830fe1abb | |||
c609f72080 | |||
a28a6f91bd | |||
a996f39d3b | |||
7790a16034 | |||
836fdd02b8 | |||
7cb3482353 | |||
041022056c | |||
05f3eb1445 | |||
17193bde3a | |||
a14b4313f5 | |||
b203d9dffd | |||
f64db2155e | |||
e07ebdeff5 | |||
3d6453f7a2 | |||
7421511079 | |||
bc24525cec | |||
699cf578e2 | |||
e9b4afb440 | |||
0c6491590e | |||
b87e733ddc | |||
cb0962b484 | |||
1fc25412ac | |||
b5405553dc | |||
a1b6fb0533 | |||
b6a8a2ec01 | |||
3e41b18af1 | |||
f7c4f5d1fe | |||
c616054878 | |||
5c08493774 | |||
af8a138b4f | |||
6d9216f14a | |||
fc4fb088bb | |||
98553ba00c | |||
1e15a12b10 | |||
569543650f | |||
07358802f7 | |||
a059fafe12 | |||
297f68ccfe | |||
c9c70d1e08 | |||
a34f58ef3f | |||
e7b7f38991 | |||
392e0aaaa3 | |||
4c774d5d53 | |||
4c4cf8a970 | |||
211d0848c2 | |||
c9665c4719 | |||
431fe0d840 | |||
df7ca66ad8 | |||
c2d2db78e6 | |||
ad89a4e389 | |||
6ca6d4443d | |||
81c9b5fc06 | |||
9fddfe084c | |||
ca40523393 | |||
76e28ced21 | |||
55c6002b28 | |||
8d5c8aded3 | |||
8ebba80b97 | |||
50d6c6b3c8 | |||
f0b03c50f2 | |||
28e83a38e6 | |||
2e261cd66b | |||
3f6e4f7333 | |||
49fadf084a | |||
e2e0dc31c1 | |||
389bbd97d5 | |||
19602691d3 | |||
2e2ff5d4bf | |||
fef6559ce0 | |||
5f669fd49a | |||
330ff08fd3 | |||
f3be8f9623 | |||
ffb70ab74c | |||
26140eefb2 | |||
cdf0433e8a | |||
acd70f73bf | |||
36c3a9ab0b | |||
cfb1afdd72 | |||
4eeb80350e | |||
fb1c985f31 | |||
689c2473d6 | |||
41620fce90 | |||
249bcf827b | |||
d47eab4f26 | |||
f011089d82 | |||
acc58a1c9f | |||
24d850de9f | |||
b045911a59 | |||
a67be76422 | |||
32033c743c | |||
5da8650611 | |||
aec5096f87 | |||
fba39b9980 | |||
6af3e4b777 | |||
58d639e9d8 | |||
0efe5d5a10 | |||
2ad93cf304 | |||
771d76a98b | |||
1e83241494 | |||
1bcbff3ed4 | |||
9a45fe7125 | |||
207901afdd | |||
c6eb1ba22f | |||
ba4168d0fd | |||
00e7adf022 | |||
d9d8ca7ca0 | |||
e8cd271bd8 | |||
e71af6402c | |||
dd1c6a4fc7 | |||
7fe1ae83b1 | |||
a7ad9d30c3 | |||
14779be1ed | |||
a36c3740cc | |||
d2108facab | |||
e5651151d9 | |||
fb61b380b6 | |||
3a02711baa | |||
c7e8002fed | |||
f4e7ec6be8 | |||
235db712fd | |||
647ac46c01 | |||
8e3c86561f | |||
dba19e831d | |||
93df5ca5fa | |||
b17104c67e | |||
875b9ce934 | |||
2d638ef76f |
3
.gitignore
vendored
@ -13,4 +13,5 @@ multi_user_updater/
|
|||||||
_build
|
_build
|
||||||
|
|
||||||
# ignore generated zip generated from blender_addon_tester
|
# ignore generated zip generated from blender_addon_tester
|
||||||
*.zip
|
*.zip
|
||||||
|
libs
|
@ -8,3 +8,5 @@ build:
|
|||||||
name: multi_user
|
name: multi_user
|
||||||
paths:
|
paths:
|
||||||
- multi_user
|
- multi_user
|
||||||
|
variables:
|
||||||
|
GIT_SUBMODULE_STRATEGY: recursive
|
||||||
|
@ -5,6 +5,7 @@ deploy:
|
|||||||
variables:
|
variables:
|
||||||
DOCKER_DRIVER: overlay2
|
DOCKER_DRIVER: overlay2
|
||||||
DOCKER_TLS_CERTDIR: "/certs"
|
DOCKER_TLS_CERTDIR: "/certs"
|
||||||
|
GIT_SUBMODULE_STRATEGY: recursive
|
||||||
|
|
||||||
services:
|
services:
|
||||||
- docker:19.03.12-dind
|
- docker:19.03.12-dind
|
||||||
|
@ -3,3 +3,5 @@ test:
|
|||||||
image: slumber/blender-addon-testing:latest
|
image: slumber/blender-addon-testing:latest
|
||||||
script:
|
script:
|
||||||
- python3 scripts/test_addon.py
|
- python3 scripts/test_addon.py
|
||||||
|
variables:
|
||||||
|
GIT_SUBMODULE_STRATEGY: recursive
|
3
.gitmodules
vendored
@ -0,0 +1,3 @@
|
|||||||
|
[submodule "multi_user/libs/replication"]
|
||||||
|
path = multi_user/libs/replication
|
||||||
|
url = https://gitlab.com/slumber/replication.git
|
||||||
|
32
CHANGELOG.md
@ -186,4 +186,34 @@ All notable changes to this project will be documented in this file.
|
|||||||
- Exception access violation during Undo/Redo
|
- Exception access violation during Undo/Redo
|
||||||
- Sync missing armature bone Roll
|
- Sync missing armature bone Roll
|
||||||
- Sync missing driver data_path
|
- Sync missing driver data_path
|
||||||
- Constraint replication
|
- Constraint replication
|
||||||
|
|
||||||
|
## [0.4.0] - 2021-07-20
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Connection preset system (@Kysios)
|
||||||
|
- Display connected users active mode (users pannel and viewport) (@Kysios)
|
||||||
|
- Delta-based replication
|
||||||
|
- Sync timeline marker
|
||||||
|
- Sync images settings (@Kysios)
|
||||||
|
- Sync parent relation type (@Kysios)
|
||||||
|
- Sync uv project modifier
|
||||||
|
- Sync FCurves modifiers
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- User selection optimizations (draw and sync) (@Kysios)
|
||||||
|
- Improved shapekey syncing performances
|
||||||
|
- Improved gpencil syncing performances
|
||||||
|
- Integrate replication as a submodule
|
||||||
|
- The dependencies are now installed in a folder(blender addon folder) that no longer requires administrative rights
|
||||||
|
- Presence overlay UI optimization (@Kysios)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- User selection bounding box glitches for non-mesh objects (@Kysios)
|
||||||
|
- Transforms replication for animated objects
|
||||||
|
- GPencil fill stroke
|
||||||
|
- Sculpt and GPencil brushes deleted when joining a session (@Kysios)
|
||||||
|
- Auto-updater doesn't work for master and develop builds
|
||||||
|
63
README.md
@ -11,9 +11,8 @@ This tool aims to allow multiple users to work on the same scene over the networ
|
|||||||
|
|
||||||
## Quick installation
|
## Quick installation
|
||||||
|
|
||||||
1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
|
1. Download [latest build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build) or [stable build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
|
||||||
2. Run blender as administrator (dependencies installation).
|
2. Install last_version.zip from your addon preferences.
|
||||||
3. Install last_version.zip from your addon preferences.
|
|
||||||
|
|
||||||
[Dependencies](#dependencies) will be automatically added to your blender python during installation.
|
[Dependencies](#dependencies) will be automatically added to your blender python during installation.
|
||||||
|
|
||||||
@ -29,35 +28,35 @@ See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_sta
|
|||||||
|
|
||||||
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
||||||
|
|
||||||
| Name | Status | Comment |
|
| Name | Status | Comment |
|
||||||
| -------------- | :----: | :----------------------------------------------------------: |
|
| -------------- | :----: | :---------------------------------------------------------------------: |
|
||||||
| action | ✔️ | |
|
| action | ✔️ | |
|
||||||
| armature | ❗ | Not stable |
|
| camera | ✔️ | |
|
||||||
| camera | ✔️ | |
|
| collection | ✔️ | |
|
||||||
| collection | ✔️ | |
|
| gpencil | ✔️ | |
|
||||||
| curve | ❗ | Nurbs surfaces not supported |
|
| image | ✔️ | |
|
||||||
| gpencil | ✔️ | |
|
| mesh | ✔️ | |
|
||||||
| image | ✔️ | |
|
| material | ✔️ | |
|
||||||
| mesh | ✔️ | |
|
| node_groups | ✔️ | Material & Geometry only |
|
||||||
| material | ✔️ | |
|
| geometry nodes | ✔️ | |
|
||||||
| node_groups | ❗ | Material & Geometry only |
|
| metaball | ✔️ | |
|
||||||
| geometry nodes | ✔️ | |
|
| object | ✔️ | |
|
||||||
| metaball | ✔️ | |
|
| texts | ✔️ | |
|
||||||
| object | ✔️ | |
|
| scene | ✔️ | |
|
||||||
| textures | ❗ | Supported for modifiers/materials/geo nodes only |
|
| world | ✔️ | |
|
||||||
| texts | ✔️ | |
|
| volumes | ✔️ | |
|
||||||
| scene | ✔️ | |
|
| lightprobes | ✔️ | |
|
||||||
| world | ✔️ | |
|
| physics | ✔️ | |
|
||||||
| lightprobes | ✔️ | |
|
| textures | ✔️ | |
|
||||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
| curve | ❗ | Nurbs surfaces not supported |
|
||||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
| armature | ❗ | Only for Mesh. [Planned for GPencil](https://gitlab.com/slumber/multi-user/-/issues/161). Not stable yet |
|
||||||
| nla | ❌ | |
|
| particles | ❗ | The cache isn't syncing. |
|
||||||
| volumes | ✔️ | |
|
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||||
| particles | ❗ | The cache isn't syncing. |
|
| vse | ❗ | Mask and Clip not supported yet |
|
||||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
| libraries | ❌ | |
|
||||||
| vse | ❗ | Mask and Clip not supported yet |
|
| nla | ❌ | |
|
||||||
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
| texts | ❌ | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||||
| libraries | ❗ | Partial |
|
| compositing | ❌ | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,10 +19,10 @@ import sys
|
|||||||
|
|
||||||
project = 'multi-user'
|
project = 'multi-user'
|
||||||
copyright = '2020, Swann Martinez'
|
copyright = '2020, Swann Martinez'
|
||||||
author = 'Swann Martinez, with contributions from Poochy'
|
author = 'Swann Martinez, Poochy, Fabian'
|
||||||
|
|
||||||
# The full version, including alpha/beta/rc tags
|
# The full version, including alpha/beta/rc tags
|
||||||
release = '0.2.0'
|
release = '0.5.0-develop'
|
||||||
|
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
# -- General configuration ---------------------------------------------------
|
||||||
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 22 KiB |
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 365 KiB |
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 26 KiB |
BIN
docs/getting_started/img/server_preset_exemple.gif
Normal file
After Width: | Height: | Size: 320 KiB |
BIN
docs/getting_started/img/server_preset_image_add.png
Normal file
After Width: | Height: | Size: 7.3 KiB |
BIN
docs/getting_started/img/server_preset_image_admin.png
Normal file
After Width: | Height: | Size: 4.2 KiB |
BIN
docs/getting_started/img/server_preset_image_normal_server.png
Normal file
After Width: | Height: | Size: 9.0 KiB |
BIN
docs/getting_started/img/server_preset_image_report.png
Normal file
After Width: | Height: | Size: 3.2 KiB |
@ -108,36 +108,69 @@ Before starting make sure that you have access to the session IP address and por
|
|||||||
1. Fill in your user information
|
1. Fill in your user information
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
Follow the user-info_ section for this step.
|
Joining a server
|
||||||
|
=======================
|
||||||
|
|
||||||
----------------
|
--------------
|
||||||
2. Network setup
|
Network setup
|
||||||
----------------
|
--------------
|
||||||
|
|
||||||
In the network panel, select **JOIN**.
|
In the network panel, select **JOIN**.
|
||||||
The **join sub-panel** (see image below) allows you to configure your client to join a
|
The **join sub-panel** (see image below) allows you to configure your client to join a
|
||||||
collaborative session which is already hosted.
|
collaborative session which is already hosted.
|
||||||
|
|
||||||
.. figure:: img/quickstart_join.png
|
.. figure:: img/server_preset_image_normal_server.png
|
||||||
:align: center
|
:align: center
|
||||||
:alt: Connect menu
|
:width: 200px
|
||||||
|
|
||||||
Connection panel
|
Connection pannel
|
||||||
|
|
||||||
Fill in the fields with your information:
|
Fill in the fields with your information:
|
||||||
|
|
||||||
- **IP**: the host's IP address.
|
- **IP**: the host's IP address.
|
||||||
- **Port**: the host's port number.
|
- **Port**: the host's port number.
|
||||||
- **Connect as admin**: connect yourself with **admin rights** (see :ref:`admin` ) to the session.
|
|
||||||
|
|
||||||
.. Maybe something more explicit here
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
Additional configuration settings can be found in the :ref:`advanced` section.
|
|
||||||
|
|
||||||
Once you've configured every field, hit the button **CONNECT** to join the session !
|
Once you've configured every field, hit the button **CONNECT** to join the session !
|
||||||
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
|
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If you want to have **administrator rights** (see :ref:`admin` ) on the server, just enter the password created by the host in the **Connect as admin** section
|
||||||
|
|
||||||
|
.. figure:: img/server_preset_image_admin.png
|
||||||
|
:align: center
|
||||||
|
:width: 200px
|
||||||
|
|
||||||
|
Admin password
|
||||||
|
|
||||||
|
---------------
|
||||||
|
Server presets
|
||||||
|
---------------
|
||||||
|
|
||||||
|
You can save your server presets in a preset list below the 'JOIN' and 'HOST' buttons. This allows you to quickly access and manage your servers.
|
||||||
|
|
||||||
|
To add a server, first enter the ip address and the port (plus the password if needed), then click on the + icon to add a name to your preset. To remove a server from the list, select it and click on the - icon.
|
||||||
|
|
||||||
|
.. figure:: img/server_preset_exemple.gif
|
||||||
|
:align: center
|
||||||
|
:width: 200px
|
||||||
|
|
||||||
|
.. warning:: Be careful, if you don't rename your new preset, or if it has the same name as an existing preset, the old preset will be overwritten.
|
||||||
|
|
||||||
|
.. figure:: img/server_preset_image_report.png
|
||||||
|
:align: center
|
||||||
|
:width: 200px
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Two presets are already present when the addon is launched:
|
||||||
|
|
||||||
|
- The 'localhost' preset, to host and join a local session quickly
|
||||||
|
- The 'public session' preset, to join the public sessions of the multi-user server (official discord to participate : https://discord.gg/aBPvGws)
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Additional configuration settings can be found in the :ref:`advanced` section.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
|
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
|
||||||
|
|
||||||
@ -182,8 +215,10 @@ One of the most vital tools is the **Online user panel**. It lists all connected
|
|||||||
users' information including your own:
|
users' information including your own:
|
||||||
|
|
||||||
* **Role** : if a user is an admin or a regular user.
|
* **Role** : if a user is an admin or a regular user.
|
||||||
* **Location**: Where the user is actually working.
|
* **Username** : Name of the user.
|
||||||
|
* **Mode** : User's active editing mode (edit_mesh, paint,etc.).
|
||||||
* **Frame**: When (on which frame) the user is working.
|
* **Frame**: When (on which frame) the user is working.
|
||||||
|
* **Location**: Where the user is actually working.
|
||||||
* **Ping**: user's connection delay in milliseconds
|
* **Ping**: user's connection delay in milliseconds
|
||||||
|
|
||||||
.. figure:: img/quickstart_users.png
|
.. figure:: img/quickstart_users.png
|
||||||
@ -240,6 +275,7 @@ it draw users' related information in your viewport such as:
|
|||||||
|
|
||||||
* Username
|
* Username
|
||||||
* User point of view
|
* User point of view
|
||||||
|
* User active mode
|
||||||
* User selection
|
* User selection
|
||||||
|
|
||||||
.. figure:: img/quickstart_presence.png
|
.. figure:: img/quickstart_presence.png
|
||||||
@ -374,15 +410,6 @@ Network
|
|||||||
|
|
||||||
Advanced network settings
|
Advanced network settings
|
||||||
|
|
||||||
**IPC Port** is the port used for Inter Process Communication. This port is used
|
|
||||||
by the multi-user subprocesses to communicate with each other. If different instances
|
|
||||||
of multi-user are using the same IPC port, this will create conflict !
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
You only need to modify this setting if you need to launch multiple clients from the same
|
|
||||||
computer (or if you try to host and join from the same computer). To resolve this, you simply need to enter a different
|
|
||||||
**IPC port** for each blender instance.
|
|
||||||
|
|
||||||
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
|
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
|
||||||
You should only increase it if you have a bad connection.
|
You should only increase it if you have a bad connection.
|
||||||
|
|
||||||
|
@ -76,7 +76,7 @@ Hit 'Create a network'(see image below) and go to the network settings.
|
|||||||
:align: center
|
:align: center
|
||||||
:width: 450px
|
:width: 450px
|
||||||
|
|
||||||
Network page
|
Admin password
|
||||||
|
|
||||||
Now that the network is created, let's configure it.
|
Now that the network is created, let's configure it.
|
||||||
|
|
||||||
@ -212,14 +212,14 @@ You can run the dedicated server on any platform by following these steps:
|
|||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
replication.server
|
replication.serve
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
|
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
replication.server -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
|
replication.serve -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
|
||||||
|
|
||||||
Here, for example, a server is instantiated on port 5555, with password 'admin', a 5 second timeout, and logging enabled.
|
Here, for example, a server is instantiated on port 5555, with password 'admin', a 5 second timeout, and logging enabled.
|
||||||
|
|
||||||
@ -562,7 +562,7 @@ The default Docker image essentially runs the equivalent of:
|
|||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
replication.server -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
|
replication.serve -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
|
||||||
|
|
||||||
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.
|
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.
|
||||||
|
|
||||||
@ -572,7 +572,7 @@ For example, I would like to launch my server with a different administrator pas
|
|||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
python3 -m replication.server -pwd supersecretpassword -p 5555 -t 3000 -l DEBUG -lf logname.log
|
replication.serve -pwd supersecretpassword -p 5555 -t 3000 -l DEBUG -lf logname.log
|
||||||
|
|
||||||
Now, my configuration should look like this:
|
Now, my configuration should look like this:
|
||||||
|
|
||||||
@ -691,7 +691,7 @@ We're finally ready to launch the server. Simply run:
|
|||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
python3 -m replication.server -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
|
replication.serve -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
|
||||||
|
|
||||||
See :ref:`cmd-line` for a description of optional parameters
|
See :ref:`cmd-line` for a description of optional parameters
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Multi-User",
|
"name": "Multi-User",
|
||||||
"author": "Swann Martinez",
|
"author": "Swann Martinez",
|
||||||
"version": (0, 3, 0),
|
"version": (0, 4, 0),
|
||||||
"description": "Enable real-time collaborative workflow inside blender",
|
"description": "Enable real-time collaborative workflow inside blender",
|
||||||
"blender": (2, 82, 0),
|
"blender": (2, 82, 0),
|
||||||
"location": "3D View > Sidebar > Multi-User tab",
|
"location": "3D View > Sidebar > Multi-User tab",
|
||||||
@ -43,13 +43,10 @@ from bpy.app.handlers import persistent
|
|||||||
from . import environment
|
from . import environment
|
||||||
|
|
||||||
|
|
||||||
DEPENDENCIES = {
|
|
||||||
("replication", '0.1.26'),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
module_error_msg = "Insufficient rights to install the multi-user \
|
module_error_msg = "Insufficient rights to install the multi-user \
|
||||||
dependencies, aunch blender with administrator rights."
|
dependencies, aunch blender with administrator rights."
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
# Setup logging policy
|
# Setup logging policy
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
@ -58,16 +55,13 @@ def register():
|
|||||||
level=logging.INFO)
|
level=logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if bpy.app.version[1] >= 91:
|
environment.register()
|
||||||
python_binary_path = sys.executable
|
|
||||||
else:
|
|
||||||
python_binary_path = bpy.app.binary_path_python
|
|
||||||
|
|
||||||
environment.setup(DEPENDENCIES, python_binary_path)
|
|
||||||
|
|
||||||
from . import presence
|
from . import presence
|
||||||
from . import operators
|
from . import operators
|
||||||
|
from . import handlers
|
||||||
from . import ui
|
from . import ui
|
||||||
|
from . import icons
|
||||||
from . import preferences
|
from . import preferences
|
||||||
from . import addon_updater_ops
|
from . import addon_updater_ops
|
||||||
|
|
||||||
@ -75,7 +69,9 @@ def register():
|
|||||||
addon_updater_ops.register(bl_info)
|
addon_updater_ops.register(bl_info)
|
||||||
presence.register()
|
presence.register()
|
||||||
operators.register()
|
operators.register()
|
||||||
|
handlers.register()
|
||||||
ui.register()
|
ui.register()
|
||||||
|
icons.register()
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
raise Exception(module_error_msg)
|
raise Exception(module_error_msg)
|
||||||
logging.error(module_error_msg)
|
logging.error(module_error_msg)
|
||||||
@ -89,21 +85,28 @@ def register():
|
|||||||
type=preferences.SessionUser
|
type=preferences.SessionUser
|
||||||
)
|
)
|
||||||
bpy.types.WindowManager.user_index = bpy.props.IntProperty()
|
bpy.types.WindowManager.user_index = bpy.props.IntProperty()
|
||||||
|
bpy.types.WindowManager.server_index = bpy.props.IntProperty()
|
||||||
bpy.types.TOPBAR_MT_file_import.append(operators.menu_func_import)
|
bpy.types.TOPBAR_MT_file_import.append(operators.menu_func_import)
|
||||||
|
bpy.types.TOPBAR_MT_file_export.append(operators.menu_func_export)
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
from . import presence
|
from . import presence
|
||||||
from . import operators
|
from . import operators
|
||||||
|
from . import handlers
|
||||||
from . import ui
|
from . import ui
|
||||||
|
from . import icons
|
||||||
from . import preferences
|
from . import preferences
|
||||||
from . import addon_updater_ops
|
from . import addon_updater_ops
|
||||||
|
|
||||||
bpy.types.TOPBAR_MT_file_import.remove(operators.menu_func_import)
|
bpy.types.TOPBAR_MT_file_import.remove(operators.menu_func_import)
|
||||||
|
bpy.types.TOPBAR_MT_file_export.remove(operators.menu_func_export)
|
||||||
|
|
||||||
presence.unregister()
|
presence.unregister()
|
||||||
addon_updater_ops.unregister()
|
addon_updater_ops.unregister()
|
||||||
ui.unregister()
|
ui.unregister()
|
||||||
|
icons.unregister()
|
||||||
|
handlers.unregister()
|
||||||
operators.unregister()
|
operators.unregister()
|
||||||
preferences.unregister()
|
preferences.unregister()
|
||||||
|
|
||||||
@ -111,3 +114,6 @@ def unregister():
|
|||||||
del bpy.types.ID.uuid
|
del bpy.types.ID.uuid
|
||||||
del bpy.types.WindowManager.online_users
|
del bpy.types.WindowManager.online_users
|
||||||
del bpy.types.WindowManager.user_index
|
del bpy.types.WindowManager.user_index
|
||||||
|
del bpy.types.WindowManager.server_index
|
||||||
|
|
||||||
|
environment.unregister()
|
||||||
|
@ -1688,10 +1688,7 @@ class GitlabEngine(object):
|
|||||||
# Could clash with tag names and if it does, it will
|
# Could clash with tag names and if it does, it will
|
||||||
# download TAG zip instead of branch zip to get
|
# download TAG zip instead of branch zip to get
|
||||||
# direct path, would need.
|
# direct path, would need.
|
||||||
return "{}{}{}".format(
|
return f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{branch}/download?job=build"
|
||||||
self.form_repo_url(updater),
|
|
||||||
"/repository/archive.zip?sha=",
|
|
||||||
branch)
|
|
||||||
|
|
||||||
def get_zip_url(self, sha, updater):
|
def get_zip_url(self, sha, updater):
|
||||||
return "{base}/repository/archive.zip?sha={sha}".format(
|
return "{base}/repository/archive.zip?sha={sha}".format(
|
||||||
|
@ -28,7 +28,6 @@ __all__ = [
|
|||||||
'bl_light',
|
'bl_light',
|
||||||
'bl_scene',
|
'bl_scene',
|
||||||
'bl_material',
|
'bl_material',
|
||||||
'bl_library',
|
|
||||||
'bl_armature',
|
'bl_armature',
|
||||||
'bl_action',
|
'bl_action',
|
||||||
'bl_world',
|
'bl_world',
|
||||||
@ -39,7 +38,6 @@ __all__ = [
|
|||||||
'bl_font',
|
'bl_font',
|
||||||
'bl_sound',
|
'bl_sound',
|
||||||
'bl_file',
|
'bl_file',
|
||||||
# 'bl_sequencer',
|
|
||||||
'bl_node_group',
|
'bl_node_group',
|
||||||
'bl_texture',
|
'bl_texture',
|
||||||
"bl_particle",
|
"bl_particle",
|
||||||
@ -49,8 +47,18 @@ if bpy.app.version[1] >= 91:
|
|||||||
__all__.append('bl_volume')
|
__all__.append('bl_volume')
|
||||||
|
|
||||||
from . import *
|
from . import *
|
||||||
from replication.data import ReplicatedDataFactory
|
|
||||||
|
|
||||||
def types_to_register():
|
def types_to_register():
|
||||||
return __all__
|
return __all__
|
||||||
|
|
||||||
|
from replication.protocol import DataTranslationProtocol
|
||||||
|
|
||||||
|
def get_data_translation_protocol()-> DataTranslationProtocol:
|
||||||
|
""" Return a data translation protocol from implemented bpy types
|
||||||
|
"""
|
||||||
|
bpy_protocol = DataTranslationProtocol()
|
||||||
|
for module_name in __all__:
|
||||||
|
impl = globals().get(module_name)
|
||||||
|
if impl and hasattr(impl, "_type") and hasattr(impl, "_type"):
|
||||||
|
bpy_protocol.register_implementation(impl._type, impl._class)
|
||||||
|
return bpy_protocol
|
||||||
|
@ -25,8 +25,8 @@ from enum import Enum
|
|||||||
from .. import utils
|
from .. import utils
|
||||||
from .dump_anything import (
|
from .dump_anything import (
|
||||||
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
|
||||||
KEYFRAME = [
|
KEYFRAME = [
|
||||||
'amplitude',
|
'amplitude',
|
||||||
@ -41,6 +41,66 @@ KEYFRAME = [
|
|||||||
'interpolation',
|
'interpolation',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def has_action(datablock):
|
||||||
|
""" Check if the datablock datablock has actions
|
||||||
|
"""
|
||||||
|
return (hasattr(datablock, 'animation_data')
|
||||||
|
and datablock.animation_data
|
||||||
|
and datablock.animation_data.action)
|
||||||
|
|
||||||
|
|
||||||
|
def has_driver(datablock):
|
||||||
|
""" Check if the datablock datablock is driven
|
||||||
|
"""
|
||||||
|
return (hasattr(datablock, 'animation_data')
|
||||||
|
and datablock.animation_data
|
||||||
|
and datablock.animation_data.drivers)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_driver(driver):
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.depth = 6
|
||||||
|
data = dumper.dump(driver)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def load_driver(target_datablock, src_driver):
|
||||||
|
loader = Loader()
|
||||||
|
drivers = target_datablock.animation_data.drivers
|
||||||
|
src_driver_data = src_driver['driver']
|
||||||
|
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
|
||||||
|
|
||||||
|
# Settings
|
||||||
|
new_driver.driver.type = src_driver_data['type']
|
||||||
|
new_driver.driver.expression = src_driver_data['expression']
|
||||||
|
loader.load(new_driver, src_driver)
|
||||||
|
|
||||||
|
# Variables
|
||||||
|
for src_variable in src_driver_data['variables']:
|
||||||
|
src_var_data = src_driver_data['variables'][src_variable]
|
||||||
|
new_var = new_driver.driver.variables.new()
|
||||||
|
new_var.name = src_var_data['name']
|
||||||
|
new_var.type = src_var_data['type']
|
||||||
|
|
||||||
|
for src_target in src_var_data['targets']:
|
||||||
|
src_target_data = src_var_data['targets'][src_target]
|
||||||
|
src_id = src_target_data.get('id')
|
||||||
|
if src_id:
|
||||||
|
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
|
||||||
|
loader.load(new_var.targets[src_target], src_target_data)
|
||||||
|
|
||||||
|
# Fcurve
|
||||||
|
new_fcurve = new_driver.keyframe_points
|
||||||
|
for p in reversed(new_fcurve):
|
||||||
|
new_fcurve.remove(p, fast=True)
|
||||||
|
|
||||||
|
new_fcurve.add(len(src_driver['keyframe_points']))
|
||||||
|
|
||||||
|
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||||
|
new_point = new_fcurve[index]
|
||||||
|
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||||
|
|
||||||
|
|
||||||
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
||||||
""" Dump a sigle curve to a dict
|
""" Dump a sigle curve to a dict
|
||||||
@ -61,7 +121,6 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
|||||||
points = fcurve.keyframe_points
|
points = fcurve.keyframe_points
|
||||||
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||||
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
|
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
|
||||||
|
|
||||||
else: # Legacy method
|
else: # Legacy method
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
fcurve_data["keyframe_points"] = []
|
fcurve_data["keyframe_points"] = []
|
||||||
@ -71,6 +130,18 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
|||||||
dumper.dump(k)
|
dumper.dump(k)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if fcurve.modifiers:
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.exclude_filter = [
|
||||||
|
'is_valid',
|
||||||
|
'active'
|
||||||
|
]
|
||||||
|
dumped_modifiers = []
|
||||||
|
for modfifier in fcurve.modifiers:
|
||||||
|
dumped_modifiers.append(dumper.dump(modfifier))
|
||||||
|
|
||||||
|
fcurve_data['modifiers'] = dumped_modifiers
|
||||||
|
|
||||||
return fcurve_data
|
return fcurve_data
|
||||||
|
|
||||||
|
|
||||||
@ -83,7 +154,7 @@ def load_fcurve(fcurve_data, fcurve):
|
|||||||
:type fcurve: bpy.types.FCurve
|
:type fcurve: bpy.types.FCurve
|
||||||
"""
|
"""
|
||||||
use_numpy = fcurve_data.get('use_numpy')
|
use_numpy = fcurve_data.get('use_numpy')
|
||||||
|
loader = Loader()
|
||||||
keyframe_points = fcurve.keyframe_points
|
keyframe_points = fcurve.keyframe_points
|
||||||
|
|
||||||
# Remove all keyframe points
|
# Remove all keyframe points
|
||||||
@ -128,27 +199,91 @@ def load_fcurve(fcurve_data, fcurve):
|
|||||||
|
|
||||||
fcurve.update()
|
fcurve.update()
|
||||||
|
|
||||||
|
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
|
||||||
|
|
||||||
|
if dumped_fcurve_modifiers:
|
||||||
|
# clear modifiers
|
||||||
|
for fmod in fcurve.modifiers:
|
||||||
|
fcurve.modifiers.remove(fmod)
|
||||||
|
|
||||||
|
# Load each modifiers in order
|
||||||
|
for modifier_data in dumped_fcurve_modifiers:
|
||||||
|
modifier = fcurve.modifiers.new(modifier_data['type'])
|
||||||
|
|
||||||
|
loader.load(modifier, modifier_data)
|
||||||
|
elif fcurve.modifiers:
|
||||||
|
for fmod in fcurve.modifiers:
|
||||||
|
fcurve.modifiers.remove(fmod)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_animation_data(datablock):
|
||||||
|
animation_data = {}
|
||||||
|
if has_action(datablock):
|
||||||
|
animation_data['action'] = datablock.animation_data.action.uuid
|
||||||
|
if has_driver(datablock):
|
||||||
|
animation_data['drivers'] = []
|
||||||
|
for driver in datablock.animation_data.drivers:
|
||||||
|
animation_data['drivers'].append(dump_driver(driver))
|
||||||
|
|
||||||
|
return animation_data
|
||||||
|
|
||||||
|
|
||||||
|
def load_animation_data(animation_data, datablock):
|
||||||
|
# Load animation data
|
||||||
|
if animation_data:
|
||||||
|
if datablock.animation_data is None:
|
||||||
|
datablock.animation_data_create()
|
||||||
|
|
||||||
|
for d in datablock.animation_data.drivers:
|
||||||
|
datablock.animation_data.drivers.remove(d)
|
||||||
|
|
||||||
|
if 'drivers' in animation_data:
|
||||||
|
for driver in animation_data['drivers']:
|
||||||
|
load_driver(datablock, driver)
|
||||||
|
|
||||||
|
action = animation_data.get('action')
|
||||||
|
if action:
|
||||||
|
action = resolve_datablock_from_uuid(action, bpy.data.actions)
|
||||||
|
datablock.animation_data.action = action
|
||||||
|
elif datablock.animation_data.action:
|
||||||
|
datablock.animation_data.action = None
|
||||||
|
|
||||||
|
# Remove existing animation data if there is not more to load
|
||||||
|
elif hasattr(datablock, 'animation_data') and datablock.animation_data:
|
||||||
|
datablock.animation_data_clear()
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_animation_dependencies(datablock):
|
||||||
|
if has_action(datablock):
|
||||||
|
return [datablock.animation_data.action]
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
class BlAction(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
class BlAction(BlDatablock):
|
|
||||||
bl_id = "actions"
|
bl_id = "actions"
|
||||||
bl_class = bpy.types.Action
|
bl_class = bpy.types.Action
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'ACTION_TWEAK'
|
bl_icon = 'ACTION_TWEAK'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.actions.new(data["name"])
|
return bpy.data.actions.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
for dumped_fcurve in data["fcurves"]:
|
for dumped_fcurve in data["fcurves"]:
|
||||||
dumped_data_path = dumped_fcurve["data_path"]
|
dumped_data_path = dumped_fcurve["data_path"]
|
||||||
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
||||||
|
|
||||||
# create fcurve if needed
|
# create fcurve if needed
|
||||||
fcurve = target.fcurves.find(
|
fcurve = datablock.fcurves.find(
|
||||||
dumped_data_path, index=dumped_array_index)
|
dumped_data_path, index=dumped_array_index)
|
||||||
if fcurve is None:
|
if fcurve is None:
|
||||||
fcurve = target.fcurves.new(
|
fcurve = datablock.fcurves.new(
|
||||||
dumped_data_path, index=dumped_array_index)
|
dumped_data_path, index=dumped_array_index)
|
||||||
|
|
||||||
load_fcurve(dumped_fcurve, fcurve)
|
load_fcurve(dumped_fcurve, fcurve)
|
||||||
@ -156,9 +291,10 @@ class BlAction(BlDatablock):
|
|||||||
id_root = data.get('id_root')
|
id_root = data.get('id_root')
|
||||||
|
|
||||||
if id_root:
|
if id_root:
|
||||||
target.id_root = id_root
|
datablock.id_root = id_root
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
|
def dump(datablock: object) -> dict:
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.exclude_filter = [
|
dumper.exclude_filter = [
|
||||||
'name_full',
|
'name_full',
|
||||||
@ -173,11 +309,23 @@ class BlAction(BlDatablock):
|
|||||||
'users'
|
'users'
|
||||||
]
|
]
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
|
||||||
data["fcurves"] = []
|
data["fcurves"] = []
|
||||||
|
|
||||||
for fcurve in instance.fcurves:
|
for fcurve in datablock.fcurves:
|
||||||
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.actions)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
_type = bpy.types.Action
|
||||||
|
_class = BlAction
|
||||||
|
@ -22,8 +22,9 @@ import mathutils
|
|||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .. import presence, operators, utils
|
from .. import presence, operators, utils
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
def get_roll(bone: bpy.types.Bone) -> float:
|
def get_roll(bone: bpy.types.Bone) -> float:
|
||||||
""" Compute the actuall roll of a pose bone
|
""" Compute the actuall roll of a pose bone
|
||||||
@ -35,17 +36,21 @@ def get_roll(bone: bpy.types.Bone) -> float:
|
|||||||
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
|
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
|
||||||
|
|
||||||
|
|
||||||
class BlArmature(BlDatablock):
|
class BlArmature(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "armatures"
|
bl_id = "armatures"
|
||||||
bl_class = bpy.types.Armature
|
bl_class = bpy.types.Armature
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'ARMATURE_DATA'
|
bl_icon = 'ARMATURE_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.armatures.new(data["name"])
|
return bpy.data.armatures.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
# Load parent object
|
# Load parent object
|
||||||
parent_object = utils.find_from_attr(
|
parent_object = utils.find_from_attr(
|
||||||
'uuid',
|
'uuid',
|
||||||
@ -55,7 +60,7 @@ class BlArmature(BlDatablock):
|
|||||||
|
|
||||||
if parent_object is None:
|
if parent_object is None:
|
||||||
parent_object = bpy.data.objects.new(
|
parent_object = bpy.data.objects.new(
|
||||||
data['user_name'], target)
|
data['user_name'], datablock)
|
||||||
parent_object.uuid = data['user']
|
parent_object.uuid = data['user']
|
||||||
|
|
||||||
is_object_in_master = (
|
is_object_in_master = (
|
||||||
@ -90,10 +95,10 @@ class BlArmature(BlDatablock):
|
|||||||
bpy.ops.object.mode_set(mode='EDIT')
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
|
||||||
for bone in data['bones']:
|
for bone in data['bones']:
|
||||||
if bone not in target.edit_bones:
|
if bone not in datablock.edit_bones:
|
||||||
new_bone = target.edit_bones.new(bone)
|
new_bone = datablock.edit_bones.new(bone)
|
||||||
else:
|
else:
|
||||||
new_bone = target.edit_bones[bone]
|
new_bone = datablock.edit_bones[bone]
|
||||||
|
|
||||||
bone_data = data['bones'].get(bone)
|
bone_data = data['bones'].get(bone)
|
||||||
|
|
||||||
@ -104,7 +109,7 @@ class BlArmature(BlDatablock):
|
|||||||
new_bone.roll = bone_data['roll']
|
new_bone.roll = bone_data['roll']
|
||||||
|
|
||||||
if 'parent' in bone_data:
|
if 'parent' in bone_data:
|
||||||
new_bone.parent = target.edit_bones[data['bones']
|
new_bone.parent = datablock.edit_bones[data['bones']
|
||||||
[bone]['parent']]
|
[bone]['parent']]
|
||||||
new_bone.use_connect = bone_data['use_connect']
|
new_bone.use_connect = bone_data['use_connect']
|
||||||
|
|
||||||
@ -119,9 +124,10 @@ class BlArmature(BlDatablock):
|
|||||||
if 'EDIT' in current_mode:
|
if 'EDIT' in current_mode:
|
||||||
bpy.ops.object.mode_set(mode='EDIT')
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
assert(instance)
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dump(datablock: object) -> dict:
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 4
|
dumper.depth = 4
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -135,14 +141,14 @@ class BlArmature(BlDatablock):
|
|||||||
'name',
|
'name',
|
||||||
'layers',
|
'layers',
|
||||||
]
|
]
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
|
||||||
for bone in instance.bones:
|
for bone in datablock.bones:
|
||||||
if bone.parent:
|
if bone.parent:
|
||||||
data['bones'][bone.name]['parent'] = bone.parent.name
|
data['bones'][bone.name]['parent'] = bone.parent.name
|
||||||
# get the parent Object
|
# get the parent Object
|
||||||
# TODO: Use id_data instead
|
# TODO: Use id_data instead
|
||||||
object_users = utils.get_datablock_users(instance)[0]
|
object_users = utils.get_datablock_users(datablock)[0]
|
||||||
data['user'] = object_users.uuid
|
data['user'] = object_users.uuid
|
||||||
data['user_name'] = object_users.name
|
data['user_name'] = object_users.name
|
||||||
|
|
||||||
@ -153,7 +159,25 @@ class BlArmature(BlDatablock):
|
|||||||
data['user_scene'] = [
|
data['user_scene'] = [
|
||||||
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
||||||
|
|
||||||
for bone in instance.bones:
|
for bone in datablock.bones:
|
||||||
data['bones'][bone.name]['roll'] = get_roll(bone)
|
data['bones'][bone.name]['roll'] = get_roll(bone)
|
||||||
|
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
name = data.get('name')
|
||||||
|
datablock = resolve_datablock_from_uuid(uuid, bpy.data.armatures)
|
||||||
|
if datablock is None:
|
||||||
|
datablock = bpy.data.armatures.get(name)
|
||||||
|
|
||||||
|
return datablock
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
return resolve_animation_dependencies(datablock)
|
||||||
|
|
||||||
|
_type = bpy.types.Armature
|
||||||
|
_class = BlArmature
|
@ -20,47 +20,58 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
|
||||||
class BlCamera(BlDatablock):
|
class BlCamera(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "cameras"
|
bl_id = "cameras"
|
||||||
bl_class = bpy.types.Camera
|
bl_class = bpy.types.Camera
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'CAMERA_DATA'
|
bl_icon = 'CAMERA_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
|
||||||
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.cameras.new(data["name"])
|
return bpy.data.cameras.new(data["name"])
|
||||||
|
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
dof_settings = data.get('dof')
|
dof_settings = data.get('dof')
|
||||||
|
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
# DOF settings
|
# DOF settings
|
||||||
if dof_settings:
|
if dof_settings:
|
||||||
loader.load(target.dof, dof_settings)
|
loader.load(datablock.dof, dof_settings)
|
||||||
|
|
||||||
background_images = data.get('background_images')
|
background_images = data.get('background_images')
|
||||||
|
|
||||||
target.background_images.clear()
|
datablock.background_images.clear()
|
||||||
|
# TODO: Use image uuid
|
||||||
if background_images:
|
if background_images:
|
||||||
for img_name, img_data in background_images.items():
|
for img_name, img_data in background_images.items():
|
||||||
img_id = img_data.get('image')
|
img_id = img_data.get('image')
|
||||||
if img_id:
|
if img_id:
|
||||||
target_img = target.background_images.new()
|
target_img = datablock.background_images.new()
|
||||||
target_img.image = bpy.data.images[img_id]
|
target_img.image = bpy.data.images[img_id]
|
||||||
loader.load(target_img, img_data)
|
loader.load(target_img, img_data)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
img_user = img_data.get('image_user')
|
||||||
assert(instance)
|
if img_user:
|
||||||
|
loader.load(target_img.image_user, img_user)
|
||||||
|
|
||||||
# TODO: background image support
|
|
||||||
|
@staticmethod
|
||||||
|
def dump(datablock: object) -> dict:
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -101,14 +112,37 @@ class BlCamera(BlDatablock):
|
|||||||
'scale',
|
'scale',
|
||||||
'use_flip_x',
|
'use_flip_x',
|
||||||
'use_flip_y',
|
'use_flip_y',
|
||||||
'image'
|
'image_user',
|
||||||
|
'image',
|
||||||
|
'frame_duration',
|
||||||
|
'frame_start',
|
||||||
|
'frame_offset',
|
||||||
|
'use_cyclic',
|
||||||
|
'use_auto_refresh'
|
||||||
]
|
]
|
||||||
return dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
def _resolve_deps_implementation(self):
|
|
||||||
|
for index, image in enumerate(datablock.background_images):
|
||||||
|
if image.image_user:
|
||||||
|
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
|
||||||
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.cameras)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
for background in self.instance.background_images:
|
for background in datablock.background_images:
|
||||||
if background.image:
|
if background.image:
|
||||||
deps.append(background.image)
|
deps.append(background.image)
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
_type = bpy.types.Camera
|
||||||
|
_class = BlCamera
|
||||||
|
@ -19,10 +19,12 @@
|
|||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from deepdiff import DeepDiff, Delta
|
||||||
from .bl_datablock import BlDatablock
|
|
||||||
from .dump_anything import Loader, Dumper
|
|
||||||
|
|
||||||
|
from .. import utils
|
||||||
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .dump_anything import Loader, Dumper
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
|
||||||
def dump_collection_children(collection):
|
def dump_collection_children(collection):
|
||||||
collection_children = []
|
collection_children = []
|
||||||
@ -81,58 +83,82 @@ def resolve_collection_dependencies(collection):
|
|||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
class BlCollection(BlDatablock):
|
class BlCollection(ReplicatedDatablock):
|
||||||
bl_id = "collections"
|
bl_id = "collections"
|
||||||
bl_icon = 'FILE_FOLDER'
|
bl_icon = 'FILE_FOLDER'
|
||||||
bl_class = bpy.types.Collection
|
bl_class = bpy.types.Collection
|
||||||
bl_check_common = True
|
bl_check_common = True
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
|
||||||
if self.is_library:
|
|
||||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
|
||||||
targetData.collections = [
|
|
||||||
name for name in sourceData.collections if name == self.data['name']]
|
|
||||||
|
|
||||||
instance = bpy.data.collections[self.data['name']]
|
use_delta = True
|
||||||
|
|
||||||
return instance
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
instance = bpy.data.collections.new(data["name"])
|
instance = bpy.data.collections.new(data["name"])
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
|
||||||
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
# Objects
|
# Objects
|
||||||
load_collection_objects(data['objects'], target)
|
load_collection_objects(data['objects'], datablock)
|
||||||
|
|
||||||
# Link childrens
|
# Link childrens
|
||||||
load_collection_childrens(data['children'], target)
|
load_collection_childrens(data['children'], datablock)
|
||||||
|
|
||||||
# FIXME: Find a better way after the replication big refacotoring
|
# FIXME: Find a better way after the replication big refacotoring
|
||||||
# Keep other user from deleting collection object by flushing their history
|
# Keep other user from deleting collection object by flushing their history
|
||||||
utils.flush_history()
|
utils.flush_history()
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
|
||||||
assert(instance)
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dump(datablock: object) -> dict:
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
"instance_offset"
|
"instance_offset"
|
||||||
]
|
]
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
|
||||||
# dump objects
|
# dump objects
|
||||||
data['objects'] = dump_collection_objects(instance)
|
data['objects'] = dump_collection_objects(datablock)
|
||||||
|
|
||||||
# dump children collections
|
# dump children collections
|
||||||
data['children'] = dump_collection_children(instance)
|
data['children'] = dump_collection_children(datablock)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
|
||||||
return resolve_collection_dependencies(self.instance)
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.collections)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
return resolve_collection_dependencies(datablock)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||||
|
diff_params = {
|
||||||
|
'ignore_order': True,
|
||||||
|
'report_repetition': True
|
||||||
|
}
|
||||||
|
delta_params = {
|
||||||
|
# 'mutate': True
|
||||||
|
}
|
||||||
|
|
||||||
|
return Delta(
|
||||||
|
DeepDiff(last_data,
|
||||||
|
current_data,
|
||||||
|
cache_size=5000,
|
||||||
|
**diff_params),
|
||||||
|
**delta_params)
|
||||||
|
|
||||||
|
_type = bpy.types.Collection
|
||||||
|
_class = BlCollection
|
@ -21,13 +21,15 @@ import bpy.types as T
|
|||||||
import mathutils
|
import mathutils
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .. import utils
|
from ..utils import get_preferences
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
from .dump_anything import (Dumper, Loader,
|
from .dump_anything import (Dumper, Loader,
|
||||||
np_load_collection,
|
np_load_collection,
|
||||||
np_dump_collection)
|
np_dump_collection)
|
||||||
from .bl_datablock import get_datablock_from_uuid
|
|
||||||
from .bl_material import dump_materials_slots, load_materials_slots
|
from .bl_material import dump_materials_slots, load_materials_slots
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
|
||||||
SPLINE_BEZIER_POINT = [
|
SPLINE_BEZIER_POINT = [
|
||||||
# "handle_left_type",
|
# "handle_left_type",
|
||||||
@ -134,25 +136,31 @@ SPLINE_METADATA = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class BlCurve(BlDatablock):
|
class BlCurve(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "curves"
|
bl_id = "curves"
|
||||||
bl_class = bpy.types.Curve
|
bl_class = bpy.types.Curve
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'CURVE_DATA'
|
bl_icon = 'CURVE_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.curves.new(data["name"], data["type"])
|
return bpy.data.curves.new(data["name"], data["type"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
loader = Loader()
|
def load(data: dict, datablock: object):
|
||||||
loader.load(target, data)
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
target.splines.clear()
|
loader = Loader()
|
||||||
|
loader.load(datablock, data)
|
||||||
|
|
||||||
|
datablock.splines.clear()
|
||||||
|
|
||||||
# load splines
|
# load splines
|
||||||
for spline in data['splines'].values():
|
for spline in data['splines'].values():
|
||||||
new_spline = target.splines.new(spline['type'])
|
new_spline = datablock.splines.new(spline['type'])
|
||||||
|
|
||||||
# Load curve geometry data
|
# Load curve geometry data
|
||||||
if new_spline.type == 'BEZIER':
|
if new_spline.type == 'BEZIER':
|
||||||
@ -173,15 +181,14 @@ class BlCurve(BlDatablock):
|
|||||||
# MATERIAL SLOTS
|
# MATERIAL SLOTS
|
||||||
src_materials = data.get('materials', None)
|
src_materials = data.get('materials', None)
|
||||||
if src_materials:
|
if src_materials:
|
||||||
load_materials_slots(src_materials, target.materials)
|
load_materials_slots(src_materials, datablock.materials)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
# Conflicting attributes
|
# Conflicting attributes
|
||||||
# TODO: remove them with the NURBS support
|
# TODO: remove them with the NURBS support
|
||||||
dumper.include_filter = CURVE_METADATA
|
dumper.include_filter = CURVE_METADATA
|
||||||
|
|
||||||
dumper.exclude_filter = [
|
dumper.exclude_filter = [
|
||||||
'users',
|
'users',
|
||||||
'order_u',
|
'order_u',
|
||||||
@ -190,14 +197,16 @@ class BlCurve(BlDatablock):
|
|||||||
'point_count_u',
|
'point_count_u',
|
||||||
'active_textbox'
|
'active_textbox'
|
||||||
]
|
]
|
||||||
if instance.use_auto_texspace:
|
if datablock.use_auto_texspace:
|
||||||
dumper.exclude_filter.extend([
|
dumper.exclude_filter.extend([
|
||||||
'texspace_location',
|
'texspace_location',
|
||||||
'texspace_size'])
|
'texspace_size'])
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
data['splines'] = {}
|
data['splines'] = {}
|
||||||
|
|
||||||
for index, spline in enumerate(instance.splines):
|
for index, spline in enumerate(datablock.splines):
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = SPLINE_METADATA
|
dumper.include_filter = SPLINE_METADATA
|
||||||
spline_data = dumper.dump(spline)
|
spline_data = dumper.dump(spline)
|
||||||
@ -211,21 +220,27 @@ class BlCurve(BlDatablock):
|
|||||||
spline.bezier_points, SPLINE_BEZIER_POINT)
|
spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||||
data['splines'][index] = spline_data
|
data['splines'][index] = spline_data
|
||||||
|
|
||||||
if isinstance(instance, T.SurfaceCurve):
|
if isinstance(datablock, T.SurfaceCurve):
|
||||||
data['type'] = 'SURFACE'
|
data['type'] = 'SURFACE'
|
||||||
elif isinstance(instance, T.TextCurve):
|
elif isinstance(datablock, T.TextCurve):
|
||||||
data['type'] = 'FONT'
|
data['type'] = 'FONT'
|
||||||
elif isinstance(instance, T.Curve):
|
elif isinstance(datablock, T.Curve):
|
||||||
data['type'] = 'CURVE'
|
data['type'] = 'CURVE'
|
||||||
|
|
||||||
data['materials'] = dump_materials_slots(instance.materials)
|
data['materials'] = dump_materials_slots(datablock.materials)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.curves)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
# TODO: resolve material
|
# TODO: resolve material
|
||||||
deps = []
|
deps = []
|
||||||
curve = self.instance
|
curve = datablock
|
||||||
|
|
||||||
if isinstance(curve, T.TextCurve):
|
if isinstance(curve, T.TextCurve):
|
||||||
deps.extend([
|
deps.extend([
|
||||||
@ -234,15 +249,19 @@ class BlCurve(BlDatablock):
|
|||||||
curve.font_bold_italic,
|
curve.font_bold_italic,
|
||||||
curve.font_italic])
|
curve.font_italic])
|
||||||
|
|
||||||
for material in self.instance.materials:
|
for material in datablock.materials:
|
||||||
if material:
|
if material:
|
||||||
deps.append(material)
|
deps.append(material)
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def diff(self):
|
@staticmethod
|
||||||
if 'EDIT' in bpy.context.mode \
|
def needs_update(datablock: object, data: dict) -> bool:
|
||||||
and not self.preferences.sync_flags.sync_during_editmode:
|
return 'EDIT' not in bpy.context.mode \
|
||||||
return False
|
or get_preferences().sync_flags.sync_during_editmode
|
||||||
else:
|
|
||||||
return super().diff()
|
|
||||||
|
_type = [bpy.types.Curve, bpy.types.TextCurve]
|
||||||
|
_class = BlCurve
|
||||||
|
@ -22,73 +22,11 @@ from collections.abc import Iterable
|
|||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
|
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
|
||||||
from replication.data import ReplicatedDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
from .dump_anything import Dumper, Loader
|
from .dump_anything import Dumper, Loader
|
||||||
|
|
||||||
|
|
||||||
def has_action(target):
|
|
||||||
""" Check if the target datablock has actions
|
|
||||||
"""
|
|
||||||
return (hasattr(target, 'animation_data')
|
|
||||||
and target.animation_data
|
|
||||||
and target.animation_data.action)
|
|
||||||
|
|
||||||
|
|
||||||
def has_driver(target):
|
|
||||||
""" Check if the target datablock is driven
|
|
||||||
"""
|
|
||||||
return (hasattr(target, 'animation_data')
|
|
||||||
and target.animation_data
|
|
||||||
and target.animation_data.drivers)
|
|
||||||
|
|
||||||
|
|
||||||
def dump_driver(driver):
|
|
||||||
dumper = Dumper()
|
|
||||||
dumper.depth = 6
|
|
||||||
data = dumper.dump(driver)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def load_driver(target_datablock, src_driver):
|
|
||||||
loader = Loader()
|
|
||||||
drivers = target_datablock.animation_data.drivers
|
|
||||||
src_driver_data = src_driver['driver']
|
|
||||||
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
|
|
||||||
|
|
||||||
# Settings
|
|
||||||
new_driver.driver.type = src_driver_data['type']
|
|
||||||
new_driver.driver.expression = src_driver_data['expression']
|
|
||||||
loader.load(new_driver, src_driver)
|
|
||||||
|
|
||||||
# Variables
|
|
||||||
for src_variable in src_driver_data['variables']:
|
|
||||||
src_var_data = src_driver_data['variables'][src_variable]
|
|
||||||
new_var = new_driver.driver.variables.new()
|
|
||||||
new_var.name = src_var_data['name']
|
|
||||||
new_var.type = src_var_data['type']
|
|
||||||
|
|
||||||
for src_target in src_var_data['targets']:
|
|
||||||
src_target_data = src_var_data['targets'][src_target]
|
|
||||||
new_var.targets[src_target].id = utils.resolve_from_id(
|
|
||||||
src_target_data['id'], src_target_data['id_type'])
|
|
||||||
loader.load(
|
|
||||||
new_var.targets[src_target], src_target_data)
|
|
||||||
|
|
||||||
# Fcurve
|
|
||||||
new_fcurve = new_driver.keyframe_points
|
|
||||||
for p in reversed(new_fcurve):
|
|
||||||
new_fcurve.remove(p, fast=True)
|
|
||||||
|
|
||||||
new_fcurve.add(len(src_driver['keyframe_points']))
|
|
||||||
|
|
||||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
|
||||||
new_point = new_fcurve[index]
|
|
||||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
|
||||||
|
|
||||||
|
|
||||||
def get_datablock_from_uuid(uuid, default, ignore=[]):
|
def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||||
if not uuid:
|
if not uuid:
|
||||||
return default
|
return default
|
||||||
@ -100,132 +38,8 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
|
|||||||
return item
|
return item
|
||||||
return default
|
return default
|
||||||
|
|
||||||
|
def resolve_datablock_from_uuid(uuid, bpy_collection):
|
||||||
class BlDatablock(ReplicatedDatablock):
|
for item in bpy_collection:
|
||||||
"""BlDatablock
|
if getattr(item, 'uuid', None) == uuid:
|
||||||
|
return item
|
||||||
bl_id : blender internal storage identifier
|
return None
|
||||||
bl_class : blender internal type
|
|
||||||
bl_icon : type icon (blender icon name)
|
|
||||||
bl_check_common: enable check even in common rights
|
|
||||||
bl_reload_parent: reload parent
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
instance = kwargs.get('instance', None)
|
|
||||||
|
|
||||||
self.preferences = utils.get_preferences()
|
|
||||||
|
|
||||||
# TODO: use is_library_indirect
|
|
||||||
self.is_library = (instance and hasattr(instance, 'library') and
|
|
||||||
instance.library) or \
|
|
||||||
(hasattr(self,'data') and self.data and 'library' in self.data)
|
|
||||||
|
|
||||||
if instance and hasattr(instance, 'uuid'):
|
|
||||||
instance.uuid = self.uuid
|
|
||||||
|
|
||||||
def resolve(self, construct = True):
|
|
||||||
datablock_root = getattr(bpy.data, self.bl_id)
|
|
||||||
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
|
||||||
|
|
||||||
if not datablock_ref:
|
|
||||||
try:
|
|
||||||
datablock_ref = datablock_root[self.data['name']]
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if construct and not datablock_ref:
|
|
||||||
name = self.data.get('name')
|
|
||||||
logging.debug(f"Constructing {name}")
|
|
||||||
datablock_ref = self._construct(data=self.data)
|
|
||||||
|
|
||||||
if datablock_ref is not None:
|
|
||||||
setattr(datablock_ref, 'uuid', self.uuid)
|
|
||||||
self.instance = datablock_ref
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def remove_instance(self):
|
|
||||||
"""
|
|
||||||
Remove instance from blender data
|
|
||||||
"""
|
|
||||||
assert(self.instance)
|
|
||||||
|
|
||||||
datablock_root = getattr(bpy.data, self.bl_id)
|
|
||||||
datablock_root.remove(self.instance)
|
|
||||||
|
|
||||||
def _dump(self, instance=None):
|
|
||||||
dumper = Dumper()
|
|
||||||
data = {}
|
|
||||||
# Dump animation data
|
|
||||||
if has_action(instance):
|
|
||||||
dumper = Dumper()
|
|
||||||
dumper.include_filter = ['action']
|
|
||||||
data['animation_data'] = dumper.dump(instance.animation_data)
|
|
||||||
|
|
||||||
if has_driver(instance):
|
|
||||||
dumped_drivers = {'animation_data': {'drivers': []}}
|
|
||||||
for driver in instance.animation_data.drivers:
|
|
||||||
dumped_drivers['animation_data']['drivers'].append(
|
|
||||||
dump_driver(driver))
|
|
||||||
|
|
||||||
data.update(dumped_drivers)
|
|
||||||
|
|
||||||
if self.is_library:
|
|
||||||
data.update(dumper.dump(instance))
|
|
||||||
else:
|
|
||||||
data.update(self._dump_implementation(data, instance=instance))
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _dump_implementation(self, data, target):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def _load(self, data, target):
|
|
||||||
# Load animation data
|
|
||||||
if 'animation_data' in data.keys():
|
|
||||||
if target.animation_data is None:
|
|
||||||
target.animation_data_create()
|
|
||||||
|
|
||||||
for d in target.animation_data.drivers:
|
|
||||||
target.animation_data.drivers.remove(d)
|
|
||||||
|
|
||||||
if 'drivers' in data['animation_data']:
|
|
||||||
for driver in data['animation_data']['drivers']:
|
|
||||||
load_driver(target, driver)
|
|
||||||
|
|
||||||
if 'action' in data['animation_data']:
|
|
||||||
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
|
|
||||||
# Remove existing animation data if there is not more to load
|
|
||||||
elif hasattr(target, 'animation_data') and target.animation_data:
|
|
||||||
target.animation_data_clear()
|
|
||||||
|
|
||||||
if self.is_library:
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
self._load_implementation(data, target)
|
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def resolve_deps(self):
|
|
||||||
dependencies = []
|
|
||||||
|
|
||||||
if has_action(self.instance):
|
|
||||||
dependencies.append(self.instance.animation_data.action)
|
|
||||||
|
|
||||||
if not self.is_library:
|
|
||||||
dependencies.extend(self._resolve_deps_implementation())
|
|
||||||
|
|
||||||
logging.debug(f"{self.instance} dependencies: {dependencies}")
|
|
||||||
return dependencies
|
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
|
||||||
return []
|
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return getattr(bpy.data, self.bl_id).get(self.data['name'])
|
|
||||||
|
@ -19,14 +19,15 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path, WindowsPath, PosixPath
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
from replication.constants import DIFF_BINARY, UP
|
from replication.constants import DIFF_BINARY, UP
|
||||||
from replication.data import ReplicatedDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
|
from ..utils import get_preferences
|
||||||
from .dump_anything import Dumper, Loader
|
from .dump_anything import Dumper, Loader
|
||||||
|
|
||||||
|
|
||||||
@ -58,33 +59,16 @@ class BlFile(ReplicatedDatablock):
|
|||||||
bl_icon = 'FILE'
|
bl_icon = 'FILE'
|
||||||
bl_reload_parent = True
|
bl_reload_parent = True
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
@staticmethod
|
||||||
super().__init__(*args, **kwargs)
|
def construct(data: dict) -> object:
|
||||||
self.instance = kwargs.get('instance', None)
|
return Path(get_filepath(data['name']))
|
||||||
|
|
||||||
if self.instance and not self.instance.exists():
|
|
||||||
raise FileNotFoundError(str(self.instance))
|
|
||||||
|
|
||||||
self.preferences = utils.get_preferences()
|
|
||||||
|
|
||||||
def resolve(self, construct = True):
|
@staticmethod
|
||||||
self.instance = Path(get_filepath(self.data['name']))
|
def resolve(data: dict) -> object:
|
||||||
|
return Path(get_filepath(data['name']))
|
||||||
file_exists = self.instance.exists()
|
|
||||||
if not file_exists:
|
|
||||||
logging.debug("File don't exist, loading it.")
|
|
||||||
self._load(self.data, self.instance)
|
|
||||||
|
|
||||||
return file_exists
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
def push(self, socket, identity=None, check_data=False):
|
def dump(datablock: object) -> dict:
|
||||||
super().push(socket, identity=None, check_data=False)
|
|
||||||
|
|
||||||
if self.preferences.clear_memory_filecache:
|
|
||||||
del self.data['file']
|
|
||||||
|
|
||||||
def _dump(self, instance=None):
|
|
||||||
"""
|
"""
|
||||||
Read the file and return a dict as:
|
Read the file and return a dict as:
|
||||||
{
|
{
|
||||||
@ -96,44 +80,62 @@ class BlFile(ReplicatedDatablock):
|
|||||||
logging.info(f"Extracting file metadata")
|
logging.info(f"Extracting file metadata")
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
'name': self.instance.name,
|
'name': datablock.name,
|
||||||
}
|
}
|
||||||
|
|
||||||
logging.info(
|
logging.info(f"Reading {datablock.name} content: {datablock.stat().st_size} bytes")
|
||||||
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file = open(self.instance, "rb")
|
file = open(datablock, "rb")
|
||||||
data['file'] = file.read()
|
data['file'] = file.read()
|
||||||
|
|
||||||
file.close()
|
file.close()
|
||||||
except IOError:
|
except IOError:
|
||||||
logging.warning(f"{self.instance} doesn't exist, skipping")
|
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||||
else:
|
else:
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _load(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
"""
|
"""
|
||||||
Writing the file
|
Writing the file
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file = open(target, "wb")
|
file = open(datablock, "wb")
|
||||||
file.write(data['file'])
|
file.write(data['file'])
|
||||||
|
|
||||||
if self.preferences.clear_memory_filecache:
|
if get_preferences().clear_memory_filecache:
|
||||||
del self.data['file']
|
del data['file']
|
||||||
except IOError:
|
except IOError:
|
||||||
logging.warning(f"{target} doesn't exist, skipping")
|
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||||
else:
|
else:
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
def diff(self):
|
@staticmethod
|
||||||
if self.preferences.clear_memory_filecache:
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def needs_update(datablock: object, data:dict)-> bool:
|
||||||
|
if get_preferences().clear_memory_filecache:
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
memory_size = sys.getsizeof(self.data['file'])-33
|
if not datablock:
|
||||||
disk_size = self.instance.stat().st_size
|
return None
|
||||||
return memory_size != disk_size
|
|
||||||
|
if not data:
|
||||||
|
return True
|
||||||
|
|
||||||
|
memory_size = sys.getsizeof(data['file'])-33
|
||||||
|
disk_size = datablock.stat().st_size
|
||||||
|
|
||||||
|
if memory_size != disk_size:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
_type = [WindowsPath, PosixPath]
|
||||||
|
_class = BlFile
|
@ -22,19 +22,20 @@ from pathlib import Path
|
|||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
from .bl_file import get_filepath, ensure_unpacked
|
from .bl_file import get_filepath, ensure_unpacked
|
||||||
from .dump_anything import Dumper, Loader
|
from .dump_anything import Dumper, Loader
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
|
||||||
|
class BlFont(ReplicatedDatablock):
|
||||||
class BlFont(BlDatablock):
|
|
||||||
bl_id = "fonts"
|
bl_id = "fonts"
|
||||||
bl_class = bpy.types.VectorFont
|
bl_class = bpy.types.VectorFont
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'FILE_FONT'
|
bl_icon = 'FILE_FONT'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
filename = data.get('filename')
|
filename = data.get('filename')
|
||||||
|
|
||||||
if filename == '<builtin>':
|
if filename == '<builtin>':
|
||||||
@ -42,31 +43,43 @@ class BlFont(BlDatablock):
|
|||||||
else:
|
else:
|
||||||
return bpy.data.fonts.load(get_filepath(filename))
|
return bpy.data.fonts.load(get_filepath(filename))
|
||||||
|
|
||||||
def _load(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _dump(self, instance=None):
|
@staticmethod
|
||||||
if instance.filepath == '<builtin>':
|
def dump(datablock: object) -> dict:
|
||||||
|
if datablock.filepath == '<builtin>':
|
||||||
filename = '<builtin>'
|
filename = '<builtin>'
|
||||||
else:
|
else:
|
||||||
filename = Path(instance.filepath).name
|
filename = Path(datablock.filepath).name
|
||||||
|
|
||||||
if not filename:
|
if not filename:
|
||||||
raise FileExistsError(instance.filepath)
|
raise FileExistsError(datablock.filepath)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'filename': filename,
|
'filename': filename,
|
||||||
'name': instance.name
|
'name': datablock.name
|
||||||
}
|
}
|
||||||
|
|
||||||
def diff(self):
|
@staticmethod
|
||||||
return False
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.fonts)
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||||
ensure_unpacked(self.instance)
|
ensure_unpacked(datablock)
|
||||||
|
|
||||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def needs_update(datablock: object, data:dict)-> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
_type = bpy.types.VectorFont
|
||||||
|
_class = BlFont
|
@ -24,10 +24,12 @@ from .dump_anything import (Dumper,
|
|||||||
Loader,
|
Loader,
|
||||||
np_dump_collection,
|
np_dump_collection,
|
||||||
np_load_collection)
|
np_load_collection)
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
# GPencil data api is structured as it follow:
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
|
from ..utils import get_preferences
|
||||||
|
from ..timers import is_annotating
|
||||||
|
from .bl_material import load_materials_slots, dump_materials_slots
|
||||||
|
|
||||||
STROKE_POINT = [
|
STROKE_POINT = [
|
||||||
'co',
|
'co',
|
||||||
@ -64,36 +66,9 @@ def dump_stroke(stroke):
|
|||||||
|
|
||||||
:param stroke: target grease pencil stroke
|
:param stroke: target grease pencil stroke
|
||||||
:type stroke: bpy.types.GPencilStroke
|
:type stroke: bpy.types.GPencilStroke
|
||||||
:return: dict
|
:return: (p_count, p_data)
|
||||||
"""
|
"""
|
||||||
|
return (len(stroke.points), np_dump_collection(stroke.points, STROKE_POINT))
|
||||||
assert(stroke)
|
|
||||||
|
|
||||||
dumper = Dumper()
|
|
||||||
dumper.include_filter = [
|
|
||||||
"aspect",
|
|
||||||
"display_mode",
|
|
||||||
"draw_cyclic",
|
|
||||||
"end_cap_mode",
|
|
||||||
"hardeness",
|
|
||||||
"line_width",
|
|
||||||
"material_index",
|
|
||||||
"start_cap_mode",
|
|
||||||
"uv_rotation",
|
|
||||||
"uv_scale",
|
|
||||||
"uv_translation",
|
|
||||||
"vertex_color_fill",
|
|
||||||
]
|
|
||||||
dumped_stroke = dumper.dump(stroke)
|
|
||||||
|
|
||||||
# Stoke points
|
|
||||||
p_count = len(stroke.points)
|
|
||||||
dumped_stroke['p_count'] = p_count
|
|
||||||
dumped_stroke['points'] = np_dump_collection(stroke.points, STROKE_POINT)
|
|
||||||
|
|
||||||
# TODO: uv_factor, uv_rotation
|
|
||||||
|
|
||||||
return dumped_stroke
|
|
||||||
|
|
||||||
|
|
||||||
def load_stroke(stroke_data, stroke):
|
def load_stroke(stroke_data, stroke):
|
||||||
@ -106,12 +81,13 @@ def load_stroke(stroke_data, stroke):
|
|||||||
"""
|
"""
|
||||||
assert(stroke and stroke_data)
|
assert(stroke and stroke_data)
|
||||||
|
|
||||||
stroke.points.add(stroke_data["p_count"])
|
stroke.points.add(stroke_data[0])
|
||||||
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
|
np_load_collection(stroke_data[1], stroke.points, STROKE_POINT)
|
||||||
|
|
||||||
# HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to
|
# HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to
|
||||||
# fix fill issues
|
# fix fill issues
|
||||||
stroke.uv_scale = stroke_data["uv_scale"]
|
stroke.uv_scale = 1.0
|
||||||
|
|
||||||
|
|
||||||
def dump_frame(frame):
|
def dump_frame(frame):
|
||||||
""" Dump a grease pencil frame to a dict
|
""" Dump a grease pencil frame to a dict
|
||||||
@ -145,12 +121,15 @@ def load_frame(frame_data, frame):
|
|||||||
|
|
||||||
assert(frame and frame_data)
|
assert(frame and frame_data)
|
||||||
|
|
||||||
|
# Load stroke points
|
||||||
for stroke_data in frame_data['strokes_points']:
|
for stroke_data in frame_data['strokes_points']:
|
||||||
target_stroke = frame.strokes.new()
|
target_stroke = frame.strokes.new()
|
||||||
load_stroke(stroke_data, target_stroke)
|
load_stroke(stroke_data, target_stroke)
|
||||||
|
|
||||||
|
# Load stroke metadata
|
||||||
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
|
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
|
||||||
|
|
||||||
|
|
||||||
def dump_layer(layer):
|
def dump_layer(layer):
|
||||||
""" Dump a grease pencil layer
|
""" Dump a grease pencil layer
|
||||||
|
|
||||||
@ -167,7 +146,6 @@ def dump_layer(layer):
|
|||||||
'opacity',
|
'opacity',
|
||||||
'channel_color',
|
'channel_color',
|
||||||
'color',
|
'color',
|
||||||
# 'thickness', #TODO: enabling only for annotation
|
|
||||||
'tint_color',
|
'tint_color',
|
||||||
'tint_factor',
|
'tint_factor',
|
||||||
'vertex_paint_opacity',
|
'vertex_paint_opacity',
|
||||||
@ -184,7 +162,7 @@ def dump_layer(layer):
|
|||||||
'hide',
|
'hide',
|
||||||
'annotation_hide',
|
'annotation_hide',
|
||||||
'lock',
|
'lock',
|
||||||
# 'lock_frame',
|
'lock_frame',
|
||||||
# 'lock_material',
|
# 'lock_material',
|
||||||
# 'use_mask_layer',
|
# 'use_mask_layer',
|
||||||
'use_lights',
|
'use_lights',
|
||||||
@ -192,12 +170,13 @@ def dump_layer(layer):
|
|||||||
'select',
|
'select',
|
||||||
'show_points',
|
'show_points',
|
||||||
'show_in_front',
|
'show_in_front',
|
||||||
|
# 'thickness'
|
||||||
# 'parent',
|
# 'parent',
|
||||||
# 'parent_type',
|
# 'parent_type',
|
||||||
# 'parent_bone',
|
# 'parent_bone',
|
||||||
# 'matrix_inverse',
|
# 'matrix_inverse',
|
||||||
]
|
]
|
||||||
if layer.id_data.is_annotation:
|
if layer.thickness != 0:
|
||||||
dumper.include_filter.append('thickness')
|
dumper.include_filter.append('thickness')
|
||||||
|
|
||||||
dumped_layer = dumper.dump(layer)
|
dumped_layer = dumper.dump(layer)
|
||||||
@ -228,87 +207,99 @@ def load_layer(layer_data, layer):
|
|||||||
load_frame(frame_data, target_frame)
|
load_frame(frame_data, target_frame)
|
||||||
|
|
||||||
|
|
||||||
class BlGpencil(BlDatablock):
|
def layer_changed(datablock: object, data: dict) -> bool:
|
||||||
|
if datablock.layers.active and \
|
||||||
|
datablock.layers.active.info != data["active_layers"]:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def frame_changed(data: dict) -> bool:
|
||||||
|
return bpy.context.scene.frame_current != data["eval_frame"]
|
||||||
|
|
||||||
|
class BlGpencil(ReplicatedDatablock):
|
||||||
bl_id = "grease_pencils"
|
bl_id = "grease_pencils"
|
||||||
bl_class = bpy.types.GreasePencil
|
bl_class = bpy.types.GreasePencil
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'GREASEPENCIL'
|
bl_icon = 'GREASEPENCIL'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.grease_pencils.new(data["name"])
|
return bpy.data.grease_pencils.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
target.materials.clear()
|
def load(data: dict, datablock: object):
|
||||||
if "materials" in data.keys():
|
# MATERIAL SLOTS
|
||||||
for mat in data['materials']:
|
src_materials = data.get('materials', None)
|
||||||
target.materials.append(bpy.data.materials[mat])
|
if src_materials:
|
||||||
|
load_materials_slots(src_materials, datablock.materials)
|
||||||
|
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
# TODO: reuse existing layer
|
# TODO: reuse existing layer
|
||||||
for layer in target.layers:
|
for layer in datablock.layers:
|
||||||
target.layers.remove(layer)
|
datablock.layers.remove(layer)
|
||||||
|
|
||||||
if "layers" in data.keys():
|
if "layers" in data.keys():
|
||||||
for layer in data["layers"]:
|
for layer in data["layers"]:
|
||||||
layer_data = data["layers"].get(layer)
|
layer_data = data["layers"].get(layer)
|
||||||
|
|
||||||
# if layer not in target.layers.keys():
|
# if layer not in datablock.layers.keys():
|
||||||
target_layer = target.layers.new(data["layers"][layer]["info"])
|
target_layer = datablock.layers.new(data["layers"][layer]["info"])
|
||||||
# else:
|
# else:
|
||||||
# target_layer = target.layers[layer]
|
# target_layer = target.layers[layer]
|
||||||
# target_layer.clear()
|
# target_layer.clear()
|
||||||
|
|
||||||
load_layer(layer_data, target_layer)
|
load_layer(layer_data, target_layer)
|
||||||
|
|
||||||
target.layers.update()
|
datablock.layers.update()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dump(datablock: object) -> dict:
|
||||||
def _dump_implementation(self, data, instance=None):
|
|
||||||
assert(instance)
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'materials',
|
|
||||||
'name',
|
'name',
|
||||||
'zdepth_offset',
|
'zdepth_offset',
|
||||||
'stroke_thickness_space',
|
'stroke_thickness_space',
|
||||||
'pixel_factor',
|
'pixel_factor',
|
||||||
'stroke_depth_order'
|
'stroke_depth_order'
|
||||||
]
|
]
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
data['materials'] = dump_materials_slots(datablock.materials)
|
||||||
data['layers'] = {}
|
data['layers'] = {}
|
||||||
|
|
||||||
for layer in instance.layers:
|
for layer in datablock.layers:
|
||||||
data['layers'][layer.info] = dump_layer(layer)
|
data['layers'][layer.info] = dump_layer(layer)
|
||||||
|
|
||||||
data["active_layers"] = instance.layers.active.info
|
data["active_layers"] = datablock.layers.active.info if datablock.layers.active else "None"
|
||||||
data["eval_frame"] = bpy.context.scene.frame_current
|
data["eval_frame"] = bpy.context.scene.frame_current
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.grease_pencils)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
for material in self.instance.materials:
|
for material in datablock.materials:
|
||||||
deps.append(material)
|
deps.append(material)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def layer_changed(self):
|
@staticmethod
|
||||||
return self.instance.layers.active.info != self.data["active_layers"]
|
def needs_update(datablock: object, data: dict) -> bool:
|
||||||
|
return bpy.context.mode == 'OBJECT' \
|
||||||
|
or layer_changed(datablock, data) \
|
||||||
|
or frame_changed(data) \
|
||||||
|
or get_preferences().sync_flags.sync_during_editmode \
|
||||||
|
or is_annotating(bpy.context)
|
||||||
|
|
||||||
def frame_changed(self):
|
_type = bpy.types.GreasePencil
|
||||||
return bpy.context.scene.frame_current != self.data["eval_frame"]
|
_class = BlGpencil
|
||||||
|
|
||||||
def diff(self):
|
|
||||||
if self.layer_changed() \
|
|
||||||
or self.frame_changed() \
|
|
||||||
or bpy.context.mode == 'OBJECT' \
|
|
||||||
or self.preferences.sync_flags.sync_during_editmode:
|
|
||||||
return super().diff()
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
@ -24,9 +24,12 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
from .dump_anything import Dumper, Loader
|
from .dump_anything import Dumper, Loader
|
||||||
from .bl_file import get_filepath, ensure_unpacked
|
from .bl_file import get_filepath, ensure_unpacked
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
|
||||||
format_to_ext = {
|
format_to_ext = {
|
||||||
'BMP': 'bmp',
|
'BMP': 'bmp',
|
||||||
@ -48,32 +51,37 @@ format_to_ext = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class BlImage(BlDatablock):
|
class BlImage(ReplicatedDatablock):
|
||||||
bl_id = "images"
|
bl_id = "images"
|
||||||
bl_class = bpy.types.Image
|
bl_class = bpy.types.Image
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'IMAGE_DATA'
|
bl_icon = 'IMAGE_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.images.new(
|
return bpy.data.images.new(
|
||||||
name=data['name'],
|
name=data['name'],
|
||||||
width=data['size'][0],
|
width=data['size'][0],
|
||||||
height=data['size'][1]
|
height=data['size'][1]
|
||||||
)
|
)
|
||||||
|
|
||||||
def _load(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(data, target)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
target.source = 'FILE'
|
# datablock.name = data.get('name')
|
||||||
target.filepath_raw = get_filepath(data['filename'])
|
datablock.source = 'FILE'
|
||||||
target.colorspace_settings.name = data["colorspace_settings"]["name"]
|
datablock.filepath_raw = get_filepath(data['filename'])
|
||||||
|
color_space_name = data.get("colorspace")
|
||||||
|
|
||||||
def _dump(self, instance=None):
|
if color_space_name:
|
||||||
assert(instance)
|
datablock.colorspace_settings.name = color_space_name
|
||||||
|
|
||||||
filename = Path(instance.filepath).name
|
@staticmethod
|
||||||
|
def dump(datablock: object) -> dict:
|
||||||
|
filename = Path(datablock.filepath).name
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"filename": filename
|
"filename": filename
|
||||||
@ -83,41 +91,47 @@ class BlImage(BlDatablock):
|
|||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
|
# 'source',
|
||||||
'size',
|
'size',
|
||||||
'height',
|
'alpha_mode']
|
||||||
'alpha',
|
data.update(dumper.dump(datablock))
|
||||||
'float_buffer',
|
data['colorspace'] = datablock.colorspace_settings.name
|
||||||
'alpha_mode',
|
|
||||||
'colorspace_settings']
|
|
||||||
data.update(dumper.dump(instance))
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def diff(self):
|
@staticmethod
|
||||||
if self.instance.is_dirty:
|
def resolve(data: dict) -> object:
|
||||||
self.instance.save()
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.images)
|
||||||
|
|
||||||
if self.instance and (self.instance.name != self.data['name']):
|
@staticmethod
|
||||||
return True
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.instance.packed_file:
|
if datablock.packed_file:
|
||||||
filename = Path(bpy.path.abspath(self.instance.filepath)).name
|
filename = Path(bpy.path.abspath(datablock.filepath)).name
|
||||||
self.instance.filepath_raw = get_filepath(filename)
|
datablock.filepath_raw = get_filepath(filename)
|
||||||
self.instance.save()
|
datablock.save()
|
||||||
# An image can't be unpacked to the modified path
|
# An image can't be unpacked to the modified path
|
||||||
# TODO: make a bug report
|
# TODO: make a bug report
|
||||||
self.instance.unpack(method="REMOVE")
|
datablock.unpack(method="REMOVE")
|
||||||
|
|
||||||
elif self.instance.source == "GENERATED":
|
elif datablock.source == "GENERATED":
|
||||||
filename = f"{self.instance.name}.png"
|
filename = f"{datablock.name}.png"
|
||||||
self.instance.filepath = get_filepath(filename)
|
datablock.filepath = get_filepath(filename)
|
||||||
self.instance.save()
|
datablock.save()
|
||||||
|
|
||||||
if self.instance.filepath:
|
if datablock.filepath:
|
||||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def needs_update(datablock: object, data:dict)-> bool:
|
||||||
|
if datablock.is_dirty:
|
||||||
|
datablock.save()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
_type = bpy.types.Image
|
||||||
|
_class = BlImage
|
||||||
|
@ -20,33 +20,41 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
from replication.exception import ContextError
|
from replication.exception import ContextError
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
POINT = ['co', 'weight_softbody', 'co_deform']
|
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||||
|
|
||||||
|
|
||||||
class BlLattice(BlDatablock):
|
class BlLattice(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "lattices"
|
bl_id = "lattices"
|
||||||
bl_class = bpy.types.Lattice
|
bl_class = bpy.types.Lattice
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'LATTICE_DATA'
|
bl_icon = 'LATTICE_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.lattices.new(data["name"])
|
return bpy.data.lattices.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
if target.is_editmode:
|
def load(data: dict, datablock: object):
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
if datablock.is_editmode:
|
||||||
raise ContextError("lattice is in edit mode")
|
raise ContextError("lattice is in edit mode")
|
||||||
|
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
np_load_collection(data['points'], target.points, POINT)
|
np_load_collection(data['points'], datablock.points, POINT)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
if instance.is_editmode:
|
def dump(datablock: object) -> dict:
|
||||||
|
if datablock.is_editmode:
|
||||||
raise ContextError("lattice is in edit mode")
|
raise ContextError("lattice is in edit mode")
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
@ -62,9 +70,20 @@ class BlLattice(BlDatablock):
|
|||||||
'interpolation_type_w',
|
'interpolation_type_w',
|
||||||
'use_outside'
|
'use_outside'
|
||||||
]
|
]
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
|
||||||
data['points'] = np_dump_collection(instance.points, POINT)
|
|
||||||
|
|
||||||
|
data['points'] = np_dump_collection(datablock.points, POINT)
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.lattices)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
return resolve_animation_dependencies(datablock)
|
||||||
|
|
||||||
|
_type = bpy.types.Lattice
|
||||||
|
_class = BlLattice
|
@ -20,25 +20,34 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
|
||||||
class BlLight(BlDatablock):
|
class BlLight(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "lights"
|
bl_id = "lights"
|
||||||
bl_class = bpy.types.Light
|
bl_class = bpy.types.Light
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'LIGHT_DATA'
|
bl_icon = 'LIGHT_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
return bpy.data.lights.new(data["name"], data["type"])
|
def construct(data: dict) -> object:
|
||||||
|
instance = bpy.data.lights.new(data["name"], data["type"])
|
||||||
|
instance.uuid = data.get("uuid")
|
||||||
|
return instance
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -67,9 +76,23 @@ class BlLight(BlDatablock):
|
|||||||
'spot_size',
|
'spot_size',
|
||||||
'spot_blend'
|
'spot_blend'
|
||||||
]
|
]
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.lights)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
deps = []
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
|
return deps
|
||||||
|
|
||||||
|
|
||||||
|
_type = [bpy.types.SpotLight, bpy.types.PointLight, bpy.types.AreaLight, bpy.types.SunLight]
|
||||||
|
_class = BlLight
|
||||||
|
@ -21,17 +21,20 @@ import mathutils
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
|
||||||
|
class BlLightprobe(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
class BlLightprobe(BlDatablock):
|
|
||||||
bl_id = "lightprobes"
|
bl_id = "lightprobes"
|
||||||
bl_class = bpy.types.LightProbe
|
bl_class = bpy.types.LightProbe
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'LIGHTPROBE_GRID'
|
bl_icon = 'LIGHTPROBE_GRID'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
|
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
|
||||||
# See https://developer.blender.org/D6396
|
# See https://developer.blender.org/D6396
|
||||||
if bpy.app.version[1] >= 83:
|
if bpy.app.version[1] >= 83:
|
||||||
@ -39,12 +42,13 @@ class BlLightprobe(BlDatablock):
|
|||||||
else:
|
else:
|
||||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
if bpy.app.version[1] < 83:
|
if bpy.app.version[1] < 83:
|
||||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
|
|
||||||
@ -71,7 +75,16 @@ class BlLightprobe(BlDatablock):
|
|||||||
'visibility_blur'
|
'visibility_blur'
|
||||||
]
|
]
|
||||||
|
|
||||||
return dumper.dump(instance)
|
return dumper.dump(datablock)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.lightprobes)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
_type = bpy.types.LightProbe
|
||||||
|
_class = BlLightprobe
|
@ -24,7 +24,10 @@ import re
|
|||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
|
||||||
|
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
||||||
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
|
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
|
||||||
@ -45,7 +48,11 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
|
|||||||
node_tree_uuid = node_data.get('node_tree_uuid', None)
|
node_tree_uuid = node_data.get('node_tree_uuid', None)
|
||||||
|
|
||||||
if image_uuid and not target_node.image:
|
if image_uuid and not target_node.image:
|
||||||
target_node.image = get_datablock_from_uuid(image_uuid, None)
|
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
|
||||||
|
if image is None:
|
||||||
|
logging.error(f"Fail to find material image from uuid {image_uuid}")
|
||||||
|
else:
|
||||||
|
target_node.image = image
|
||||||
|
|
||||||
if node_tree_uuid:
|
if node_tree_uuid:
|
||||||
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
|
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
|
||||||
@ -117,8 +124,7 @@ def dump_node(node: bpy.types.ShaderNode) -> dict:
|
|||||||
"show_preview",
|
"show_preview",
|
||||||
"show_texture",
|
"show_texture",
|
||||||
"outputs",
|
"outputs",
|
||||||
"width_hidden",
|
"width_hidden"
|
||||||
"image"
|
|
||||||
]
|
]
|
||||||
|
|
||||||
dumped_node = node_dumper.dump(node)
|
dumped_node = node_dumper.dump(node)
|
||||||
@ -381,44 +387,50 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
|
|||||||
|
|
||||||
for mat_uuid, mat_name in src_materials:
|
for mat_uuid, mat_name in src_materials:
|
||||||
mat_ref = None
|
mat_ref = None
|
||||||
if mat_uuid is not None:
|
if mat_uuid:
|
||||||
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
||||||
else:
|
else:
|
||||||
mat_ref = bpy.data.materials[mat_name]
|
mat_ref = bpy.data.materials[mat_name]
|
||||||
|
|
||||||
dst_materials.append(mat_ref)
|
dst_materials.append(mat_ref)
|
||||||
|
|
||||||
|
|
||||||
class BlMaterial(BlDatablock):
|
class BlMaterial(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "materials"
|
bl_id = "materials"
|
||||||
bl_class = bpy.types.Material
|
bl_class = bpy.types.Material
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'MATERIAL_DATA'
|
bl_icon = 'MATERIAL_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
bl_reload_child = True
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.materials.new(data["name"])
|
return bpy.data.materials.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
|
|
||||||
is_grease_pencil = data.get('is_grease_pencil')
|
is_grease_pencil = data.get('is_grease_pencil')
|
||||||
use_nodes = data.get('use_nodes')
|
use_nodes = data.get('use_nodes')
|
||||||
|
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
if is_grease_pencil:
|
if is_grease_pencil:
|
||||||
if not target.is_grease_pencil:
|
if not datablock.is_grease_pencil:
|
||||||
bpy.data.materials.create_gpencil_data(target)
|
bpy.data.materials.create_gpencil_data(datablock)
|
||||||
loader.load(target.grease_pencil, data['grease_pencil'])
|
loader.load(datablock.grease_pencil, data['grease_pencil'])
|
||||||
elif use_nodes:
|
elif use_nodes:
|
||||||
if target.node_tree is None:
|
if datablock.node_tree is None:
|
||||||
target.use_nodes = True
|
datablock.use_nodes = True
|
||||||
|
|
||||||
load_node_tree(data['node_tree'], target.node_tree)
|
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||||
|
load_animation_data(data.get('nodes_animation_data'), datablock.node_tree)
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
mat_dumper = Dumper()
|
mat_dumper = Dumper()
|
||||||
mat_dumper.depth = 2
|
mat_dumper.depth = 2
|
||||||
mat_dumper.include_filter = [
|
mat_dumper.include_filter = [
|
||||||
@ -444,9 +456,9 @@ class BlMaterial(BlDatablock):
|
|||||||
'line_priority',
|
'line_priority',
|
||||||
'is_grease_pencil'
|
'is_grease_pencil'
|
||||||
]
|
]
|
||||||
data = mat_dumper.dump(instance)
|
data = mat_dumper.dump(datablock)
|
||||||
|
|
||||||
if instance.is_grease_pencil:
|
if datablock.is_grease_pencil:
|
||||||
gp_mat_dumper = Dumper()
|
gp_mat_dumper = Dumper()
|
||||||
gp_mat_dumper.depth = 3
|
gp_mat_dumper.depth = 3
|
||||||
|
|
||||||
@ -480,19 +492,30 @@ class BlMaterial(BlDatablock):
|
|||||||
'use_overlap_strokes',
|
'use_overlap_strokes',
|
||||||
'use_fill_holdout',
|
'use_fill_holdout',
|
||||||
]
|
]
|
||||||
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
|
||||||
elif instance.use_nodes:
|
elif datablock.use_nodes:
|
||||||
data['node_tree'] = dump_node_tree(instance.node_tree)
|
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||||
|
data['nodes_animation_data'] = dump_animation_data(datablock.node_tree)
|
||||||
|
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
# TODO: resolve node group deps
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.materials)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.instance.use_nodes:
|
if datablock.use_nodes:
|
||||||
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
|
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||||
if self.is_library:
|
deps.extend(resolve_animation_dependencies(datablock.node_tree))
|
||||||
deps.append(self.instance.library)
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
_type = bpy.types.Material
|
||||||
|
_class = BlMaterial
|
@ -25,8 +25,13 @@ import numpy as np
|
|||||||
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||||
from replication.constants import DIFF_BINARY
|
from replication.constants import DIFF_BINARY
|
||||||
from replication.exception import ContextError
|
from replication.exception import ContextError
|
||||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
|
||||||
|
from .bl_datablock import get_datablock_from_uuid
|
||||||
from .bl_material import dump_materials_slots, load_materials_slots
|
from .bl_material import dump_materials_slots, load_materials_slots
|
||||||
|
from ..utils import get_preferences
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
VERTICE = ['co']
|
VERTICE = ['co']
|
||||||
|
|
||||||
@ -49,76 +54,79 @@ POLYGON = [
|
|||||||
'material_index',
|
'material_index',
|
||||||
]
|
]
|
||||||
|
|
||||||
class BlMesh(BlDatablock):
|
class BlMesh(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "meshes"
|
bl_id = "meshes"
|
||||||
bl_class = bpy.types.Mesh
|
bl_class = bpy.types.Mesh
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'MESH_DATA'
|
bl_icon = 'MESH_DATA'
|
||||||
bl_reload_parent = True
|
bl_reload_parent = True
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
instance = bpy.data.meshes.new(data["name"])
|
def construct(data: dict) -> object:
|
||||||
instance.uuid = self.uuid
|
return bpy.data.meshes.new(data.get("name"))
|
||||||
return instance
|
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
if not target or target.is_editmode:
|
def load(data: dict, datablock: object):
|
||||||
|
if not datablock or datablock.is_editmode:
|
||||||
raise ContextError
|
raise ContextError
|
||||||
else:
|
else:
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
# MATERIAL SLOTS
|
# MATERIAL SLOTS
|
||||||
src_materials = data.get('materials', None)
|
src_materials = data.get('materials', None)
|
||||||
if src_materials:
|
if src_materials:
|
||||||
load_materials_slots(src_materials, target.materials)
|
load_materials_slots(src_materials, datablock.materials)
|
||||||
|
|
||||||
# CLEAR GEOMETRY
|
# CLEAR GEOMETRY
|
||||||
if target.vertices:
|
if datablock.vertices:
|
||||||
target.clear_geometry()
|
datablock.clear_geometry()
|
||||||
|
|
||||||
target.vertices.add(data["vertex_count"])
|
datablock.vertices.add(data["vertex_count"])
|
||||||
target.edges.add(data["egdes_count"])
|
datablock.edges.add(data["egdes_count"])
|
||||||
target.loops.add(data["loop_count"])
|
datablock.loops.add(data["loop_count"])
|
||||||
target.polygons.add(data["poly_count"])
|
datablock.polygons.add(data["poly_count"])
|
||||||
|
|
||||||
# LOADING
|
# LOADING
|
||||||
np_load_collection(data['vertices'], target.vertices, VERTICE)
|
np_load_collection(data['vertices'], datablock.vertices, VERTICE)
|
||||||
np_load_collection(data['edges'], target.edges, EDGE)
|
np_load_collection(data['edges'], datablock.edges, EDGE)
|
||||||
np_load_collection(data['loops'], target.loops, LOOP)
|
np_load_collection(data['loops'], datablock.loops, LOOP)
|
||||||
np_load_collection(data["polygons"],target.polygons, POLYGON)
|
np_load_collection(data["polygons"],datablock.polygons, POLYGON)
|
||||||
|
|
||||||
# UV Layers
|
# UV Layers
|
||||||
if 'uv_layers' in data.keys():
|
if 'uv_layers' in data.keys():
|
||||||
for layer in data['uv_layers']:
|
for layer in data['uv_layers']:
|
||||||
if layer not in target.uv_layers:
|
if layer not in datablock.uv_layers:
|
||||||
target.uv_layers.new(name=layer)
|
datablock.uv_layers.new(name=layer)
|
||||||
|
|
||||||
np_load_collection_primitives(
|
np_load_collection_primitives(
|
||||||
target.uv_layers[layer].data,
|
datablock.uv_layers[layer].data,
|
||||||
'uv',
|
'uv',
|
||||||
data["uv_layers"][layer]['data'])
|
data["uv_layers"][layer]['data'])
|
||||||
|
|
||||||
# Vertex color
|
# Vertex color
|
||||||
if 'vertex_colors' in data.keys():
|
if 'vertex_colors' in data.keys():
|
||||||
for color_layer in data['vertex_colors']:
|
for color_layer in data['vertex_colors']:
|
||||||
if color_layer not in target.vertex_colors:
|
if color_layer not in datablock.vertex_colors:
|
||||||
target.vertex_colors.new(name=color_layer)
|
datablock.vertex_colors.new(name=color_layer)
|
||||||
|
|
||||||
np_load_collection_primitives(
|
np_load_collection_primitives(
|
||||||
target.vertex_colors[color_layer].data,
|
datablock.vertex_colors[color_layer].data,
|
||||||
'color',
|
'color',
|
||||||
data["vertex_colors"][color_layer]['data'])
|
data["vertex_colors"][color_layer]['data'])
|
||||||
|
|
||||||
target.validate()
|
datablock.validate()
|
||||||
target.update()
|
datablock.update()
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
|
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
|
||||||
if (instance.is_editmode or bpy.context.mode == "SCULPT") and not self.preferences.sync_flags.sync_during_editmode:
|
|
||||||
raise ContextError("Mesh is in edit mode")
|
raise ContextError("Mesh is in edit mode")
|
||||||
mesh = instance
|
mesh = datablock
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
@ -132,6 +140,8 @@ class BlMesh(BlDatablock):
|
|||||||
|
|
||||||
data = dumper.dump(mesh)
|
data = dumper.dump(mesh)
|
||||||
|
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
|
|
||||||
# VERTICES
|
# VERTICES
|
||||||
data["vertex_count"] = len(mesh.vertices)
|
data["vertex_count"] = len(mesh.vertices)
|
||||||
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
|
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
|
||||||
@ -163,21 +173,30 @@ class BlMesh(BlDatablock):
|
|||||||
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
||||||
|
|
||||||
# Materials
|
# Materials
|
||||||
data['materials'] = dump_materials_slots(instance.materials)
|
data['materials'] = dump_materials_slots(datablock.materials)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
for material in self.instance.materials:
|
for material in datablock.materials:
|
||||||
if material:
|
if material:
|
||||||
deps.append(material)
|
deps.append(material)
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def diff(self):
|
@staticmethod
|
||||||
if 'EDIT' in bpy.context.mode \
|
def resolve(data: dict) -> object:
|
||||||
and not self.preferences.sync_flags.sync_during_editmode:
|
uuid = data.get('uuid')
|
||||||
return False
|
return resolve_datablock_from_uuid(uuid, bpy.data.meshes)
|
||||||
else:
|
|
||||||
return super().diff()
|
@staticmethod
|
||||||
|
def needs_update(datablock: object, data: dict) -> bool:
|
||||||
|
return ('EDIT' not in bpy.context.mode and bpy.context.mode != 'SCULPT') \
|
||||||
|
or get_preferences().sync_flags.sync_during_editmode
|
||||||
|
|
||||||
|
_type = bpy.types.Mesh
|
||||||
|
_class = BlMesh
|
||||||
|
@ -23,7 +23,9 @@ from .dump_anything import (
|
|||||||
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
|
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
|
||||||
np_dump_collection, np_load_collection)
|
np_dump_collection, np_load_collection)
|
||||||
|
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
|
||||||
ELEMENT = [
|
ELEMENT = [
|
||||||
@ -62,29 +64,35 @@ def load_metaball_elements(elements_data, elements):
|
|||||||
np_load_collection(elements_data, elements, ELEMENT)
|
np_load_collection(elements_data, elements, ELEMENT)
|
||||||
|
|
||||||
|
|
||||||
class BlMetaball(BlDatablock):
|
class BlMetaball(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "metaballs"
|
bl_id = "metaballs"
|
||||||
bl_class = bpy.types.MetaBall
|
bl_class = bpy.types.MetaBall
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'META_BALL'
|
bl_icon = 'META_BALL'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.metaballs.new(data["name"])
|
return bpy.data.metaballs.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
loader = Loader()
|
def load(data: dict, datablock: object):
|
||||||
loader.load(target, data)
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
target.elements.clear()
|
loader = Loader()
|
||||||
|
loader.load(datablock, data)
|
||||||
|
|
||||||
|
datablock.elements.clear()
|
||||||
|
|
||||||
for mtype in data["elements"]['type']:
|
for mtype in data["elements"]['type']:
|
||||||
new_element = target.elements.new()
|
new_element = datablock.elements.new()
|
||||||
|
|
||||||
load_metaball_elements(data['elements'], target.elements)
|
load_metaball_elements(data['elements'], datablock.elements)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -98,7 +106,24 @@ class BlMetaball(BlDatablock):
|
|||||||
'texspace_size'
|
'texspace_size'
|
||||||
]
|
]
|
||||||
|
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
data['elements'] = dump_metaball_elements(instance.elements)
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
|
data['elements'] = dump_metaball_elements(datablock.elements)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.metaballs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
deps = []
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
|
return deps
|
||||||
|
|
||||||
|
_type = bpy.types.MetaBall
|
||||||
|
_class = BlMetaball
|
@ -20,26 +20,45 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
from .bl_material import (dump_node_tree,
|
from .bl_material import (dump_node_tree,
|
||||||
load_node_tree,
|
load_node_tree,
|
||||||
get_node_tree_dependencies)
|
get_node_tree_dependencies)
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
class BlNodeGroup(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
class BlNodeGroup(BlDatablock):
|
|
||||||
bl_id = "node_groups"
|
bl_id = "node_groups"
|
||||||
bl_class = bpy.types.NodeTree
|
bl_class = bpy.types.NodeTree
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'NODETREE'
|
bl_icon = 'NODETREE'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.node_groups.new(data["name"], data["type"])
|
return bpy.data.node_groups.new(data["name"], data["type"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
load_node_tree(data, target)
|
def load(data: dict, datablock: object):
|
||||||
|
load_node_tree(data, datablock)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
return dump_node_tree(instance)
|
def dump(datablock: object) -> dict:
|
||||||
|
return dump_node_tree(datablock)
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
return get_node_tree_dependencies(self.instance)
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.node_groups)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
deps = []
|
||||||
|
deps.extend(get_node_tree_dependencies(datablock))
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
return deps
|
||||||
|
|
||||||
|
_type = [bpy.types.ShaderNodeTree, bpy.types.GeometryNodeTree]
|
||||||
|
_class = BlNodeGroup
|
@ -22,8 +22,11 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
from replication.exception import ContextError
|
from replication.exception import ContextError
|
||||||
|
|
||||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||||
from .bl_material import IGNORED_SOCKETS
|
from .bl_material import IGNORED_SOCKETS
|
||||||
|
from ..utils import get_preferences
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
from .dump_anything import (
|
from .dump_anything import (
|
||||||
Dumper,
|
Dumper,
|
||||||
Loader,
|
Loader,
|
||||||
@ -37,6 +40,14 @@ SKIN_DATA = [
|
|||||||
'use_root'
|
'use_root'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
SHAPEKEY_BLOCK_ATTR = [
|
||||||
|
'mute',
|
||||||
|
'value',
|
||||||
|
'slider_min',
|
||||||
|
'slider_max',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
if bpy.app.version[1] >= 93:
|
if bpy.app.version[1] >= 93:
|
||||||
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
|
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
|
||||||
else:
|
else:
|
||||||
@ -44,6 +55,7 @@ else:
|
|||||||
logging.warning("Geometry node Float parameter not supported in \
|
logging.warning("Geometry node Float parameter not supported in \
|
||||||
blender 2.92.")
|
blender 2.92.")
|
||||||
|
|
||||||
|
|
||||||
def get_node_group_inputs(node_group):
|
def get_node_group_inputs(node_group):
|
||||||
inputs = []
|
inputs = []
|
||||||
for inpt in node_group.inputs:
|
for inpt in node_group.inputs:
|
||||||
@ -82,6 +94,7 @@ def dump_physics(target: bpy.types.Object)->dict:
|
|||||||
|
|
||||||
return physics_data
|
return physics_data
|
||||||
|
|
||||||
|
|
||||||
def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
||||||
""" Load all physics settings from a given object excluding modifier
|
""" Load all physics settings from a given object excluding modifier
|
||||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||||
@ -107,7 +120,8 @@ def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
|||||||
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
|
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
|
||||||
elif target.rigid_body_constraint:
|
elif target.rigid_body_constraint:
|
||||||
bpy.ops.rigidbody.constraint_remove({"object": target})
|
bpy.ops.rigidbody.constraint_remove({"object": target})
|
||||||
|
|
||||||
|
|
||||||
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
|
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
|
||||||
""" Dump geometry node modifier input properties
|
""" Dump geometry node modifier input properties
|
||||||
|
|
||||||
@ -289,115 +303,282 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
|
|||||||
vertex_group.add([index], weight, 'REPLACE')
|
vertex_group.add([index], weight, 'REPLACE')
|
||||||
|
|
||||||
|
|
||||||
class BlObject(BlDatablock):
|
def dump_shape_keys(target_key: bpy.types.Key)->dict:
|
||||||
|
""" Dump the target shape_keys datablock to a dict using numpy
|
||||||
|
|
||||||
|
:param dumped_key: target key datablock
|
||||||
|
:type dumped_key: bpy.types.Key
|
||||||
|
:return: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
dumped_key_blocks = []
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.include_filter = [
|
||||||
|
'name',
|
||||||
|
'mute',
|
||||||
|
'value',
|
||||||
|
'slider_min',
|
||||||
|
'slider_max',
|
||||||
|
]
|
||||||
|
for key in target_key.key_blocks:
|
||||||
|
dumped_key_block = dumper.dump(key)
|
||||||
|
dumped_key_block['data'] = np_dump_collection(key.data, ['co'])
|
||||||
|
dumped_key_block['relative_key'] = key.relative_key.name
|
||||||
|
dumped_key_blocks.append(dumped_key_block)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'reference_key': target_key.reference_key.name,
|
||||||
|
'use_relative': target_key.use_relative,
|
||||||
|
'key_blocks': dumped_key_blocks,
|
||||||
|
'animation_data': dump_animation_data(target_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def load_shape_keys(dumped_shape_keys: dict, target_object: bpy.types.Object):
|
||||||
|
""" Load the target shape_keys datablock to a dict using numpy
|
||||||
|
|
||||||
|
:param dumped_key: src key data
|
||||||
|
:type dumped_key: bpy.types.Key
|
||||||
|
:param target_object: object used to load the shapekeys data onto
|
||||||
|
:type target_object: bpy.types.Object
|
||||||
|
"""
|
||||||
|
loader = Loader()
|
||||||
|
# Remove existing ones
|
||||||
|
target_object.shape_key_clear()
|
||||||
|
|
||||||
|
# Create keys and load vertices coords
|
||||||
|
dumped_key_blocks = dumped_shape_keys.get('key_blocks')
|
||||||
|
for dumped_key_block in dumped_key_blocks:
|
||||||
|
key_block = target_object.shape_key_add(name=dumped_key_block['name'])
|
||||||
|
|
||||||
|
loader.load(key_block, dumped_key_block)
|
||||||
|
np_load_collection(dumped_key_block['data'], key_block.data, ['co'])
|
||||||
|
|
||||||
|
# Load relative key after all
|
||||||
|
for dumped_key_block in dumped_key_blocks:
|
||||||
|
relative_key_name = dumped_key_block.get('relative_key')
|
||||||
|
key_name = dumped_key_block.get('name')
|
||||||
|
|
||||||
|
target_keyblock = target_object.data.shape_keys.key_blocks[key_name]
|
||||||
|
relative_key = target_object.data.shape_keys.key_blocks[relative_key_name]
|
||||||
|
|
||||||
|
target_keyblock.relative_key = relative_key
|
||||||
|
|
||||||
|
# Shape keys animation data
|
||||||
|
anim_data = dumped_shape_keys.get('animation_data')
|
||||||
|
|
||||||
|
if anim_data:
|
||||||
|
load_animation_data(anim_data, target_object.data.shape_keys)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
|
||||||
|
""" Dump all modifiers of a modifier collection into a dict
|
||||||
|
|
||||||
|
:param modifiers: modifiers
|
||||||
|
:type modifiers: bpy.types.bpy_prop_collection
|
||||||
|
:return: dict
|
||||||
|
"""
|
||||||
|
dumped_modifiers = []
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.depth = 1
|
||||||
|
dumper.exclude_filter = ['is_active']
|
||||||
|
|
||||||
|
for modifier in modifiers:
|
||||||
|
dumped_modifier = dumper.dump(modifier)
|
||||||
|
# hack to dump geometry nodes inputs
|
||||||
|
if modifier.type == 'NODES':
|
||||||
|
dumped_inputs = dump_modifier_geometry_node_inputs(
|
||||||
|
modifier)
|
||||||
|
dumped_modifier['inputs'] = dumped_inputs
|
||||||
|
|
||||||
|
elif modifier.type == 'PARTICLE_SYSTEM':
|
||||||
|
dumper.exclude_filter = [
|
||||||
|
"is_edited",
|
||||||
|
"is_editable",
|
||||||
|
"is_global_hair"
|
||||||
|
]
|
||||||
|
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
|
||||||
|
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
|
||||||
|
|
||||||
|
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||||
|
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
||||||
|
elif modifier.type == 'UV_PROJECT':
|
||||||
|
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
|
||||||
|
|
||||||
|
dumped_modifiers.append(dumped_modifier)
|
||||||
|
return dumped_modifiers
|
||||||
|
|
||||||
|
def dump_constraints(constraints: bpy.types.bpy_prop_collection)->list:
|
||||||
|
"""Dump all constraints to a list
|
||||||
|
|
||||||
|
:param constraints: constraints
|
||||||
|
:type constraints: bpy.types.bpy_prop_collection
|
||||||
|
:return: dict
|
||||||
|
"""
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.depth = 2
|
||||||
|
dumper.include_filter = None
|
||||||
|
dumped_constraints = []
|
||||||
|
for constraint in constraints:
|
||||||
|
dumped_constraints.append(dumper.dump(constraint))
|
||||||
|
return dumped_constraints
|
||||||
|
|
||||||
|
def load_constraints(dumped_constraints: list, constraints: bpy.types.bpy_prop_collection):
|
||||||
|
""" Load dumped constraints
|
||||||
|
|
||||||
|
:param dumped_constraints: list of constraints to load
|
||||||
|
:type dumped_constraints: list
|
||||||
|
:param constraints: constraints
|
||||||
|
:type constraints: bpy.types.bpy_prop_collection
|
||||||
|
"""
|
||||||
|
loader = Loader()
|
||||||
|
constraints.clear()
|
||||||
|
for dumped_constraint in dumped_constraints:
|
||||||
|
constraint_type = dumped_constraint.get('type')
|
||||||
|
new_constraint = constraints.new(constraint_type)
|
||||||
|
loader.load(new_constraint, dumped_constraint)
|
||||||
|
|
||||||
|
def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collection):
|
||||||
|
""" Dump all modifiers of a modifier collection into a dict
|
||||||
|
|
||||||
|
:param dumped_modifiers: list of modifiers to load
|
||||||
|
:type dumped_modifiers: list
|
||||||
|
:param modifiers: modifiers
|
||||||
|
:type modifiers: bpy.types.bpy_prop_collection
|
||||||
|
"""
|
||||||
|
loader = Loader()
|
||||||
|
modifiers.clear()
|
||||||
|
for dumped_modifier in dumped_modifiers:
|
||||||
|
name = dumped_modifier.get('name')
|
||||||
|
mtype = dumped_modifier.get('type')
|
||||||
|
loaded_modifier = modifiers.new(name, mtype)
|
||||||
|
loader.load(loaded_modifier, dumped_modifier)
|
||||||
|
|
||||||
|
if loaded_modifier.type == 'NODES':
|
||||||
|
load_modifier_geometry_node_inputs(dumped_modifier, loaded_modifier)
|
||||||
|
elif loaded_modifier.type == 'PARTICLE_SYSTEM':
|
||||||
|
default = loaded_modifier.particle_system.settings
|
||||||
|
dumped_particles = dumped_modifier['particle_system']
|
||||||
|
loader.load(loaded_modifier.particle_system, dumped_particles)
|
||||||
|
|
||||||
|
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
||||||
|
if settings:
|
||||||
|
loaded_modifier.particle_system.settings = settings
|
||||||
|
# Hack to remove the default generated particle settings
|
||||||
|
if not default.uuid:
|
||||||
|
bpy.data.particles.remove(default)
|
||||||
|
elif loaded_modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||||
|
loader.load(loaded_modifier.settings, dumped_modifier['settings'])
|
||||||
|
elif loaded_modifier.type == 'UV_PROJECT':
|
||||||
|
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
|
||||||
|
target_object = bpy.data.objects.get(projector_object)
|
||||||
|
if target_object:
|
||||||
|
loaded_modifier.projectors[projector_index].object = target_object
|
||||||
|
else:
|
||||||
|
logging.error("Could't load projector target object {projector_object}")
|
||||||
|
|
||||||
|
|
||||||
|
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
|
||||||
|
""" Load modifiers custom data not managed by the dump_anything loader
|
||||||
|
|
||||||
|
:param dumped_modifiers: modifiers to load
|
||||||
|
:type dumped_modifiers: dict
|
||||||
|
:param modifiers: target modifiers collection
|
||||||
|
:type modifiers: bpy.types.bpy_prop_collection
|
||||||
|
"""
|
||||||
|
loader = Loader()
|
||||||
|
|
||||||
|
for modifier in modifiers:
|
||||||
|
dumped_modifier = dumped_modifiers.get(modifier.name)
|
||||||
|
|
||||||
|
|
||||||
|
class BlObject(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "objects"
|
bl_id = "objects"
|
||||||
bl_class = bpy.types.Object
|
bl_class = bpy.types.Object
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'OBJECT_DATA'
|
bl_icon = 'OBJECT_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
instance = None
|
instance = None
|
||||||
|
|
||||||
if self.is_library:
|
|
||||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
|
||||||
targetData.objects = [
|
|
||||||
name for name in sourceData.objects if name == self.data['name']]
|
|
||||||
|
|
||||||
instance = bpy.data.objects[self.data['name']]
|
|
||||||
instance.uuid = self.uuid
|
|
||||||
return instance
|
|
||||||
|
|
||||||
# TODO: refactoring
|
# TODO: refactoring
|
||||||
object_name = data.get("name")
|
object_name = data.get("name")
|
||||||
data_uuid = data.get("data_uuid")
|
data_uuid = data.get("data_uuid")
|
||||||
data_id = data.get("data")
|
data_id = data.get("data")
|
||||||
|
data_type = data.get("type")
|
||||||
|
|
||||||
object_data = get_datablock_from_uuid(
|
object_data = get_datablock_from_uuid(
|
||||||
data_uuid,
|
data_uuid,
|
||||||
find_data_from_name(data_id),
|
find_data_from_name(data_id),
|
||||||
ignore=['images']) # TODO: use resolve_from_id
|
ignore=['images']) # TODO: use resolve_from_id
|
||||||
|
|
||||||
if object_data is None and data_uuid:
|
if data_type != 'EMPTY' and object_data is None:
|
||||||
raise Exception(f"Fail to load object {data['name']}({self.uuid})")
|
raise Exception(f"Fail to load object {data['name']})")
|
||||||
|
|
||||||
instance = bpy.data.objects.new(object_name, object_data)
|
return bpy.data.objects.new(object_name, object_data)
|
||||||
instance.uuid = self.uuid
|
|
||||||
|
|
||||||
return instance
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
def _load_implementation(self, data, target):
|
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
data_uuid = data.get("data_uuid")
|
data_uuid = data.get("data_uuid")
|
||||||
data_id = data.get("data")
|
data_id = data.get("data")
|
||||||
|
|
||||||
if target.data and (target.data.name != data_id):
|
if datablock.data and (datablock.data.name != data_id):
|
||||||
target.data = get_datablock_from_uuid(
|
datablock.data = get_datablock_from_uuid(
|
||||||
data_uuid, find_data_from_name(data_id), ignore=['images'])
|
data_uuid, find_data_from_name(data_id), ignore=['images'])
|
||||||
|
|
||||||
# vertex groups
|
# vertex groups
|
||||||
vertex_groups = data.get('vertex_groups', None)
|
vertex_groups = data.get('vertex_groups', None)
|
||||||
if vertex_groups:
|
if vertex_groups:
|
||||||
load_vertex_groups(vertex_groups, target)
|
load_vertex_groups(vertex_groups, datablock)
|
||||||
|
|
||||||
object_data = target.data
|
object_data = datablock.data
|
||||||
|
|
||||||
# SHAPE KEYS
|
# SHAPE KEYS
|
||||||
if 'shape_keys' in data:
|
shape_keys = data.get('shape_keys')
|
||||||
target.shape_key_clear()
|
if shape_keys:
|
||||||
|
load_shape_keys(shape_keys, datablock)
|
||||||
# Create keys and load vertices coords
|
|
||||||
for key_block in data['shape_keys']['key_blocks']:
|
|
||||||
key_data = data['shape_keys']['key_blocks'][key_block]
|
|
||||||
target.shape_key_add(name=key_block)
|
|
||||||
|
|
||||||
loader.load(
|
|
||||||
target.data.shape_keys.key_blocks[key_block], key_data)
|
|
||||||
for vert in key_data['data']:
|
|
||||||
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
|
||||||
|
|
||||||
# Load relative key after all
|
|
||||||
for key_block in data['shape_keys']['key_blocks']:
|
|
||||||
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
|
|
||||||
|
|
||||||
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
|
||||||
|
|
||||||
# Load transformation data
|
# Load transformation data
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
# Object display fields
|
# Object display fields
|
||||||
if 'display' in data:
|
if 'display' in data:
|
||||||
loader.load(target.display, data['display'])
|
loader.load(datablock.display, data['display'])
|
||||||
|
|
||||||
# Parenting
|
# Parenting
|
||||||
parent_id = data.get('parent_uid')
|
parent_id = data.get('parent_uid')
|
||||||
if parent_id:
|
if parent_id:
|
||||||
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
|
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
|
||||||
# Avoid reloading
|
# Avoid reloading
|
||||||
if target.parent != parent and parent is not None:
|
if datablock.parent != parent and parent is not None:
|
||||||
target.parent = parent
|
datablock.parent = parent
|
||||||
elif target.parent:
|
elif datablock.parent:
|
||||||
target.parent = None
|
datablock.parent = None
|
||||||
|
|
||||||
# Pose
|
# Pose
|
||||||
if 'pose' in data:
|
if 'pose' in data:
|
||||||
if not target.pose:
|
if not datablock.pose:
|
||||||
raise Exception('No pose data yet (Fixed in a near futur)')
|
raise Exception('No pose data yet (Fixed in a near futur)')
|
||||||
# Bone groups
|
# Bone groups
|
||||||
for bg_name in data['pose']['bone_groups']:
|
for bg_name in data['pose']['bone_groups']:
|
||||||
bg_data = data['pose']['bone_groups'].get(bg_name)
|
bg_data = data['pose']['bone_groups'].get(bg_name)
|
||||||
bg_target = target.pose.bone_groups.get(bg_name)
|
bg_target = datablock.pose.bone_groups.get(bg_name)
|
||||||
|
|
||||||
if not bg_target:
|
if not bg_target:
|
||||||
bg_target = target.pose.bone_groups.new(name=bg_name)
|
bg_target = datablock.pose.bone_groups.new(name=bg_name)
|
||||||
|
|
||||||
loader.load(bg_target, bg_data)
|
loader.load(bg_target, bg_data)
|
||||||
# target.pose.bone_groups.get
|
# datablock.pose.bone_groups.get
|
||||||
|
|
||||||
# Bones
|
# Bones
|
||||||
for bone in data['pose']['bones']:
|
for bone in data['pose']['bones']:
|
||||||
target_bone = target.pose.bones.get(bone)
|
target_bone = datablock.pose.bones.get(bone)
|
||||||
bone_data = data['pose']['bones'].get(bone)
|
bone_data = data['pose']['bones'].get(bone)
|
||||||
|
|
||||||
if 'constraints' in bone_data.keys():
|
if 'constraints' in bone_data.keys():
|
||||||
@ -406,13 +587,13 @@ class BlObject(BlDatablock):
|
|||||||
load_pose(target_bone, bone_data)
|
load_pose(target_bone, bone_data)
|
||||||
|
|
||||||
if 'bone_index' in bone_data.keys():
|
if 'bone_index' in bone_data.keys():
|
||||||
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
|
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
|
||||||
|
|
||||||
# TODO: find another way...
|
# TODO: find another way...
|
||||||
if target.empty_display_type == "IMAGE":
|
if datablock.empty_display_type == "IMAGE":
|
||||||
img_uuid = data.get('data_uuid')
|
img_uuid = data.get('data_uuid')
|
||||||
if target.data is None and img_uuid:
|
if datablock.data is None and img_uuid:
|
||||||
target.data = get_datablock_from_uuid(img_uuid, None)
|
datablock.data = get_datablock_from_uuid(img_uuid, None)
|
||||||
|
|
||||||
if hasattr(object_data, 'skin_vertices') \
|
if hasattr(object_data, 'skin_vertices') \
|
||||||
and object_data.skin_vertices\
|
and object_data.skin_vertices\
|
||||||
@ -423,56 +604,31 @@ class BlObject(BlDatablock):
|
|||||||
skin_data.data,
|
skin_data.data,
|
||||||
SKIN_DATA)
|
SKIN_DATA)
|
||||||
|
|
||||||
if hasattr(target, 'cycles_visibility') \
|
if hasattr(datablock, 'cycles_visibility') \
|
||||||
and 'cycles_visibility' in data:
|
and 'cycles_visibility' in data:
|
||||||
loader.load(target.cycles_visibility, data['cycles_visibility'])
|
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
|
||||||
|
|
||||||
# TODO: handle geometry nodes input from dump_anything
|
if hasattr(datablock, 'modifiers'):
|
||||||
if hasattr(target, 'modifiers'):
|
load_modifiers(data['modifiers'], datablock.modifiers)
|
||||||
nodes_modifiers = [
|
|
||||||
mod for mod in target.modifiers if mod.type == 'NODES']
|
|
||||||
for modifier in nodes_modifiers:
|
|
||||||
load_modifier_geometry_node_inputs(
|
|
||||||
data['modifiers'][modifier.name], modifier)
|
|
||||||
|
|
||||||
particles_modifiers = [
|
constraints = data.get('constraints')
|
||||||
mod for mod in target.modifiers if mod.type == 'PARTICLE_SYSTEM']
|
if constraints:
|
||||||
|
load_constraints(constraints, datablock.constraints)
|
||||||
for mod in particles_modifiers:
|
|
||||||
default = mod.particle_system.settings
|
|
||||||
dumped_particles = data['modifiers'][mod.name]['particle_system']
|
|
||||||
loader.load(mod.particle_system, dumped_particles)
|
|
||||||
|
|
||||||
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
|
||||||
if settings:
|
|
||||||
mod.particle_system.settings = settings
|
|
||||||
# Hack to remove the default generated particle settings
|
|
||||||
if not default.uuid:
|
|
||||||
bpy.data.particles.remove(default)
|
|
||||||
|
|
||||||
phys_modifiers = [
|
|
||||||
mod for mod in target.modifiers if mod.type in ['SOFT_BODY', 'CLOTH']]
|
|
||||||
|
|
||||||
for mod in phys_modifiers:
|
|
||||||
loader.load(mod.settings, data['modifiers'][mod.name]['settings'])
|
|
||||||
|
|
||||||
# PHYSICS
|
# PHYSICS
|
||||||
load_physics(data, target)
|
load_physics(data, datablock)
|
||||||
|
|
||||||
transform = data.get('transforms', None)
|
transform = data.get('transforms', None)
|
||||||
if transform:
|
if transform:
|
||||||
target.matrix_parent_inverse = mathutils.Matrix(
|
datablock.matrix_parent_inverse = mathutils.Matrix(transform['matrix_parent_inverse'])
|
||||||
transform['matrix_parent_inverse'])
|
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
|
||||||
target.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
|
|
||||||
target.matrix_local = mathutils.Matrix(transform['matrix_local'])
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
|
if _is_editmode(datablock):
|
||||||
if _is_editmode(instance):
|
if get_preferences().sync_flags.sync_during_editmode:
|
||||||
if self.preferences.sync_flags.sync_during_editmode:
|
datablock.update_from_editmode()
|
||||||
instance.update_from_editmode()
|
|
||||||
else:
|
else:
|
||||||
raise ContextError("Object is in edit-mode.")
|
raise ContextError("Object is in edit-mode.")
|
||||||
|
|
||||||
@ -508,60 +664,37 @@ class BlObject(BlDatablock):
|
|||||||
'show_all_edges',
|
'show_all_edges',
|
||||||
'show_texture_space',
|
'show_texture_space',
|
||||||
'show_in_front',
|
'show_in_front',
|
||||||
'type'
|
'type',
|
||||||
|
'parent_type',
|
||||||
|
'parent_bone',
|
||||||
|
'track_axis',
|
||||||
|
'up_axis',
|
||||||
]
|
]
|
||||||
|
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'matrix_parent_inverse',
|
'matrix_parent_inverse',
|
||||||
'matrix_local',
|
'matrix_local',
|
||||||
'matrix_basis']
|
'matrix_basis']
|
||||||
data['transforms'] = dumper.dump(instance)
|
data['transforms'] = dumper.dump(datablock)
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'show_shadows',
|
'show_shadows',
|
||||||
]
|
]
|
||||||
data['display'] = dumper.dump(instance.display)
|
data['display'] = dumper.dump(datablock.display)
|
||||||
|
|
||||||
data['data_uuid'] = getattr(instance.data, 'uuid', None)
|
data['data_uuid'] = getattr(datablock.data, 'uuid', None)
|
||||||
if self.is_library:
|
|
||||||
return data
|
|
||||||
|
|
||||||
# PARENTING
|
# PARENTING
|
||||||
if instance.parent:
|
if datablock.parent:
|
||||||
data['parent_uid'] = (instance.parent.uuid, instance.parent.name)
|
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
|
||||||
|
|
||||||
# MODIFIERS
|
# MODIFIERS
|
||||||
if hasattr(instance, 'modifiers'):
|
modifiers = getattr(datablock, 'modifiers', None)
|
||||||
data["modifiers"] = {}
|
if hasattr(datablock, 'modifiers'):
|
||||||
modifiers = getattr(instance, 'modifiers', None)
|
data['modifiers'] = dump_modifiers(modifiers)
|
||||||
if modifiers:
|
|
||||||
dumper.include_filter = None
|
|
||||||
dumper.depth = 1
|
|
||||||
dumper.exclude_filter = ['is_active']
|
|
||||||
for index, modifier in enumerate(modifiers):
|
|
||||||
dumped_modifier = dumper.dump(modifier)
|
|
||||||
# hack to dump geometry nodes inputs
|
|
||||||
if modifier.type == 'NODES':
|
|
||||||
dumped_inputs = dump_modifier_geometry_node_inputs(
|
|
||||||
modifier)
|
|
||||||
dumped_modifier['inputs'] = dumped_inputs
|
|
||||||
|
|
||||||
elif modifier.type == 'PARTICLE_SYSTEM':
|
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
|
||||||
dumper.exclude_filter = [
|
|
||||||
"is_edited",
|
|
||||||
"is_editable",
|
|
||||||
"is_global_hair"
|
|
||||||
]
|
|
||||||
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
|
|
||||||
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
|
|
||||||
|
|
||||||
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
|
|
||||||
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
|
||||||
|
|
||||||
data["modifiers"][modifier.name] = dumped_modifier
|
|
||||||
|
|
||||||
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
|
|
||||||
|
|
||||||
if gp_modifiers:
|
if gp_modifiers:
|
||||||
dumper.include_filter = None
|
dumper.include_filter = None
|
||||||
@ -584,16 +717,14 @@ class BlObject(BlDatablock):
|
|||||||
|
|
||||||
|
|
||||||
# CONSTRAINTS
|
# CONSTRAINTS
|
||||||
if hasattr(instance, 'constraints'):
|
if hasattr(datablock, 'constraints'):
|
||||||
dumper.include_filter = None
|
data["constraints"] = dump_constraints(datablock.constraints)
|
||||||
dumper.depth = 3
|
|
||||||
data["constraints"] = dumper.dump(instance.constraints)
|
|
||||||
|
|
||||||
# POSE
|
# POSE
|
||||||
if hasattr(instance, 'pose') and instance.pose:
|
if hasattr(datablock, 'pose') and datablock.pose:
|
||||||
# BONES
|
# BONES
|
||||||
bones = {}
|
bones = {}
|
||||||
for bone in instance.pose.bones:
|
for bone in datablock.pose.bones:
|
||||||
bones[bone.name] = {}
|
bones[bone.name] = {}
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
||||||
@ -618,7 +749,7 @@ class BlObject(BlDatablock):
|
|||||||
|
|
||||||
# GROUPS
|
# GROUPS
|
||||||
bone_groups = {}
|
bone_groups = {}
|
||||||
for group in instance.pose.bone_groups:
|
for group in datablock.pose.bone_groups:
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'name',
|
'name',
|
||||||
@ -628,36 +759,13 @@ class BlObject(BlDatablock):
|
|||||||
data['pose']['bone_groups'] = bone_groups
|
data['pose']['bone_groups'] = bone_groups
|
||||||
|
|
||||||
# VERTEx GROUP
|
# VERTEx GROUP
|
||||||
if len(instance.vertex_groups) > 0:
|
if len(datablock.vertex_groups) > 0:
|
||||||
data['vertex_groups'] = dump_vertex_groups(instance)
|
data['vertex_groups'] = dump_vertex_groups(datablock)
|
||||||
|
|
||||||
# SHAPE KEYS
|
# SHAPE KEYS
|
||||||
object_data = instance.data
|
object_data = datablock.data
|
||||||
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
||||||
dumper = Dumper()
|
data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
|
||||||
dumper.depth = 2
|
|
||||||
dumper.include_filter = [
|
|
||||||
'reference_key',
|
|
||||||
'use_relative'
|
|
||||||
]
|
|
||||||
data['shape_keys'] = dumper.dump(object_data.shape_keys)
|
|
||||||
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
|
|
||||||
key_blocks = {}
|
|
||||||
for key in object_data.shape_keys.key_blocks:
|
|
||||||
dumper.depth = 3
|
|
||||||
dumper.include_filter = [
|
|
||||||
'name',
|
|
||||||
'data',
|
|
||||||
'mute',
|
|
||||||
'value',
|
|
||||||
'slider_min',
|
|
||||||
'slider_max',
|
|
||||||
'data',
|
|
||||||
'co'
|
|
||||||
]
|
|
||||||
key_blocks[key.name] = dumper.dump(key)
|
|
||||||
key_blocks[key.name]['relative_key'] = key.relative_key.name
|
|
||||||
data['shape_keys']['key_blocks'] = key_blocks
|
|
||||||
|
|
||||||
# SKIN VERTICES
|
# SKIN VERTICES
|
||||||
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
|
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
|
||||||
@ -668,7 +776,7 @@ class BlObject(BlDatablock):
|
|||||||
data['skin_vertices'] = skin_vertices
|
data['skin_vertices'] = skin_vertices
|
||||||
|
|
||||||
# CYCLE SETTINGS
|
# CYCLE SETTINGS
|
||||||
if hasattr(instance, 'cycles_visibility'):
|
if hasattr(datablock, 'cycles_visibility'):
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'camera',
|
'camera',
|
||||||
'diffuse',
|
'diffuse',
|
||||||
@ -677,36 +785,48 @@ class BlObject(BlDatablock):
|
|||||||
'scatter',
|
'scatter',
|
||||||
'shadow',
|
'shadow',
|
||||||
]
|
]
|
||||||
data['cycles_visibility'] = dumper.dump(instance.cycles_visibility)
|
data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility)
|
||||||
|
|
||||||
# PHYSICS
|
# PHYSICS
|
||||||
data.update(dump_physics(instance))
|
data.update(dump_physics(datablock))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
# Avoid Empty case
|
# Avoid Empty case
|
||||||
if self.instance.data:
|
if datablock.data:
|
||||||
deps.append(self.instance.data)
|
deps.append(datablock.data)
|
||||||
|
|
||||||
# Particle systems
|
# Particle systems
|
||||||
for particle_slot in self.instance.particle_systems:
|
for particle_slot in datablock.particle_systems:
|
||||||
deps.append(particle_slot.settings)
|
deps.append(particle_slot.settings)
|
||||||
|
|
||||||
if self.is_library:
|
if datablock.parent:
|
||||||
deps.append(self.instance.library)
|
deps.append(datablock.parent)
|
||||||
|
|
||||||
if self.instance.parent:
|
if datablock.instance_type == 'COLLECTION':
|
||||||
deps.append(self.instance.parent)
|
|
||||||
|
|
||||||
if self.instance.instance_type == 'COLLECTION':
|
|
||||||
# TODO: uuid based
|
# TODO: uuid based
|
||||||
deps.append(self.instance.instance_collection)
|
deps.append(datablock.instance_collection)
|
||||||
|
|
||||||
if self.instance.modifiers:
|
if datablock.modifiers:
|
||||||
deps.extend(find_textures_dependencies(self.instance.modifiers))
|
deps.extend(find_textures_dependencies(datablock.modifiers))
|
||||||
deps.extend(find_geometry_nodes_dependencies(self.instance.modifiers))
|
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
|
||||||
|
|
||||||
|
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.objects)
|
||||||
|
|
||||||
|
_type = bpy.types.Object
|
||||||
|
_class = BlObject
|
@ -2,7 +2,10 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from . import dump_anything
|
from . import dump_anything
|
||||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import get_datablock_from_uuid
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
|
||||||
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
|
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
|
||||||
@ -37,54 +40,67 @@ IGNORED_ATTR = [
|
|||||||
"users"
|
"users"
|
||||||
]
|
]
|
||||||
|
|
||||||
class BlParticle(BlDatablock):
|
class BlParticle(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "particles"
|
bl_id = "particles"
|
||||||
bl_class = bpy.types.ParticleSettings
|
bl_class = bpy.types.ParticleSettings
|
||||||
bl_icon = "PARTICLES"
|
bl_icon = "PARTICLES"
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
instance = bpy.data.particles.new(data["name"])
|
def construct(data: dict) -> object:
|
||||||
instance.uuid = self.uuid
|
return bpy.data.particles.new(data["name"])
|
||||||
return instance
|
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
dump_anything.load(target, data)
|
def load(data: dict, datablock: object):
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
dump_anything.load(datablock, data)
|
||||||
|
|
||||||
dump_anything.load(target.effector_weights, data["effector_weights"])
|
dump_anything.load(datablock.effector_weights, data["effector_weights"])
|
||||||
|
|
||||||
# Force field
|
# Force field
|
||||||
force_field_1 = data.get("force_field_1", None)
|
force_field_1 = data.get("force_field_1", None)
|
||||||
if force_field_1:
|
if force_field_1:
|
||||||
dump_anything.load(target.force_field_1, force_field_1)
|
dump_anything.load(datablock.force_field_1, force_field_1)
|
||||||
|
|
||||||
force_field_2 = data.get("force_field_2", None)
|
force_field_2 = data.get("force_field_2", None)
|
||||||
if force_field_2:
|
if force_field_2:
|
||||||
dump_anything.load(target.force_field_2, force_field_2)
|
dump_anything.load(datablock.force_field_2, force_field_2)
|
||||||
|
|
||||||
# Texture slots
|
# Texture slots
|
||||||
load_texture_slots(data["texture_slots"], target.texture_slots)
|
load_texture_slots(data["texture_slots"], datablock.texture_slots)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
|
||||||
assert instance
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dump(datablock: object) -> dict:
|
||||||
dumper = dump_anything.Dumper()
|
dumper = dump_anything.Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.exclude_filter = IGNORED_ATTR
|
dumper.exclude_filter = IGNORED_ATTR
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
|
||||||
# Particle effectors
|
# Particle effectors
|
||||||
data["effector_weights"] = dumper.dump(instance.effector_weights)
|
data["effector_weights"] = dumper.dump(datablock.effector_weights)
|
||||||
if instance.force_field_1:
|
if datablock.force_field_1:
|
||||||
data["force_field_1"] = dumper.dump(instance.force_field_1)
|
data["force_field_1"] = dumper.dump(datablock.force_field_1)
|
||||||
if instance.force_field_2:
|
if datablock.force_field_2:
|
||||||
data["force_field_2"] = dumper.dump(instance.force_field_2)
|
data["force_field_2"] = dumper.dump(datablock.force_field_2)
|
||||||
|
|
||||||
# Texture slots
|
# Texture slots
|
||||||
data["texture_slots"] = dump_textures_slots(instance.texture_slots)
|
data["texture_slots"] = dump_textures_slots(datablock.texture_slots)
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
return [t.texture for t in self.instance.texture_slots if t and t.texture]
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.particles)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
|
deps = [t.texture for t in datablock.texture_slots if t and t.texture]
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
return deps
|
||||||
|
|
||||||
|
_type = bpy.types.ParticleSettings
|
||||||
|
_class = BlParticle
|
@ -18,17 +18,21 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
from deepdiff import DeepDiff
|
from deepdiff import DeepDiff, Delta
|
||||||
from replication.constants import DIFF_JSON, MODIFIED
|
from replication.constants import DIFF_JSON, MODIFIED
|
||||||
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
|
||||||
from ..utils import flush_history
|
from ..utils import flush_history, get_preferences
|
||||||
|
from .bl_action import (dump_animation_data, load_animation_data,
|
||||||
|
resolve_animation_dependencies)
|
||||||
from .bl_collection import (dump_collection_children, dump_collection_objects,
|
from .bl_collection import (dump_collection_children, dump_collection_objects,
|
||||||
load_collection_childrens, load_collection_objects,
|
load_collection_childrens, load_collection_objects,
|
||||||
resolve_collection_dependencies)
|
resolve_collection_dependencies)
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
from .bl_file import get_filepath
|
from .bl_file import get_filepath
|
||||||
from .dump_anything import Dumper, Loader
|
from .dump_anything import Dumper, Loader
|
||||||
|
|
||||||
@ -286,12 +290,10 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
|
|||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
data = dumper.dump(sequence)
|
data = dumper.dump(sequence)
|
||||||
|
|
||||||
|
|
||||||
# TODO: Support multiple images
|
# TODO: Support multiple images
|
||||||
if sequence.type == 'IMAGE':
|
if sequence.type == 'IMAGE':
|
||||||
data['filenames'] = [e.filename for e in sequence.elements]
|
data['filenames'] = [e.filename for e in sequence.elements]
|
||||||
|
|
||||||
|
|
||||||
# Effect strip inputs
|
# Effect strip inputs
|
||||||
input_count = getattr(sequence, 'input_count', None)
|
input_count = getattr(sequence, 'input_count', None)
|
||||||
if input_count:
|
if input_count:
|
||||||
@ -302,7 +304,8 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor):
|
def load_sequence(sequence_data: dict,
|
||||||
|
sequence_editor: bpy.types.SequenceEditor):
|
||||||
""" Load sequence from dumped data
|
""" Load sequence from dumped data
|
||||||
|
|
||||||
:arg sequence_data: sequence to dump
|
:arg sequence_data: sequence to dump
|
||||||
@ -321,129 +324,145 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
|
|||||||
if strip_type == 'SCENE':
|
if strip_type == 'SCENE':
|
||||||
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
|
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
|
||||||
sequence = sequence_editor.sequences.new_scene(strip_name,
|
sequence = sequence_editor.sequences.new_scene(strip_name,
|
||||||
strip_scene,
|
strip_scene,
|
||||||
strip_channel,
|
strip_channel,
|
||||||
strip_frame_start)
|
strip_frame_start)
|
||||||
elif strip_type == 'MOVIE':
|
elif strip_type == 'MOVIE':
|
||||||
filepath = get_filepath(Path(sequence_data['filepath']).name)
|
filepath = get_filepath(Path(sequence_data['filepath']).name)
|
||||||
sequence = sequence_editor.sequences.new_movie(strip_name,
|
sequence = sequence_editor.sequences.new_movie(strip_name,
|
||||||
filepath,
|
filepath,
|
||||||
strip_channel,
|
strip_channel,
|
||||||
strip_frame_start)
|
strip_frame_start)
|
||||||
elif strip_type == 'SOUND':
|
elif strip_type == 'SOUND':
|
||||||
filepath = bpy.data.sounds[sequence_data['sound']].filepath
|
filepath = bpy.data.sounds[sequence_data['sound']].filepath
|
||||||
sequence = sequence_editor.sequences.new_sound(strip_name,
|
sequence = sequence_editor.sequences.new_sound(strip_name,
|
||||||
filepath,
|
filepath,
|
||||||
strip_channel,
|
strip_channel,
|
||||||
strip_frame_start)
|
strip_frame_start)
|
||||||
elif strip_type == 'IMAGE':
|
elif strip_type == 'IMAGE':
|
||||||
images_name = sequence_data.get('filenames')
|
images_name = sequence_data.get('filenames')
|
||||||
filepath = get_filepath(images_name[0])
|
filepath = get_filepath(images_name[0])
|
||||||
sequence = sequence_editor.sequences.new_image(strip_name,
|
sequence = sequence_editor.sequences.new_image(strip_name,
|
||||||
filepath,
|
filepath,
|
||||||
strip_channel,
|
strip_channel,
|
||||||
strip_frame_start)
|
strip_frame_start)
|
||||||
# load other images
|
# load other images
|
||||||
if len(images_name)>1:
|
if len(images_name) > 1:
|
||||||
for img_idx in range(1,len(images_name)):
|
for img_idx in range(1, len(images_name)):
|
||||||
sequence.elements.append((images_name[img_idx]))
|
sequence.elements.append((images_name[img_idx]))
|
||||||
else:
|
else:
|
||||||
seq = {}
|
seq = {}
|
||||||
|
|
||||||
for i in range(sequence_data['input_count']):
|
for i in range(sequence_data['input_count']):
|
||||||
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(sequence_data.get(f"input_{i+1}", None))
|
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(
|
||||||
|
sequence_data.get(f"input_{i+1}", None))
|
||||||
|
|
||||||
sequence = sequence_editor.sequences.new_effect(name=strip_name,
|
sequence = sequence_editor.sequences.new_effect(name=strip_name,
|
||||||
type=strip_type,
|
type=strip_type,
|
||||||
channel=strip_channel,
|
channel=strip_channel,
|
||||||
frame_start=strip_frame_start,
|
frame_start=strip_frame_start,
|
||||||
frame_end=sequence_data['frame_final_end'],
|
frame_end=sequence_data['frame_final_end'],
|
||||||
**seq)
|
**seq)
|
||||||
|
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
# TODO: Support filepath updates
|
|
||||||
loader.exclure_filter = ['filepath', 'sound', 'filenames','fps']
|
loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
|
||||||
loader.load(sequence, sequence_data)
|
loader.load(sequence, sequence_data)
|
||||||
sequence.select = False
|
sequence.select = False
|
||||||
|
|
||||||
|
|
||||||
class BlScene(BlDatablock):
|
class BlScene(ReplicatedDatablock):
|
||||||
|
is_root = True
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
bl_id = "scenes"
|
bl_id = "scenes"
|
||||||
bl_class = bpy.types.Scene
|
bl_class = bpy.types.Scene
|
||||||
bl_check_common = True
|
bl_check_common = True
|
||||||
bl_icon = 'SCENE_DATA'
|
bl_icon = 'SCENE_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
instance = bpy.data.scenes.new(data["name"])
|
def construct(data: dict) -> object:
|
||||||
instance.uuid = self.uuid
|
return bpy.data.scenes.new(data["name"])
|
||||||
|
|
||||||
return instance
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
|
||||||
# Load other meshes metadata
|
# Load other meshes metadata
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
# Load master collection
|
# Load master collection
|
||||||
load_collection_objects(
|
load_collection_objects(
|
||||||
data['collection']['objects'], target.collection)
|
data['collection']['objects'], datablock.collection)
|
||||||
load_collection_childrens(
|
load_collection_childrens(
|
||||||
data['collection']['children'], target.collection)
|
data['collection']['children'], datablock.collection)
|
||||||
|
|
||||||
if 'world' in data.keys():
|
if 'world' in data.keys():
|
||||||
target.world = bpy.data.worlds[data['world']]
|
datablock.world = bpy.data.worlds[data['world']]
|
||||||
|
|
||||||
# Annotation
|
# Annotation
|
||||||
if 'grease_pencil' in data.keys():
|
gpencil_uid = data.get('grease_pencil')
|
||||||
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
if gpencil_uid:
|
||||||
|
datablock.grease_pencil = resolve_datablock_from_uuid(gpencil_uid, bpy.data.grease_pencils)
|
||||||
|
|
||||||
if self.preferences.sync_flags.sync_render_settings:
|
if get_preferences().sync_flags.sync_render_settings:
|
||||||
if 'eevee' in data.keys():
|
if 'eevee' in data.keys():
|
||||||
loader.load(target.eevee, data['eevee'])
|
loader.load(datablock.eevee, data['eevee'])
|
||||||
|
|
||||||
if 'cycles' in data.keys():
|
if 'cycles' in data.keys():
|
||||||
loader.load(target.cycles, data['cycles'])
|
loader.load(datablock.cycles, data['cycles'])
|
||||||
|
|
||||||
if 'render' in data.keys():
|
if 'render' in data.keys():
|
||||||
loader.load(target.render, data['render'])
|
loader.load(datablock.render, data['render'])
|
||||||
|
|
||||||
if 'view_settings' in data.keys():
|
view_settings = data.get('view_settings')
|
||||||
loader.load(target.view_settings, data['view_settings'])
|
if view_settings:
|
||||||
if target.view_settings.use_curve_mapping and \
|
loader.load(datablock.view_settings, view_settings)
|
||||||
'curve_mapping' in data['view_settings']:
|
if datablock.view_settings.use_curve_mapping and \
|
||||||
|
'curve_mapping' in view_settings:
|
||||||
# TODO: change this ugly fix
|
# TODO: change this ugly fix
|
||||||
target.view_settings.curve_mapping.white_level = data[
|
datablock.view_settings.curve_mapping.white_level = view_settings['curve_mapping']['white_level']
|
||||||
'view_settings']['curve_mapping']['white_level']
|
datablock.view_settings.curve_mapping.black_level = view_settings['curve_mapping']['black_level']
|
||||||
target.view_settings.curve_mapping.black_level = data[
|
datablock.view_settings.curve_mapping.update()
|
||||||
'view_settings']['curve_mapping']['black_level']
|
|
||||||
target.view_settings.curve_mapping.update()
|
|
||||||
|
|
||||||
# Sequencer
|
# Sequencer
|
||||||
sequences = data.get('sequences')
|
sequences = data.get('sequences')
|
||||||
|
|
||||||
if sequences:
|
if sequences:
|
||||||
# Create sequencer data
|
# Create sequencer data
|
||||||
target.sequence_editor_create()
|
datablock.sequence_editor_create()
|
||||||
vse = target.sequence_editor
|
vse = datablock.sequence_editor
|
||||||
|
|
||||||
# Clear removed sequences
|
# Clear removed sequences
|
||||||
for seq in vse.sequences_all:
|
for seq in vse.sequences_all:
|
||||||
if seq.name not in sequences:
|
if seq.name not in sequences:
|
||||||
vse.sequences.remove(seq)
|
vse.sequences.remove(seq)
|
||||||
# Load existing sequences
|
# Load existing sequences
|
||||||
for seq_name, seq_data in sequences.items():
|
for seq_data in sequences.value():
|
||||||
load_sequence(seq_data, vse)
|
load_sequence(seq_data, vse)
|
||||||
# If the sequence is no longer used, clear it
|
# If the sequence is no longer used, clear it
|
||||||
elif target.sequence_editor and not sequences:
|
elif datablock.sequence_editor and not sequences:
|
||||||
target.sequence_editor_clear()
|
datablock.sequence_editor_clear()
|
||||||
|
|
||||||
|
# Timeline markers
|
||||||
|
markers = data.get('timeline_markers')
|
||||||
|
if markers:
|
||||||
|
datablock.timeline_markers.clear()
|
||||||
|
for name, frame, camera in markers:
|
||||||
|
marker = datablock.timeline_markers.new(name, frame=frame)
|
||||||
|
if camera:
|
||||||
|
marker.camera = resolve_datablock_from_uuid(camera, bpy.data.objects)
|
||||||
|
marker.select = False
|
||||||
# FIXME: Find a better way after the replication big refacotoring
|
# FIXME: Find a better way after the replication big refacotoring
|
||||||
# Keep other user from deleting collection object by flushing their history
|
# Keep other user from deleting collection object by flushing their history
|
||||||
flush_history()
|
flush_history()
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
|
data = {}
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
scene_dumper = Dumper()
|
scene_dumper = Dumper()
|
||||||
@ -452,45 +471,44 @@ class BlScene(BlDatablock):
|
|||||||
'name',
|
'name',
|
||||||
'world',
|
'world',
|
||||||
'id',
|
'id',
|
||||||
'grease_pencil',
|
|
||||||
'frame_start',
|
'frame_start',
|
||||||
'frame_end',
|
'frame_end',
|
||||||
'frame_step',
|
'frame_step',
|
||||||
]
|
]
|
||||||
if self.preferences.sync_flags.sync_active_camera:
|
if get_preferences().sync_flags.sync_active_camera:
|
||||||
scene_dumper.include_filter.append('camera')
|
scene_dumper.include_filter.append('camera')
|
||||||
|
|
||||||
data.update(scene_dumper.dump(instance))
|
data.update(scene_dumper.dump(datablock))
|
||||||
|
|
||||||
# Master collection
|
# Master collection
|
||||||
data['collection'] = {}
|
data['collection'] = {}
|
||||||
data['collection']['children'] = dump_collection_children(
|
data['collection']['children'] = dump_collection_children(
|
||||||
instance.collection)
|
datablock.collection)
|
||||||
data['collection']['objects'] = dump_collection_objects(
|
data['collection']['objects'] = dump_collection_objects(
|
||||||
instance.collection)
|
datablock.collection)
|
||||||
|
|
||||||
scene_dumper.depth = 1
|
scene_dumper.depth = 1
|
||||||
scene_dumper.include_filter = None
|
scene_dumper.include_filter = None
|
||||||
|
|
||||||
# Render settings
|
# Render settings
|
||||||
if self.preferences.sync_flags.sync_render_settings:
|
if get_preferences().sync_flags.sync_render_settings:
|
||||||
scene_dumper.include_filter = RENDER_SETTINGS
|
scene_dumper.include_filter = RENDER_SETTINGS
|
||||||
|
|
||||||
data['render'] = scene_dumper.dump(instance.render)
|
data['render'] = scene_dumper.dump(datablock.render)
|
||||||
|
|
||||||
if instance.render.engine == 'BLENDER_EEVEE':
|
if datablock.render.engine == 'BLENDER_EEVEE':
|
||||||
scene_dumper.include_filter = EVEE_SETTINGS
|
scene_dumper.include_filter = EVEE_SETTINGS
|
||||||
data['eevee'] = scene_dumper.dump(instance.eevee)
|
data['eevee'] = scene_dumper.dump(datablock.eevee)
|
||||||
elif instance.render.engine == 'CYCLES':
|
elif datablock.render.engine == 'CYCLES':
|
||||||
scene_dumper.include_filter = CYCLES_SETTINGS
|
scene_dumper.include_filter = CYCLES_SETTINGS
|
||||||
data['cycles'] = scene_dumper.dump(instance.cycles)
|
data['cycles'] = scene_dumper.dump(datablock.cycles)
|
||||||
|
|
||||||
scene_dumper.include_filter = VIEW_SETTINGS
|
scene_dumper.include_filter = VIEW_SETTINGS
|
||||||
data['view_settings'] = scene_dumper.dump(instance.view_settings)
|
data['view_settings'] = scene_dumper.dump(datablock.view_settings)
|
||||||
|
|
||||||
if instance.view_settings.use_curve_mapping:
|
if datablock.view_settings.use_curve_mapping:
|
||||||
data['view_settings']['curve_mapping'] = scene_dumper.dump(
|
data['view_settings']['curve_mapping'] = scene_dumper.dump(
|
||||||
instance.view_settings.curve_mapping)
|
datablock.view_settings.curve_mapping)
|
||||||
scene_dumper.depth = 5
|
scene_dumper.depth = 5
|
||||||
scene_dumper.include_filter = [
|
scene_dumper.include_filter = [
|
||||||
'curves',
|
'curves',
|
||||||
@ -498,35 +516,44 @@ class BlScene(BlDatablock):
|
|||||||
'location',
|
'location',
|
||||||
]
|
]
|
||||||
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
|
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
|
||||||
instance.view_settings.curve_mapping.curves)
|
datablock.view_settings.curve_mapping.curves)
|
||||||
|
|
||||||
# Sequence
|
# Sequence
|
||||||
vse = instance.sequence_editor
|
vse = datablock.sequence_editor
|
||||||
if vse:
|
if vse:
|
||||||
dumped_sequences = {}
|
dumped_sequences = {}
|
||||||
for seq in vse.sequences_all:
|
for seq in vse.sequences_all:
|
||||||
dumped_sequences[seq.name] = dump_sequence(seq)
|
dumped_sequences[seq.name] = dump_sequence(seq)
|
||||||
data['sequences'] = dumped_sequences
|
data['sequences'] = dumped_sequences
|
||||||
|
|
||||||
|
# Timeline markers
|
||||||
|
if datablock.timeline_markers:
|
||||||
|
data['timeline_markers'] = [(m.name, m.frame, getattr(m.camera, 'uuid', None)) for m in datablock.timeline_markers]
|
||||||
|
|
||||||
|
if datablock.grease_pencil:
|
||||||
|
data['grease_pencil'] = datablock.grease_pencil.uuid
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
# Master Collection
|
# Master Collection
|
||||||
deps.extend(resolve_collection_dependencies(self.instance.collection))
|
deps.extend(resolve_collection_dependencies(datablock.collection))
|
||||||
|
|
||||||
# world
|
# world
|
||||||
if self.instance.world:
|
if datablock.world:
|
||||||
deps.append(self.instance.world)
|
deps.append(datablock.world)
|
||||||
|
|
||||||
# annotations
|
# annotations
|
||||||
if self.instance.grease_pencil:
|
if datablock.grease_pencil:
|
||||||
deps.append(self.instance.grease_pencil)
|
deps.append(datablock.grease_pencil)
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
# Sequences
|
# Sequences
|
||||||
vse = self.instance.sequence_editor
|
vse = datablock.sequence_editor
|
||||||
if vse:
|
if vse:
|
||||||
for sequence in vse.sequences_all:
|
for sequence in vse.sequences_all:
|
||||||
if sequence.type == 'MOVIE' and sequence.filepath:
|
if sequence.type == 'MOVIE' and sequence.filepath:
|
||||||
@ -541,16 +568,45 @@ class BlScene(BlDatablock):
|
|||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def diff(self):
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
name = data.get('name')
|
||||||
|
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
|
||||||
|
if datablock is None:
|
||||||
|
datablock = bpy.data.scenes.get(name)
|
||||||
|
|
||||||
|
return datablock
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||||
exclude_path = []
|
exclude_path = []
|
||||||
|
|
||||||
if not self.preferences.sync_flags.sync_render_settings:
|
if not get_preferences().sync_flags.sync_render_settings:
|
||||||
exclude_path.append("root['eevee']")
|
exclude_path.append("root['eevee']")
|
||||||
exclude_path.append("root['cycles']")
|
exclude_path.append("root['cycles']")
|
||||||
exclude_path.append("root['view_settings']")
|
exclude_path.append("root['view_settings']")
|
||||||
exclude_path.append("root['render']")
|
exclude_path.append("root['render']")
|
||||||
|
|
||||||
if not self.preferences.sync_flags.sync_active_camera:
|
if not get_preferences().sync_flags.sync_active_camera:
|
||||||
exclude_path.append("root['camera']")
|
exclude_path.append("root['camera']")
|
||||||
|
|
||||||
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)
|
diff_params = {
|
||||||
|
'exclude_paths': exclude_path,
|
||||||
|
'ignore_order': True,
|
||||||
|
'report_repetition': True
|
||||||
|
}
|
||||||
|
delta_params = {
|
||||||
|
# 'mutate': True
|
||||||
|
}
|
||||||
|
|
||||||
|
return Delta(
|
||||||
|
DeepDiff(last_data,
|
||||||
|
current_data,
|
||||||
|
cache_size=5000,
|
||||||
|
**diff_params),
|
||||||
|
**delta_params)
|
||||||
|
|
||||||
|
|
||||||
|
_type = bpy.types.Scene
|
||||||
|
_class = BlScene
|
||||||
|
@ -23,45 +23,59 @@ from pathlib import Path
|
|||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from .bl_file import get_filepath, ensure_unpacked
|
from .bl_file import get_filepath, ensure_unpacked
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
from .dump_anything import Dumper, Loader
|
from .dump_anything import Dumper, Loader
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
|
||||||
|
|
||||||
class BlSound(BlDatablock):
|
class BlSound(ReplicatedDatablock):
|
||||||
bl_id = "sounds"
|
bl_id = "sounds"
|
||||||
bl_class = bpy.types.Sound
|
bl_class = bpy.types.Sound
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'SOUND'
|
bl_icon = 'SOUND'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
filename = data.get('filename')
|
filename = data.get('filename')
|
||||||
|
|
||||||
return bpy.data.sounds.load(get_filepath(filename))
|
return bpy.data.sounds.load(get_filepath(filename))
|
||||||
|
|
||||||
def _load(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
def diff(self):
|
@staticmethod
|
||||||
return False
|
def dump(datablock: object) -> dict:
|
||||||
|
filename = Path(datablock.filepath).name
|
||||||
def _dump(self, instance=None):
|
|
||||||
filename = Path(instance.filepath).name
|
|
||||||
|
|
||||||
if not filename:
|
if not filename:
|
||||||
raise FileExistsError(instance.filepath)
|
raise FileExistsError(datablock.filepath)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'filename': filename,
|
'filename': filename,
|
||||||
'name': instance.name
|
'name': datablock.name
|
||||||
}
|
}
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||||
ensure_unpacked(self.instance)
|
ensure_unpacked(datablock)
|
||||||
|
|
||||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.sounds)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def needs_update(datablock: object, data:dict)-> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
_type = bpy.types.Sound
|
||||||
|
_class = BlSound
|
@ -20,26 +20,31 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
class BlSpeaker(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
class BlSpeaker(BlDatablock):
|
|
||||||
bl_id = "speakers"
|
bl_id = "speakers"
|
||||||
bl_class = bpy.types.Speaker
|
bl_class = bpy.types.Speaker
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'SPEAKER'
|
bl_icon = 'SPEAKER'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.speakers.new(data["name"])
|
return bpy.data.speakers.new(data["name"])
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -58,17 +63,27 @@ class BlSpeaker(BlDatablock):
|
|||||||
'cone_volume_outer'
|
'cone_volume_outer'
|
||||||
]
|
]
|
||||||
|
|
||||||
return dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.speakers)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
# TODO: resolve material
|
# TODO: resolve material
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
sound = self.instance.sound
|
sound = datablock.sound
|
||||||
|
|
||||||
if sound:
|
if sound:
|
||||||
deps.append(sound)
|
deps.append(sound)
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
_type = bpy.types.Speaker
|
||||||
|
_class = BlSpeaker
|
||||||
|
@ -20,25 +20,32 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
import bpy.types as T
|
||||||
|
|
||||||
|
class BlTexture(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
class BlTexture(BlDatablock):
|
|
||||||
bl_id = "textures"
|
bl_id = "textures"
|
||||||
bl_class = bpy.types.Texture
|
bl_class = bpy.types.Texture
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'TEXTURE'
|
bl_icon = 'TEXTURE'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.textures.new(data["name"], data["type"])
|
return bpy.data.textures.new(data["name"], data["type"])
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
@ -52,24 +59,39 @@ class BlTexture(BlDatablock):
|
|||||||
'name_full'
|
'name_full'
|
||||||
]
|
]
|
||||||
|
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
color_ramp = getattr(instance, 'color_ramp', None)
|
|
||||||
|
color_ramp = getattr(datablock, 'color_ramp', None)
|
||||||
|
|
||||||
if color_ramp:
|
if color_ramp:
|
||||||
dumper.depth = 4
|
dumper.depth = 4
|
||||||
data['color_ramp'] = dumper.dump(color_ramp)
|
data['color_ramp'] = dumper.dump(color_ramp)
|
||||||
|
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
# TODO: resolve material
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.textures)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
image = getattr(self.instance,"image", None)
|
image = getattr(datablock,"image", None)
|
||||||
|
|
||||||
if image:
|
if image:
|
||||||
deps.append(image)
|
deps.append(image)
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
_type = [T.WoodTexture, T.VoronoiTexture,
|
||||||
|
T.StucciTexture, T.NoiseTexture,
|
||||||
|
T.MusgraveTexture, T.MarbleTexture,
|
||||||
|
T.MagicTexture, T.ImageTexture,
|
||||||
|
T.DistortedNoiseTexture, T.CloudsTexture,
|
||||||
|
T.BlendTexture]
|
||||||
|
_class = BlTexture
|
||||||
|
@ -21,32 +21,26 @@ import mathutils
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
from replication.protocol import ReplicatedDatablock
|
||||||
|
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||||
from .bl_material import dump_materials_slots, load_materials_slots
|
from .bl_material import dump_materials_slots, load_materials_slots
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
class BlVolume(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
class BlVolume(BlDatablock):
|
|
||||||
bl_id = "volumes"
|
bl_id = "volumes"
|
||||||
bl_class = bpy.types.Volume
|
bl_class = bpy.types.Volume
|
||||||
bl_check_common = False
|
bl_check_common = False
|
||||||
bl_icon = 'VOLUME_DATA'
|
bl_icon = 'VOLUME_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
loader = Loader()
|
def construct(data: dict) -> object:
|
||||||
loader.load(target, data)
|
|
||||||
loader.load(target.display, data['display'])
|
|
||||||
|
|
||||||
# MATERIAL SLOTS
|
|
||||||
src_materials = data.get('materials', None)
|
|
||||||
if src_materials:
|
|
||||||
load_materials_slots(src_materials, target.materials)
|
|
||||||
|
|
||||||
def _construct(self, data):
|
|
||||||
return bpy.data.volumes.new(data["name"])
|
return bpy.data.volumes.new(data["name"])
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
@staticmethod
|
||||||
assert(instance)
|
def dump(datablock: object) -> dict:
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.exclude_filter = [
|
dumper.exclude_filter = [
|
||||||
@ -60,27 +54,48 @@ class BlVolume(BlDatablock):
|
|||||||
'use_fake_user'
|
'use_fake_user'
|
||||||
]
|
]
|
||||||
|
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(datablock)
|
||||||
|
|
||||||
data['display'] = dumper.dump(instance.display)
|
data['display'] = dumper.dump(datablock.display)
|
||||||
|
|
||||||
# Fix material index
|
# Fix material index
|
||||||
data['materials'] = dump_materials_slots(instance.materials)
|
data['materials'] = dump_materials_slots(datablock.materials)
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(datablock, data)
|
||||||
|
loader.load(datablock.display, data['display'])
|
||||||
|
|
||||||
|
# MATERIAL SLOTS
|
||||||
|
src_materials = data.get('materials', None)
|
||||||
|
if src_materials:
|
||||||
|
load_materials_slots(src_materials, datablock.materials)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.volumes)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
# TODO: resolve material
|
# TODO: resolve material
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
external_vdb = Path(bpy.path.abspath(self.instance.filepath))
|
external_vdb = Path(bpy.path.abspath(datablock.filepath))
|
||||||
if external_vdb.exists() and not external_vdb.is_dir():
|
if external_vdb.exists() and not external_vdb.is_dir():
|
||||||
deps.append(external_vdb)
|
deps.append(external_vdb)
|
||||||
|
|
||||||
for material in self.instance.materials:
|
for material in datablock.materials:
|
||||||
if material:
|
if material:
|
||||||
deps.append(material)
|
deps.append(material)
|
||||||
|
|
||||||
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
_type = bpy.types.Volume
|
||||||
|
_class = BlVolume
|
||||||
|
@ -20,35 +20,42 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from replication.protocol import ReplicatedDatablock
|
||||||
from .bl_material import (load_node_tree,
|
from .bl_material import (load_node_tree,
|
||||||
dump_node_tree,
|
dump_node_tree,
|
||||||
get_node_tree_dependencies)
|
get_node_tree_dependencies)
|
||||||
|
|
||||||
|
from .bl_datablock import resolve_datablock_from_uuid
|
||||||
|
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||||
|
|
||||||
|
|
||||||
|
class BlWorld(ReplicatedDatablock):
|
||||||
|
use_delta = True
|
||||||
|
|
||||||
class BlWorld(BlDatablock):
|
|
||||||
bl_id = "worlds"
|
bl_id = "worlds"
|
||||||
bl_class = bpy.types.World
|
bl_class = bpy.types.World
|
||||||
bl_check_common = True
|
bl_check_common = True
|
||||||
bl_icon = 'WORLD_DATA'
|
bl_icon = 'WORLD_DATA'
|
||||||
bl_reload_parent = False
|
bl_reload_parent = False
|
||||||
|
|
||||||
def _construct(self, data):
|
@staticmethod
|
||||||
|
def construct(data: dict) -> object:
|
||||||
return bpy.data.worlds.new(data["name"])
|
return bpy.data.worlds.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
@staticmethod
|
||||||
|
def load(data: dict, datablock: object):
|
||||||
|
load_animation_data(data.get('animation_data'), datablock)
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(datablock, data)
|
||||||
|
|
||||||
if data["use_nodes"]:
|
if data["use_nodes"]:
|
||||||
if target.node_tree is None:
|
if datablock.node_tree is None:
|
||||||
target.use_nodes = True
|
datablock.use_nodes = True
|
||||||
|
|
||||||
load_node_tree(data['node_tree'], target.node_tree)
|
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
|
||||||
assert(instance)
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dump(datablock: object) -> dict:
|
||||||
world_dumper = Dumper()
|
world_dumper = Dumper()
|
||||||
world_dumper.depth = 1
|
world_dumper.depth = 1
|
||||||
world_dumper.include_filter = [
|
world_dumper.include_filter = [
|
||||||
@ -56,17 +63,27 @@ class BlWorld(BlDatablock):
|
|||||||
"name",
|
"name",
|
||||||
"color"
|
"color"
|
||||||
]
|
]
|
||||||
data = world_dumper.dump(instance)
|
data = world_dumper.dump(datablock)
|
||||||
if instance.use_nodes:
|
if datablock.use_nodes:
|
||||||
data['node_tree'] = dump_node_tree(instance.node_tree)
|
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||||
|
|
||||||
|
data['animation_data'] = dump_animation_data(datablock)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve(data: dict) -> object:
|
||||||
|
uuid = data.get('uuid')
|
||||||
|
return resolve_datablock_from_uuid(uuid, bpy.data.worlds)
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
@staticmethod
|
||||||
|
def resolve_deps(datablock: object) -> [object]:
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.instance.use_nodes:
|
if datablock.use_nodes:
|
||||||
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
|
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||||
if self.is_library:
|
|
||||||
deps.append(self.instance.library)
|
deps.extend(resolve_animation_dependencies(datablock))
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
_type = bpy.types.World
|
||||||
|
_class = BlWorld
|
@ -507,16 +507,12 @@ class Loader:
|
|||||||
_constructors = {
|
_constructors = {
|
||||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||||
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
|
||||||
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||||
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
destructors = {
|
destructors = {
|
||||||
T.ColorRampElement: DESTRUCTOR_REMOVE,
|
T.ColorRampElement: DESTRUCTOR_REMOVE,
|
||||||
T.Modifier: DESTRUCTOR_CLEAR,
|
|
||||||
T.GpencilModifier: DESTRUCTOR_CLEAR,
|
T.GpencilModifier: DESTRUCTOR_CLEAR,
|
||||||
T.Constraint: DESTRUCTOR_REMOVE,
|
|
||||||
}
|
}
|
||||||
element_type = element.bl_rna_property.fixed_type
|
element_type = element.bl_rna_property.fixed_type
|
||||||
|
|
||||||
|
@ -24,20 +24,25 @@ import sys
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import socket
|
import socket
|
||||||
import re
|
import re
|
||||||
|
import bpy
|
||||||
|
|
||||||
VERSION_EXPR = re.compile('\d+.\d+.\d+')
|
VERSION_EXPR = re.compile('\d+.\d+.\d+')
|
||||||
|
|
||||||
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
|
||||||
DEFAULT_CACHE_DIR = os.path.join(
|
DEFAULT_CACHE_DIR = os.path.join(
|
||||||
os.path.dirname(os.path.abspath(__file__)), "cache")
|
os.path.dirname(os.path.abspath(__file__)), "cache")
|
||||||
|
REPLICATION_DEPENDENCIES = {
|
||||||
|
"zmq",
|
||||||
|
"deepdiff"
|
||||||
|
}
|
||||||
|
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||||
|
REPLICATION = os.path.join(LIBS,"replication")
|
||||||
|
|
||||||
PYTHON_PATH = None
|
PYTHON_PATH = None
|
||||||
SUBPROCESS_DIR = None
|
SUBPROCESS_DIR = None
|
||||||
|
|
||||||
|
|
||||||
rtypes = []
|
rtypes = []
|
||||||
|
|
||||||
|
|
||||||
def module_can_be_imported(name):
|
def module_can_be_imported(name: str) -> bool:
|
||||||
try:
|
try:
|
||||||
__import__(name)
|
__import__(name)
|
||||||
return True
|
return True
|
||||||
@ -50,7 +55,7 @@ def install_pip():
|
|||||||
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
||||||
|
|
||||||
|
|
||||||
def install_package(name, version):
|
def install_package(name: str, install_dir: str):
|
||||||
logging.info(f"installing {name} version...")
|
logging.info(f"installing {name} version...")
|
||||||
env = os.environ
|
env = os.environ
|
||||||
if "PIP_REQUIRE_VIRTUALENV" in env:
|
if "PIP_REQUIRE_VIRTUALENV" in env:
|
||||||
@ -60,12 +65,13 @@ def install_package(name, version):
|
|||||||
# env var for the subprocess.
|
# env var for the subprocess.
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
del env["PIP_REQUIRE_VIRTUALENV"]
|
del env["PIP_REQUIRE_VIRTUALENV"]
|
||||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
|
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
|
||||||
|
|
||||||
if name in sys.modules:
|
if name in sys.modules:
|
||||||
del sys.modules[name]
|
del sys.modules[name]
|
||||||
|
|
||||||
def check_package_version(name, required_version):
|
|
||||||
|
def check_package_version(name: str, required_version: str):
|
||||||
logging.info(f"Checking {name} version...")
|
logging.info(f"Checking {name} version...")
|
||||||
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
||||||
|
|
||||||
@ -77,6 +83,7 @@ def check_package_version(name, required_version):
|
|||||||
logging.info(f"{name} need an update")
|
logging.info(f"{name} need an update")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def get_ip():
|
def get_ip():
|
||||||
"""
|
"""
|
||||||
Retrieve the main network interface IP.
|
Retrieve the main network interface IP.
|
||||||
@ -94,7 +101,25 @@ def check_dir(dir):
|
|||||||
os.makedirs(dir)
|
os.makedirs(dir)
|
||||||
|
|
||||||
|
|
||||||
def setup(dependencies, python_path):
|
def setup_paths(paths: list):
|
||||||
|
""" Add missing path to sys.path
|
||||||
|
"""
|
||||||
|
for path in paths:
|
||||||
|
if path not in sys.path:
|
||||||
|
logging.debug(f"Adding {path} dir to the path.")
|
||||||
|
sys.path.insert(0, path)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_paths(paths: list):
|
||||||
|
""" Remove list of path from sys.path
|
||||||
|
"""
|
||||||
|
for path in paths:
|
||||||
|
if path in sys.path:
|
||||||
|
logging.debug(f"Removing {path} dir from the path.")
|
||||||
|
sys.path.remove(path)
|
||||||
|
|
||||||
|
|
||||||
|
def install_modules(dependencies: list, python_path: str, install_dir: str):
|
||||||
global PYTHON_PATH, SUBPROCESS_DIR
|
global PYTHON_PATH, SUBPROCESS_DIR
|
||||||
|
|
||||||
PYTHON_PATH = Path(python_path)
|
PYTHON_PATH = Path(python_path)
|
||||||
@ -103,9 +128,23 @@ def setup(dependencies, python_path):
|
|||||||
if not module_can_be_imported("pip"):
|
if not module_can_be_imported("pip"):
|
||||||
install_pip()
|
install_pip()
|
||||||
|
|
||||||
for package_name, package_version in dependencies:
|
for package_name in dependencies:
|
||||||
if not module_can_be_imported(package_name):
|
if not module_can_be_imported(package_name):
|
||||||
install_package(package_name, package_version)
|
install_package(package_name, install_dir=install_dir)
|
||||||
module_can_be_imported(package_name)
|
module_can_be_imported(package_name)
|
||||||
elif not check_package_version(package_name, package_version):
|
|
||||||
install_package(package_name, package_version)
|
def register():
|
||||||
|
if bpy.app.version[1] >= 91:
|
||||||
|
python_binary_path = sys.executable
|
||||||
|
else:
|
||||||
|
python_binary_path = bpy.app.binary_path_python
|
||||||
|
|
||||||
|
for module_name in list(sys.modules.keys()):
|
||||||
|
if 'replication' in module_name:
|
||||||
|
del sys.modules[module_name]
|
||||||
|
|
||||||
|
setup_paths([LIBS, REPLICATION])
|
||||||
|
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
remove_paths([REPLICATION, LIBS])
|
155
multi_user/handlers.py
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.app.handlers import persistent
|
||||||
|
from replication import porcelain
|
||||||
|
from replication.constants import RP_COMMON, STATE_ACTIVE, STATE_SYNCING, UP
|
||||||
|
from replication.exception import ContextError, NonAuthorizedOperationError
|
||||||
|
from replication.interface import session
|
||||||
|
|
||||||
|
from . import shared_data, utils
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_deps_graph(remove_nodes: bool = False):
|
||||||
|
""" Cleanup the replication graph
|
||||||
|
"""
|
||||||
|
if session and session.state == STATE_ACTIVE:
|
||||||
|
start = utils.current_milli_time()
|
||||||
|
rm_cpt = 0
|
||||||
|
for node in session.repository.graph.values():
|
||||||
|
node.instance = session.repository.rdp.resolve(node.data)
|
||||||
|
if node is None \
|
||||||
|
or (node.state == UP and not node.instance):
|
||||||
|
if remove_nodes:
|
||||||
|
try:
|
||||||
|
porcelain.rm(session.repository,
|
||||||
|
node.uuid,
|
||||||
|
remove_dependencies=False)
|
||||||
|
logging.info(f"Removing {node.uuid}")
|
||||||
|
rm_cpt += 1
|
||||||
|
except NonAuthorizedOperationError:
|
||||||
|
continue
|
||||||
|
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
|
||||||
|
|
||||||
|
|
||||||
|
def update_external_dependencies():
|
||||||
|
"""Force external dependencies(files such as images) evaluation
|
||||||
|
"""
|
||||||
|
external_types = ['WindowsPath', 'PosixPath', 'Image']
|
||||||
|
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in external_types]
|
||||||
|
for node_id in nodes_ids:
|
||||||
|
node = session.repository.graph.get(node_id)
|
||||||
|
if node and node.owner in [session.repository.username, RP_COMMON]:
|
||||||
|
porcelain.commit(session.repository, node_id)
|
||||||
|
porcelain.push(session.repository, 'origin', node_id)
|
||||||
|
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def on_scene_update(scene):
|
||||||
|
"""Forward blender depsgraph update to replication
|
||||||
|
"""
|
||||||
|
if session and session.state == STATE_ACTIVE:
|
||||||
|
context = bpy.context
|
||||||
|
blender_depsgraph = bpy.context.view_layer.depsgraph
|
||||||
|
dependency_updates = [u for u in blender_depsgraph.updates]
|
||||||
|
settings = utils.get_preferences()
|
||||||
|
incoming_updates = shared_data.session.applied_updates
|
||||||
|
|
||||||
|
distant_update = [getattr(u.id, 'uuid', None) for u in dependency_updates if getattr(u.id, 'uuid', None) in incoming_updates]
|
||||||
|
if distant_update:
|
||||||
|
for u in distant_update:
|
||||||
|
shared_data.session.applied_updates.remove(u)
|
||||||
|
logging.debug(f"Ignoring distant update of {dependency_updates[0].id.name}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# NOTE: maybe we don't need to check each update but only the first
|
||||||
|
for update in reversed(dependency_updates):
|
||||||
|
update_uuid = getattr(update.id, 'uuid', None)
|
||||||
|
if update_uuid:
|
||||||
|
node = session.repository.graph.get(update.id.uuid)
|
||||||
|
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
|
||||||
|
|
||||||
|
if node and (node.owner == session.repository.username or check_common):
|
||||||
|
logging.debug(f"Evaluate {update.id.name}")
|
||||||
|
if node.state == UP:
|
||||||
|
try:
|
||||||
|
porcelain.commit(session.repository, node.uuid)
|
||||||
|
porcelain.push(session.repository,
|
||||||
|
'origin', node.uuid)
|
||||||
|
except ReferenceError:
|
||||||
|
logging.debug(f"Reference error {node.uuid}")
|
||||||
|
except ContextError as e:
|
||||||
|
logging.debug(e)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(e)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif isinstance(update.id, bpy.types.Scene):
|
||||||
|
scene = bpy.data.scenes.get(update.id.name)
|
||||||
|
scn_uuid = porcelain.add(session.repository, scene)
|
||||||
|
porcelain.commit(session.repository, scn_uuid)
|
||||||
|
porcelain.push(session.repository, 'origin', scn_uuid)
|
||||||
|
|
||||||
|
scene_graph_changed = [u for u in reversed(dependency_updates) if getattr(u.id, 'uuid', None) and isinstance(u.id,(bpy.types.Scene,bpy.types.Collection))]
|
||||||
|
if scene_graph_changed:
|
||||||
|
porcelain.purge_orphan_nodes(session.repository)
|
||||||
|
|
||||||
|
update_external_dependencies()
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def resolve_deps_graph(dummy):
|
||||||
|
"""Resolve deps graph
|
||||||
|
|
||||||
|
Temporary solution to resolve each node pointers after a Undo.
|
||||||
|
A future solution should be to avoid storing dataclock reference...
|
||||||
|
|
||||||
|
"""
|
||||||
|
if session and session.state == STATE_ACTIVE:
|
||||||
|
sanitize_deps_graph(remove_nodes=True)
|
||||||
|
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def load_pre_handler(dummy):
|
||||||
|
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
|
||||||
|
bpy.ops.session.stop()
|
||||||
|
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def update_client_frame(scene):
|
||||||
|
if session and session.state == STATE_ACTIVE:
|
||||||
|
porcelain.update_user_metadata(session.repository, {
|
||||||
|
'frame_current': scene.frame_current
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
bpy.app.handlers.undo_post.append(resolve_deps_graph)
|
||||||
|
bpy.app.handlers.redo_post.append(resolve_deps_graph)
|
||||||
|
|
||||||
|
bpy.app.handlers.load_pre.append(load_pre_handler)
|
||||||
|
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
||||||
|
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
|
||||||
|
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
|
||||||
|
|
||||||
|
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
||||||
|
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
@ -15,31 +15,31 @@
|
|||||||
#
|
#
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import os
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from pathlib import Path
|
||||||
from .bl_datablock import BlDatablock
|
import bpy.utils.previews
|
||||||
|
|
||||||
|
def register():
|
||||||
|
|
||||||
|
global icons_col
|
||||||
|
|
||||||
|
pcoll = bpy.utils.previews.new()
|
||||||
|
icons_dir = os.path.join(os.path.dirname(__file__), ".")
|
||||||
|
for png in Path(icons_dir).rglob("*.png"):
|
||||||
|
pcoll.load(png.stem, str(png), "IMAGE")
|
||||||
|
|
||||||
|
icons_col = pcoll
|
||||||
|
|
||||||
|
|
||||||
class BlLibrary(BlDatablock):
|
def unregister():
|
||||||
bl_id = "libraries"
|
|
||||||
bl_class = bpy.types.Library
|
|
||||||
bl_check_common = False
|
|
||||||
bl_icon = 'LIBRARY_DATA_DIRECT'
|
|
||||||
bl_reload_parent = False
|
|
||||||
|
|
||||||
def _construct(self, data):
|
global icons_col
|
||||||
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
|
|
||||||
targetData = sourceData
|
try:
|
||||||
return sourceData
|
bpy.utils.previews.remove(icons_col)
|
||||||
def _load(self, data, target):
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _dump(self, instance=None):
|
icons_col = None
|
||||||
assert(instance)
|
|
||||||
dumper = Dumper()
|
|
||||||
return dumper.dump(instance)
|
|
||||||
|
|
||||||
|
|
BIN
multi_user/icons/repository_merge.png
Normal file
After Width: | Height: | Size: 6.5 KiB |
BIN
multi_user/icons/repository_push.png
Normal file
After Width: | Height: | Size: 4.2 KiB |
BIN
multi_user/icons/server_offline.png
Normal file
After Width: | Height: | Size: 6.7 KiB |
BIN
multi_user/icons/server_online.png
Normal file
After Width: | Height: | Size: 9.5 KiB |
BIN
multi_user/icons/session_status_offline.png
Normal file
After Width: | Height: | Size: 9.4 KiB |
BIN
multi_user/icons/session_status_online.png
Normal file
After Width: | Height: | Size: 13 KiB |
BIN
multi_user/icons/session_status_waiting.png
Normal file
After Width: | Height: | Size: 11 KiB |
1
multi_user/libs/replication
Submodule
@ -17,6 +17,7 @@
|
|||||||
|
|
||||||
import random
|
import random
|
||||||
import logging
|
import logging
|
||||||
|
from uuid import uuid4
|
||||||
import bpy
|
import bpy
|
||||||
import string
|
import string
|
||||||
import re
|
import re
|
||||||
@ -25,7 +26,7 @@ import os
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from . import bl_types, environment, addon_updater_ops, presence, ui
|
from . import bl_types, environment, addon_updater_ops, presence, ui
|
||||||
from .utils import get_preferences, get_expanded_icon
|
from .utils import get_preferences, get_expanded_icon, get_folder_size
|
||||||
from replication.constants import RP_COMMON
|
from replication.constants import RP_COMMON
|
||||||
from replication.interface import session
|
from replication.interface import session
|
||||||
|
|
||||||
@ -33,6 +34,25 @@ from replication.interface import session
|
|||||||
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
|
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
|
||||||
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
|
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
|
||||||
|
|
||||||
|
#SERVER PRESETS AT LAUNCH
|
||||||
|
DEFAULT_PRESETS = {
|
||||||
|
"localhost" : {
|
||||||
|
"server_name": "localhost",
|
||||||
|
"ip": "localhost",
|
||||||
|
"port": 5555,
|
||||||
|
"use_admin_password": True,
|
||||||
|
"admin_password": "admin",
|
||||||
|
"server_password": ""
|
||||||
|
},
|
||||||
|
"public session" : {
|
||||||
|
"server_name": "public session",
|
||||||
|
"ip": "51.75.71.183",
|
||||||
|
"port": 5555,
|
||||||
|
"admin_password": "",
|
||||||
|
"server_password": ""
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
def randomColor():
|
def randomColor():
|
||||||
"""Generate a random color """
|
"""Generate a random color """
|
||||||
r = random.random()
|
r = random.random()
|
||||||
@ -66,16 +86,6 @@ def update_ip(self, context):
|
|||||||
self['ip'] = "127.0.0.1"
|
self['ip'] = "127.0.0.1"
|
||||||
|
|
||||||
|
|
||||||
def update_port(self, context):
|
|
||||||
max_port = self.port + 3
|
|
||||||
|
|
||||||
if self.ipc_port < max_port and \
|
|
||||||
self['ipc_port'] >= self.port:
|
|
||||||
logging.error(
|
|
||||||
"IPC Port in conflict with the port, assigning a random value")
|
|
||||||
self['ipc_port'] = random.randrange(self.port+4, 10000)
|
|
||||||
|
|
||||||
|
|
||||||
def update_directory(self, context):
|
def update_directory(self, context):
|
||||||
new_dir = Path(self.cache_directory)
|
new_dir = Path(self.cache_directory)
|
||||||
if new_dir.exists() and any(Path(self.cache_directory).iterdir()):
|
if new_dir.exists() and any(Path(self.cache_directory).iterdir()):
|
||||||
@ -101,6 +111,16 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
|
|||||||
auto_push: bpy.props.BoolProperty(default=True)
|
auto_push: bpy.props.BoolProperty(default=True)
|
||||||
icon: bpy.props.StringProperty()
|
icon: bpy.props.StringProperty()
|
||||||
|
|
||||||
|
class ServerPreset(bpy.types.PropertyGroup):
|
||||||
|
server_name: bpy.props.StringProperty(default="")
|
||||||
|
ip: bpy.props.StringProperty(default="127.0.0.1", update=update_ip)
|
||||||
|
port: bpy.props.IntProperty(default=5555)
|
||||||
|
use_server_password: bpy.props.BoolProperty(default=False)
|
||||||
|
server_password: bpy.props.StringProperty(default="", subtype = "PASSWORD")
|
||||||
|
use_admin_password: bpy.props.BoolProperty(default=False)
|
||||||
|
admin_password: bpy.props.StringProperty(default="", subtype = "PASSWORD")
|
||||||
|
is_online: bpy.props.BoolProperty(default=False)
|
||||||
|
is_private: bpy.props.BoolProperty(default=False)
|
||||||
|
|
||||||
def set_sync_render_settings(self, value):
|
def set_sync_render_settings(self, value):
|
||||||
self['sync_render_settings'] = value
|
self['sync_render_settings'] = value
|
||||||
@ -150,36 +170,66 @@ class ReplicationFlags(bpy.types.PropertyGroup):
|
|||||||
class SessionPrefs(bpy.types.AddonPreferences):
|
class SessionPrefs(bpy.types.AddonPreferences):
|
||||||
bl_idname = __package__
|
bl_idname = __package__
|
||||||
|
|
||||||
ip: bpy.props.StringProperty(
|
# User settings
|
||||||
name="ip",
|
|
||||||
description='Distant host ip',
|
|
||||||
default="127.0.0.1",
|
|
||||||
update=update_ip)
|
|
||||||
username: bpy.props.StringProperty(
|
username: bpy.props.StringProperty(
|
||||||
name="Username",
|
name="Username",
|
||||||
default=f"user_{random_string_digits()}"
|
default=f"user_{random_string_digits()}"
|
||||||
)
|
)
|
||||||
client_color: bpy.props.FloatVectorProperty(
|
client_color: bpy.props.FloatVectorProperty(
|
||||||
name="client_instance_color",
|
name="client_instance_color",
|
||||||
|
description='User color',
|
||||||
subtype='COLOR',
|
subtype='COLOR',
|
||||||
default=randomColor())
|
default=randomColor()
|
||||||
port: bpy.props.IntProperty(
|
)
|
||||||
name="port",
|
# Current server settings
|
||||||
|
server_name: bpy.props.StringProperty(
|
||||||
|
name="server_name",
|
||||||
|
description="Custom name of the server",
|
||||||
|
default='localhost',
|
||||||
|
)
|
||||||
|
server_index: bpy.props.IntProperty(
|
||||||
|
name="server_index",
|
||||||
|
description="index of the server",
|
||||||
|
)
|
||||||
|
# User host session settings
|
||||||
|
host_port: bpy.props.IntProperty(
|
||||||
|
name="host_port",
|
||||||
description='Distant host port',
|
description='Distant host port',
|
||||||
default=5555
|
default=5555
|
||||||
)
|
)
|
||||||
|
host_use_server_password: bpy.props.BoolProperty(
|
||||||
|
name="use_server_password",
|
||||||
|
description='Use session password',
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
host_server_password: bpy.props.StringProperty(
|
||||||
|
name="server_password",
|
||||||
|
description='Session password',
|
||||||
|
subtype='PASSWORD'
|
||||||
|
)
|
||||||
|
host_use_admin_password: bpy.props.BoolProperty(
|
||||||
|
name="use_admin_password",
|
||||||
|
description='Use admin password',
|
||||||
|
default=True
|
||||||
|
)
|
||||||
|
host_admin_password: bpy.props.StringProperty(
|
||||||
|
name="admin_password",
|
||||||
|
description='Admin password',
|
||||||
|
subtype='PASSWORD',
|
||||||
|
default='admin'
|
||||||
|
)
|
||||||
|
# Other
|
||||||
|
is_first_launch: bpy.props.BoolProperty(
|
||||||
|
name="is_fnirst_launch",
|
||||||
|
description="First time lauching the addon",
|
||||||
|
default=True
|
||||||
|
)
|
||||||
sync_flags: bpy.props.PointerProperty(
|
sync_flags: bpy.props.PointerProperty(
|
||||||
type=ReplicationFlags
|
type=ReplicationFlags
|
||||||
)
|
)
|
||||||
supported_datablocks: bpy.props.CollectionProperty(
|
supported_datablocks: bpy.props.CollectionProperty(
|
||||||
type=ReplicatedDatablock,
|
type=ReplicatedDatablock,
|
||||||
)
|
)
|
||||||
ipc_port: bpy.props.IntProperty(
|
|
||||||
name="ipc_port",
|
|
||||||
description='internal ttl port(only useful for multiple local instances)',
|
|
||||||
default=random.randrange(5570, 70000),
|
|
||||||
update=update_port,
|
|
||||||
)
|
|
||||||
init_method: bpy.props.EnumProperty(
|
init_method: bpy.props.EnumProperty(
|
||||||
name='init_method',
|
name='init_method',
|
||||||
description='Init repo',
|
description='Init repo',
|
||||||
@ -197,6 +247,11 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
description='connection timeout before disconnection',
|
description='connection timeout before disconnection',
|
||||||
default=5000
|
default=5000
|
||||||
)
|
)
|
||||||
|
ping_timeout: bpy.props.IntProperty(
|
||||||
|
name='ping timeout',
|
||||||
|
description='check if servers are online',
|
||||||
|
default=500
|
||||||
|
)
|
||||||
# Replication update settings
|
# Replication update settings
|
||||||
depsgraph_update_rate: bpy.props.FloatProperty(
|
depsgraph_update_rate: bpy.props.FloatProperty(
|
||||||
name='depsgraph update rate (s)',
|
name='depsgraph update rate (s)',
|
||||||
@ -208,11 +263,12 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
description="Remove filecache from memory",
|
description="Remove filecache from memory",
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
# for UI
|
# For UI
|
||||||
category: bpy.props.EnumProperty(
|
category: bpy.props.EnumProperty(
|
||||||
name="Category",
|
name="Category",
|
||||||
description="Preferences Category",
|
description="Preferences Category",
|
||||||
items=[
|
items=[
|
||||||
|
('PREF', "Preferences", "Preferences of this add-on"),
|
||||||
('CONFIG', "Configuration", "Configuration of this add-on"),
|
('CONFIG', "Configuration", "Configuration of this add-on"),
|
||||||
('UPDATE', "Update", "Update this add-on"),
|
('UPDATE', "Update", "Update this add-on"),
|
||||||
],
|
],
|
||||||
@ -256,31 +312,58 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
step=1,
|
step=1,
|
||||||
subtype='PERCENTAGE',
|
subtype='PERCENTAGE',
|
||||||
)
|
)
|
||||||
|
presence_text_distance: bpy.props.FloatProperty(
|
||||||
|
name="Distance text visibilty",
|
||||||
|
description="Adjust the distance visibilty of user's mode/name",
|
||||||
|
min=0.1,
|
||||||
|
max=10000,
|
||||||
|
default=100,
|
||||||
|
)
|
||||||
conf_session_identity_expanded: bpy.props.BoolProperty(
|
conf_session_identity_expanded: bpy.props.BoolProperty(
|
||||||
name="Identity",
|
name="Identity",
|
||||||
description="Identity",
|
description="Identity",
|
||||||
default=True
|
default=False
|
||||||
)
|
)
|
||||||
conf_session_net_expanded: bpy.props.BoolProperty(
|
conf_session_net_expanded: bpy.props.BoolProperty(
|
||||||
name="Net",
|
name="Net",
|
||||||
description="net",
|
description="net",
|
||||||
default=True
|
default=False
|
||||||
)
|
)
|
||||||
conf_session_hosting_expanded: bpy.props.BoolProperty(
|
conf_session_hosting_expanded: bpy.props.BoolProperty(
|
||||||
name="Rights",
|
name="Rights",
|
||||||
description="Rights",
|
description="Rights",
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
|
conf_session_rep_expanded: bpy.props.BoolProperty(
|
||||||
|
name="Replication",
|
||||||
|
description="Replication",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
conf_session_cache_expanded: bpy.props.BoolProperty(
|
conf_session_cache_expanded: bpy.props.BoolProperty(
|
||||||
name="Cache",
|
name="Cache",
|
||||||
description="cache",
|
description="cache",
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
|
conf_session_log_expanded: bpy.props.BoolProperty(
|
||||||
|
name="conf_session_log_expanded",
|
||||||
|
description="conf_session_log_expanded",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
conf_session_ui_expanded: bpy.props.BoolProperty(
|
conf_session_ui_expanded: bpy.props.BoolProperty(
|
||||||
name="Interface",
|
name="Interface",
|
||||||
description="Interface",
|
description="Interface",
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
|
sidebar_repository_shown: bpy.props.BoolProperty(
|
||||||
|
name="sidebar_repository_shown",
|
||||||
|
description="sidebar_repository_shown",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
sidebar_advanced_shown: bpy.props.BoolProperty(
|
||||||
|
name="sidebar_advanced_shown",
|
||||||
|
description="sidebar_advanced_shown",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
sidebar_advanced_rep_expanded: bpy.props.BoolProperty(
|
sidebar_advanced_rep_expanded: bpy.props.BoolProperty(
|
||||||
name="sidebar_advanced_rep_expanded",
|
name="sidebar_advanced_rep_expanded",
|
||||||
description="sidebar_advanced_rep_expanded",
|
description="sidebar_advanced_rep_expanded",
|
||||||
@ -291,6 +374,11 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
description="sidebar_advanced_log_expanded",
|
description="sidebar_advanced_log_expanded",
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
|
sidebar_advanced_uinfo_expanded: bpy.props.BoolProperty(
|
||||||
|
name="sidebar_advanced_uinfo_expanded",
|
||||||
|
description="sidebar_advanced_uinfo_expanded",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
sidebar_advanced_net_expanded: bpy.props.BoolProperty(
|
sidebar_advanced_net_expanded: bpy.props.BoolProperty(
|
||||||
name="sidebar_advanced_net_expanded",
|
name="sidebar_advanced_net_expanded",
|
||||||
description="sidebar_advanced_net_expanded",
|
description="sidebar_advanced_net_expanded",
|
||||||
@ -335,6 +423,19 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
max=59
|
max=59
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Server preset
|
||||||
|
def server_list_callback(scene, context):
|
||||||
|
settings = get_preferences()
|
||||||
|
enum = []
|
||||||
|
for i in settings.server_preset:
|
||||||
|
enum.append((i.name, i.name, ""))
|
||||||
|
return enum
|
||||||
|
|
||||||
|
server_preset: bpy.props.CollectionProperty(
|
||||||
|
name="server preset",
|
||||||
|
type=ServerPreset,
|
||||||
|
)
|
||||||
|
|
||||||
# Custom panel
|
# Custom panel
|
||||||
panel_category: bpy.props.StringProperty(
|
panel_category: bpy.props.StringProperty(
|
||||||
description="Choose a name for the category of the panel",
|
description="Choose a name for the category of the panel",
|
||||||
@ -343,38 +444,28 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
layout.row().prop(self, "category", expand=True)
|
layout.row().prop(self, "category", expand=True)
|
||||||
|
|
||||||
|
if self.category == 'PREF':
|
||||||
|
grid = layout.column()
|
||||||
|
|
||||||
|
box = grid.box()
|
||||||
|
row = box.row()
|
||||||
|
# USER SETTINGS
|
||||||
|
split = row.split(factor=0.7, align=True)
|
||||||
|
split.prop(self, "username", text="User")
|
||||||
|
split.prop(self, "client_color", text="")
|
||||||
|
|
||||||
|
row = box.row()
|
||||||
|
row.label(text="Hide settings:")
|
||||||
|
row = box.row()
|
||||||
|
row.prop(self, "sidebar_advanced_shown", text="Hide “Advanced” settings in side pannel (Not in session)")
|
||||||
|
row = box.row()
|
||||||
|
row.prop(self, "sidebar_repository_shown", text="Hide “Repository” settings in side pannel (In session)")
|
||||||
|
|
||||||
if self.category == 'CONFIG':
|
if self.category == 'CONFIG':
|
||||||
grid = layout.column()
|
grid = layout.column()
|
||||||
|
|
||||||
# USER INFORMATIONS
|
|
||||||
box = grid.box()
|
|
||||||
box.prop(
|
|
||||||
self, "conf_session_identity_expanded", text="User information",
|
|
||||||
icon=get_expanded_icon(self.conf_session_identity_expanded),
|
|
||||||
emboss=False)
|
|
||||||
if self.conf_session_identity_expanded:
|
|
||||||
box.row().prop(self, "username", text="name")
|
|
||||||
box.row().prop(self, "client_color", text="color")
|
|
||||||
|
|
||||||
# NETWORK SETTINGS
|
|
||||||
box = grid.box()
|
|
||||||
box.prop(
|
|
||||||
self, "conf_session_net_expanded", text="Networking",
|
|
||||||
icon=get_expanded_icon(self.conf_session_net_expanded),
|
|
||||||
emboss=False)
|
|
||||||
|
|
||||||
if self.conf_session_net_expanded:
|
|
||||||
box.row().prop(self, "ip", text="Address")
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Port:")
|
|
||||||
row.prop(self, "port", text="")
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Init the session from:")
|
|
||||||
row.prop(self, "init_method", text="")
|
|
||||||
|
|
||||||
# HOST SETTINGS
|
# HOST SETTINGS
|
||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
@ -382,9 +473,57 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
icon=get_expanded_icon(self.conf_session_hosting_expanded),
|
icon=get_expanded_icon(self.conf_session_hosting_expanded),
|
||||||
emboss=False)
|
emboss=False)
|
||||||
if self.conf_session_hosting_expanded:
|
if self.conf_session_hosting_expanded:
|
||||||
|
row = box.row()
|
||||||
|
row.prop(self, "host_port", text="Port: ")
|
||||||
row = box.row()
|
row = box.row()
|
||||||
row.label(text="Init the session from:")
|
row.label(text="Init the session from:")
|
||||||
row.prop(self, "init_method", text="")
|
row.prop(self, "init_method", text="")
|
||||||
|
row = box.row()
|
||||||
|
col = row.column()
|
||||||
|
col.prop(self, "host_use_server_password", text="Server password:")
|
||||||
|
col = row.column()
|
||||||
|
col.enabled = True if self.host_use_server_password else False
|
||||||
|
col.prop(self, "host_server_password", text="")
|
||||||
|
row = box.row()
|
||||||
|
col = row.column()
|
||||||
|
col.prop(self, "host_use_admin_password", text="Admin password:")
|
||||||
|
col = row.column()
|
||||||
|
col.enabled = True if self.host_use_admin_password else False
|
||||||
|
col.prop(self, "host_admin_password", text="")
|
||||||
|
|
||||||
|
# NETWORKING
|
||||||
|
box = grid.box()
|
||||||
|
box.prop(
|
||||||
|
self, "conf_session_net_expanded", text="Network",
|
||||||
|
icon=get_expanded_icon(self.conf_session_net_expanded),
|
||||||
|
emboss=False)
|
||||||
|
if self.conf_session_net_expanded:
|
||||||
|
row = box.row()
|
||||||
|
row.label(text="Timeout (ms):")
|
||||||
|
row.prop(self, "connection_timeout", text="")
|
||||||
|
row = box.row()
|
||||||
|
row.label(text="Server ping (ms):")
|
||||||
|
row.prop(self, "ping_timeout", text="")
|
||||||
|
|
||||||
|
# REPLICATION
|
||||||
|
box = grid.box()
|
||||||
|
box.prop(
|
||||||
|
self, "conf_session_rep_expanded", text="Replication",
|
||||||
|
icon=get_expanded_icon(self.conf_session_rep_expanded),
|
||||||
|
emboss=False)
|
||||||
|
if self.conf_session_rep_expanded:
|
||||||
|
row = box.row()
|
||||||
|
row.prop(self.sync_flags, "sync_render_settings")
|
||||||
|
row = box.row()
|
||||||
|
row.prop(self.sync_flags, "sync_active_camera")
|
||||||
|
row = box.row()
|
||||||
|
row.prop(self.sync_flags, "sync_during_editmode")
|
||||||
|
row = box.row()
|
||||||
|
if self.sync_flags.sync_during_editmode:
|
||||||
|
warning = row.box()
|
||||||
|
warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
|
||||||
|
row = box.row()
|
||||||
|
row.prop(self, "depsgraph_update_rate", text="Apply delay")
|
||||||
|
|
||||||
# CACHE SETTINGS
|
# CACHE SETTINGS
|
||||||
box = grid.box()
|
box = grid.box()
|
||||||
@ -395,24 +534,18 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
if self.conf_session_cache_expanded:
|
if self.conf_session_cache_expanded:
|
||||||
box.row().prop(self, "cache_directory", text="Cache directory")
|
box.row().prop(self, "cache_directory", text="Cache directory")
|
||||||
box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache")
|
box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache")
|
||||||
|
box.row().operator('session.clear_cache', text=f"Clear cache ({get_folder_size(self.cache_directory)})")
|
||||||
# INTERFACE SETTINGS
|
|
||||||
|
# LOGGING
|
||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
self, "conf_session_ui_expanded", text="Interface",
|
self, "conf_session_log_expanded", text="Logging",
|
||||||
icon=get_expanded_icon(self.conf_session_ui_expanded),
|
icon=get_expanded_icon(self.conf_session_log_expanded),
|
||||||
emboss=False)
|
emboss=False)
|
||||||
if self.conf_session_ui_expanded:
|
if self.conf_session_log_expanded:
|
||||||
box.row().prop(self, "panel_category", text="Panel category", expand=True)
|
|
||||||
row = box.row()
|
row = box.row()
|
||||||
row.label(text="Session widget:")
|
row.label(text="Log level:")
|
||||||
|
row.prop(self, 'logging_level', text="")
|
||||||
col = box.column(align=True)
|
|
||||||
col.prop(self, "presence_hud_scale", expand=True)
|
|
||||||
|
|
||||||
|
|
||||||
col.prop(self, "presence_hud_hpos", expand=True)
|
|
||||||
col.prop(self, "presence_hud_vpos", expand=True)
|
|
||||||
|
|
||||||
if self.category == 'UPDATE':
|
if self.category == 'UPDATE':
|
||||||
from . import addon_updater_ops
|
from . import addon_updater_ops
|
||||||
@ -421,18 +554,43 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
def generate_supported_types(self):
|
def generate_supported_types(self):
|
||||||
self.supported_datablocks.clear()
|
self.supported_datablocks.clear()
|
||||||
|
|
||||||
for type in bl_types.types_to_register():
|
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||||
|
|
||||||
|
# init the factory with supported types
|
||||||
|
for dcc_type_id, impl in bpy_protocol.implementations.items():
|
||||||
new_db = self.supported_datablocks.add()
|
new_db = self.supported_datablocks.add()
|
||||||
|
|
||||||
type_module = getattr(bl_types, type)
|
new_db.name = dcc_type_id
|
||||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
new_db.type_name = dcc_type_id
|
||||||
type_impl_name = 'Bl'+''.join(name)
|
|
||||||
type_module_class = getattr(type_module, type_impl_name)
|
|
||||||
new_db.name = type_impl_name
|
|
||||||
new_db.type_name = type_impl_name
|
|
||||||
new_db.use_as_filter = True
|
new_db.use_as_filter = True
|
||||||
new_db.icon = type_module_class.bl_icon
|
new_db.icon = impl.bl_icon
|
||||||
new_db.bl_name = type_module_class.bl_id
|
new_db.bl_name = impl.bl_id
|
||||||
|
|
||||||
|
# Get a server preset through its name
|
||||||
|
def get_server_preset(self, name):
|
||||||
|
existing_preset = None
|
||||||
|
|
||||||
|
for server_preset in self.server_preset :
|
||||||
|
if server_preset.server_name == name :
|
||||||
|
existing_preset = server_preset
|
||||||
|
|
||||||
|
return existing_preset
|
||||||
|
|
||||||
|
# Custom at launch server preset
|
||||||
|
def generate_default_presets(self):
|
||||||
|
for preset_name, preset_data in DEFAULT_PRESETS.items():
|
||||||
|
existing_preset = self.get_server_preset(preset_name)
|
||||||
|
if existing_preset :
|
||||||
|
continue
|
||||||
|
new_server = self.server_preset.add()
|
||||||
|
new_server.name = str(uuid4())
|
||||||
|
new_server.server_name = preset_data.get('server_name')
|
||||||
|
new_server.ip = preset_data.get('ip')
|
||||||
|
new_server.port = preset_data.get('port')
|
||||||
|
new_server.use_server_password = preset_data.get('use_server_password',False)
|
||||||
|
new_server.server_password = preset_data.get('server_password',None)
|
||||||
|
new_server.use_admin_password = preset_data.get('use_admin_password',False)
|
||||||
|
new_server.admin_password = preset_data.get('admin_password',None)
|
||||||
|
|
||||||
|
|
||||||
def client_list_callback(scene, context):
|
def client_list_callback(scene, context):
|
||||||
@ -490,6 +648,11 @@ class SessionProps(bpy.types.PropertyGroup):
|
|||||||
description='Enable user overlay ',
|
description='Enable user overlay ',
|
||||||
default=True,
|
default=True,
|
||||||
)
|
)
|
||||||
|
presence_show_mode: bpy.props.BoolProperty(
|
||||||
|
name="Show users current mode",
|
||||||
|
description='Enable user mode overlay ',
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
presence_show_far_user: bpy.props.BoolProperty(
|
presence_show_far_user: bpy.props.BoolProperty(
|
||||||
name="Show users on different scenes",
|
name="Show users on different scenes",
|
||||||
description="Show user on different scenes",
|
description="Show user on different scenes",
|
||||||
@ -505,22 +668,16 @@ class SessionProps(bpy.types.PropertyGroup):
|
|||||||
description='Show only owned datablocks',
|
description='Show only owned datablocks',
|
||||||
default=True
|
default=True
|
||||||
)
|
)
|
||||||
|
filter_name: bpy.props.StringProperty(
|
||||||
|
name="filter_name",
|
||||||
|
default="",
|
||||||
|
description='Node name filter',
|
||||||
|
)
|
||||||
admin: bpy.props.BoolProperty(
|
admin: bpy.props.BoolProperty(
|
||||||
name="admin",
|
name="admin",
|
||||||
description='Connect as admin',
|
description='Connect as admin',
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
password: bpy.props.StringProperty(
|
|
||||||
name="password",
|
|
||||||
default=random_string_digits(),
|
|
||||||
description='Session password',
|
|
||||||
subtype='PASSWORD'
|
|
||||||
)
|
|
||||||
internet_ip: bpy.props.StringProperty(
|
|
||||||
name="internet ip",
|
|
||||||
default="no found",
|
|
||||||
description='Internet interface ip',
|
|
||||||
)
|
|
||||||
user_snap_running: bpy.props.BoolProperty(
|
user_snap_running: bpy.props.BoolProperty(
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
@ -537,6 +694,7 @@ classes = (
|
|||||||
SessionProps,
|
SessionProps,
|
||||||
ReplicationFlags,
|
ReplicationFlags,
|
||||||
ReplicatedDatablock,
|
ReplicatedDatablock,
|
||||||
|
ServerPreset,
|
||||||
SessionPrefs,
|
SessionPrefs,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -551,6 +709,10 @@ def register():
|
|||||||
if len(prefs.supported_datablocks) == 0:
|
if len(prefs.supported_datablocks) == 0:
|
||||||
logging.debug('Generating bl_types preferences')
|
logging.debug('Generating bl_types preferences')
|
||||||
prefs.generate_supported_types()
|
prefs.generate_supported_types()
|
||||||
|
|
||||||
|
# at launch server presets
|
||||||
|
prefs.generate_default_presets()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
|
@ -30,7 +30,7 @@ import mathutils
|
|||||||
from bpy_extras import view3d_utils
|
from bpy_extras import view3d_utils
|
||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
|
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
|
||||||
STATE_INITIAL, STATE_LAUNCHING_SERVICES,
|
STATE_INITIAL, CONNECTING,
|
||||||
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
|
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
|
||||||
STATE_SYNCING, STATE_WAITING)
|
STATE_SYNCING, STATE_WAITING)
|
||||||
from replication.interface import session
|
from replication.interface import session
|
||||||
@ -94,15 +94,41 @@ def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D,
|
|||||||
return [target.x, target.y, target.z]
|
return [target.x, target.y, target.z]
|
||||||
|
|
||||||
|
|
||||||
def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
|
def bbox_from_obj(obj: bpy.types.Object, index: int = 1) -> list:
|
||||||
""" Generate a bounding box for a given object by using its world matrix
|
""" Generate a bounding box for a given object by using its world matrix
|
||||||
|
|
||||||
:param obj: target object
|
:param obj: target object
|
||||||
:type obj: bpy.types.Object
|
:type obj: bpy.types.Object
|
||||||
:param radius: bounding box radius
|
:param index: indice offset
|
||||||
:type radius: float
|
:type index: int
|
||||||
:return: list of 8 points [(x,y,z),...]
|
:return: list of 8 points [(x,y,z),...], list of 12 link between these points [(1,2),...]
|
||||||
"""
|
"""
|
||||||
|
radius = 1.0 # Radius of the bounding box
|
||||||
|
index = 8*index
|
||||||
|
vertex_indices = (
|
||||||
|
(0+index, 1+index), (0+index, 2+index), (1+index, 3+index), (2+index, 3+index),
|
||||||
|
(4+index, 5+index), (4+index, 6+index), (5+index, 7+index), (6+index, 7+index),
|
||||||
|
(0+index, 4+index), (1+index, 5+index), (2+index, 6+index), (3+index, 7+index))
|
||||||
|
|
||||||
|
if obj.type == 'EMPTY':
|
||||||
|
radius = obj.empty_display_size
|
||||||
|
elif obj.type == 'LIGHT':
|
||||||
|
radius = obj.data.shadow_soft_size
|
||||||
|
elif obj.type == 'LIGHT_PROBE':
|
||||||
|
radius = obj.data.influence_distance
|
||||||
|
elif obj.type == 'CAMERA':
|
||||||
|
radius = obj.data.display_size
|
||||||
|
elif hasattr(obj, 'bound_box'):
|
||||||
|
vertex_indices = (
|
||||||
|
(0+index, 1+index), (1+index, 2+index),
|
||||||
|
(2+index, 3+index), (0+index, 3+index),
|
||||||
|
(4+index, 5+index), (5+index, 6+index),
|
||||||
|
(6+index, 7+index), (4+index, 7+index),
|
||||||
|
(0+index, 4+index), (1+index, 5+index),
|
||||||
|
(2+index, 6+index), (3+index, 7+index))
|
||||||
|
vertex_pos = get_bb_coords_from_obj(obj)
|
||||||
|
return vertex_pos, vertex_indices
|
||||||
|
|
||||||
coords = [
|
coords = [
|
||||||
(-radius, -radius, -radius), (+radius, -radius, -radius),
|
(-radius, -radius, -radius), (+radius, -radius, -radius),
|
||||||
(-radius, +radius, -radius), (+radius, +radius, -radius),
|
(-radius, +radius, -radius), (+radius, +radius, -radius),
|
||||||
@ -112,9 +138,32 @@ def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
|
|||||||
base = obj.matrix_world
|
base = obj.matrix_world
|
||||||
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
|
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
|
||||||
|
|
||||||
return [(point.x, point.y, point.z)
|
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
|
||||||
for point in bbox_corners]
|
|
||||||
|
|
||||||
|
return vertex_pos, vertex_indices
|
||||||
|
|
||||||
|
def bbox_from_instance_collection(ic: bpy.types.Object, index: int = 0) -> list:
|
||||||
|
""" Generate a bounding box for a given instance collection by using its objects
|
||||||
|
|
||||||
|
:param ic: target instance collection
|
||||||
|
:type ic: bpy.types.Object
|
||||||
|
:param index: indice offset
|
||||||
|
:type index: int
|
||||||
|
:return: list of 8*objs points [(x,y,z),...], tuple of 12*objs link between these points [(1,2),...]
|
||||||
|
"""
|
||||||
|
vertex_pos = []
|
||||||
|
vertex_indices = ()
|
||||||
|
|
||||||
|
for obj_index, obj in enumerate(ic.instance_collection.objects):
|
||||||
|
vertex_pos_temp, vertex_indices_temp = bbox_from_obj(obj, index=index+obj_index)
|
||||||
|
vertex_pos += vertex_pos_temp
|
||||||
|
vertex_indices += vertex_indices_temp
|
||||||
|
|
||||||
|
bbox_corners = [ic.matrix_world @ mathutils.Vector(vertex) for vertex in vertex_pos]
|
||||||
|
|
||||||
|
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
|
||||||
|
|
||||||
|
return vertex_pos, vertex_indices
|
||||||
|
|
||||||
def generate_user_camera() -> list:
|
def generate_user_camera() -> list:
|
||||||
""" Generate a basic camera represention of the user point of view
|
""" Generate a basic camera represention of the user point of view
|
||||||
@ -175,7 +224,7 @@ def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object
|
|||||||
|
|
||||||
bbox_corners = [base @ mathutils.Vector(
|
bbox_corners = [base @ mathutils.Vector(
|
||||||
corner) for corner in object.bound_box]
|
corner) for corner in object.bound_box]
|
||||||
|
|
||||||
|
|
||||||
return [(point.x, point.y, point.z) for point in bbox_corners]
|
return [(point.x, point.y, point.z) for point in bbox_corners]
|
||||||
|
|
||||||
@ -203,6 +252,13 @@ class Widget(object):
|
|||||||
"""
|
"""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def configure_bgl(self):
|
||||||
|
bgl.glLineWidth(2.)
|
||||||
|
bgl.glEnable(bgl.GL_DEPTH_TEST)
|
||||||
|
bgl.glEnable(bgl.GL_BLEND)
|
||||||
|
bgl.glEnable(bgl.GL_LINE_SMOOTH)
|
||||||
|
|
||||||
|
|
||||||
def draw(self):
|
def draw(self):
|
||||||
"""How to draw the widget
|
"""How to draw the widget
|
||||||
"""
|
"""
|
||||||
@ -256,11 +312,6 @@ class UserFrustumWidget(Widget):
|
|||||||
{"pos": positions},
|
{"pos": positions},
|
||||||
indices=self.indices)
|
indices=self.indices)
|
||||||
|
|
||||||
bgl.glLineWidth(2.)
|
|
||||||
bgl.glEnable(bgl.GL_DEPTH_TEST)
|
|
||||||
bgl.glEnable(bgl.GL_BLEND)
|
|
||||||
bgl.glEnable(bgl.GL_LINE_SMOOTH)
|
|
||||||
|
|
||||||
shader.bind()
|
shader.bind()
|
||||||
shader.uniform_float("color", self.data.get('color'))
|
shader.uniform_float("color", self.data.get('color'))
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
@ -272,6 +323,8 @@ class UserSelectionWidget(Widget):
|
|||||||
username):
|
username):
|
||||||
self.username = username
|
self.username = username
|
||||||
self.settings = bpy.context.window_manager.session
|
self.settings = bpy.context.window_manager.session
|
||||||
|
self.current_selection_ids = []
|
||||||
|
self.current_selected_objects = []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self):
|
def data(self):
|
||||||
@ -281,6 +334,15 @@ class UserSelectionWidget(Widget):
|
|||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def selected_objects(self):
|
||||||
|
user_selection = self.data.get('selected_objects')
|
||||||
|
if self.current_selection_ids != user_selection:
|
||||||
|
self.current_selected_objects = [find_from_attr("uuid", uid, bpy.data.objects) for uid in user_selection]
|
||||||
|
self.current_selection_ids = user_selection
|
||||||
|
|
||||||
|
return self.current_selected_objects
|
||||||
|
|
||||||
def poll(self):
|
def poll(self):
|
||||||
if self.data is None:
|
if self.data is None:
|
||||||
return False
|
return False
|
||||||
@ -295,48 +357,31 @@ class UserSelectionWidget(Widget):
|
|||||||
self.settings.enable_presence
|
self.settings.enable_presence
|
||||||
|
|
||||||
def draw(self):
|
def draw(self):
|
||||||
user_selection = self.data.get('selected_objects')
|
vertex_pos = []
|
||||||
for select_ob in user_selection:
|
vertex_ind = []
|
||||||
ob = find_from_attr("uuid", select_ob, bpy.data.objects)
|
collection_offset = 0
|
||||||
if not ob:
|
for obj_index, obj in enumerate(self.selected_objects):
|
||||||
return
|
if obj is None:
|
||||||
|
continue
|
||||||
|
obj_index+=collection_offset
|
||||||
|
if hasattr(obj, 'instance_collection') and obj.instance_collection:
|
||||||
|
bbox_pos, bbox_ind = bbox_from_instance_collection(obj, index=obj_index)
|
||||||
|
collection_offset+=len(obj.instance_collection.objects)-1
|
||||||
|
else :
|
||||||
|
bbox_pos, bbox_ind = bbox_from_obj(obj, index=obj_index)
|
||||||
|
vertex_pos += bbox_pos
|
||||||
|
vertex_ind += bbox_ind
|
||||||
|
|
||||||
vertex_pos = bbox_from_obj(ob, 1.0)
|
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
||||||
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
|
batch = batch_for_shader(
|
||||||
(4, 5), (4, 6), (5, 7), (6, 7),
|
shader,
|
||||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
'LINES',
|
||||||
|
{"pos": vertex_pos},
|
||||||
if ob.instance_collection:
|
indices=vertex_ind)
|
||||||
for obj in ob.instance_collection.objects:
|
|
||||||
if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
|
|
||||||
vertex_pos = get_bb_coords_from_obj(obj, instance=ob)
|
|
||||||
break
|
|
||||||
elif ob.type == 'EMPTY':
|
|
||||||
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
|
|
||||||
elif ob.type == 'LIGHT':
|
|
||||||
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
|
|
||||||
elif ob.type == 'LIGHT_PROBE':
|
|
||||||
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
|
|
||||||
elif ob.type == 'CAMERA':
|
|
||||||
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
|
|
||||||
elif hasattr(ob, 'bound_box'):
|
|
||||||
vertex_indices = (
|
|
||||||
(0, 1), (1, 2), (2, 3), (0, 3),
|
|
||||||
(4, 5), (5, 6), (6, 7), (4, 7),
|
|
||||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
|
||||||
vertex_pos = get_bb_coords_from_obj(ob)
|
|
||||||
|
|
||||||
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
|
||||||
batch = batch_for_shader(
|
|
||||||
shader,
|
|
||||||
'LINES',
|
|
||||||
{"pos": vertex_pos},
|
|
||||||
indices=vertex_indices)
|
|
||||||
|
|
||||||
shader.bind()
|
|
||||||
shader.uniform_float("color", self.data.get('color'))
|
|
||||||
batch.draw(shader)
|
|
||||||
|
|
||||||
|
shader.bind()
|
||||||
|
shader.uniform_float("color", self.data.get('color'))
|
||||||
|
batch.draw(shader)
|
||||||
|
|
||||||
class UserNameWidget(Widget):
|
class UserNameWidget(Widget):
|
||||||
draw_type = 'POST_PIXEL'
|
draw_type = 'POST_PIXEL'
|
||||||
@ -380,6 +425,62 @@ class UserNameWidget(Widget):
|
|||||||
blf.color(0, color[0], color[1], color[2], color[3])
|
blf.color(0, color[0], color[1], color[2], color[3])
|
||||||
blf.draw(0, self.username)
|
blf.draw(0, self.username)
|
||||||
|
|
||||||
|
class UserModeWidget(Widget):
|
||||||
|
draw_type = 'POST_PIXEL'
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
username):
|
||||||
|
self.username = username
|
||||||
|
self.settings = bpy.context.window_manager.session
|
||||||
|
self.preferences = get_preferences()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
user = session.online_users.get(self.username)
|
||||||
|
if user:
|
||||||
|
return user.get('metadata')
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def poll(self):
|
||||||
|
if self.data is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
scene_current = self.data.get('scene_current')
|
||||||
|
mode_current = self.data.get('mode_current')
|
||||||
|
user_selection = self.data.get('selected_objects')
|
||||||
|
|
||||||
|
return (scene_current == bpy.context.scene.name or
|
||||||
|
mode_current == bpy.context.mode or
|
||||||
|
self.settings.presence_show_far_user) and \
|
||||||
|
user_selection and \
|
||||||
|
self.settings.presence_show_mode and \
|
||||||
|
self.settings.enable_presence
|
||||||
|
|
||||||
|
def draw(self):
|
||||||
|
user_selection = self.data.get('selected_objects')
|
||||||
|
area, region, rv3d = view3d_find()
|
||||||
|
viewport_coord = project_to_viewport(region, rv3d, (0, 0))
|
||||||
|
|
||||||
|
obj = find_from_attr("uuid", user_selection[0], bpy.data.objects)
|
||||||
|
if not obj:
|
||||||
|
return
|
||||||
|
mode_current = self.data.get('mode_current')
|
||||||
|
color = self.data.get('color')
|
||||||
|
origin_coord = project_to_screen(obj.location)
|
||||||
|
|
||||||
|
distance_viewport_object = math.sqrt((viewport_coord[0]-obj.location[0])**2+(viewport_coord[1]-obj.location[1])**2+(viewport_coord[2]-obj.location[2])**2)
|
||||||
|
|
||||||
|
if distance_viewport_object > self.preferences.presence_mode_distance :
|
||||||
|
return
|
||||||
|
|
||||||
|
if origin_coord :
|
||||||
|
blf.position(0, origin_coord[0]+8, origin_coord[1]-15, 0)
|
||||||
|
blf.size(0, 16, 72)
|
||||||
|
blf.color(0, color[0], color[1], color[2], color[3])
|
||||||
|
blf.draw(0, mode_current)
|
||||||
|
|
||||||
|
|
||||||
class SessionStatusWidget(Widget):
|
class SessionStatusWidget(Widget):
|
||||||
draw_type = 'POST_PIXEL'
|
draw_type = 'POST_PIXEL'
|
||||||
@ -399,7 +500,7 @@ class SessionStatusWidget(Widget):
|
|||||||
text_scale = self.preferences.presence_hud_scale
|
text_scale = self.preferences.presence_hud_scale
|
||||||
ui_scale = bpy.context.preferences.view.ui_scale
|
ui_scale = bpy.context.preferences.view.ui_scale
|
||||||
color = [1, 1, 0, 1]
|
color = [1, 1, 0, 1]
|
||||||
state = session.state.get('STATE')
|
state = session.state
|
||||||
state_str = f"{get_state_str(state)}"
|
state_str = f"{get_state_str(state)}"
|
||||||
|
|
||||||
if state == STATE_ACTIVE:
|
if state == STATE_ACTIVE:
|
||||||
@ -462,6 +563,7 @@ class DrawFactory(object):
|
|||||||
try:
|
try:
|
||||||
for widget in self.widgets.values():
|
for widget in self.widgets.values():
|
||||||
if widget.draw_type == 'POST_VIEW' and widget.poll():
|
if widget.draw_type == 'POST_VIEW' and widget.poll():
|
||||||
|
widget.configure_bgl()
|
||||||
widget.draw()
|
widget.draw()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(
|
logging.error(
|
||||||
@ -471,6 +573,7 @@ class DrawFactory(object):
|
|||||||
try:
|
try:
|
||||||
for widget in self.widgets.values():
|
for widget in self.widgets.values():
|
||||||
if widget.draw_type == 'POST_PIXEL' and widget.poll():
|
if widget.draw_type == 'POST_PIXEL' and widget.poll():
|
||||||
|
widget.configure_bgl()
|
||||||
widget.draw()
|
widget.draw()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(
|
logging.error(
|
||||||
@ -483,6 +586,7 @@ this.renderer = DrawFactory()
|
|||||||
|
|
||||||
def register():
|
def register():
|
||||||
this.renderer.register_handlers()
|
this.renderer.register_handlers()
|
||||||
|
|
||||||
|
|
||||||
this.renderer.add_widget("session_status", SessionStatusWidget())
|
this.renderer.add_widget("session_status", SessionStatusWidget())
|
||||||
|
|
||||||
|
48
multi_user/shared_data.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
from replication.constants import STATE_INITIAL
|
||||||
|
|
||||||
|
|
||||||
|
class SessionData():
|
||||||
|
""" A structure to share easily the current session data across the addon
|
||||||
|
modules.
|
||||||
|
This object will completely replace the Singleton lying in replication
|
||||||
|
interface module.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.repository = None # The current repository
|
||||||
|
self.remote = None # The active remote
|
||||||
|
self.server = None
|
||||||
|
self.applied_updates = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
if self.remote is None:
|
||||||
|
return STATE_INITIAL
|
||||||
|
else:
|
||||||
|
return self.remote.connection_status
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self.remote = None
|
||||||
|
self.repository = None
|
||||||
|
self.server = None
|
||||||
|
self.applied_updates = []
|
||||||
|
|
||||||
|
|
||||||
|
session = SessionData()
|
@ -24,12 +24,15 @@ from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
|
|||||||
STATE_SRV_SYNC, STATE_SYNCING, UP)
|
STATE_SRV_SYNC, STATE_SYNCING, UP)
|
||||||
from replication.exception import NonAuthorizedOperationError, ContextError
|
from replication.exception import NonAuthorizedOperationError, ContextError
|
||||||
from replication.interface import session
|
from replication.interface import session
|
||||||
|
from replication import porcelain
|
||||||
|
|
||||||
from . import operators, utils
|
from . import operators, utils
|
||||||
from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
|
from .presence import (UserFrustumWidget, UserNameWidget, UserModeWidget, UserSelectionWidget,
|
||||||
generate_user_camera, get_view_matrix, refresh_3d_view,
|
generate_user_camera, get_view_matrix, refresh_3d_view,
|
||||||
refresh_sidebar_view, renderer)
|
refresh_sidebar_view, renderer)
|
||||||
|
|
||||||
|
from . import shared_data
|
||||||
|
|
||||||
this = sys.modules[__name__]
|
this = sys.modules[__name__]
|
||||||
|
|
||||||
# Registered timers
|
# Registered timers
|
||||||
@ -38,7 +41,8 @@ this.registry = dict()
|
|||||||
def is_annotating(context: bpy.types.Context):
|
def is_annotating(context: bpy.types.Context):
|
||||||
""" Check if the annotate mode is enabled
|
""" Check if the annotate mode is enabled
|
||||||
"""
|
"""
|
||||||
return bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False).idname == 'builtin.annotate'
|
active_tool = bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False)
|
||||||
|
return (active_tool and active_tool.idname == 'builtin.annotate')
|
||||||
|
|
||||||
|
|
||||||
class Timer(object):
|
class Timer(object):
|
||||||
@ -71,7 +75,8 @@ class Timer(object):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(e)
|
logging.error(e)
|
||||||
self.unregister()
|
self.unregister()
|
||||||
session.disconnect()
|
traceback.print_exc()
|
||||||
|
session.disconnect(reason=f"Error during timer {self.id} execution")
|
||||||
else:
|
else:
|
||||||
if self.is_running:
|
if self.is_running:
|
||||||
return self._timeout
|
return self._timeout
|
||||||
@ -87,7 +92,7 @@ class Timer(object):
|
|||||||
if bpy.app.timers.is_registered(self.main):
|
if bpy.app.timers.is_registered(self.main):
|
||||||
logging.info(f"Unregistering {self.id}")
|
logging.info(f"Unregistering {self.id}")
|
||||||
bpy.app.timers.unregister(self.main)
|
bpy.app.timers.unregister(self.main)
|
||||||
|
|
||||||
del this.registry[self.id]
|
del this.registry[self.id]
|
||||||
self.is_running = False
|
self.is_running = False
|
||||||
|
|
||||||
@ -98,127 +103,135 @@ class SessionBackupTimer(Timer):
|
|||||||
|
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
session.save(self._filepath)
|
session.repository.dumps(self._filepath)
|
||||||
|
|
||||||
|
class SessionListenTimer(Timer):
|
||||||
|
def execute(self):
|
||||||
|
session.listen()
|
||||||
|
|
||||||
class ApplyTimer(Timer):
|
class ApplyTimer(Timer):
|
||||||
def execute(self):
|
def execute(self):
|
||||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
if session and session.state == STATE_ACTIVE:
|
||||||
nodes = session.list()
|
for node in session.repository.graph.keys():
|
||||||
|
node_ref = session.repository.graph.get(node)
|
||||||
for node in nodes:
|
|
||||||
node_ref = session.get(uuid=node)
|
|
||||||
|
|
||||||
if node_ref.state == FETCHED:
|
if node_ref.state == FETCHED:
|
||||||
try:
|
try:
|
||||||
session.apply(node)
|
shared_data.session.applied_updates.append(node)
|
||||||
|
porcelain.apply(session.repository, node)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Fail to apply {node_ref.uuid}")
|
logging.error(f"Fail to apply {node_ref.uuid}")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
else:
|
else:
|
||||||
if node_ref.bl_reload_parent:
|
impl = session.repository.rdp.get_implementation(node_ref.instance)
|
||||||
for parent in session._graph.find_parents(node):
|
if impl.bl_reload_parent:
|
||||||
|
for parent in session.repository.graph.get_parents(node):
|
||||||
logging.debug("Refresh parent {node}")
|
logging.debug("Refresh parent {node}")
|
||||||
session.apply(parent, force=True)
|
porcelain.apply(session.repository,
|
||||||
|
parent.uuid,
|
||||||
|
force=True)
|
||||||
|
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
|
||||||
|
for dep in node_ref.dependencies:
|
||||||
|
porcelain.apply(session.repository,
|
||||||
|
dep,
|
||||||
|
force=True)
|
||||||
|
|
||||||
|
|
||||||
|
class AnnotationUpdates(Timer):
|
||||||
|
def __init__(self, timeout=1):
|
||||||
|
self._annotating = False
|
||||||
|
self._settings = utils.get_preferences()
|
||||||
|
|
||||||
|
super().__init__(timeout)
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
if session and session.state == STATE_ACTIVE:
|
||||||
|
ctx = bpy.context
|
||||||
|
annotation_gp = ctx.scene.grease_pencil
|
||||||
|
|
||||||
|
if annotation_gp and not annotation_gp.uuid:
|
||||||
|
ctx.scene.update_tag()
|
||||||
|
|
||||||
|
# if an annotation exist and is tracked
|
||||||
|
if annotation_gp and annotation_gp.uuid:
|
||||||
|
registered_gp = session.repository.graph.get(annotation_gp.uuid)
|
||||||
|
if is_annotating(bpy.context):
|
||||||
|
# try to get the right on it
|
||||||
|
if registered_gp.owner == RP_COMMON:
|
||||||
|
self._annotating = True
|
||||||
|
logging.debug(
|
||||||
|
"Getting the right on the annotation GP")
|
||||||
|
porcelain.lock(session.repository,
|
||||||
|
[registered_gp.uuid],
|
||||||
|
ignore_warnings=True,
|
||||||
|
affect_dependencies=False)
|
||||||
|
|
||||||
|
if registered_gp.owner == self._settings.username:
|
||||||
|
porcelain.commit(session.repository, annotation_gp.uuid)
|
||||||
|
porcelain.push(session.repository, 'origin', annotation_gp.uuid)
|
||||||
|
|
||||||
|
elif self._annotating:
|
||||||
|
porcelain.unlock(session.repository,
|
||||||
|
[registered_gp.uuid],
|
||||||
|
ignore_warnings=True,
|
||||||
|
affect_dependencies=False)
|
||||||
|
self._annotating = False
|
||||||
|
|
||||||
class DynamicRightSelectTimer(Timer):
|
class DynamicRightSelectTimer(Timer):
|
||||||
def __init__(self, timeout=.1):
|
def __init__(self, timeout=.1):
|
||||||
super().__init__(timeout)
|
super().__init__(timeout)
|
||||||
self._last_selection = []
|
self._last_selection = set()
|
||||||
self._user = None
|
self._user = None
|
||||||
self._annotating = False
|
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
settings = utils.get_preferences()
|
settings = utils.get_preferences()
|
||||||
|
|
||||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
if session and session.state == STATE_ACTIVE:
|
||||||
# Find user
|
# Find user
|
||||||
if self._user is None:
|
if self._user is None:
|
||||||
self._user = session.online_users.get(settings.username)
|
self._user = session.online_users.get(settings.username)
|
||||||
|
|
||||||
if self._user:
|
if self._user:
|
||||||
ctx = bpy.context
|
current_selection = set(utils.get_selected_objects(
|
||||||
annotation_gp = ctx.scene.grease_pencil
|
|
||||||
|
|
||||||
if annotation_gp and not annotation_gp.uuid:
|
|
||||||
ctx.scene.update_tag()
|
|
||||||
|
|
||||||
# if an annotation exist and is tracked
|
|
||||||
if annotation_gp and annotation_gp.uuid:
|
|
||||||
registered_gp = session.get(uuid=annotation_gp.uuid)
|
|
||||||
if is_annotating(bpy.context):
|
|
||||||
# try to get the right on it
|
|
||||||
if registered_gp.owner == RP_COMMON:
|
|
||||||
self._annotating = True
|
|
||||||
logging.debug(
|
|
||||||
"Getting the right on the annotation GP")
|
|
||||||
session.change_owner(
|
|
||||||
registered_gp.uuid,
|
|
||||||
settings.username,
|
|
||||||
ignore_warnings=True,
|
|
||||||
affect_dependencies=False)
|
|
||||||
|
|
||||||
if registered_gp.owner == settings.username:
|
|
||||||
gp_node = session.get(uuid=annotation_gp.uuid)
|
|
||||||
if gp_node.has_changed():
|
|
||||||
session.commit(gp_node.uuid)
|
|
||||||
session.push(gp_node.uuid, check_data=False)
|
|
||||||
|
|
||||||
elif self._annotating:
|
|
||||||
session.change_owner(
|
|
||||||
registered_gp.uuid,
|
|
||||||
RP_COMMON,
|
|
||||||
ignore_warnings=True,
|
|
||||||
affect_dependencies=False)
|
|
||||||
|
|
||||||
current_selection = utils.get_selected_objects(
|
|
||||||
bpy.context.scene,
|
bpy.context.scene,
|
||||||
bpy.data.window_managers['WinMan'].windows[0].view_layer
|
bpy.data.window_managers['WinMan'].windows[0].view_layer
|
||||||
)
|
))
|
||||||
if current_selection != self._last_selection:
|
if current_selection != self._last_selection:
|
||||||
obj_common = [
|
to_lock = list(current_selection.difference(self._last_selection))
|
||||||
o for o in self._last_selection if o not in current_selection]
|
to_release = list(self._last_selection.difference(current_selection))
|
||||||
obj_ours = [
|
instances_to_lock = list()
|
||||||
o for o in current_selection if o not in self._last_selection]
|
|
||||||
|
|
||||||
# change old selection right to common
|
for node_id in to_lock:
|
||||||
for obj in obj_common:
|
node = session.repository.graph.get(node_id)
|
||||||
node = session.get(uuid=obj)
|
instance_mode = node.data.get('instance_type')
|
||||||
|
if instance_mode and instance_mode == 'COLLECTION':
|
||||||
|
to_lock.remove(node_id)
|
||||||
|
instances_to_lock.append(node_id)
|
||||||
|
if instances_to_lock:
|
||||||
|
try:
|
||||||
|
porcelain.lock(session.repository,
|
||||||
|
instances_to_lock,
|
||||||
|
ignore_warnings=True,
|
||||||
|
affect_dependencies=False)
|
||||||
|
except NonAuthorizedOperationError as e:
|
||||||
|
logging.warning(e)
|
||||||
|
|
||||||
if node and (node.owner == settings.username or node.owner == RP_COMMON):
|
if to_release:
|
||||||
recursive = True
|
try:
|
||||||
if node.data and 'instance_type' in node.data.keys():
|
porcelain.unlock(session.repository,
|
||||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
to_release,
|
||||||
try:
|
ignore_warnings=True,
|
||||||
session.change_owner(
|
affect_dependencies=True)
|
||||||
node.uuid,
|
except NonAuthorizedOperationError as e:
|
||||||
RP_COMMON,
|
logging.warning(e)
|
||||||
ignore_warnings=True,
|
if to_lock:
|
||||||
affect_dependencies=recursive)
|
try:
|
||||||
except NonAuthorizedOperationError:
|
porcelain.lock(session.repository,
|
||||||
logging.warning(
|
to_lock,
|
||||||
f"Not authorized to change {node} owner")
|
ignore_warnings=True,
|
||||||
|
affect_dependencies=True)
|
||||||
# change new selection to our
|
except NonAuthorizedOperationError as e:
|
||||||
for obj in obj_ours:
|
logging.warning(e)
|
||||||
node = session.get(uuid=obj)
|
|
||||||
|
|
||||||
if node and node.owner == RP_COMMON:
|
|
||||||
recursive = True
|
|
||||||
if node.data and 'instance_type' in node.data.keys():
|
|
||||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
|
||||||
|
|
||||||
try:
|
|
||||||
session.change_owner(
|
|
||||||
node.uuid,
|
|
||||||
settings.username,
|
|
||||||
ignore_warnings=True,
|
|
||||||
affect_dependencies=recursive)
|
|
||||||
except NonAuthorizedOperationError:
|
|
||||||
logging.warning(
|
|
||||||
f"Not authorized to change {node} owner")
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._last_selection = current_selection
|
self._last_selection = current_selection
|
||||||
|
|
||||||
@ -226,31 +239,29 @@ class DynamicRightSelectTimer(Timer):
|
|||||||
'selected_objects': current_selection
|
'selected_objects': current_selection
|
||||||
}
|
}
|
||||||
|
|
||||||
session.update_user_metadata(user_metadata)
|
porcelain.update_user_metadata(session.repository, user_metadata)
|
||||||
logging.debug("Update selection")
|
logging.debug("Update selection")
|
||||||
|
|
||||||
# Fix deselection until right managment refactoring (with Roles concepts)
|
# Fix deselection until right managment refactoring (with Roles concepts)
|
||||||
if len(current_selection) == 0 :
|
if len(current_selection) == 0 :
|
||||||
owned_keys = session.list(
|
owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
|
||||||
filter_owner=settings.username)
|
if owned_keys:
|
||||||
for key in owned_keys:
|
|
||||||
node = session.get(uuid=key)
|
|
||||||
try:
|
try:
|
||||||
session.change_owner(
|
porcelain.unlock(session.repository,
|
||||||
key,
|
owned_keys,
|
||||||
RP_COMMON,
|
ignore_warnings=True,
|
||||||
ignore_warnings=True,
|
affect_dependencies=True)
|
||||||
affect_dependencies=recursive)
|
except NonAuthorizedOperationError as e:
|
||||||
except NonAuthorizedOperationError:
|
logging.warning(e)
|
||||||
logging.warning(
|
|
||||||
f"Not authorized to change {key} owner")
|
|
||||||
|
|
||||||
|
# Objects selectability
|
||||||
for obj in bpy.data.objects:
|
for obj in bpy.data.objects:
|
||||||
object_uuid = getattr(obj, 'uuid', None)
|
object_uuid = getattr(obj, 'uuid', None)
|
||||||
if object_uuid:
|
if object_uuid:
|
||||||
is_selectable = not session.is_readonly(object_uuid)
|
is_selectable = not session.repository.is_node_readonly(object_uuid)
|
||||||
if obj.hide_select != is_selectable:
|
if obj.hide_select != is_selectable:
|
||||||
obj.hide_select = is_selectable
|
obj.hide_select = is_selectable
|
||||||
|
shared_data.session.applied_updates.append(object_uuid)
|
||||||
|
|
||||||
|
|
||||||
class ClientUpdate(Timer):
|
class ClientUpdate(Timer):
|
||||||
@ -263,7 +274,7 @@ class ClientUpdate(Timer):
|
|||||||
settings = utils.get_preferences()
|
settings = utils.get_preferences()
|
||||||
|
|
||||||
if session and renderer:
|
if session and renderer:
|
||||||
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
|
if session.state in [STATE_ACTIVE, STATE_LOBBY]:
|
||||||
local_user = session.online_users.get(
|
local_user = session.online_users.get(
|
||||||
settings.username)
|
settings.username)
|
||||||
|
|
||||||
@ -300,20 +311,24 @@ class ClientUpdate(Timer):
|
|||||||
settings.client_color.b,
|
settings.client_color.b,
|
||||||
1),
|
1),
|
||||||
'frame_current': bpy.context.scene.frame_current,
|
'frame_current': bpy.context.scene.frame_current,
|
||||||
'scene_current': scene_current
|
'scene_current': scene_current,
|
||||||
|
'mode_current': bpy.context.mode
|
||||||
}
|
}
|
||||||
session.update_user_metadata(metadata)
|
porcelain.update_user_metadata(session.repository, metadata)
|
||||||
|
|
||||||
# Update client representation
|
# Update client representation
|
||||||
# Update client current scene
|
# Update client current scene
|
||||||
elif scene_current != local_user_metadata['scene_current']:
|
elif scene_current != local_user_metadata['scene_current']:
|
||||||
local_user_metadata['scene_current'] = scene_current
|
local_user_metadata['scene_current'] = scene_current
|
||||||
session.update_user_metadata(local_user_metadata)
|
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||||
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
||||||
local_user_metadata['view_corners'] = current_view_corners
|
local_user_metadata['view_corners'] = current_view_corners
|
||||||
local_user_metadata['view_matrix'] = get_view_matrix(
|
local_user_metadata['view_matrix'] = get_view_matrix(
|
||||||
)
|
)
|
||||||
session.update_user_metadata(local_user_metadata)
|
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||||
|
elif bpy.context.mode != local_user_metadata['mode_current']:
|
||||||
|
local_user_metadata['mode_current'] = bpy.context.mode
|
||||||
|
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||||
|
|
||||||
|
|
||||||
class SessionStatusUpdate(Timer):
|
class SessionStatusUpdate(Timer):
|
||||||
@ -341,6 +356,7 @@ class SessionUserSync(Timer):
|
|||||||
renderer.remove_widget(f"{user.username}_cam")
|
renderer.remove_widget(f"{user.username}_cam")
|
||||||
renderer.remove_widget(f"{user.username}_select")
|
renderer.remove_widget(f"{user.username}_select")
|
||||||
renderer.remove_widget(f"{user.username}_name")
|
renderer.remove_widget(f"{user.username}_name")
|
||||||
|
renderer.remove_widget(f"{user.username}_mode")
|
||||||
ui_users.remove(index)
|
ui_users.remove(index)
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -356,6 +372,8 @@ class SessionUserSync(Timer):
|
|||||||
f"{user}_select", UserSelectionWidget(user))
|
f"{user}_select", UserSelectionWidget(user))
|
||||||
renderer.add_widget(
|
renderer.add_widget(
|
||||||
f"{user}_name", UserNameWidget(user))
|
f"{user}_name", UserNameWidget(user))
|
||||||
|
renderer.add_widget(
|
||||||
|
f"{user}_mode", UserModeWidget(user))
|
||||||
|
|
||||||
|
|
||||||
class MainThreadExecutor(Timer):
|
class MainThreadExecutor(Timer):
|
||||||
|
593
multi_user/ui.py
@ -16,7 +16,9 @@
|
|||||||
# ##### END GPL LICENSE BLOCK #####
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
|
from logging import log
|
||||||
import bpy
|
import bpy
|
||||||
|
import bpy.utils.previews
|
||||||
|
|
||||||
from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str
|
from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str
|
||||||
from replication.constants import (ADDED, ERROR, FETCHED,
|
from replication.constants import (ADDED, ERROR, FETCHED,
|
||||||
@ -26,7 +28,7 @@ from replication.constants import (ADDED, ERROR, FETCHED,
|
|||||||
STATE_INITIAL, STATE_SRV_SYNC,
|
STATE_INITIAL, STATE_SRV_SYNC,
|
||||||
STATE_WAITING, STATE_QUITTING,
|
STATE_WAITING, STATE_QUITTING,
|
||||||
STATE_LOBBY,
|
STATE_LOBBY,
|
||||||
STATE_LAUNCHING_SERVICES)
|
CONNECTING)
|
||||||
from replication import __version__
|
from replication import __version__
|
||||||
from replication.interface import session
|
from replication.interface import session
|
||||||
from .timers import registry
|
from .timers import registry
|
||||||
@ -71,157 +73,132 @@ class SESSION_PT_settings(bpy.types.Panel):
|
|||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
if session and session.state['STATE'] != STATE_INITIAL:
|
settings = get_preferences()
|
||||||
|
|
||||||
|
from multi_user import icons
|
||||||
|
offline_icon = icons.icons_col["session_status_offline"]
|
||||||
|
waiting_icon = icons.icons_col["session_status_waiting"]
|
||||||
|
online_icon = icons.icons_col["session_status_online"]
|
||||||
|
|
||||||
|
if session and session.state != STATE_INITIAL:
|
||||||
cli_state = session.state
|
cli_state = session.state
|
||||||
state = session.state.get('STATE')
|
state = session.state
|
||||||
connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
|
connection_icon = offline_icon
|
||||||
|
|
||||||
if state == STATE_ACTIVE:
|
if state == STATE_ACTIVE:
|
||||||
connection_icon = 'PROP_ON'
|
connection_icon = online_icon
|
||||||
else:
|
else:
|
||||||
connection_icon = 'PROP_CON'
|
connection_icon = waiting_icon
|
||||||
|
|
||||||
layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
|
layout.label(text=f"{str(settings.server_name)} - {get_state_str(cli_state)}", icon_value=connection_icon.icon_id)
|
||||||
else:
|
else:
|
||||||
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
|
layout.label(text=f"Multi-user - v{__version__}", icon="ANTIALIASED")
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
row = layout.row()
|
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
settings = get_preferences()
|
settings = get_preferences()
|
||||||
|
|
||||||
if hasattr(context.window_manager, 'session'):
|
if settings.is_first_launch:
|
||||||
# STATE INITIAL
|
# USER SETTINGS
|
||||||
if not session \
|
row = layout.row()
|
||||||
or (session and session.state['STATE'] == STATE_INITIAL):
|
row.label(text="1. Enter your username and color:")
|
||||||
pass
|
row = layout.row()
|
||||||
else:
|
split = row.split(factor=0.7, align=True)
|
||||||
cli_state = session.state
|
split.prop(settings, "username", text="")
|
||||||
row = layout.row()
|
split.prop(settings, "client_color", text="")
|
||||||
|
|
||||||
current_state = cli_state['STATE']
|
# DOC
|
||||||
info_msg = None
|
row = layout.row()
|
||||||
|
row.label(text="2. New here ? See the doc:")
|
||||||
|
row = layout.row()
|
||||||
|
row.operator("doc.get", text="Documentation", icon="HELP")
|
||||||
|
|
||||||
|
# START
|
||||||
|
row = layout.row()
|
||||||
|
row.label(text="3: Start the Multi-user:")
|
||||||
|
row = layout.row()
|
||||||
|
row.scale_y = 2
|
||||||
|
row.operator("firstlaunch.verify", text="Continue")
|
||||||
|
|
||||||
|
if not settings.is_first_launch:
|
||||||
|
if hasattr(context.window_manager, 'session'):
|
||||||
|
# STATE INITIAL
|
||||||
|
if not session \
|
||||||
|
or (session and session.state == STATE_INITIAL):
|
||||||
|
layout = self.layout
|
||||||
|
settings = get_preferences()
|
||||||
|
server_preset = settings.server_preset
|
||||||
|
selected_server = context.window_manager.server_index if context.window_manager.server_index<=len(server_preset)-1 else 0
|
||||||
|
active_server_name = server_preset[selected_server].name if len(server_preset)>=1 else ""
|
||||||
|
is_server_selected = True if active_server_name else False
|
||||||
|
|
||||||
if current_state in [STATE_ACTIVE]:
|
# SERVER LIST
|
||||||
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
|
row = layout.row()
|
||||||
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
|
box = row.box()
|
||||||
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
|
box.scale_y = 0.7
|
||||||
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE')
|
split = box.split(factor=0.7)
|
||||||
|
split.label(text="Server")
|
||||||
|
split.label(text="Online")
|
||||||
|
|
||||||
row= layout.row()
|
col = row.column(align=True)
|
||||||
|
col.operator("session.get_info", icon="FILE_REFRESH", text="")
|
||||||
|
|
||||||
if current_state in [STATE_ACTIVE] and runtime_settings.is_host:
|
row = layout.row()
|
||||||
info_msg = f"LAN: {runtime_settings.internet_ip}"
|
col = row.column(align=True)
|
||||||
if current_state == STATE_LOBBY:
|
col.template_list("SESSION_UL_network", "", settings, "server_preset", context.window_manager, "server_index")
|
||||||
info_msg = "Waiting for the session to start."
|
col.separator()
|
||||||
|
connectOp = col.row()
|
||||||
|
connectOp.enabled =is_server_selected
|
||||||
|
connectOp.operator("session.connect", text="Connect")
|
||||||
|
|
||||||
if info_msg:
|
col = row.column(align=True)
|
||||||
info_box = row.box()
|
col.operator("session.preset_server_add", icon="ADD", text="") # TODO : add conditions (need a name, etc..)
|
||||||
info_box.row().label(text=info_msg,icon='INFO')
|
row_visible = col.row(align=True)
|
||||||
|
col_visible = row_visible.column(align=True)
|
||||||
|
col_visible.enabled = is_server_selected
|
||||||
|
col_visible.operator("session.preset_server_remove", icon="REMOVE", text="").target_server_name = active_server_name
|
||||||
|
col_visible.separator()
|
||||||
|
col_visible.operator("session.preset_server_edit", icon="GREASEPENCIL", text="").target_server_name = active_server_name
|
||||||
|
|
||||||
# Progress bar
|
else:
|
||||||
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
|
exitbutton = layout.row()
|
||||||
info_box = row.box()
|
exitbutton.scale_y = 1.5
|
||||||
info_box.row().label(text=printProgressBar(
|
exitbutton.operator("session.stop", icon='QUIT', text="Disconnect")
|
||||||
cli_state['CURRENT'],
|
|
||||||
cli_state['TOTAL'],
|
|
||||||
length=16
|
|
||||||
))
|
|
||||||
|
|
||||||
layout.row().operator("session.stop", icon='QUIT', text="Exit")
|
progress = session.state_progress
|
||||||
|
current_state = session.state
|
||||||
|
info_msg = None
|
||||||
|
|
||||||
|
if current_state == STATE_LOBBY:
|
||||||
|
usr = session.online_users.get(settings.username)
|
||||||
|
row= layout.row()
|
||||||
|
info_msg = "Waiting for the session to start."
|
||||||
|
if usr and usr['admin']:
|
||||||
|
info_msg = "Init the session to start."
|
||||||
|
info_box = layout.row()
|
||||||
|
info_box.label(text=info_msg,icon='INFO')
|
||||||
|
init_row = layout.row()
|
||||||
|
init_row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
|
||||||
|
else:
|
||||||
|
info_box = layout.row()
|
||||||
|
info_box.row().label(text=info_msg,icon='INFO')
|
||||||
|
|
||||||
class SESSION_PT_settings_network(bpy.types.Panel):
|
# PROGRESS BAR
|
||||||
bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel"
|
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
|
||||||
bl_label = "Network"
|
row= layout.row()
|
||||||
bl_space_type = 'VIEW_3D'
|
row.label(text=f"Status: {get_state_str(current_state)}")
|
||||||
bl_region_type = 'UI'
|
row= layout.row()
|
||||||
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
info_box = row.box()
|
||||||
|
info_box.label(text=printProgressBar(
|
||||||
|
progress['current'],
|
||||||
|
progress['total'],
|
||||||
|
length=16
|
||||||
|
))
|
||||||
|
|
||||||
@classmethod
|
class SESSION_PT_host_settings(bpy.types.Panel):
|
||||||
def poll(cls, context):
|
bl_idname = "MULTIUSER_SETTINGS_HOST_PT_panel"
|
||||||
return not session \
|
bl_label = "Hosting"
|
||||||
or (session and session.state['STATE'] == 0)
|
|
||||||
|
|
||||||
def draw_header(self, context):
|
|
||||||
self.layout.label(text="", icon='URL')
|
|
||||||
|
|
||||||
def draw(self, context):
|
|
||||||
layout = self.layout
|
|
||||||
|
|
||||||
runtime_settings = context.window_manager.session
|
|
||||||
settings = get_preferences()
|
|
||||||
|
|
||||||
# USER SETTINGS
|
|
||||||
row = layout.row()
|
|
||||||
row.prop(runtime_settings, "session_mode", expand=True)
|
|
||||||
row = layout.row()
|
|
||||||
|
|
||||||
box = row.box()
|
|
||||||
|
|
||||||
if runtime_settings.session_mode == 'HOST':
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Port:")
|
|
||||||
row.prop(settings, "port", text="")
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Start from:")
|
|
||||||
row.prop(settings, "init_method", text="")
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Admin password:")
|
|
||||||
row.prop(runtime_settings, "password", text="")
|
|
||||||
row = box.row()
|
|
||||||
row.operator("session.start", text="HOST").host = True
|
|
||||||
else:
|
|
||||||
row = box.row()
|
|
||||||
row.prop(settings, "ip", text="IP")
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Port:")
|
|
||||||
row.prop(settings, "port", text="")
|
|
||||||
|
|
||||||
row = box.row()
|
|
||||||
row.prop(runtime_settings, "admin", text='Connect as admin', icon='DISCLOSURE_TRI_DOWN' if runtime_settings.admin
|
|
||||||
else 'DISCLOSURE_TRI_RIGHT')
|
|
||||||
if runtime_settings.admin:
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Password:")
|
|
||||||
row.prop(runtime_settings, "password", text="")
|
|
||||||
row = box.row()
|
|
||||||
row.operator("session.start", text="CONNECT").host = False
|
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_settings_user(bpy.types.Panel):
|
|
||||||
bl_idname = "MULTIUSER_SETTINGS_USER_PT_panel"
|
|
||||||
bl_label = "User info"
|
|
||||||
bl_space_type = 'VIEW_3D'
|
|
||||||
bl_region_type = 'UI'
|
|
||||||
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context):
|
|
||||||
return not session \
|
|
||||||
or (session and session.state['STATE'] == 0)
|
|
||||||
|
|
||||||
def draw_header(self, context):
|
|
||||||
self.layout.label(text="", icon='USER')
|
|
||||||
|
|
||||||
def draw(self, context):
|
|
||||||
layout = self.layout
|
|
||||||
|
|
||||||
runtime_settings = context.window_manager.session
|
|
||||||
settings = get_preferences()
|
|
||||||
|
|
||||||
row = layout.row()
|
|
||||||
# USER SETTINGS
|
|
||||||
row.prop(settings, "username", text="name")
|
|
||||||
|
|
||||||
row = layout.row()
|
|
||||||
row.prop(settings, "client_color", text="color")
|
|
||||||
row = layout.row()
|
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|
||||||
bl_idname = "MULTIUSER_SETTINGS_REPLICATION_PT_panel"
|
|
||||||
bl_label = "Advanced"
|
|
||||||
bl_space_type = 'VIEW_3D'
|
bl_space_type = 'VIEW_3D'
|
||||||
bl_region_type = 'UI'
|
bl_region_type = 'UI'
|
||||||
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
||||||
@ -229,19 +206,82 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
|
settings = get_preferences()
|
||||||
return not session \
|
return not session \
|
||||||
or (session and session.state['STATE'] == 0)
|
or (session and session.state == 0) \
|
||||||
|
and not settings.sidebar_advanced_shown \
|
||||||
|
and not settings.is_first_launch
|
||||||
|
|
||||||
|
def draw_header(self, context):
|
||||||
|
self.layout.label(text="", icon='NETWORK_DRIVE')
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
settings = get_preferences()
|
||||||
|
|
||||||
|
#HOST
|
||||||
|
host_selection = layout.row().box()
|
||||||
|
host_selection_row = host_selection.row()
|
||||||
|
host_selection_row.label(text="Init the session from:")
|
||||||
|
host_selection_row.prop(settings, "init_method", text="")
|
||||||
|
host_selection_row = host_selection.row()
|
||||||
|
host_selection_row.label(text="Port:")
|
||||||
|
host_selection_row.prop(settings, "host_port", text="")
|
||||||
|
host_selection_row = host_selection.row()
|
||||||
|
host_selection_col = host_selection_row.column()
|
||||||
|
host_selection_col.prop(settings, "host_use_server_password", text="Server password:")
|
||||||
|
host_selection_col = host_selection_row.column()
|
||||||
|
host_selection_col.enabled = True if settings.host_use_server_password else False
|
||||||
|
host_selection_col.prop(settings, "host_server_password", text="")
|
||||||
|
host_selection_row = host_selection.row()
|
||||||
|
host_selection_col = host_selection_row.column()
|
||||||
|
host_selection_col.prop(settings, "host_use_admin_password", text="Admin password:")
|
||||||
|
host_selection_col = host_selection_row.column()
|
||||||
|
host_selection_col.enabled = True if settings.host_use_admin_password else False
|
||||||
|
host_selection_col.prop(settings, "host_admin_password", text="")
|
||||||
|
|
||||||
|
host_selection = layout.column()
|
||||||
|
host_selection.operator("session.host", text="Host")
|
||||||
|
|
||||||
|
|
||||||
|
class SESSION_PT_advanced_settings(bpy.types.Panel):
|
||||||
|
bl_idname = "MULTIUSER_SETTINGS_REPLICATION_PT_panel"
|
||||||
|
bl_label = "General Settings"
|
||||||
|
bl_space_type = 'VIEW_3D'
|
||||||
|
bl_region_type = 'UI'
|
||||||
|
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
||||||
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
settings = get_preferences()
|
||||||
|
return not session \
|
||||||
|
or (session and session.state == 0) \
|
||||||
|
and not settings.sidebar_advanced_shown \
|
||||||
|
and not settings.is_first_launch
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.label(text="", icon='PREFERENCES')
|
self.layout.label(text="", icon='PREFERENCES')
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
runtime_settings = context.window_manager.session
|
|
||||||
settings = get_preferences()
|
settings = get_preferences()
|
||||||
|
|
||||||
|
#ADVANCED USER INFO
|
||||||
|
uinfo_section = layout.row().box()
|
||||||
|
uinfo_section.prop(
|
||||||
|
settings,
|
||||||
|
"sidebar_advanced_uinfo_expanded",
|
||||||
|
text="User Info",
|
||||||
|
icon=get_expanded_icon(settings.sidebar_advanced_uinfo_expanded),
|
||||||
|
emboss=False)
|
||||||
|
if settings.sidebar_advanced_uinfo_expanded:
|
||||||
|
uinfo_section_row = uinfo_section.row()
|
||||||
|
uinfo_section_split = uinfo_section_row.split(factor=0.7, align=True)
|
||||||
|
uinfo_section_split.prop(settings, "username", text="")
|
||||||
|
uinfo_section_split.prop(settings, "client_color", text="")
|
||||||
|
|
||||||
|
#ADVANCED NET
|
||||||
net_section = layout.row().box()
|
net_section = layout.row().box()
|
||||||
net_section.prop(
|
net_section.prop(
|
||||||
settings,
|
settings,
|
||||||
@ -249,15 +289,15 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
text="Network",
|
text="Network",
|
||||||
icon=get_expanded_icon(settings.sidebar_advanced_net_expanded),
|
icon=get_expanded_icon(settings.sidebar_advanced_net_expanded),
|
||||||
emboss=False)
|
emboss=False)
|
||||||
|
|
||||||
if settings.sidebar_advanced_net_expanded:
|
if settings.sidebar_advanced_net_expanded:
|
||||||
net_section_row = net_section.row()
|
|
||||||
net_section_row.label(text="IPC Port:")
|
|
||||||
net_section_row.prop(settings, "ipc_port", text="")
|
|
||||||
net_section_row = net_section.row()
|
net_section_row = net_section.row()
|
||||||
net_section_row.label(text="Timeout (ms):")
|
net_section_row.label(text="Timeout (ms):")
|
||||||
net_section_row.prop(settings, "connection_timeout", text="")
|
net_section_row.prop(settings, "connection_timeout", text="")
|
||||||
|
net_section_row = net_section.row()
|
||||||
|
net_section_row.label(text="Server ping (ms):")
|
||||||
|
net_section_row.prop(settings, "ping_timeout", text="")
|
||||||
|
|
||||||
|
#ADVANCED REPLICATION
|
||||||
replication_section = layout.row().box()
|
replication_section = layout.row().box()
|
||||||
replication_section.prop(
|
replication_section.prop(
|
||||||
settings,
|
settings,
|
||||||
@ -265,16 +305,12 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
text="Replication",
|
text="Replication",
|
||||||
icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded),
|
icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded),
|
||||||
emboss=False)
|
emboss=False)
|
||||||
|
|
||||||
if settings.sidebar_advanced_rep_expanded:
|
if settings.sidebar_advanced_rep_expanded:
|
||||||
replication_section_row = replication_section.row()
|
|
||||||
|
|
||||||
replication_section_row = replication_section.row()
|
replication_section_row = replication_section.row()
|
||||||
replication_section_row.prop(settings.sync_flags, "sync_render_settings")
|
replication_section_row.prop(settings.sync_flags, "sync_render_settings")
|
||||||
replication_section_row = replication_section.row()
|
replication_section_row = replication_section.row()
|
||||||
replication_section_row.prop(settings.sync_flags, "sync_active_camera")
|
replication_section_row.prop(settings.sync_flags, "sync_active_camera")
|
||||||
replication_section_row = replication_section.row()
|
replication_section_row = replication_section.row()
|
||||||
|
|
||||||
replication_section_row.prop(settings.sync_flags, "sync_during_editmode")
|
replication_section_row.prop(settings.sync_flags, "sync_during_editmode")
|
||||||
replication_section_row = replication_section.row()
|
replication_section_row = replication_section.row()
|
||||||
if settings.sync_flags.sync_during_editmode:
|
if settings.sync_flags.sync_during_editmode:
|
||||||
@ -283,7 +319,7 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
replication_section_row = replication_section.row()
|
replication_section_row = replication_section.row()
|
||||||
replication_section_row.prop(settings, "depsgraph_update_rate", text="Apply delay")
|
replication_section_row.prop(settings, "depsgraph_update_rate", text="Apply delay")
|
||||||
|
|
||||||
|
#ADVANCED CACHE
|
||||||
cache_section = layout.row().box()
|
cache_section = layout.row().box()
|
||||||
cache_section.prop(
|
cache_section.prop(
|
||||||
settings,
|
settings,
|
||||||
@ -301,6 +337,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
cache_section_row.prop(settings, "clear_memory_filecache", text="")
|
cache_section_row.prop(settings, "clear_memory_filecache", text="")
|
||||||
cache_section_row = cache_section.row()
|
cache_section_row = cache_section.row()
|
||||||
cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})")
|
cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})")
|
||||||
|
|
||||||
|
#ADVANCED LOG
|
||||||
log_section = layout.row().box()
|
log_section = layout.row().box()
|
||||||
log_section.prop(
|
log_section.prop(
|
||||||
settings,
|
settings,
|
||||||
@ -308,11 +346,11 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
text="Logging",
|
text="Logging",
|
||||||
icon=get_expanded_icon(settings.sidebar_advanced_log_expanded),
|
icon=get_expanded_icon(settings.sidebar_advanced_log_expanded),
|
||||||
emboss=False)
|
emboss=False)
|
||||||
|
|
||||||
if settings.sidebar_advanced_log_expanded:
|
if settings.sidebar_advanced_log_expanded:
|
||||||
log_section_row = log_section.row()
|
log_section_row = log_section.row()
|
||||||
log_section_row.label(text="Log level:")
|
log_section_row.label(text="Log level:")
|
||||||
log_section_row.prop(settings, 'logging_level', text="")
|
log_section_row.prop(settings, 'logging_level', text="")
|
||||||
|
|
||||||
class SESSION_PT_user(bpy.types.Panel):
|
class SESSION_PT_user(bpy.types.Panel):
|
||||||
bl_idname = "MULTIUSER_USER_PT_panel"
|
bl_idname = "MULTIUSER_USER_PT_panel"
|
||||||
bl_label = "Online users"
|
bl_label = "Online users"
|
||||||
@ -322,7 +360,8 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return session and session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
|
return session \
|
||||||
|
and session.state in [STATE_ACTIVE, STATE_LOBBY]
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.label(text="", icon='USER')
|
self.layout.label(text="", icon='USER')
|
||||||
@ -334,26 +373,27 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
settings = get_preferences()
|
settings = get_preferences()
|
||||||
active_user = online_users[selected_user] if len(
|
active_user = online_users[selected_user] if len(
|
||||||
online_users)-1 >= selected_user else 0
|
online_users)-1 >= selected_user else 0
|
||||||
runtime_settings = context.window_manager.session
|
|
||||||
|
|
||||||
# Create a simple row.
|
#USER LIST
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
box = row.box()
|
box = row.box()
|
||||||
split = box.split(factor=0.35)
|
split = box.split(factor=0.35)
|
||||||
split.label(text="user")
|
split.label(text="user")
|
||||||
split = split.split(factor=0.5)
|
split = split.split(factor=0.3)
|
||||||
split.label(text="location")
|
split.label(text="mode")
|
||||||
split.label(text="frame")
|
split.label(text="frame")
|
||||||
|
split.label(text="location")
|
||||||
split.label(text="ping")
|
split.label(text="ping")
|
||||||
|
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
layout.template_list("SESSION_UL_users", "", context.window_manager,
|
layout.template_list("SESSION_UL_users", "", context.window_manager,
|
||||||
"online_users", context.window_manager, "user_index")
|
"online_users", context.window_manager, "user_index")
|
||||||
|
|
||||||
|
#OPERATOR ON USER
|
||||||
if active_user != 0 and active_user.username != settings.username:
|
if active_user != 0 and active_user.username != settings.username:
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
user_operations = row.split()
|
user_operations = row.split()
|
||||||
if session.state['STATE'] == STATE_ACTIVE:
|
if session.state == STATE_ACTIVE:
|
||||||
|
|
||||||
user_operations.alert = context.window_manager.session.time_snap_running
|
user_operations.alert = context.window_manager.session.time_snap_running
|
||||||
user_operations.operator(
|
user_operations.operator(
|
||||||
@ -381,6 +421,8 @@ class SESSION_UL_users(bpy.types.UIList):
|
|||||||
ping = '-'
|
ping = '-'
|
||||||
frame_current = '-'
|
frame_current = '-'
|
||||||
scene_current = '-'
|
scene_current = '-'
|
||||||
|
mode_current = '-'
|
||||||
|
mode_icon = 'BLANK1'
|
||||||
status_icon = 'BLANK1'
|
status_icon = 'BLANK1'
|
||||||
if session:
|
if session:
|
||||||
user = session.online_users.get(item.username)
|
user = session.online_users.get(item.username)
|
||||||
@ -390,59 +432,51 @@ class SESSION_UL_users(bpy.types.UIList):
|
|||||||
if metadata and 'frame_current' in metadata:
|
if metadata and 'frame_current' in metadata:
|
||||||
frame_current = str(metadata.get('frame_current','-'))
|
frame_current = str(metadata.get('frame_current','-'))
|
||||||
scene_current = metadata.get('scene_current','-')
|
scene_current = metadata.get('scene_current','-')
|
||||||
|
mode_current = metadata.get('mode_current','-')
|
||||||
|
if mode_current == "OBJECT" :
|
||||||
|
mode_icon = "OBJECT_DATAMODE"
|
||||||
|
elif mode_current == "EDIT_MESH" :
|
||||||
|
mode_icon = "EDITMODE_HLT"
|
||||||
|
elif mode_current == 'EDIT_CURVE':
|
||||||
|
mode_icon = "CURVE_DATA"
|
||||||
|
elif mode_current == 'EDIT_SURFACE':
|
||||||
|
mode_icon = "SURFACE_DATA"
|
||||||
|
elif mode_current == 'EDIT_TEXT':
|
||||||
|
mode_icon = "FILE_FONT"
|
||||||
|
elif mode_current == 'EDIT_ARMATURE':
|
||||||
|
mode_icon = "ARMATURE_DATA"
|
||||||
|
elif mode_current == 'EDIT_METABALL':
|
||||||
|
mode_icon = "META_BALL"
|
||||||
|
elif mode_current == 'EDIT_LATTICE':
|
||||||
|
mode_icon = "LATTICE_DATA"
|
||||||
|
elif mode_current == 'POSE':
|
||||||
|
mode_icon = "POSE_HLT"
|
||||||
|
elif mode_current == 'SCULPT':
|
||||||
|
mode_icon = "SCULPTMODE_HLT"
|
||||||
|
elif mode_current == 'PAINT_WEIGHT':
|
||||||
|
mode_icon = "WPAINT_HLT"
|
||||||
|
elif mode_current == 'PAINT_VERTEX':
|
||||||
|
mode_icon = "VPAINT_HLT"
|
||||||
|
elif mode_current == 'PAINT_TEXTURE':
|
||||||
|
mode_icon = "TPAINT_HLT"
|
||||||
|
elif mode_current == 'PARTICLE':
|
||||||
|
mode_icon = "PARTICLES"
|
||||||
|
elif mode_current == 'PAINT_GPENCIL' or mode_current =='EDIT_GPENCIL' or mode_current =='SCULPT_GPENCIL' or mode_current =='WEIGHT_GPENCIL' or mode_current =='VERTEX_GPENCIL':
|
||||||
|
mode_icon = "GREASEPENCIL"
|
||||||
if user['admin']:
|
if user['admin']:
|
||||||
status_icon = 'FAKE_USER_ON'
|
status_icon = 'FAKE_USER_ON'
|
||||||
split = layout.split(factor=0.35)
|
split = layout.split(factor=0.35)
|
||||||
split.label(text=item.username, icon=status_icon)
|
split.label(text=item.username, icon=status_icon)
|
||||||
split = split.split(factor=0.5)
|
split = split.split(factor=0.3)
|
||||||
split.label(text=scene_current)
|
split.label(icon=mode_icon)
|
||||||
split.label(text=frame_current)
|
split.label(text=frame_current)
|
||||||
|
split.label(text=scene_current)
|
||||||
split.label(text=ping)
|
split.label(text=ping)
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_presence(bpy.types.Panel):
|
|
||||||
bl_idname = "MULTIUSER_MODULE_PT_panel"
|
|
||||||
bl_label = "Presence overlay"
|
|
||||||
bl_space_type = 'VIEW_3D'
|
|
||||||
bl_region_type = 'UI'
|
|
||||||
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
|
||||||
bl_options = {'DEFAULT_CLOSED'}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context):
|
|
||||||
return not session \
|
|
||||||
or (session and session.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
|
|
||||||
|
|
||||||
def draw_header(self, context):
|
|
||||||
self.layout.prop(context.window_manager.session,
|
|
||||||
"enable_presence", text="",icon='OVERLAY')
|
|
||||||
|
|
||||||
def draw(self, context):
|
|
||||||
layout = self.layout
|
|
||||||
|
|
||||||
settings = context.window_manager.session
|
|
||||||
pref = get_preferences()
|
|
||||||
layout.active = settings.enable_presence
|
|
||||||
col = layout.column()
|
|
||||||
col.prop(settings, "presence_show_session_status")
|
|
||||||
row = col.column()
|
|
||||||
row.active = settings.presence_show_session_status
|
|
||||||
row.prop(pref, "presence_hud_scale", expand=True)
|
|
||||||
row = col.column(align=True)
|
|
||||||
row.active = settings.presence_show_session_status
|
|
||||||
row.prop(pref, "presence_hud_hpos", expand=True)
|
|
||||||
row.prop(pref, "presence_hud_vpos", expand=True)
|
|
||||||
col.prop(settings, "presence_show_selected")
|
|
||||||
col.prop(settings, "presence_show_user")
|
|
||||||
row = layout.column()
|
|
||||||
row.active = settings.presence_show_user
|
|
||||||
row.prop(settings, "presence_show_far_user")
|
|
||||||
|
|
||||||
def draw_property(context, parent, property_uuid, level=0):
|
def draw_property(context, parent, property_uuid, level=0):
|
||||||
settings = get_preferences()
|
settings = get_preferences()
|
||||||
runtime_settings = context.window_manager.session
|
item = session.repository.graph.get(property_uuid)
|
||||||
item = session.get(uuid=property_uuid)
|
type_id = item.data.get('type_id')
|
||||||
|
|
||||||
area_msg = parent.row(align=True)
|
area_msg = parent.row(align=True)
|
||||||
|
|
||||||
if item.state == ERROR:
|
if item.state == ERROR:
|
||||||
@ -453,23 +487,25 @@ def draw_property(context, parent, property_uuid, level=0):
|
|||||||
line = area_msg.box()
|
line = area_msg.box()
|
||||||
|
|
||||||
name = item.data['name'] if item.data else item.uuid
|
name = item.data['name'] if item.data else item.uuid
|
||||||
|
icon = settings.supported_datablocks[type_id].icon if type_id else 'ERROR'
|
||||||
detail_item_box = line.row(align=True)
|
detail_item_box = line.row(align=True)
|
||||||
|
|
||||||
detail_item_box.label(text="",
|
detail_item_box.label(text="", icon=icon)
|
||||||
icon=settings.supported_datablocks[item.str_type].icon)
|
|
||||||
detail_item_box.label(text=f"{name}")
|
detail_item_box.label(text=f"{name}")
|
||||||
|
|
||||||
# Operations
|
# Operations
|
||||||
|
|
||||||
have_right_to_modify = (item.owner == settings.username or \
|
have_right_to_modify = (item.owner == settings.username or \
|
||||||
item.owner == RP_COMMON) and item.state != ERROR
|
item.owner == RP_COMMON) and item.state != ERROR
|
||||||
|
|
||||||
|
from multi_user import icons
|
||||||
|
sync_status = icons.icons_col["repository_push"] #TODO: Link all icons to the right sync (push/merge/issue). For issue use "UNLINKED" for icon
|
||||||
|
# sync_status = icons.icons_col["repository_merge"]
|
||||||
|
|
||||||
if have_right_to_modify:
|
if have_right_to_modify:
|
||||||
detail_item_box.operator(
|
detail_item_box.operator(
|
||||||
"session.commit",
|
"session.commit",
|
||||||
text="",
|
text="",
|
||||||
icon='TRIA_UP').target = item.uuid
|
icon_value=sync_status.icon_id).target = item.uuid
|
||||||
detail_item_box.separator()
|
detail_item_box.separator()
|
||||||
|
|
||||||
if item.state in [FETCHED, UP]:
|
if item.state in [FETCHED, UP]:
|
||||||
@ -501,12 +537,40 @@ def draw_property(context, parent, property_uuid, level=0):
|
|||||||
else:
|
else:
|
||||||
detail_item_box.label(text="", icon="DECORATE_LOCKED")
|
detail_item_box.label(text="", icon="DECORATE_LOCKED")
|
||||||
|
|
||||||
|
class SESSION_PT_sync(bpy.types.Panel):
|
||||||
|
bl_idname = "MULTIUSER_SYNC_PT_panel"
|
||||||
|
bl_label = "Synchronize"
|
||||||
|
bl_space_type = 'VIEW_3D'
|
||||||
|
bl_region_type = 'UI'
|
||||||
|
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
||||||
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return session \
|
||||||
|
and session.state in [STATE_ACTIVE]
|
||||||
|
|
||||||
|
def draw_header(self, context):
|
||||||
|
self.layout.label(text="", icon='UV_SYNC_SELECT')
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
settings = get_preferences()
|
||||||
|
|
||||||
|
row= layout.row()
|
||||||
|
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
|
||||||
|
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
|
||||||
|
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
|
||||||
|
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='VIEW_CAMERA')
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_repository(bpy.types.Panel):
|
class SESSION_PT_repository(bpy.types.Panel):
|
||||||
bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
|
bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
|
||||||
bl_label = "Repository"
|
bl_label = "Repository"
|
||||||
bl_space_type = 'VIEW_3D'
|
bl_space_type = 'VIEW_3D'
|
||||||
bl_region_type = 'UI'
|
bl_region_type = 'UI'
|
||||||
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
||||||
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
@ -519,8 +583,8 @@ class SESSION_PT_repository(bpy.types.Panel):
|
|||||||
admin = usr['admin']
|
admin = usr['admin']
|
||||||
return hasattr(context.window_manager, 'session') and \
|
return hasattr(context.window_manager, 'session') and \
|
||||||
session and \
|
session and \
|
||||||
(session.state['STATE'] == STATE_ACTIVE or \
|
session.state == STATE_ACTIVE and \
|
||||||
session.state['STATE'] == STATE_LOBBY and admin)
|
not settings.sidebar_repository_shown
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
|
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
|
||||||
@ -534,55 +598,37 @@ class SESSION_PT_repository(bpy.types.Panel):
|
|||||||
|
|
||||||
usr = session.online_users.get(settings.username)
|
usr = session.online_users.get(settings.username)
|
||||||
|
|
||||||
row = layout.row()
|
if session.state == STATE_ACTIVE:
|
||||||
|
|
||||||
if session.state['STATE'] == STATE_ACTIVE:
|
|
||||||
if 'SessionBackupTimer' in registry:
|
if 'SessionBackupTimer' in registry:
|
||||||
|
row = layout.row()
|
||||||
row.alert = True
|
row.alert = True
|
||||||
row.operator('session.cancel_autosave', icon="CANCEL")
|
row.operator('session.cancel_autosave', icon="CANCEL")
|
||||||
row.alert = False
|
row.alert = False
|
||||||
else:
|
# else:
|
||||||
row.operator('session.save', icon="FILE_TICK")
|
# row.operator('session.save', icon="FILE_TICK")
|
||||||
|
|
||||||
flow = layout.grid_flow(
|
box = layout.box()
|
||||||
row_major=True,
|
row = box.row()
|
||||||
columns=0,
|
row.prop(runtime_settings, "filter_owned", text="Only show owned data blocks", icon_only=True, icon="DECORATE_UNLOCKED")
|
||||||
even_columns=True,
|
row = box.row()
|
||||||
even_rows=False,
|
row.prop(runtime_settings, "filter_name", text="Filter")
|
||||||
align=True)
|
row = box.row()
|
||||||
|
|
||||||
for item in settings.supported_datablocks:
|
|
||||||
col = flow.column(align=True)
|
|
||||||
col.prop(item, "use_as_filter", text="", icon=item.icon)
|
|
||||||
|
|
||||||
row = layout.row(align=True)
|
|
||||||
row.prop(runtime_settings, "filter_owned", text="Show only owned")
|
|
||||||
|
|
||||||
row = layout.row(align=True)
|
|
||||||
|
|
||||||
# Properties
|
# Properties
|
||||||
types_filter = [t.type_name for t in settings.supported_datablocks
|
owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
|
||||||
if t.use_as_filter]
|
|
||||||
|
|
||||||
key_to_filter = session.list(
|
filtered_node = owned_nodes if runtime_settings.filter_owned else list(session.repository.graph.keys())
|
||||||
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
|
|
||||||
|
|
||||||
client_keys = [key for key in key_to_filter
|
if runtime_settings.filter_name:
|
||||||
if session.get(uuid=key).str_type
|
filtered_node = [n for n in filtered_node if runtime_settings.filter_name.lower() in session.repository.graph.get(n).data.get('name').lower()]
|
||||||
in types_filter]
|
|
||||||
|
|
||||||
if client_keys:
|
if filtered_node:
|
||||||
col = layout.column(align=True)
|
col = layout.column(align=True)
|
||||||
for key in client_keys:
|
for key in filtered_node:
|
||||||
draw_property(context, col, key)
|
draw_property(context, col, key)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
row.label(text="Empty")
|
layout.row().label(text="Empty")
|
||||||
|
|
||||||
elif session.state['STATE'] == STATE_LOBBY and usr and usr['admin']:
|
|
||||||
row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
|
|
||||||
else:
|
|
||||||
row.label(text="Waiting to start")
|
|
||||||
|
|
||||||
class VIEW3D_PT_overlay_session(bpy.types.Panel):
|
class VIEW3D_PT_overlay_session(bpy.types.Panel):
|
||||||
bl_space_type = 'VIEW_3D'
|
bl_space_type = 'VIEW_3D'
|
||||||
@ -597,37 +643,74 @@ class VIEW3D_PT_overlay_session(bpy.types.Panel):
|
|||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
view = context.space_data
|
|
||||||
overlay = view.overlay
|
|
||||||
display_all = overlay.show_overlays
|
|
||||||
|
|
||||||
col = layout.column()
|
|
||||||
|
|
||||||
row = col.row(align=True)
|
|
||||||
settings = context.window_manager.session
|
settings = context.window_manager.session
|
||||||
|
pref = get_preferences()
|
||||||
layout.active = settings.enable_presence
|
layout.active = settings.enable_presence
|
||||||
col = layout.column()
|
|
||||||
col.prop(settings, "presence_show_session_status")
|
row = layout.row()
|
||||||
col.prop(settings, "presence_show_selected")
|
row.prop(settings, "enable_presence",text="Presence Overlay")
|
||||||
col.prop(settings, "presence_show_user")
|
|
||||||
|
|
||||||
row = layout.column()
|
row = layout.row()
|
||||||
row.active = settings.presence_show_user
|
row.prop(settings, "presence_show_selected",text="Selected Objects")
|
||||||
row.prop(settings, "presence_show_far_user")
|
|
||||||
|
row = layout.row(align=True)
|
||||||
|
row.prop(settings, "presence_show_user", text="Users camera")
|
||||||
|
row.prop(settings, "presence_show_mode", text="Users mode")
|
||||||
|
|
||||||
|
col = layout.column()
|
||||||
|
if settings.presence_show_mode or settings.presence_show_user:
|
||||||
|
row = col.column()
|
||||||
|
row.prop(pref, "presence_text_distance", expand=True)
|
||||||
|
|
||||||
|
row = col.column()
|
||||||
|
row.prop(settings, "presence_show_far_user", text="Users on different scenes")
|
||||||
|
|
||||||
|
col.prop(settings, "presence_show_session_status")
|
||||||
|
if settings.presence_show_session_status :
|
||||||
|
split = layout.split()
|
||||||
|
text_pos = split.column(align=True)
|
||||||
|
text_pos.active = settings.presence_show_session_status
|
||||||
|
text_pos.prop(pref, "presence_hud_hpos", expand=True)
|
||||||
|
text_pos.prop(pref, "presence_hud_vpos", expand=True)
|
||||||
|
text_scale = split.column()
|
||||||
|
text_scale.active = settings.presence_show_session_status
|
||||||
|
text_scale.prop(pref, "presence_hud_scale", expand=True)
|
||||||
|
|
||||||
|
|
||||||
|
class SESSION_UL_network(bpy.types.UIList):
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
|
||||||
|
settings = get_preferences()
|
||||||
|
server_name = '-'
|
||||||
|
server_status = 'BLANK1'
|
||||||
|
server_private = 'BLANK1'
|
||||||
|
|
||||||
|
server_name = item.server_name
|
||||||
|
|
||||||
|
split = layout.split(factor=0.7)
|
||||||
|
if item.is_private:
|
||||||
|
server_private = 'LOCKED'
|
||||||
|
split.label(text=server_name, icon=server_private)
|
||||||
|
else:
|
||||||
|
split.label(text=server_name)
|
||||||
|
|
||||||
|
from multi_user import icons
|
||||||
|
server_status = icons.icons_col["server_offline"]
|
||||||
|
if item.is_online:
|
||||||
|
server_status = icons.icons_col["server_online"]
|
||||||
|
split.label(icon_value=server_status.icon_id)
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
SESSION_UL_users,
|
SESSION_UL_users,
|
||||||
|
SESSION_UL_network,
|
||||||
SESSION_PT_settings,
|
SESSION_PT_settings,
|
||||||
SESSION_PT_settings_user,
|
SESSION_PT_host_settings,
|
||||||
SESSION_PT_settings_network,
|
|
||||||
SESSION_PT_presence,
|
|
||||||
SESSION_PT_advanced_settings,
|
SESSION_PT_advanced_settings,
|
||||||
SESSION_PT_user,
|
SESSION_PT_user,
|
||||||
|
SESSION_PT_sync,
|
||||||
SESSION_PT_repository,
|
SESSION_PT_repository,
|
||||||
VIEW3D_PT_overlay_session,
|
VIEW3D_PT_overlay_session,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
register, unregister = bpy.utils.register_classes_factory(classes)
|
register, unregister = bpy.utils.register_classes_factory(classes)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -36,8 +36,16 @@ from replication.constants import (STATE_ACTIVE, STATE_AUTH,
|
|||||||
STATE_INITIAL, STATE_SRV_SYNC,
|
STATE_INITIAL, STATE_SRV_SYNC,
|
||||||
STATE_WAITING, STATE_QUITTING,
|
STATE_WAITING, STATE_QUITTING,
|
||||||
STATE_LOBBY,
|
STATE_LOBBY,
|
||||||
STATE_LAUNCHING_SERVICES)
|
CONNECTING)
|
||||||
|
|
||||||
|
CLEARED_DATABLOCKS = ['actions', 'armatures', 'cache_files', 'cameras',
|
||||||
|
'collections', 'curves', 'filepath', 'fonts',
|
||||||
|
'grease_pencils', 'images', 'lattices', 'libraries',
|
||||||
|
'lightprobes', 'lights', 'linestyles', 'masks',
|
||||||
|
'materials', 'meshes', 'metaballs', 'movieclips',
|
||||||
|
'node_groups', 'objects', 'paint_curves', 'particles',
|
||||||
|
'scenes', 'shape_keys', 'sounds', 'speakers', 'texts',
|
||||||
|
'textures', 'volumes', 'worlds']
|
||||||
|
|
||||||
def find_from_attr(attr_name, attr_value, list):
|
def find_from_attr(attr_name, attr_value, list):
|
||||||
for item in list:
|
for item in list:
|
||||||
@ -92,7 +100,7 @@ def get_state_str(state):
|
|||||||
state_str = 'OFFLINE'
|
state_str = 'OFFLINE'
|
||||||
elif state == STATE_QUITTING:
|
elif state == STATE_QUITTING:
|
||||||
state_str = 'QUITTING'
|
state_str = 'QUITTING'
|
||||||
elif state == STATE_LAUNCHING_SERVICES:
|
elif state == CONNECTING:
|
||||||
state_str = 'LAUNCHING SERVICES'
|
state_str = 'LAUNCHING SERVICES'
|
||||||
elif state == STATE_LOBBY:
|
elif state == STATE_LOBBY:
|
||||||
state_str = 'LOBBY'
|
state_str = 'LOBBY'
|
||||||
@ -101,17 +109,25 @@ def get_state_str(state):
|
|||||||
|
|
||||||
|
|
||||||
def clean_scene():
|
def clean_scene():
|
||||||
for type_name in dir(bpy.data):
|
for type_name in CLEARED_DATABLOCKS:
|
||||||
try:
|
sub_collection_to_avoid = [
|
||||||
type_collection = getattr(bpy.data, type_name)
|
bpy.data.linestyles.get('LineStyle'),
|
||||||
for item in type_collection:
|
bpy.data.materials.get('Dots Stroke')
|
||||||
|
]
|
||||||
|
|
||||||
|
type_collection = getattr(bpy.data, type_name)
|
||||||
|
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid]
|
||||||
|
for item in items_to_remove:
|
||||||
|
try:
|
||||||
type_collection.remove(item)
|
type_collection.remove(item)
|
||||||
except:
|
logging.info(item.name)
|
||||||
continue
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
# Clear sequencer
|
# Clear sequencer
|
||||||
bpy.context.scene.sequence_editor_clear()
|
bpy.context.scene.sequence_editor_clear()
|
||||||
|
|
||||||
|
|
||||||
def get_selected_objects(scene, active_view_layer):
|
def get_selected_objects(scene, active_view_layer):
|
||||||
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]
|
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# Download base image debian jessie
|
# Download base image debian jessie
|
||||||
FROM python:slim
|
FROM python:slim
|
||||||
|
|
||||||
ARG replication_version=0.1.13
|
ARG replication_version=0.9.1
|
||||||
ARG version=0.1.1
|
ARG version=0.1.1
|
||||||
|
|
||||||
# Infos
|
# Infos
|
||||||
@ -22,4 +22,4 @@ RUN pip install replication==$replication_version
|
|||||||
|
|
||||||
# Run the server with parameters
|
# Run the server with parameters
|
||||||
ENTRYPOINT ["/bin/sh", "-c"]
|
ENTRYPOINT ["/bin/sh", "-c"]
|
||||||
CMD ["python3 -m replication.server -pwd ${password} -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]
|
CMD ["replication.serve -apwd ${password} -spwd '' -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]
|
@ -1,4 +1,4 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
init_py = open("multi_user/__init__.py").read()
|
init_py = open("multi_user/libs/replication/replication/__init__.py").read()
|
||||||
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))
|
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))
|
||||||
|
@ -13,7 +13,7 @@ def main():
|
|||||||
if len(sys.argv) > 2:
|
if len(sys.argv) > 2:
|
||||||
blender_rev = sys.argv[2]
|
blender_rev = sys.argv[2]
|
||||||
else:
|
else:
|
||||||
blender_rev = "2.92.0"
|
blender_rev = "2.93.0"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)
|
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)
|
||||||
|
@ -8,6 +8,7 @@ import random
|
|||||||
from multi_user.bl_types.bl_action import BlAction
|
from multi_user.bl_types.bl_action import BlAction
|
||||||
|
|
||||||
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
|
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
|
||||||
|
FMODIFIERS = ['GENERATOR', 'FNGENERATOR', 'ENVELOPE', 'CYCLES', 'NOISE', 'LIMITS', 'STEPPED']
|
||||||
|
|
||||||
# @pytest.mark.parametrize('blendname', ['test_action.blend'])
|
# @pytest.mark.parametrize('blendname', ['test_action.blend'])
|
||||||
def test_action(clear_blend):
|
def test_action(clear_blend):
|
||||||
@ -22,17 +23,20 @@ def test_action(clear_blend):
|
|||||||
point.co[1] = random.randint(-10,10)
|
point.co[1] = random.randint(-10,10)
|
||||||
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
|
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
|
||||||
|
|
||||||
|
for mod_type in FMODIFIERS:
|
||||||
|
fcurve_sample.modifiers.new(mod_type)
|
||||||
|
|
||||||
bpy.ops.mesh.primitive_plane_add()
|
bpy.ops.mesh.primitive_plane_add()
|
||||||
bpy.data.objects[0].animation_data_create()
|
bpy.data.objects[0].animation_data_create()
|
||||||
bpy.data.objects[0].animation_data.action = datablock
|
bpy.data.objects[0].animation_data.action = datablock
|
||||||
|
|
||||||
# Test
|
# Test
|
||||||
implementation = BlAction()
|
implementation = BlAction()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.actions.remove(datablock)
|
bpy.data.actions.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -12,11 +12,11 @@ def test_armature(clear_blend):
|
|||||||
datablock = bpy.data.armatures[0]
|
datablock = bpy.data.armatures[0]
|
||||||
|
|
||||||
implementation = BlArmature()
|
implementation = BlArmature()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.armatures.remove(datablock)
|
bpy.data.armatures.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -15,11 +15,11 @@ def test_camera(clear_blend, camera_type):
|
|||||||
datablock.type = camera_type
|
datablock.type = camera_type
|
||||||
|
|
||||||
camera_dumper = BlCamera()
|
camera_dumper = BlCamera()
|
||||||
expected = camera_dumper._dump(datablock)
|
expected = camera_dumper.dump(datablock)
|
||||||
bpy.data.cameras.remove(datablock)
|
bpy.data.cameras.remove(datablock)
|
||||||
|
|
||||||
test = camera_dumper._construct(expected)
|
test = camera_dumper.construct(expected)
|
||||||
camera_dumper._load(expected, test)
|
camera_dumper.load(expected, test)
|
||||||
result = camera_dumper._dump(test)
|
result = camera_dumper.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -23,11 +23,11 @@ def test_collection(clear_blend):
|
|||||||
|
|
||||||
# Test
|
# Test
|
||||||
implementation = BlCollection()
|
implementation = BlCollection()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.collections.remove(datablock)
|
bpy.data.collections.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -19,11 +19,11 @@ def test_curve(clear_blend, curve_type):
|
|||||||
datablock = bpy.data.curves[0]
|
datablock = bpy.data.curves[0]
|
||||||
|
|
||||||
implementation = BlCurve()
|
implementation = BlCurve()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.curves.remove(datablock)
|
bpy.data.curves.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -13,11 +13,11 @@ def test_gpencil(clear_blend):
|
|||||||
datablock = bpy.data.grease_pencils[0]
|
datablock = bpy.data.grease_pencils[0]
|
||||||
|
|
||||||
implementation = BlGpencil()
|
implementation = BlGpencil()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.grease_pencils.remove(datablock)
|
bpy.data.grease_pencils.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -13,11 +13,11 @@ def test_lattice(clear_blend):
|
|||||||
datablock = bpy.data.lattices[0]
|
datablock = bpy.data.lattices[0]
|
||||||
|
|
||||||
implementation = BlLattice()
|
implementation = BlLattice()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.lattices.remove(datablock)
|
bpy.data.lattices.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -14,11 +14,11 @@ def test_lightprobes(clear_blend, lightprobe_type):
|
|||||||
|
|
||||||
blender_light = bpy.data.lightprobes[0]
|
blender_light = bpy.data.lightprobes[0]
|
||||||
lightprobe_dumper = BlLightprobe()
|
lightprobe_dumper = BlLightprobe()
|
||||||
expected = lightprobe_dumper._dump(blender_light)
|
expected = lightprobe_dumper.dump(blender_light)
|
||||||
bpy.data.lightprobes.remove(blender_light)
|
bpy.data.lightprobes.remove(blender_light)
|
||||||
|
|
||||||
test = lightprobe_dumper._construct(expected)
|
test = lightprobe_dumper.construct(expected)
|
||||||
lightprobe_dumper._load(expected, test)
|
lightprobe_dumper.load(expected, test)
|
||||||
result = lightprobe_dumper._dump(test)
|
result = lightprobe_dumper.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -13,11 +13,11 @@ def test_light(clear_blend, light_type):
|
|||||||
|
|
||||||
blender_light = bpy.data.lights[0]
|
blender_light = bpy.data.lights[0]
|
||||||
light_dumper = BlLight()
|
light_dumper = BlLight()
|
||||||
expected = light_dumper._dump(blender_light)
|
expected = light_dumper.dump(blender_light)
|
||||||
bpy.data.lights.remove(blender_light)
|
bpy.data.lights.remove(blender_light)
|
||||||
|
|
||||||
test = light_dumper._construct(expected)
|
test = light_dumper.construct(expected)
|
||||||
light_dumper._load(expected, test)
|
light_dumper.load(expected, test)
|
||||||
result = light_dumper._dump(test)
|
result = light_dumper.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -17,12 +17,12 @@ def test_material_nodes(clear_blend):
|
|||||||
datablock.node_tree.nodes.new(ntype)
|
datablock.node_tree.nodes.new(ntype)
|
||||||
|
|
||||||
implementation = BlMaterial()
|
implementation = BlMaterial()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.materials.remove(datablock)
|
bpy.data.materials.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
|
||||||
@ -32,11 +32,11 @@ def test_material_gpencil(clear_blend):
|
|||||||
bpy.data.materials.create_gpencil_data(datablock)
|
bpy.data.materials.create_gpencil_data(datablock)
|
||||||
|
|
||||||
implementation = BlMaterial()
|
implementation = BlMaterial()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.materials.remove(datablock)
|
bpy.data.materials.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -18,11 +18,11 @@ def test_mesh(clear_blend, mesh_type):
|
|||||||
|
|
||||||
# Test
|
# Test
|
||||||
implementation = BlMesh()
|
implementation = BlMesh()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.meshes.remove(datablock)
|
bpy.data.meshes.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -13,11 +13,11 @@ def test_metaball(clear_blend, metaballs_type):
|
|||||||
|
|
||||||
datablock = bpy.data.metaballs[0]
|
datablock = bpy.data.metaballs[0]
|
||||||
dumper = BlMetaball()
|
dumper = BlMetaball()
|
||||||
expected = dumper._dump(datablock)
|
expected = dumper.dump(datablock)
|
||||||
bpy.data.metaballs.remove(datablock)
|
bpy.data.metaballs.remove(datablock)
|
||||||
|
|
||||||
test = dumper._construct(expected)
|
test = dumper.construct(expected)
|
||||||
dumper._load(expected, test)
|
dumper.load(expected, test)
|
||||||
result = dumper._dump(test)
|
result = dumper.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -65,11 +65,11 @@ def test_object(clear_blend):
|
|||||||
datablock.shape_key_add(name='shape2')
|
datablock.shape_key_add(name='shape2')
|
||||||
|
|
||||||
implementation = BlObject()
|
implementation = BlObject()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.objects.remove(datablock)
|
bpy.data.objects.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
print(DeepDiff(expected, result))
|
print(DeepDiff(expected, result))
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -12,14 +12,16 @@ def test_scene(clear_blend):
|
|||||||
get_preferences().sync_flags.sync_render_settings = True
|
get_preferences().sync_flags.sync_render_settings = True
|
||||||
|
|
||||||
datablock = bpy.data.scenes.new("toto")
|
datablock = bpy.data.scenes.new("toto")
|
||||||
|
datablock.timeline_markers.new('toto', frame=10)
|
||||||
|
datablock.timeline_markers.new('tata', frame=1)
|
||||||
datablock.view_settings.use_curve_mapping = True
|
datablock.view_settings.use_curve_mapping = True
|
||||||
# Test
|
# Test
|
||||||
implementation = BlScene()
|
implementation = BlScene()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.scenes.remove(datablock)
|
bpy.data.scenes.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -12,11 +12,11 @@ def test_speaker(clear_blend):
|
|||||||
datablock = bpy.data.speakers[0]
|
datablock = bpy.data.speakers[0]
|
||||||
|
|
||||||
implementation = BlSpeaker()
|
implementation = BlSpeaker()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.speakers.remove(datablock)
|
bpy.data.speakers.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -14,11 +14,11 @@ def test_texture(clear_blend, texture_type):
|
|||||||
datablock = bpy.data.textures.new('test', texture_type)
|
datablock = bpy.data.textures.new('test', texture_type)
|
||||||
|
|
||||||
implementation = BlTexture()
|
implementation = BlTexture()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.textures.remove(datablock)
|
bpy.data.textures.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -11,11 +11,11 @@ def test_volume(clear_blend):
|
|||||||
datablock = bpy.data.volumes.new("Test")
|
datablock = bpy.data.volumes.new("Test")
|
||||||
|
|
||||||
implementation = BlVolume()
|
implementation = BlVolume()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.volumes.remove(datablock)
|
bpy.data.volumes.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -12,11 +12,11 @@ def test_world(clear_blend):
|
|||||||
datablock.use_nodes = True
|
datablock.use_nodes = True
|
||||||
|
|
||||||
implementation = BlWorld()
|
implementation = BlWorld()
|
||||||
expected = implementation._dump(datablock)
|
expected = implementation.dump(datablock)
|
||||||
bpy.data.worlds.remove(datablock)
|
bpy.data.worlds.remove(datablock)
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
test = implementation.construct(expected)
|
||||||
implementation._load(expected, test)
|
implementation.load(expected, test)
|
||||||
result = implementation._dump(test)
|
result = implementation.dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -1,20 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from deepdiff import DeepDiff
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
import random
|
|
||||||
|
|
||||||
|
|
||||||
def test_start_session():
|
|
||||||
result = bpy.ops.session.start()
|
|
||||||
|
|
||||||
|
|
||||||
assert 'FINISHED' in result
|
|
||||||
|
|
||||||
def test_stop_session():
|
|
||||||
|
|
||||||
result = bpy.ops.session.stop()
|
|
||||||
|
|
||||||
assert 'FINISHED' in result
|
|