Compare commits

..

2 Commits

Author SHA1 Message Date
761fa26ed7 Revert "Merge branch '45-vse-support' into 'master'"
This reverts merge request !68
2020-11-11 19:31:06 +00:00
d42ed789e7 Merge branch '45-vse-support' into 'master'
Resolve "VSE Support"

Closes #45

See merge request slumber/multi-user!68
2020-11-11 19:30:37 +00:00
118 changed files with 1980 additions and 4652 deletions

1
.gitignore vendored
View File

@ -14,4 +14,3 @@ _build
# ignore generated zip generated from blender_addon_tester # ignore generated zip generated from blender_addon_tester
*.zip *.zip
libs

View File

@ -2,12 +2,9 @@ stages:
- test - test
- build - build
- deploy - deploy
- doc
include: include:
- local: .gitlab/ci/test.gitlab-ci.yml - local: .gitlab/ci/test.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml - local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/deploy.gitlab-ci.yml - local: .gitlab/ci/deploy.gitlab-ci.yml
- local: .gitlab/ci/doc.gitlab-ci.yml

View File

@ -1,6 +1,5 @@
build: build:
stage: build stage: build
needs: ["test"]
image: debian:stable-slim image: debian:stable-slim
script: script:
- rm -rf tests .git .gitignore script - rm -rf tests .git .gitignore script
@ -8,5 +7,7 @@ build:
name: multi_user name: multi_user
paths: paths:
- multi_user - multi_user
variables: only:
GIT_SUBMODULE_STRATEGY: recursive refs:
- master
- develop

View File

@ -1,11 +1,9 @@
deploy: deploy:
stage: deploy stage: deploy
needs: ["build"]
image: slumber/docker-python image: slumber/docker-python
variables: variables:
DOCKER_DRIVER: overlay2 DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs" DOCKER_TLS_CERTDIR: "/certs"
GIT_SUBMODULE_STRATEGY: recursive
services: services:
- docker:19.03.12-dind - docker:19.03.12-dind
@ -17,5 +15,9 @@ deploy:
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server - docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
- echo "Pushing to gitlab registry ${VERSION}" - echo "Pushing to gitlab registry ${VERSION}"
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} registry.gitlab.com/slumber/multi-user/multi-user-server:${CI_COMMIT_REF_NAME} - docker push registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server
only:
refs:
- master
- develop

View File

@ -1,16 +0,0 @@
pages:
stage: doc
needs: ["deploy"]
image: python
script:
- pip install -U sphinx sphinx_rtd_theme sphinx-material
- sphinx-build -b html ./docs public
artifacts:
paths:
- public
only:
refs:
- master
- develop

View File

@ -3,5 +3,3 @@ test:
image: slumber/blender-addon-testing:latest image: slumber/blender-addon-testing:latest
script: script:
- python3 scripts/test_addon.py - python3 scripts/test_addon.py
variables:
GIT_SUBMODULE_STRATEGY: recursive

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "multi_user/libs/replication"]
path = multi_user/libs/replication
url = https://gitlab.com/slumber/replication.git

View File

@ -125,65 +125,3 @@ All notable changes to this project will be documented in this file.
- Packed image save error - Packed image save error
- Material replication - Material replication
- UI spelling errors (@brybalicious) - UI spelling errors (@brybalicious)
## [0.2.0] - 2020-12-17
### Added
- Documentation `Troubleshouting` section (@brybalicious)
- Documentation `Update` section (@brybalicious)
- Documentation `Cloud Hosting Walkthrough` (@brybalicious)
- Support DNS name
- Sync annotations
- Sync volume objects
- Sync material node_goups
- Sync VSE
- Sync grease pencil modifiers
- Sync textures (modifier only)
- Session status widget
- Disconnection popup
- Popup with disconnection reason
### Changed
- Improved GPencil performances
### Fixed
- Texture paint update
- Various documentation fixes section (@brybalicious)
- Empty and Light object selection highlights
- Material renaming
- Default material nodes input parameters
- blender 2.91 python api compatibility
## [0.3.0] - 2021-04-14
### Added
- Curve material support
- Cycle visibility settings
- Session save/load operator
- Add new scene support
- Physic initial support
- Geometry node initial support
- Blender 2.93 compatibility
### Changed
- Host documentation on Gitlab Page
- Event driven update (from the blender deps graph)
### Fixed
- Vertex group assignation
- Parent relation can't be removed
- Separate object
- Delete animation
- Sync missing holdout option for grease pencil material
- Sync missing `skin_vertices`
- Exception access violation during Undo/Redo
- Sync missing armature bone Roll
- Sync missing driver data_path
- Constraint replication

View File

@ -19,46 +19,38 @@ This tool aims to allow multiple users to work on the same scene over the networ
## Usage ## Usage
See the [documentation](https://slumber.gitlab.io/multi-user/index.html) for details. See the [documentation](https://multi-user.readthedocs.io/en/latest/) for details.
## Troubleshooting
See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_started/troubleshooting.html) for tips on the most common issues.
## Current development status ## Current development status
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones. Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment | | Name | Status | Comment |
| -------------- | :----: | :----------------------------------------------------------: | | ----------- | :----: | :--------------------------------------------------------------------------: |
| action | ✔️ | | | action | ✔️ | |
| armature | ❗ | Not stable |
| camera | ✔️ | | | camera | ✔️ | |
| collection | ✔️ | | | collection | ✔️ | |
| gpencil | ✔️ | | | curve | | Nurbs not supported |
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
| image | ✔️ | | | image | ✔️ | |
| mesh | ✔️ | | | mesh | ✔️ | |
| material | ✔️ | | | material | ✔️ | |
| node_groups | ✔️ | Material & Geometry only |
| geometry nodes | ✔️ | |
| metaball | ✔️ | | | metaball | ✔️ | |
| object | ✔️ | | | object | ✔️ | |
| texts | ✔️ | | | texts | ✔️ | |
| scene | ✔️ | | | scene | ✔️ | |
| world | ✔️ | | | world | ✔️ | |
| volumes | ✔️ | |
| lightprobes | ✔️ | | | lightprobes | ✔️ | |
| physics | ✔️ | |
| curve | ❗ | Nurbs surfaces not supported |
| textures | ❗ | Supported for modifiers/materials/geo nodes only |
| armature | ❗ | Not stable |
| particles | ❗ | The cache isn't syncing. |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❗ | Mask and Clip not supported yet |
| libraries | ❗ | Partial |
| nla | ❌ | |
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) | | compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | ❌ | |
| volumes | ❌ | |
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
### Performance issues ### Performance issues
@ -76,7 +68,7 @@ I'm working on it.
## Contributing ## Contributing
See [contributing section](https://slumber.gitlab.io/multi-user/ways_to_contribute.html) of the documentation. See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute. Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.

View File

@ -19,10 +19,10 @@ import sys
project = 'multi-user' project = 'multi-user'
copyright = '2020, Swann Martinez' copyright = '2020, Swann Martinez'
author = 'Swann Martinez, with contributions from Poochy' author = 'Swann Martinez'
# The full version, including alpha/beta/rc tags # The full version, including alpha/beta/rc tags
release = '0.2.0' release = '0.1.0'
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------

View File

@ -9,14 +9,14 @@ Glossary
administrator administrator
*A session administrator can manage users (kick) and hold write access on *A session administrator can manage users (kick) and have a write access on
each datablock. They can also init a dedicated server repository.* each datablock. He could also init a dedicated server repository.*
.. _session-status: .. _session-status:
session status session status
*Located in the title of the multi-user panel, the session status shows *Located in the title of the multi-user panel, the session status show
you the connection state.* you the connection state.*
.. figure:: img/quickstart_session_status.png .. figure:: img/quickstart_session_status.png
@ -24,7 +24,7 @@ Glossary
Session status in panel title bar Session status in panel title bar
All possible connection states are listed here with their meaning:* All possible state are listed here with their meaning:*
+--------------------+---------------------------------------------------------------------------------------------+ +--------------------+---------------------------------------------------------------------------------------------+
| State | Description | | State | Description |
@ -33,7 +33,7 @@ Glossary
+--------------------+---------------------------------------------------------------------------------------------+ +--------------------+---------------------------------------------------------------------------------------------+
| FETCHING | Dowloading snapshot from the server | | FETCHING | Dowloading snapshot from the server |
+--------------------+---------------------------------------------------------------------------------------------+ +--------------------+---------------------------------------------------------------------------------------------+
| AUTHENTICATION | Initial server authentication | | AUTHENTIFICATION | Initial server authentication |
+--------------------+---------------------------------------------------------------------------------------------+ +--------------------+---------------------------------------------------------------------------------------------+
| ONLINE | Connected to the session | | ONLINE | Connected to the session |
+--------------------+---------------------------------------------------------------------------------------------+ +--------------------+---------------------------------------------------------------------------------------------+
@ -55,5 +55,5 @@ Glossary
common right common right
When a data block is under common right, it is available to everyone for modification. When a data block is under common right, it is available for everyone to modification.
The rights will be given to the user that selects it first. The rights will be given to the user that select it first.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 106 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 320 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 223 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 209 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 217 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 226 KiB

View File

@ -8,5 +8,4 @@ Getting started
install install
quickstart quickstart
troubleshooting
glossary glossary

View File

@ -5,54 +5,9 @@ Installation
.. hint:: .. hint::
The process is the same for linux, mac and windows. The process is the same for linux, mac and windows.
1. Download `LATEST build <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build>`_ or `STABLE build <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build>`_. 1. Download latest `release <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build>`_ or `develop (unstable !) <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build>`_ build.
2. Run blender as administrator (to allow python dependencies auto-installation). 2. Run blender as administrator (to allow python dependencies auto-installation).
3. Install **multi-user.zip** from your addon preferences. 3. Install **multi-user.zip** from your addon preferences.
Once the addon is succesfully installed, I strongly recommend you to follow the :ref:`quickstart` Once the addon is succesfully installed, I strongly recommend you to follow the :ref:`quickstart`
tutorial. tutorial.
.. _update-version:
Updates
=======
Multi-User has a built-in auto-update function.
1. Navigate to Edit >> Preferences pane in Blender, and go to the 'Add-ons' section.
2. Search 'multi-user', select the 'Update' tab, click 'Auto-check for Update' and choose the frequency you'd like.
3. Make sure to click the three bars in the bottom-left, and save this to your preferences (userpref.blend).
Sometimes you'd like to perform manual update, or even side-grade or rollback your multi-user version. Perhaps you are trying out new features from the 'develop' branch in a test session.
1. Click on 'Check now for multiuser update'. Multi-user will now find new versions
.. figure:: img/update_1.jpg
:align: center
:width: 300px
Check for updates
2. Select 'Install latest master / old version'
.. figure:: img/update_2.jpg
:align: center
:width: 300px
Install
3. In most cases, select 'master' branch for the latest stable release. The unstable 'develop' branch and older releases are available
.. figure:: img/update_3.jpg
:align: center
:width: 300px
Select version
4. Finally, restart blender to use the updated version
.. figure:: img/update_4.jpg
:align: center
:width: 300px
Restart blender

View File

@ -5,10 +5,10 @@ Quick start
=========== ===========
.. hint:: .. hint::
*All session-related settings are located under: `View3D -> Sidebar -> Multiuser panel`* *All session related settings are located under: `View3D -> Sidebar -> Multiuser panel`*
The multi-user addon provides a session management system. The multi-user is based on a session management system.
In this guide, you will quickly learn how to use the collaborative session management system in three parts: In this this guide you will quickly learn how to use the collaborative session system in three part:
- :ref:`how-to-host` - :ref:`how-to-host`
- :ref:`how-to-join` - :ref:`how-to-join`
@ -19,22 +19,22 @@ In this guide, you will quickly learn how to use the collaborative session manag
How to host a session How to host a session
===================== =====================
The multi-user add-on relies on a Client-Server architecture. The multi-user add-on rely on a Client-Server architecture.
The server is the heart of the collaborative session. The server is the heart of the collaborative session,
It is what allows user's blender instances to communicate with each other. it will allow each users to communicate with each others.
In simple terms, *Hosting a session* means *run a local server and connect the local client to it*. In simple terms, *Hosting a session* means *run a local server and connect the local client to it*.
When I say **local server** I mean a server which is accessible from the LAN (Local Area Network) without requiring an internet connection. When I said **local server** I mean accessible from the LAN (Local Area Network).
However, there are times when you will need to host a session over the internet. However sometime you will need to host a session over the internet,
In this case, I strongly recommend that you read the :ref:`internet-guide` tutorial. in this case I strongly recommand you to read the :ref:`internet-guide` tutorial.
.. _user-info: .. _user-info:
-------------------------------- -----------------------------
1. Fill in your user information 1. Fill your user information
-------------------------------- -----------------------------
The **User Info** panel (See image below) allows you to customise your online identity. The **User Info** panel (See image below) allow you to constomize your online identity.
.. figure:: img/quickstart_user_info.png .. figure:: img/quickstart_user_info.png
:align: center :align: center
@ -42,38 +42,38 @@ The **User Info** panel (See image below) allows you to customise your online id
User info panel User info panel
Let's fill in those two fields: Let's fill those tow field:
- **name**: your online name. - **name**: your online name.
- **color**: a color used to represent you in other users' workspaces (see image below). - **color**: a color used to represent you into other user workspace(see image below).
During online sessions, other users will see your selected object and camera highlighted in your profile color. During online sessions, other users will see your selected object and camera hilghlited in your profile color.
.. _user-representation: .. _user-representation:
.. figure:: img/quickstart_user_representation.png .. figure:: img/quickstart_user_representation.png
:align: center :align: center
User viewport representation aka 'User Presence' User viewport representation
--------------------- --------------------
2. Set up the network 2. Setup the network
--------------------- --------------------
When the hosting process starts, the multi-user addon will launch a local server instance. When the hosting process will start, the multi-user addon will lauch a local server instance.
In the network panel, select **HOST**. In the nerwork panel select **HOST**.
The **Host sub-panel** (see image below) allows you to configure the server according to: The **Host sub-panel** (see image below) allow you to configure the server according to:
* **Port**: Port on which the server is listening. * **Port**: Port on wich the server is listening.
* **Start from**: The session initialisation method. * **Start from**: The session initialisation method.
* **current scenes**: Start with the data loaded in the current blend file. * **current scenes**: Start with the current blendfile datas.
* **an empty scene**: Clear the blend file's data and start over. * **an empty scene**: Clear a data and start over.
.. danger:: .. danger::
By starting from an empty scene, all of the blend data will be removed! By starting from an empty, all of the blend data will be removed !
Be sure to save your existing work before launching the session. Ensure to save your existing work before launching the session.
* **Admin password**: The session administration password. * **Admin password**: The session administration password.
@ -84,16 +84,16 @@ The **Host sub-panel** (see image below) allows you to configure the server acco
Host network panel Host network panel
.. note:: Additional configuration setting can be found in the :ref:`advanced` section. .. note:: Additionnal configuration setting can be found in the :ref:`advanced` section.
Once everything is set up, you can hit the **HOST** button to launch the session! Once everything is setup you can hit the **HOST** button to launch the session !
This will do two things: It will do two things:
* Start a local server * Start a local server
* Connect you to it as an :ref:`admin` * Connect you to it as an :ref:`admin`
During an online session, various actions are available to you, go to :ref:`how-to-manage` section to During online session, various actions are available to you, go to :ref:`how-to-manage` section to
learn more about them. learn more about them.
.. _how-to-join: .. _how-to-join:
@ -101,88 +101,55 @@ learn more about them.
How to join a session How to join a session
===================== =====================
This section describes how join a launched session. This section describe how join a launched session.
Before starting make sure that you have access to the session IP address and port number. Before starting make sure that you have access to the session ip and port.
-------------------------------- -----------------------------
1. Fill in your user information 1. Fill your user information
-------------------------------- -----------------------------
Joining a server Follow the user-info_ section for this step.
=======================
-------------- ----------------
Network setup 2. Network setup
-------------- ----------------
In the network panel, select **JOIN**. In the nerwork panel select **JOIN**.
The **join sub-panel** (see image below) allows you to configure your client to join a The **join sub-panel** (see image below) allow you configure the client to join a
collaborative session which is already hosted. collaborative session.
.. figure:: img/server_preset_image_normal_server.png .. figure:: img/quickstart_join.png
:align: center :align: center
:width: 200px :alt: Connect menu
Connection pannel Connection panel
Fill in the fields with your information: Fill those field with your information:
- **IP**: the host's IP address. - **IP**: the host ip.
- **Port**: the host's port number. - **Port**: the host port.
- **Connect as admin**: connect you with **admin rights** (see :ref:`admin` ) to the session.
Once you've configured every field, hit the button **CONNECT** to join the session ! .. Maybe something more explicit here
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
.. note:: .. note::
Additionnal configuration setting can be found in the :ref:`advanced` section.
If you want to have **administrator rights** (see :ref:`admin` ) on the server, just enter the password created by the host in the **Connect as admin** section Once you've set every field, hit the button **CONNECT** to join the session !
When the :ref:`session-status` is **ONLINE** you are online and ready to start to collaborate.
.. figure:: img/server_preset_image_admin.png
:align: center
:width: 200px
Admin password
---------------
Server presets
---------------
You can save your server presets in a preset list below the 'JOIN' and 'HOST' buttons. This allows you to quickly access and manage your servers.
To add a server, first enter the ip address and the port (plus the password if needed), then click on the + icon to add a name to your preset. To remove a server from the list, select it and click on the - icon.
.. figure:: img/server_preset_exemple.gif
:align: center
:width: 200px
.. warning:: Be careful, if you don't rename your new preset, or if it has the same name as an existing preset, the old preset will be overwritten.
.. figure:: img/server_preset_image_report.png
:align: center
:width: 200px
.. note:: .. note::
On the **dedicated server** startup, the session status will get you to the **LOBBY** waiting a admin to start it.
Two presets are already present when the addon is launched: If the session status is set to **LOBBY** and you are a regular user, you need to wait that an admin launch it.
If you are the admin, you just need to init the repository to start the session (see image below).
- The 'localhost' preset, to host and join a local session quickly
- The 'public session' preset, to join the public sessions of the multi-user server (official discord to participate : https://discord.gg/aBPvGws)
.. note::
Additional configuration settings can be found in the :ref:`advanced` section.
.. note::
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
If the session status is set to **LOBBY** and you are a regular user, you need to wait for the admin to launch the scene.
If you are the admin, you just need to initialise the repository to start the session (see image below).
.. figure:: img/quickstart_session_init.png .. figure:: img/quickstart_session_init.png
:align: center :align: center
Session initialisation for dedicated server Session initialisation for dedicated server
During an online session, various actions are available to you. Go to :ref:`how-to-manage` to During online session, various actions are available to you, go to :ref:`how-to-manage` section to
learn more about them. learn more about them.
.. _how-to-manage: .. _how-to-manage:
@ -190,17 +157,17 @@ learn more about them.
How to manage a session How to manage a session
======================= =======================
The quality of a collaborative session directly depends on the quality of the network connection, and the communication between the users. This section describes The collaboration quality directly depend on the communication quality. This section describes
various tools which have been made in an effort to ease the communication between your fellow creators. various tools made in an effort to ease the communication between the different session users.
Feel free to suggest any ideas for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ . Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
--------------------------- ---------------------------
Change replication behavior Change replication behavior
--------------------------- ---------------------------
During a session, multi-user will replicate all of your local modifications to the scene, to all other users' blender instances. During a session, the multi-user will replicate your modifications to other instances.
In order to avoid annoying other users when you are experimenting, you can flag some of your local modifications to be ignored via In order to avoid annoying other users when you are experimenting, some of those modifications can be ignored via
various flags present at the top of the panel (see red area in the image below). Those flags are explained in the :ref:`replication` section. various flags present at the top of the panel (see red area in the image bellow). Those flags are explained in the :ref:`replication` section.
.. figure:: img/quickstart_replication.png .. figure:: img/quickstart_replication.png
:align: center :align: center
@ -211,31 +178,32 @@ various flags present at the top of the panel (see red area in the image below).
Monitor online users Monitor online users
-------------------- --------------------
One of the most vital tools is the **Online user panel**. It lists all connected One of the most vital tool is the **Online user panel**. It list all connected
users' information including your own: users information's including yours such as :
* **Role** : if a user is an admin or a regular user. * **Role** : if user is an admin or a regular user.
* **Location**: Where the user is actually working. * **Location**: Where the user is actually working.
* **Frame**: When (on which frame) the user is working. * **Frame**: When (in frame) the user working.
* **Ping**: user's connection delay in milliseconds * **Ping**: user connection delay in milliseconds
.. figure:: img/quickstart_users.png .. figure:: img/quickstart_users.png
:align: center :align: center
Online user panel Online user panel
By selecting a user in the list you'll have access to different users' related **actions**. By selecting a user in the list you'll have access to different user related **actions**.
Those operators allow you to experience the selected user's state in two different dimensions: **SPACE** and **TIME**. Those operators allow you reach the selected user state in tow different dimensions: **SPACE** and **TIME**.
Snapping in space Snapping in space
----------------- ----------------
The **CAMERA button** (Also called **snap view** operator) allow you to snap to The **CAMERA button** (Also called **snap view** operator) allow you to snap on
the user's viewpoint. To disable the snap, click on the button once again. This action the user viewpoint. To disable the snap, click back on the button. This action
serves different purposes such as easing the review process, and working together on a large or populated world. served different purposes such as easing the review process, working together on
wide world.
.. hint:: .. hint::
If the target user is located in another scene, the **snap view** operator will send you to their scene. If the target user is localized on another scene, the **snap view** operator will send you to his scene.
.. figure:: img/quickstart_snap_view.gif .. figure:: img/quickstart_snap_view.gif
:align: center :align: center
@ -243,11 +211,11 @@ serves different purposes such as easing the review process, and working togethe
Snap view in action Snap view in action
Snapping in time Snapping in time
---------------- ---------------
The **CLOCK button** (Also called **snap time** operator) allows you to snap to The **CLOCK button** (Also called **snap time** operator) allow you to snap on
the user's time (current frame). To disable the snap, click on the button once again. the user time (current frame). To disable the snap, click back on the button.
This action helps various multiple creators to work in the same time-frame This action is built to help various actors to work on the same temporality
(for instance multiple animators). (for instance multiple animators).
.. figure:: img/quickstart_snap_time.gif .. figure:: img/quickstart_snap_time.gif
@ -262,14 +230,14 @@ Kick a user
.. warning:: Only available for :ref:`admin` ! .. warning:: Only available for :ref:`admin` !
The **CROSS button** (Also called **kick** operator) allows the administrator to kick the selected user. This can be helpful if a user is acting unruly, but more importantly, if they are experiencing a high ping which is slowing down the scene. Meanwhile, in the target user's world, the session will properly disconnect. The **CROSS button** (Also called **kick** operator) allow the admin to kick the selected user. On the target user side, the session will properly disconnect.
Change users display Change users display
-------------------- --------------------
Presence is the multi-user module responsible for displaying user presence. During the session, Presence is the multi-user module responsible for users display. During the session,
it draw users' related information in your viewport such as: it draw users related information in your viewport such as:
* Username * Username
* User point of view * User point of view
@ -280,19 +248,11 @@ it draw users' related information in your viewport such as:
Presence show flags Presence show flags
The presence overlay panel (see image above) allows you to enable/disable The presence overlay panel (see image above) allow you to enable/disable
various drawn parts via the following flags: various drawn parts via the following flags:
- **Show session status**: display the session status in the viewport - **Show selected objects**: display other users current selection
- **Show users**: display users current viewpoint
.. figure:: img/quickstart_status.png
:align: center
- **Text scale**: session status text size
- **Vertical/Horizontal position**: session position in the viewport
- **Show selected objects**: display other users' current selections
- **Show users**: display users' current viewpoint
- **Show different scenes**: display users working on other scenes - **Show different scenes**: display users working on other scenes
@ -301,40 +261,40 @@ various drawn parts via the following flags:
Manage data Manage data
----------- -----------
In order to understand replication data managment, a quick introduction to the multi-user data workflow is in order. In order to understand replication data managment, a quick introduction to the multi-user data workflow is required.
The first thing to know: until now, the addon relies on data-based replication. In simple words, it means that it replicates First thing to know: until now, the addon rely on a data-based replication. In simple words, it means that it replicate
the resultant output of a user's actions. user's action results.
To replicate datablocks between clients, multi-user relies on a standard distributed architecture: To replicate datablocks between clients the multi-user rely on what tends to be a distributed architecture:
- The server stores the "master" version of the work. - The server store the "master" version of the work.
- Each client has a local version of the work. - Each client have a local version of the work.
When an artist modifies something in the scene, here is what is happening in the background: When an artist modified something in the scene, here is what is happening in the background:
1. Modified data are **COMMITTED** to the local repository. 1. Modified data are **COMMITTED** to the local repository.
2. Once committed locally, they are **PUSHED** to the server 2. Once committed locally, they are **PUSHED** to the server
3. As soon as the server receives updates, they are stored locally and pushed to every other client 3. As soon as the server is getting updates, they are stored locally and pushed to every other clients
At the top of this data management system, a rights management system prevents At the top of this data management system, a right management system prevent
multiple users from modifying the same data at the same time. A datablock may belong to multiple users from modifying same data at same time. A datablock may belong to
a connected user or be under :ref:`common-right<**COMMON**>` rights. a connected user or be under :ref:`common-right<**COMMON**>` rights.
.. note:: .. note::
In a near future, the rights management system will support roles to allow multiple users to In a near future, the right management system will support roles to allow multiple users to
work on different aspects of the same datablock. work on different aspect of the same datablock.
The Repository panel (see image below) allows you to monitor, change datablock states and rights manually. The Repository panel (see image below) allow you to monitor, change datablock states and right manually.
.. figure:: img/quickstart_save_session_data.png .. figure:: img/quickstart_properties.png
:align: center :align: center
Repository panel Repository panel
The **show only owned** flag allows you to see which datablocks you are currently modifying. The **show only owned** flag allow you to see which datablocks you are currently modifying.
.. warning:: .. warning::
If you are editing a datablock not listed with this flag enabled, it means that you have not been granted the rights to modify it. If you are editing a datablock not listed with this fag enabled, it means that you do
So, it won't be updated to other clients! not have right granted to modify it. So it won't be updated to other client !
Here is a quick list of available actions: Here is a quick list of available actions:
@ -352,40 +312,6 @@ Here is a quick list of available actions:
| .. image:: img/quickstart_remove.png | **Delete** | Remove the data-block from network replication | | .. image:: img/quickstart_remove.png | **Delete** | Remove the data-block from network replication |
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+ +---------------------------------------+-------------------+------------------------------------------------------------------------------------+
Save session data
-----------------
.. danger::
This is an experimental feature, until the stable release it is highly recommended to use regular .blend save.
The save session data allows you to create a backup of the session data.
When you hit the **save session data** button, the following popup dialog will appear.
It allows you to choose the destination folder and if you want to run an auto-save.
.. figure:: img/quickstart_save_session_data_dialog.png
:align: center
Save session data dialog.
If you enabled the auto-save option, you can cancel it from the **Cancel auto-save** button.
.. figure:: img/quickstart_save_session_data_cancel.png
:align: center
Cancel session autosave.
To import session data backups, use the following **Multiuser session snapshot** import dialog
.. figure:: img/quickstart_import_session_data.png
:align: center
Import session data dialog.
.. note::
It is not yet possible to start a session directly from a backup.
.. _advanced: .. _advanced:
Advanced settings Advanced settings
@ -407,6 +333,15 @@ Network
Advanced network settings Advanced network settings
**IPC Port** is the port used for Inter Process Communication. This port is used
by the multi-users subprocesses to communicate with each others. If different instances
of the multi-user are using the same IPC port it will create conflict !
.. note::
You only need to modify it if you need to launch multiple clients from the same
computer(or if you try to host and join on the same computer). You should just enter a different
**IPC port** for each blender instance.
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting. **Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
You should only increase it if you have a bad connection. You should only increase it if you have a bad connection.
@ -421,20 +356,20 @@ Replication
Advanced replication settings Advanced replication settings
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match renders between clients. **Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
**Synchronize active camera** sync the scene's active camera. **Synchronize active camera** sync the scene active camera.
**Edit Mode Updates** enable objects to update while you are in Edit_Mode. **Edit Mode Updates** enable objects update while you are in Edit_Mode.
.. warning:: Edit Mode Updates kills the session's performance with complex objects (heavy meshes, gpencil, etc...). .. warning:: Edit Mode Updates kill performances with complex objects (heavy meshes, gpencil, etc...).
**Update method** allows you to change how replication updates are triggered. Until now, two update methods are implemented: **Update method** allow you to change how replication update are triggered. Until now two update methode are implemented:
- **Default**: Use external threads to monitor datablocks changes. Slower and less accurate. - **Default**: Use external threads to monitor datablocks changes, slower and less accurate.
- **Despgraph ⚠️**: Use the blender dependency graph to trigger updates. Faster but experimental and unstable ! - **Despgraph ⚠️**: Use the blender dependency graph to trigger updates. Faster but experimental and unstable !
**Properties frequency grid** set a custom replication frequency for each type of data-block: **Properties frequency gird** allow to set a custom replication frequency for each type of data-block:
- **Refresh**: pushed data update rate (in second) - **Refresh**: pushed data update rate (in second)
- **Apply**: pulled data update rate (in second) - **Apply**: pulled data update rate (in second)
@ -443,21 +378,21 @@ Replication
Cache Cache
----- -----
Multi-user allows you to replicate external dependencies such as images (textures, hdris, etc...), movies, and sounds. The multi-user allows to replicate external blend dependencies such as images, movies sounds.
On each client, the files will be stored in the multi-user cache folder. On each client, those files are stored into the cache folder.
.. figure:: img/quickstart_advanced_cache.png .. figure:: img/quickstart_advanced_cache.png
:align: center :align: center
Advanced cache settings Advanced cache settings
**cache_directory** choose where cached files (images, sound, movies) will be saved. **cache_directory** allows to choose where cached files (images, sound, movies) will be saved.
**Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it has been written to the disk. **Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it have been written to the disk.
**Clear cache** will remove all files from the cache folder. **Clear cache** will remove all file from the cache folder.
.. warning:: Clearing the cache could break your scene images/movies/sounds if they are used in a blend file! Try saving the blend file and choosing 'Pack all into blend' before clearing the cache. .. warning:: Clear cash could break your scene image/movie/sound if they are used into the blend !
--- ---
Log Log
@ -468,16 +403,16 @@ Log
Advanced log settings Advanced log settings
**log level** allows you to set the level of detail captured in multi-user's logging output. Here is a brief description on the level of detail for each value of the logging parameter: **log level** allow to set the logging level of detail. Here is the detail for each values:
+-----------+-----------------------------------------------+ +-----------+-----------------------------------------------+
| Log level | Description | | Log level | Description |
+===========+===============================================+ +===========+===============================================+
| ERROR | Shows only critical errors | | ERROR | Shows only critical error |
+-----------+-----------------------------------------------+ +-----------+-----------------------------------------------+
| WARNING | Shows only errors (of all kinds) | | WARNING | Shows only errors (all kind) |
+-----------+-----------------------------------------------+ +-----------+-----------------------------------------------+
| INFO | Shows only status-related messages and errors | | INFO | Shows only status related messages and errors |
+-----------+-----------------------------------------------+ +-----------+-----------------------------------------------+
| DEBUG | Shows all possible information | | DEBUG | Shows every possible information. |
+-----------+-----------------------------------------------+ +-----------+-----------------------------------------------+

View File

@ -1,19 +0,0 @@
.. _troubleshooting:
===============
Troubleshooting
===============
The majority of issues new users experience when first using Multi-User can be solved with a few quick checks.
- Run Blender in Administrator mode
- Update the multi-user addon to the latest version
- Make sure to allow Blender through your firewall
.. hint:: Your firewall may have additional settings like Ransomware protection, or you may need to enable both Blender and Python on private and/or public Networks
- Solve problems with your connection quality
- Minimise the use of large textures or file sizes
- Avoid using 'Undo'. Use 'delete' instead
Use the #support channel on the multi-user `discord server <https://discord.gg/aBPvGws>`_ to chat, seek help and contribute.

View File

@ -49,7 +49,6 @@ Documentation is organized into the following sections:
getting_started/install getting_started/install
getting_started/quickstart getting_started/quickstart
getting_started/glossary getting_started/glossary
getting_started/troubleshooting
.. toctree:: .. toctree::
:maxdepth: 1 :maxdepth: 1

View File

@ -1,51 +1,36 @@
.. _internet-guide: .. _internet-guide:
======================= ===================
Hosting on the internet Hosting on internet
======================= ===================
.. warning:: .. warning::
Until now, those communications are not encrypted but are planned to be in a mid-term future (`status <https://gitlab.com/slumber/multi-user/issues/62>`_). Until now, those communications are not encrypted but are planned to be in a mid-term future (`Status <https://gitlab.com/slumber/multi-user/issues/62>`_).
This tutorial aims to guide you toward hosting a collaborative multi-user session on the internet. This tutorial aims to guide you to host a collaborative Session on internet.
Hosting a session can be achieved in several ways: Hosting a session can be done is several ways:
- :ref:`host-blender`: hosting a session directly from the blender add-on panel. - :ref:`host-blender`: hosting a session directly from the blender add-on panel.
- :ref:`host-dedicated`: hosting a session directly from the command line interface on a computer without blender. - :ref:`host-dedicated`: hosting a session directly from the command line interface on a computer without blender.
- :ref:`host-cloud`: hosting a session on a dedicated cloud server such as Google Cloud's free tier.
.. _host-blender: .. _host-blender:
-------------------- -------------
From blender From blender
-------------------- -------------
By default your router doesn't allow anyone to share you connection. By default your router doesn't allow anyone to share you connection.
In order grant the server access to people from internet you have two main option: In order grant server access to people from internet you have tow main option:
* The :ref:`connection-sharing`: the easiest way. * The :ref:`connection-sharing`: the easiest way.
* The :ref:`port-forwarding`: this way is the most unsecure. If you have no networking knowledge, you should definitely follow :ref:`connection-sharing`. * The :ref:`port-forwarding`: this one is the most unsecure, if you have no networking knowledge, you should definitively go to :ref:`connection-sharing`.
.. _connection-sharing: .. _connection-sharing:
Using a connection sharing solution Using a connection sharing solution
----------------------------------- -----------------------------------
You can either follow `Pierre Schiller's <https://www.youtube.com/c/activemotionpictures/featured>`_ excellent video tutorial or jump to the `text tutorial <zt-installation_>`_.
.. raw:: html
<p>
<iframe width="560" height="315" src="https://www.youtube.com/embed/xV4R5AukkVw" frameborder="0" allow="accelerometer; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
</p>
Many third party software like `ZEROTIER <https://www.zerotier.com/download/>`_ (Free) or `HAMACHI <https://vpn.net/>`_ (Free until 5 users) allow you to share your private network with other people. Many third party software like `ZEROTIER <https://www.zerotier.com/download/>`_ (Free) or `HAMACHI <https://vpn.net/>`_ (Free until 5 users) allow you to share your private network with other people.
For the example I'm gonna use ZeroTier because it's free and open source. For the example I'm gonna use ZeroTier because its free and open source.
.. _zt-installation:
1. Installation 1. Installation
^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^
@ -62,7 +47,7 @@ To create a ZeroTier private network you need to register a ZeroTier account `on
(click on **login** then register on the bottom) (click on **login** then register on the bottom)
Once you account it activated, you can connect to `my.zerotier.com <https://my.zerotier.com/login>`_. Once you account it activated, you can connect to `my.zerotier.com <https://my.zerotier.com/login>`_.
Head up to the **Network** section (highlighted in red in the image below). Head up to the **Network** section(highlighted in red in the image below).
.. figure:: img/hosting_guide_head_network.png .. figure:: img/hosting_guide_head_network.png
:align: center :align: center
@ -76,7 +61,7 @@ Hit 'Create a network'(see image below) and go to the network settings.
:align: center :align: center
:width: 450px :width: 450px
Admin password Network page
Now that the network is created, let's configure it. Now that the network is created, let's configure it.
@ -101,7 +86,7 @@ Now let's connect everyone.
3. Network authorization 3. Network authorization
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
Since your ZeroTier network is Private, you will need to authorize each new user Since your ZeroTier network is Private, you will need to authorize each new users
to connect to it. to connect to it.
For each user you want to add, do the following step: For each user you want to add, do the following step:
@ -119,7 +104,7 @@ For each user you want to add, do the following step:
:align: center :align: center
:width: 450px :width: 450px
Add the client to network-authorized users Add the client to network authorized users
4. Network connection 4. Network connection
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
@ -168,12 +153,12 @@ This is it for the ZeroTier network setup. Now everything should be setup to use
Using port-forwarding Using port-forwarding
--------------------- ---------------------
The port forwarding method consists of configuring your network router to deny most traffic with a firewall, but to then allow particular internet traffic (like a multiuser connection) through the firewall on specified ports. The port forwarding method consist to configure you Network route to allow internet trafic throught specific ports.
In order to know which ports are used by the add-on, please check the :ref:`port-setup` section. In order to know which port are used by the add-on, check the :ref:`port-setup` section.
To set up port forwarding for each port you can follow this `guide <https://www.wikihow.com/Set-Up-Port-Forwarding-on-a-Router>`_ for example. To set up port forwarding for each port you can follow this `guide <https://www.wikihow.com/Set-Up-Port-Forwarding-on-a-Router>`_ for example.
Once you have set up the network you can follow the :ref:`quickstart` guide to begin using the multi-user add-on ! Once you have set up the network you can follow the :ref:`quickstart` guide to start using the multi-user add-on !
.. _host-dedicated: .. _host-dedicated:
@ -182,10 +167,11 @@ From the dedicated server
-------------------------- --------------------------
.. warning:: .. warning::
The dedicated server is developed to run directly on an internet server (like a VPS (Virtual Private Server)). You can also run it at home on a LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first. Please see :ref:`host-cloud` for a detailed walkthrough of cloud hosting using Google Cloud. The dedicated server is developed to run directly on internet server (like VPS). You can also
run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first.
The dedicated server allows you to host a session with simplicity from any location. The dedicated server allow you to host a session with simplicity from any location.
It was developed to improve internet hosting performance (for example poor latency). It was developed to improve internet hosting performance.
The dedicated server can be run in two ways: The dedicated server can be run in two ways:
@ -206,28 +192,29 @@ You can run the dedicated server on any platform by following these steps:
.. code-block:: bash .. code-block:: bash
python -m pip install replication==0.1.13 python -m pip install replication==0.0.21a15
4. Launch the server with: 4. Launch the server with:
.. code-block:: bash .. code-block:: bash
replication.server replication.serve
.. hint:: .. hint::
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
.. code-block:: bash .. code-block:: bash
replication.server -p 5555 -pwd admin -t 5000 -l INFO -lf server.log replication.serve -p 5555 -pwd admin -t 1000 -l INFO -lf server.log
Here, for example, a server is instantiated on port 5555, with password 'admin', a 5 second timeout, and logging enabled. Here, for example, a server is instantiated on port 5555, with password 'admin', a 1 second timeout, and logging enabled.
As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`. As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
.. hint:: .. hint::
Some server commands are available to enable administrators to manage a multi-user session. Check :ref:`dedicated-management` to learn more. Some commands are available to enable an administrator to manage the session. Check :ref:`dedicated-management` to learn more.
.. _docker: .. _docker:
@ -244,103 +231,25 @@ Launching the dedicated server from a docker server is simple as running:
-e port=5555 \ -e port=5555 \
-e log_level=DEBUG \ -e log_level=DEBUG \
-e password=admin \ -e password=admin \
-e timeout=5000 \ -e timeout=1000 \
registry.gitlab.com/slumber/multi-user/multi-user-server:latest registry.gitlab.com/slumber/multi-user/multi-user-server:0.1.0
Please use the :latest tag, or otherwise use the URL of the most recent container available in the `multi-user container registry <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_. As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`. As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
You can check that your container is running, and find its ID and name with: You can check your container is running, and find its ID with:
.. code-block:: bash .. code-block:: bash
docker ps docker ps
.. _docker-logs: Logs for the server running in the docker container can be accessed by outputting the following to a log file:
Viewing logs in a docker container
----------------------------------
Logs for the server running in a docker container can be accessed by outputting the container logs to a log file. First, you'll need to know your container ID, which you can find by running:
.. code-block:: bash .. code-block:: bash
docker ps docker log your-container-id >& dockerserver.log
Then, output the container logs to a file:
.. code-block:: bash
docker logs your-container-id >& dockerserver.log
.. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available. .. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available.
.. This may not be true. Need to write up how to locally start a docker container from WSL2
Downloading logs from a docker container on a cloud-hosted server
-----------------------------------------------------------------
If you'd like to pull the log files from a cloud-hosted server to submit to a developer for review, a simple process using SSH and SCP is as follows:
First SSH into your instance. You can either open the `VM Instances console <https://console.cloud.google.com/compute/instances>`_ and use the browser terminal provided by Google Cloud (I had the best luck using the Google Chrome browser)... or you can see `here <https://cloud.google.com/compute/docs/instances/connecting-advanced#thirdpartytools>`_ for how to set up your instance for SSH access from your local terminal.
If using SSH from your terminal, first generate SSH keys (setting their access permissions to e.g. chmod 400 level whereby only the user has permissions) and submit the public key to the cloud-hosted VM instance, storing the private key on your local machine.
Then, SSH into your cloud server from your local terminal, with the following command:
.. code-block:: bash
ssh -i PATH_TO_PRIVATE_KEY USERNAME@EXTERNAL_IP_ADDRESS
Use the private key which corresponds to the public key you uploaded, and the username associated with that key (visible in the Google Cloud console for your VM Instance). Use the external IP address for the server, available from the `VM Instances console <https://console.cloud.google.com/compute/instances>`_
e.g.
.. code-block:: bash
ssh -i ~/.ssh/id_rsa user@xxx.xxx.xxx.xxx
Once you've connected to the server's secure shell, you can generate a log file from the docker container running the replication server. First, you'll need to know your container ID, which you can find by running:
.. code-block:: bash
docker ps
If you're cloud-hosting with e.g. Google Cloud, your container will be the one associated with the `registry address <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_ where your Docker image was located. e.g. registry.gitlab.com/slumber/multi-user/multi-user-server:latest
To view the docker container logs, run:
.. code-block:: bash
docker logs your-container-name
OR
.. code-block:: bash
docker logs your-container-id
To save the output to a file, run:
.. code-block:: bash
docker logs your-container-id >& dockerserver.log
Now that the server logs are available in a file, we can disconnect from the secure shell (SSH), and then copy the file to the local machine using SCP. In your local terminal, execute the following:
.. code-block:: bash
scp -i PATH_TO_PRIVATE_KEY USERNAME@EXTERNAL_IP_ADDRESS:"dockerserver.log" LOCAL_PATH_TO_COPY_FILE_TO
e.g.
.. code-block:: bash
scp -i ~/.ssh/id_rsa user@xxx.xxx.xxx.xxx:"dockerserver.log" .
This copies the file dockerserver.log generated in the previous step to the current directory on the local machine. From there, you can send it to the multi-user maintainers for review.
.. Note:: See these `notes <https://cloud.google.com/compute/docs/containers/deploying-containers?_ga=2.113663175.-1396941296.1606125558#viewing_container_logs>`_ for how to check server logs on Google Cloud using other tools.
.. _serverstartscripts: .. _serverstartscripts:
@ -371,30 +280,11 @@ Dedicated server management
Here is the list of available commands from the dedicated server: Here is the list of available commands from the dedicated server:
- ``help`` or ``?``: Show all commands. Or, use ``help <command>`` to learn about another command - ``help``: Show all commands.
- ``exit`` or ``Ctrl+C`` : Stop the server. - ``exit`` or ``Ctrl+C`` : Stop the server.
- ``kick username``: kick the provided user. - ``kick username``: kick the provided user.
- ``users``: list all online users. - ``users``: list all online users.
Also, see :ref:`how-to-manage` for more details on managing a server.
.. _cloud-dockermanage:
Managing a docker server from the command line
----------------------------------------------
If you want to be able to manage a server running within a docker container, open the terminal on the host machine (or SSH in, if you are using cloud hosting), and then run
.. code-block:: bash
docker ps
to find your container id, and then
.. code-block:: bash
docker attach your-container-id
to attach to the STDOUT from the container. There, you can issue the server management commands detailed in :ref:`dedicated-management`. Type ``?`` and hit return/enter to see the available commands. Also, see :ref:`how-to-manage` for more details on managing a server.
.. _port-setup: .. _port-setup:
@ -402,14 +292,14 @@ to attach to the STDOUT from the container. There, you can issue the server mana
Port setup Port setup
---------- ----------
The multi-user network architecture is based on a client-server model. The communication protocol uses four ports to communicate with clients: The multi-user network architecture is based on a clients-server model. The communication protocol use four ports to communicate with client:
* Commands: command transmission (such as **snapshots**, **change_rights**, etc.) [user-nominated port] * Commands: command transmission (such as **snapshots**, **change_rights**, etc.) [given port]
* Subscriber : pull data [Commands port + 1] * Subscriber : pull data [Commands port + 1]
* Publisher : push data [Commands port + 2] * Publisher : push data [Commands port + 2]
* TTL (time to leave) : used to ping each client [Commands port + 3] * TTL (time to leave) : used to ping each client [Commands port + 3]
To know which ports will be used, you just have to read the port in your preferences. To know which ports will be used, you just have to read the port in your preference.
.. figure:: img/hosting_guide_port.png .. figure:: img/hosting_guide_port.png
:align: center :align: center
@ -417,315 +307,11 @@ To know which ports will be used, you just have to read the port in your prefere
:width: 200px :width: 200px
Port in host settings Port in host settings
In the picture below we have setup our port to **5555** so it will be:
In the picture below we have setup our port to **5555** so the four ports will be: * Commands: 5555 (**5555**)
* Subscriber: 5556 (**5555** +1)
* Publisher: 5557 (**5555** +2)
* TTL: 5558 (**5555** +3)
* Commands: **5555** (5555) Those four ports need to be accessible from the client otherwise it won't work at all !
* Subscriber: **5556** (5555 +1)
* Publisher: **5557** (5555 +2)
* TTL: **5558** (5555 +3)
Those four ports need to be accessible from the client otherwise multi-user won't work at all !
.. _host-cloud:
-------------------------
Cloud Hosting Walkthrough
-------------------------
The following is a walkthrough for how to set up a multi-user dedicated server instance on a cloud hosting provider - in this case, `Google Cloud <https://www.cloud.google.com>`_. Google Cloud is a powerful hosting service with a worldwide network of servers. It offers a free trial which provides free cloud hosting for 90 days, and then a free tier which runs indefinitely thereafter, so long as you stay within the `usage limits <https://cloud.google.com/free/docs/gcp-free-tier#free-tier-usage-limits>`_. ^^Thanks to community member @NotFood for the tip!
Cloud hosting is a little more complicated to set up, but it can be valuable if you are trying to host a session with multiple friends scattered about planet earth. This can resolve issues with data replication or slowdowns due to poor latency of some users (high ping). This guide may seem technical, but if you follow the steps, you should be able to succeed in hosting an internet server to co-create with other multi-user creators around the world.
Setup Process
-------------
1. Sign Up for Google Cloud
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Let's start by activating an account with Google Cloud. Go to https://www.cloud.google.com and click 'Get Started For Free'
.. figure:: img/hosting_guide_gcloud_1.jpg
:align: center
:width: 450px
Google will ask you to login/signup, and to set up a billing account (Don't worry. It will not be charged unless you explicitly enable billing and then run over your `free credit allowance <https://cloud.google.com/free/docs/gcp-free-tier>`_). You will need to choose a billing country (relevant for `tax purposes <https://cloud.google.com/billing/docs/resources/vat-overview>`_). You will choose your server location at a later step.
2. Enable Billing and Compute Engine API
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
From here on, we will mostly stick to the instructions provided `here <https://cloud.google.com/compute/docs/quickstart-linux>`_. Nevertheless, the instructions for multi-user specifically are as follows.
In order to set up a Virtual Machine (VM) to host your server, you will need to enable the billing account which was created during your signup process. From your `console <https://console.cloud.google.com/getting-started>`_, click on 'Go to Checklist' and then 'Create a Billing Account', following the prompts to choose the billing account that was created for you upon signup.
.. figure:: img/hosting_guide_gcloud_2.jpg
:align: center
:width: 300px
.. figure:: img/hosting_guide_gcloud_3.jpg
:align: center
:width: 300px
.. figure:: img/hosting_guide_gcloud_4.jpg
:align: center
:width: 300px
.. figure:: img/hosting_guide_gcloud_5.jpg
:align: center
:width: 300px
.. figure:: img/hosting_guide_gcloud_6.jpg
:align: center
:width: 300px
Now hit 'Set Account', and go back to your `console <https://console.cloud.google.com/getting-started>`_.
Now enable the Compute Engine API. Click `here <https://console.cloud.google.com/apis/api/compute.googleapis.com/overview>`_ to enable.
.. figure:: img/hosting_guide_gcloud_7.jpg
:align: center
:width: 300px
3. Create a Linux Virtual Machine Instance
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Continue following the `instructions <https://cloud.google.com/compute/docs/quickstart-linux#create_a_virtual_machine_instance>`_ to create a VM instance. However, once you've finished step 2 of 'Create a virtual machine instance', use the settings and steps for multi-user as follows.
.. _server-location:
3.1 Choose a Server Location
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The most important settings which you will need to choose for your specific case are the server Region and Zone. You must choose a location which will provide the best ping for all of your fellow creators.
All you need to know is that you'll probably want to choose a location near to where most of your collaborators are located. If your friends are spread out, somewhere in the middle which distributes the ping evenly to all users is best.
You can use `this map <https://cloud.google.com/about/locations/>`_ to make a rough guess of the best server location, if you know your friends' locations.
.. figure:: img/hosting_guide_gcloud_9.jpg
:align: center
:width: 450px
A much better approach is to have your users run a ping test for Google Cloud's servers at https://www.gcping.com/
Have your collaborators open this webpage from their fastest browser, and press the play button. The play button turns to a stop icon while the ping test is running. When it is complete, the play button returns. You may need to refresh your browser to get this to work. You can replay the test to add more server locations to the scan, and stop when you are satisfied that the results are consistent.
Now, gather your friends' data, and work down each user's list from the top, until you find the first location which gives roughly the same ping for all users.
In general, global (using load balancing) will provide the best results, but beyond that, the US Central servers e.g. IOWA generally turn out best for a globally distributed bunch of creators. When in doubt, choose between the servers offered under the `free tier <https://cloud.google.com/free/docs/gcp-free-tier>`_
- Oregon: *us-west1*
- Iowa: *us-central1*
- South Carolina: *us-east1*
For the following example, the server which gave the most balanced, and lowest average ping between two friends based in Europe and Australia was in Iowa. Salt Lake City would also be an excellent choice.
.. figure:: img/hosting_guide_gcloud_10.jpg
:align: center
:width: 450px
Left - European User | Right - Australian User
Now, input this server location in the 'Region' field for your instance, and leave the default zone which is then populated.
.. Note:: You can read `here <https://cloud.google.com/solutions/best-practices-compute-engine-region-selection>`_ for a deeper understanding about how to choose a good server location.
3.2 Configure the VM
^^^^^^^^^^^^^^^^^^^^
You can deploy the replication server to your VM in either of the ways mentioned at :ref:`host-dedicated`. That is, you can set it up :ref:`cmd-line` or :ref:`docker`. We will go through both options in this walkthrough. See :ref:`container_v_direct` for more details on how to choose. Deploying a container is the recommended approach.
.. _cloud-container:
Option 1 - Deploy a container
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you are familiar with Docker, you'll appreciate that it makes life a little simpler for us. While configuring your instance, you can check **Deploy a container to this VM instance** and copy in the URL of the latest docker image available from the `multi-user container registry <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_ to the *Container image* field, or use the tag ``:latest``
.. figure:: img/hosting_guide_gcloud_8b.jpg
:align: center
:width: 450px
Your configuration with Docker should look like this
Make sure to choose the amount of memory you'd like your server to be able to handle (how much memory does your blender scene require?). In this example, I've chosen 4GB of RAM.
Click on **Advanced container options** and turn on *Allocate a buffer for STDIN* and *Allocate a pseudo-TTY* just in case you want to run an interactive shell in your container.
.. _cloud-optional-parameters:
Optional server parameters
^^^^^^^^^^^^^^^^^^^^^^^^^^
The default Docker image essentially runs the equivalent of:
.. code-block:: bash
replication.server -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.
.. Note:: If you'd like to configure different server options from the default docker configuration, you can insert your options here by expanding 'Advanced container options'
For example, I would like to launch my server with a different administrator password than the default, my own log filename, and a shorter 3-second (3000ms) timeout. I'll click *Add argument* under **Command arguments** and paste the following command with options into the "command arguments" field:
.. code-block:: bash
python3 -m replication.server -pwd supersecretpassword -p 5555 -t 3000 -l DEBUG -lf logname.log
Now, my configuration should look like this:
.. figure:: img/hosting_guide_gcloud_8c.jpg
:align: center
:width: 450px
The rest of the settings are now complete. Hit **Create** and your instance will go live. If you've taken this approach, you're already almost there! Skip to :ref:`cloud-firewall`.
.. hint:: You can find further information on configuration options `here <https://cloud.google.com/compute/docs/containers/configuring-options-to-run-containers>`_. Also, see these `notes <https://cloud.google.com/compute/docs/containers/deploying-containers?_ga=2.113663175.-1396941296.1606125558#viewing_container_logs>`_ for other options when deploying your server inside a container, including how to access the server's logs.
.. _cloud-direct:
Option 2 - Over SSH
^^^^^^^^^^^^^^^^^^^
Otherwise, we can run the dedicated server ourselves from the command-line over SSH.
While creating your instance, keep the default settings mentioned in the `guide <https://cloud.google.com/compute/docs/quickstart-linux#create_a_virtual_machine_instance>`_, however at step 4, choose Debian version 10. Also, there is no need to enable HTTP so skip step 6.
.. figure:: img/hosting_guide_gcloud_8a.jpg
:align: center
:width: 450px
Your configuration should look like this
Make sure to choose the amount of memory you'd like your server to be able to handle (how much memory does your blender scene require?). In this example, I've chosen 4GB of RAM.
Now, finally, click 'Create' to generate your Virtual Machine Instance.
.. _cloud-firewall:
4. Setting up Firewall and opening Ports
----------------------------------------
Now that your VM is instanced, you'll need to set up firewall rules, and open the ports required by multi-user. The documentation for VM firewalls on google cloud is `here <https://cloud.google.com/vpc/docs/using-firewalls#listing-rules-vm>`_.
First, go to the dashboard showing your `VM instances <https://console.cloud.google.com/compute/instances>`_ and note the 'External IP' address for later. This is the address of your server. Then, click 'Set up Firewall Rules'.
.. figure:: img/hosting_guide_gcloud_11.jpg
:align: center
:width: 450px
Note down your External IP
Now you will need to create two rules. One to enable communication inbound to your server (ingress), and another to enable outbound communication from your server (egress). Click 'Create Firewall'
.. figure:: img/hosting_guide_gcloud_12.jpg
:align: center
:width: 450px
Now create a rule exactly as in the image below for the outbound communication (egress).
.. figure:: img/hosting_guide_gcloud_13.jpg
:align: center
:width: 450px
Egress
.. Note:: If you set a different port number in :ref:`cloud-optional-parameters`, then use the ports indicated in :ref:`port-setup`
And another rule exactly as in the image below for the inbound communication (ingress).
.. figure:: img/hosting_guide_gcloud_14.jpg
:align: center
:width: 450px
Ingress
Finally, your firewall configuration should look like this.
.. figure:: img/hosting_guide_gcloud_15.jpg
:align: center
:width: 450px
Final Firewall Configuration
5. Install Replication Server into Virtual Machine
--------------------------------------------------
.. Note:: Skip to :ref:`initialise-server` if you've opted to launch the server by deploying a container. Your server is already live!
Now that we have set up our Virtual Machine instance, we can SSH into it, and install the Replication Server. Open the `VM Instances console <https://console.cloud.google.com/compute/instances>`_ once more, and SSH into your instance. It's easiest to use the browser terminal provided by Google Cloud (I had the best luck using the Google Chrome browser), but you can also see `here <https://cloud.google.com/compute/docs/instances/connecting-advanced#thirdpartytools>`_ for how to set up your instance for SSH access from your terminal.
.. figure:: img/hosting_guide_gcloud_16.jpg
:align: center
:width: 450px
Now, a terminal window should pop up in a new browser window looking something like this:
.. figure:: img/hosting_guide_gcloud_17.jpg
:align: center
:width: 450px
Remember, you had set up the VM with Debian 10. This comes with Python 3.7.3 already installed. The only dependency missing is to set up pip3. So, run:
.. code-block:: bash
sudo apt install python3-pip
.. figure:: img/hosting_guide_gcloud_18.jpg
:align: center
:width: 450px
And now lets install the latest version of replication:
.. code-block:: bash
sudo pip3 install replication==0.1.13
6. Launch Replication Server on VM Instance
-------------------------------------------
We're finally ready to launch the server. Simply run:
.. code-block:: bash
python3 -m replication.server -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
See :ref:`cmd-line` for a description of optional parameters
And your replication server is live! It should stay running in the terminal window until you close it. Copy the external IP that you noted down earlier, available `here <https://console.cloud.google.com/networking/addresses/list>`_ and now you can open Blender and connect to your server!
.. _initialise-server:
7. Initialise your Server in Blender
------------------------------------
Once in Blender, make sure your multi-user addon is updated to the latest version. :ref:`update-version`. Then, follow the instructions from :ref:`how-to-join` and connect as an admin user, using the password you launched the server with. Input your external IP, and make sure you're set to JOIN the server. Then, click CONNECT.
.. figure:: img/hosting_guide_gcloud_19.jpg
:align: center
:width: 200px
Now as the admin user, you can choose whether to initialise the server with a preloaded scene, or an empty scene
.. figure:: img/hosting_guide_gcloud_20.jpg
:align: center
:width: 200px
Press okay, and now your session is live!
If you made it this far, congratulations! You can now go ahead and share the external IP address with your friends and co-creators and have fun with real-time collaboration in Blender!
Hopefully, your cloud server setup has improved your group's overall ping readings, and you're in for a smooth and trouble-free co-creation session.
.. Note:: If you should so desire, pay attention to your credit and follow the steps `here <https://cloud.google.com/compute/docs/quickstart-linux#clean-up>`_ to close your instance at your discretion.
.. _container_v_direct:
Should I deploy a Docker Container or launch a server from Linux VM command-line?
------------------------------------------------------
- Directly from Linux VM - This approach gives you control over your session more easily. However, your server may time out once your SSH link to the server is interrupted (for example, if the admin's computer goes to sleep).
- Deploy a Docker Container - This is the recommended approach. This approach is better for leaving a session running without supervision. It can however be more complicated to manage. Use this approach if you'd like a consistent experience with others in the multi-user community, pulling from the most up-to-date docker image maintained by @swann in the multi-user container registry.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 757 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 214 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 249 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 116 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 230 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 687 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 635 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 204 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 153 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 109 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 262 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 233 KiB

View File

@ -77,7 +77,6 @@ The following example suggests how to contribute a feature.
.. code-block:: bash .. code-block:: bash
git checkout -b feature/yourfeaturename git checkout -b feature/yourfeaturename
...where 'feature/' designates a feature branch, and 'yourfeaturename' is a name of your choosing ...where 'feature/' designates a feature branch, and 'yourfeaturename' is a name of your choosing
9. Add and commit your changes, including a commit message: 9. Add and commit your changes, including a commit message:
@ -101,8 +100,7 @@ The following example suggests how to contribute a feature.
.. Hint:: -u option sets up your locally created new branch to follow a remote branch which is now created with the same name on your remote repository. .. Hint:: -u option sets up your locally created new branch to follow a remote branch which is now created with the same name on your remote repository.
11. Finally, create a new Pull/Merge Request on Gitlab to merge the remote version of this new branch with commited updates, back into the upstream 'develop' branch, finalising the integration of the new feature. 11. Finally, create a new Pull/Merge Request on Gitlab to merge the remote version of this new branch with commited updates, back into the upstream develop branch, finalising the integration of the new feature.
Make sure to set the target branch to 'develop' for features and 'master' for hotfixes. Also, include any milestones or labels, and assignees that may be relevant. By default, the Merge option to 'delete source branch when merge request is activated' will be checked.
12. Thanks for contributing! 12. Thanks for contributing!

View File

@ -19,7 +19,7 @@
bl_info = { bl_info = {
"name": "Multi-User", "name": "Multi-User",
"author": "Swann Martinez", "author": "Swann Martinez",
"version": (0, 5, 0), "version": (0, 1, 1),
"description": "Enable real-time collaborative workflow inside blender", "description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 82, 0), "blender": (2, 82, 0),
"location": "3D View > Sidebar > Multi-User tab", "location": "3D View > Sidebar > Multi-User tab",
@ -43,10 +43,13 @@ from bpy.app.handlers import persistent
from . import environment from . import environment
DEPENDENCIES = {
("replication", '0.1.3'),
}
module_error_msg = "Insufficient rights to install the multi-user \ module_error_msg = "Insufficient rights to install the multi-user \
dependencies, aunch blender with administrator rights." dependencies, aunch blender with administrator rights."
def register(): def register():
# Setup logging policy # Setup logging policy
logging.basicConfig( logging.basicConfig(
@ -55,11 +58,10 @@ def register():
level=logging.INFO) level=logging.INFO)
try: try:
environment.register() environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
from . import presence from . import presence
from . import operators from . import operators
from . import handlers
from . import ui from . import ui
from . import preferences from . import preferences
from . import addon_updater_ops from . import addon_updater_ops
@ -68,7 +70,6 @@ def register():
addon_updater_ops.register(bl_info) addon_updater_ops.register(bl_info)
presence.register() presence.register()
operators.register() operators.register()
handlers.register()
ui.register() ui.register()
except ModuleNotFoundError as e: except ModuleNotFoundError as e:
raise Exception(module_error_msg) raise Exception(module_error_msg)
@ -83,23 +84,17 @@ def register():
type=preferences.SessionUser type=preferences.SessionUser
) )
bpy.types.WindowManager.user_index = bpy.props.IntProperty() bpy.types.WindowManager.user_index = bpy.props.IntProperty()
bpy.types.TOPBAR_MT_file_import.append(operators.menu_func_import)
def unregister(): def unregister():
from . import presence from . import presence
from . import operators from . import operators
from . import handlers
from . import ui from . import ui
from . import preferences from . import preferences
from . import addon_updater_ops from . import addon_updater_ops
bpy.types.TOPBAR_MT_file_import.remove(operators.menu_func_import)
presence.unregister() presence.unregister()
addon_updater_ops.unregister() addon_updater_ops.unregister()
ui.unregister() ui.unregister()
handlers.unregister()
operators.unregister() operators.unregister()
preferences.unregister() preferences.unregister()
@ -107,5 +102,3 @@ def unregister():
del bpy.types.ID.uuid del bpy.types.ID.uuid
del bpy.types.WindowManager.online_users del bpy.types.WindowManager.online_users
del bpy.types.WindowManager.user_index del bpy.types.WindowManager.user_index
environment.unregister()

View File

@ -1688,7 +1688,10 @@ class GitlabEngine(object):
# Could clash with tag names and if it does, it will # Could clash with tag names and if it does, it will
# download TAG zip instead of branch zip to get # download TAG zip instead of branch zip to get
# direct path, would need. # direct path, would need.
return f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{branch}/download?job=build" return "{}{}{}".format(
self.form_repo_url(updater),
"/repository/archive.zip?sha=",
branch)
def get_zip_url(self, sha, updater): def get_zip_url(self, sha, updater):
return "{base}/repository/archive.zip?sha={sha}".format( return "{base}/repository/archive.zip?sha={sha}".format(

View File

@ -122,13 +122,13 @@ class addon_updater_install_popup(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new # if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the # equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains # updater folder/backup folder remains
clean_install: bpy.props.BoolProperty( clean_install = bpy.props.BoolProperty(
name="Clean install", name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False, default=False,
options={'HIDDEN'} options={'HIDDEN'}
) )
ignore_enum: bpy.props.EnumProperty( ignore_enum = bpy.props.EnumProperty(
name="Process update", name="Process update",
description="Decide to install, ignore, or defer new addon update", description="Decide to install, ignore, or defer new addon update",
items=[ items=[
@ -264,7 +264,7 @@ class addon_updater_update_now(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new # if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the # equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains # updater folder/backup folder remains
clean_install: bpy.props.BoolProperty( clean_install = bpy.props.BoolProperty(
name="Clean install", name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False, default=False,
@ -332,7 +332,7 @@ class addon_updater_update_target(bpy.types.Operator):
i+=1 i+=1
return ret return ret
target: bpy.props.EnumProperty( target = bpy.props.EnumProperty(
name="Target version to install", name="Target version to install",
description="Select the version to install", description="Select the version to install",
items=target_version items=target_version
@ -341,7 +341,7 @@ class addon_updater_update_target(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new # if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the # equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains # updater folder/backup folder remains
clean_install: bpy.props.BoolProperty( clean_install = bpy.props.BoolProperty(
name="Clean install", name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False, default=False,
@ -399,7 +399,7 @@ class addon_updater_install_manually(bpy.types.Operator):
bl_description = "Proceed to manually install update" bl_description = "Proceed to manually install update"
bl_options = {'REGISTER', 'INTERNAL'} bl_options = {'REGISTER', 'INTERNAL'}
error: bpy.props.StringProperty( error = bpy.props.StringProperty(
name="Error Occurred", name="Error Occurred",
default="", default="",
options={'HIDDEN'} options={'HIDDEN'}
@ -461,7 +461,7 @@ class addon_updater_updated_successful(bpy.types.Operator):
bl_description = "Update installation response" bl_description = "Update installation response"
bl_options = {'REGISTER', 'INTERNAL', 'UNDO'} bl_options = {'REGISTER', 'INTERNAL', 'UNDO'}
error: bpy.props.StringProperty( error = bpy.props.StringProperty(
name="Error Occurred", name="Error Occurred",
default="", default="",
options={'HIDDEN'} options={'HIDDEN'}

View File

@ -15,7 +15,6 @@
# #
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import bpy
__all__ = [ __all__ = [
'bl_object', 'bl_object',
@ -28,6 +27,7 @@ __all__ = [
'bl_light', 'bl_light',
'bl_scene', 'bl_scene',
'bl_material', 'bl_material',
'bl_library',
'bl_armature', 'bl_armature',
'bl_action', 'bl_action',
'bl_world', 'bl_world',
@ -37,28 +37,12 @@ __all__ = [
'bl_speaker', 'bl_speaker',
'bl_font', 'bl_font',
'bl_sound', 'bl_sound',
'bl_file', 'bl_file'
'bl_node_group',
'bl_texture',
"bl_particle",
] # Order here defines execution order ] # Order here defines execution order
if bpy.app.version[1] >= 91:
__all__.append('bl_volume')
from . import * from . import *
from replication.data import ReplicatedDataFactory
def types_to_register(): def types_to_register():
return __all__ return __all__
from replication.protocol import DataTranslationProtocol
def get_data_translation_protocol()-> DataTranslationProtocol:
""" Return a data translation protocol from implemented bpy types
"""
bpy_protocol = DataTranslationProtocol()
for module_name in __all__:
impl = globals().get(module_name)
if impl and hasattr(impl, "_type") and hasattr(impl, "_type"):
bpy_protocol.register_implementation(impl._type, impl._class)
return bpy_protocol

View File

@ -25,8 +25,8 @@ from enum import Enum
from .. import utils from .. import utils
from .dump_anything import ( from .dump_anything import (
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict) Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
KEYFRAME = [ KEYFRAME = [
'amplitude', 'amplitude',
@ -41,68 +41,8 @@ KEYFRAME = [
'interpolation', 'interpolation',
] ]
def has_action(datablock):
""" Check if the datablock datablock has actions
"""
return (hasattr(datablock, 'animation_data')
and datablock.animation_data
and datablock.animation_data.action)
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict:
def has_driver(datablock):
""" Check if the datablock datablock is driven
"""
return (hasattr(datablock, 'animation_data')
and datablock.animation_data
and datablock.animation_data.drivers)
def dump_driver(driver):
dumper = Dumper()
dumper.depth = 6
data = dumper.dump(driver)
return data
def load_driver(target_datablock, src_driver):
loader = Loader()
drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver']
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
loader.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
src_var_data = src_driver_data['variables'][src_variable]
new_var = new_driver.driver.variables.new()
new_var.name = src_var_data['name']
new_var.type = src_var_data['type']
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
src_id = src_target_data.get('id')
if src_id:
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
loader.load(new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
for p in reversed(new_fcurve):
new_fcurve.remove(p, fast=True)
new_fcurve.add(len(src_driver['keyframe_points']))
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
loader.load(new_point, src_driver['keyframe_points'][src_point])
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
""" Dump a sigle curve to a dict """ Dump a sigle curve to a dict
:arg fcurve: fcurve to dump :arg fcurve: fcurve to dump
@ -121,6 +61,7 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
points = fcurve.keyframe_points points = fcurve.keyframe_points
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points) fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME) fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
else: # Legacy method else: # Legacy method
dumper = Dumper() dumper = Dumper()
fcurve_data["keyframe_points"] = [] fcurve_data["keyframe_points"] = []
@ -130,18 +71,6 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
dumper.dump(k) dumper.dump(k)
) )
if fcurve.modifiers:
dumper = Dumper()
dumper.exclude_filter = [
'is_valid',
'active'
]
dumped_modifiers = []
for modfifier in fcurve.modifiers:
dumped_modifiers.append(dumper.dump(modfifier))
fcurve_data['modifiers'] = dumped_modifiers
return fcurve_data return fcurve_data
@ -154,7 +83,7 @@ def load_fcurve(fcurve_data, fcurve):
:type fcurve: bpy.types.FCurve :type fcurve: bpy.types.FCurve
""" """
use_numpy = fcurve_data.get('use_numpy') use_numpy = fcurve_data.get('use_numpy')
loader = Loader()
keyframe_points = fcurve.keyframe_points keyframe_points = fcurve.keyframe_points
# Remove all keyframe points # Remove all keyframe points
@ -163,8 +92,7 @@ def load_fcurve(fcurve_data, fcurve):
if use_numpy: if use_numpy:
keyframe_points.add(fcurve_data['keyframes_count']) keyframe_points.add(fcurve_data['keyframes_count'])
np_load_collection( np_load_collection(fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
else: else:
# paste dumped keyframes # paste dumped keyframes
@ -199,102 +127,35 @@ def load_fcurve(fcurve_data, fcurve):
fcurve.update() fcurve.update()
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
if dumped_fcurve_modifiers:
# clear modifiers
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
# Load each modifiers in order
for modifier_data in dumped_fcurve_modifiers:
modifier = fcurve.modifiers.new(modifier_data['type'])
loader.load(modifier, modifier_data)
elif fcurve.modifiers:
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
def dump_animation_data(datablock):
animation_data = {}
if has_action(datablock):
animation_data['action'] = datablock.animation_data.action.uuid
if has_driver(datablock):
animation_data['drivers'] = []
for driver in datablock.animation_data.drivers:
animation_data['drivers'].append(dump_driver(driver))
return animation_data
def load_animation_data(animation_data, datablock):
# Load animation data
if animation_data:
if datablock.animation_data is None:
datablock.animation_data_create()
for d in datablock.animation_data.drivers:
datablock.animation_data.drivers.remove(d)
if 'drivers' in animation_data:
for driver in animation_data['drivers']:
load_driver(datablock, driver)
action = animation_data.get('action')
if action:
action = resolve_datablock_from_uuid(action, bpy.data.actions)
datablock.animation_data.action = action
elif datablock.animation_data.action:
datablock.animation_data.action = None
# Remove existing animation data if there is not more to load
elif hasattr(datablock, 'animation_data') and datablock.animation_data:
datablock.animation_data_clear()
def resolve_animation_dependencies(datablock):
if has_action(datablock):
return [datablock.animation_data.action]
else:
return []
class BlAction(ReplicatedDatablock):
use_delta = True
class BlAction(BlDatablock):
bl_id = "actions" bl_id = "actions"
bl_class = bpy.types.Action bl_class = bpy.types.Action
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'ACTION_TWEAK' bl_icon = 'ACTION_TWEAK'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.actions.new(data["name"]) return bpy.data.actions.new(data["name"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
for dumped_fcurve in data["fcurves"]: for dumped_fcurve in data["fcurves"]:
dumped_data_path = dumped_fcurve["data_path"] dumped_data_path = dumped_fcurve["data_path"]
dumped_array_index = dumped_fcurve["dumped_array_index"] dumped_array_index = dumped_fcurve["dumped_array_index"]
# create fcurve if needed # create fcurve if needed
fcurve = datablock.fcurves.find( fcurve = target.fcurves.find(
dumped_data_path, index=dumped_array_index) dumped_data_path, index=dumped_array_index)
if fcurve is None: if fcurve is None:
fcurve = datablock.fcurves.new( fcurve = target.fcurves.new(
dumped_data_path, index=dumped_array_index) dumped_data_path, index=dumped_array_index)
load_fcurve(dumped_fcurve, fcurve) load_fcurve(dumped_fcurve, fcurve)
target.id_root = data['id_root']
id_root = data.get('id_root') def _dump_implementation(self, data, instance=None):
if id_root:
datablock.id_root = id_root
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper() dumper = Dumper()
dumper.exclude_filter = [ dumper.exclude_filter = [
'name_full', 'name_full',
@ -309,23 +170,11 @@ class BlAction(ReplicatedDatablock):
'users' 'users'
] ]
dumper.depth = 1 dumper.depth = 1
data = dumper.dump(datablock) data = dumper.dump(instance)
data["fcurves"] = [] data["fcurves"] = []
for fcurve in datablock.fcurves: for fcurve in instance.fcurves:
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True)) data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.actions)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return []
_type = bpy.types.Action
_class = BlAction

View File

@ -22,35 +22,22 @@ import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .. import presence, operators, utils from .. import presence, operators, utils
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
def get_roll(bone: bpy.types.Bone) -> float:
""" Compute the actuall roll of a pose bone
:arg pose_bone: target pose bone
:type pose_bone: bpy.types.PoseBone
:return: float
"""
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
class BlArmature(ReplicatedDatablock): class BlArmature(BlDatablock):
use_delta = True
bl_id = "armatures" bl_id = "armatures"
bl_class = bpy.types.Armature bl_class = bpy.types.Armature
bl_delay_refresh = 1
bl_delay_apply = 0
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'ARMATURE_DATA' bl_icon = 'ARMATURE_DATA'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.armatures.new(data["name"]) return bpy.data.armatures.new(data["name"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
# Load parent object # Load parent object
parent_object = utils.find_from_attr( parent_object = utils.find_from_attr(
'uuid', 'uuid',
@ -60,7 +47,7 @@ class BlArmature(ReplicatedDatablock):
if parent_object is None: if parent_object is None:
parent_object = bpy.data.objects.new( parent_object = bpy.data.objects.new(
data['user_name'], datablock) data['user_name'], target)
parent_object.uuid = data['user'] parent_object.uuid = data['user']
is_object_in_master = ( is_object_in_master = (
@ -95,10 +82,10 @@ class BlArmature(ReplicatedDatablock):
bpy.ops.object.mode_set(mode='EDIT') bpy.ops.object.mode_set(mode='EDIT')
for bone in data['bones']: for bone in data['bones']:
if bone not in datablock.edit_bones: if bone not in target.edit_bones:
new_bone = datablock.edit_bones.new(bone) new_bone = target.edit_bones.new(bone)
else: else:
new_bone = datablock.edit_bones[bone] new_bone = target.edit_bones[bone]
bone_data = data['bones'].get(bone) bone_data = data['bones'].get(bone)
@ -106,10 +93,10 @@ class BlArmature(ReplicatedDatablock):
new_bone.head = bone_data['head_local'] new_bone.head = bone_data['head_local']
new_bone.tail_radius = bone_data['tail_radius'] new_bone.tail_radius = bone_data['tail_radius']
new_bone.head_radius = bone_data['head_radius'] new_bone.head_radius = bone_data['head_radius']
new_bone.roll = bone_data['roll'] # new_bone.roll = bone_data['roll']
if 'parent' in bone_data: if 'parent' in bone_data:
new_bone.parent = datablock.edit_bones[data['bones'] new_bone.parent = target.edit_bones[data['bones']
[bone]['parent']] [bone]['parent']]
new_bone.use_connect = bone_data['use_connect'] new_bone.use_connect = bone_data['use_connect']
@ -124,10 +111,9 @@ class BlArmature(ReplicatedDatablock):
if 'EDIT' in current_mode: if 'EDIT' in current_mode:
bpy.ops.object.mode_set(mode='EDIT') bpy.ops.object.mode_set(mode='EDIT')
load_animation_data(data.get('animation_data'), datablock) def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper() dumper = Dumper()
dumper.depth = 4 dumper.depth = 4
dumper.include_filter = [ dumper.include_filter = [
@ -140,15 +126,16 @@ class BlArmature(ReplicatedDatablock):
'parent', 'parent',
'name', 'name',
'layers', 'layers',
] # 'roll',
data = dumper.dump(datablock)
for bone in datablock.bones: ]
data = dumper.dump(instance)
for bone in instance.bones:
if bone.parent: if bone.parent:
data['bones'][bone.name]['parent'] = bone.parent.name data['bones'][bone.name]['parent'] = bone.parent.name
# get the parent Object # get the parent Object
# TODO: Use id_data instead object_users = utils.get_datablock_users(instance)[0]
object_users = utils.get_datablock_users(datablock)[0]
data['user'] = object_users.uuid data['user'] = object_users.uuid
data['user_name'] = object_users.name data['user_name'] = object_users.name
@ -158,26 +145,6 @@ class BlArmature(ReplicatedDatablock):
item.name for item in container_users if isinstance(item, bpy.types.Collection)] item.name for item in container_users if isinstance(item, bpy.types.Collection)]
data['user_scene'] = [ data['user_scene'] = [
item.name for item in container_users if isinstance(item, bpy.types.Scene)] item.name for item in container_users if isinstance(item, bpy.types.Scene)]
for bone in datablock.bones:
data['bones'][bone.name]['roll'] = get_roll(bone)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.armatures)
if datablock is None:
datablock = bpy.data.armatures.get(name)
return datablock
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return resolve_animation_dependencies(datablock)
_type = bpy.types.Armature
_class = BlArmature

View File

@ -20,58 +20,46 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlCamera(ReplicatedDatablock): class BlCamera(BlDatablock):
use_delta = True
bl_id = "cameras" bl_id = "cameras"
bl_class = bpy.types.Camera bl_class = bpy.types.Camera
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'CAMERA_DATA' bl_icon = 'CAMERA_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.cameras.new(data["name"]) return bpy.data.cameras.new(data["name"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
dof_settings = data.get('dof') dof_settings = data.get('dof')
load_animation_data(data.get('animation_data'), datablock)
# DOF settings # DOF settings
if dof_settings: if dof_settings:
loader.load(datablock.dof, dof_settings) loader.load(target.dof, dof_settings)
background_images = data.get('background_images') background_images = data.get('background_images')
datablock.background_images.clear()
if background_images: if background_images:
target.background_images.clear()
for img_name, img_data in background_images.items(): for img_name, img_data in background_images.items():
img_id = img_data.get('image') target_img = target.background_images.new()
if img_id: target_img.image = bpy.data.images[img_name]
target_img = datablock.background_images.new()
target_img.image = bpy.data.images[img_id]
loader.load(target_img, img_data) loader.load(target_img, img_data)
img_user = img_data.get('image_user') def _dump_implementation(self, data, instance=None):
if img_user: assert(instance)
loader.load(target_img.image_user, img_user)
# TODO: background image support
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper() dumper = Dumper()
dumper.depth = 3 dumper.depth = 3
dumper.include_filter = [ dumper.include_filter = [
@ -112,37 +100,14 @@ class BlCamera(ReplicatedDatablock):
'scale', 'scale',
'use_flip_x', 'use_flip_x',
'use_flip_y', 'use_flip_y',
'image_user', 'image'
'image',
'frame_duration',
'frame_start',
'frame_offset',
'use_cyclic',
'use_auto_refresh'
] ]
data = dumper.dump(datablock) return dumper.dump(instance)
data['animation_data'] = dump_animation_data(datablock)
for index, image in enumerate(datablock.background_images): def _resolve_deps_implementation(self):
if image.image_user:
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.cameras)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
for background in datablock.background_images: for background in self.instance.background_images:
if background.image: if background.image:
deps.append(background.image) deps.append(background.image)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
_type = bpy.types.Camera
_class = BlCamera

View File

@ -19,12 +19,10 @@
import bpy import bpy
import mathutils import mathutils
from deepdiff import DeepDiff, Delta
from .. import utils from .. import utils
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import resolve_datablock_from_uuid
def dump_collection_children(collection): def dump_collection_children(collection):
collection_children = [] collection_children = []
@ -73,92 +71,64 @@ def load_collection_childrens(dumped_childrens, collection):
if child_collection.uuid not in dumped_childrens: if child_collection.uuid not in dumped_childrens:
collection.children.unlink(child_collection) collection.children.unlink(child_collection)
def resolve_collection_dependencies(collection):
deps = []
for child in collection.children: class BlCollection(BlDatablock):
deps.append(child)
for object in collection.objects:
deps.append(object)
return deps
class BlCollection(ReplicatedDatablock):
bl_id = "collections" bl_id = "collections"
bl_icon = 'FILE_FOLDER' bl_icon = 'FILE_FOLDER'
bl_class = bpy.types.Collection bl_class = bpy.types.Collection
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True bl_check_common = True
bl_reload_parent = False
use_delta = True def _construct(self, data):
if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.collections = [
name for name in sourceData.collections if name == self.data['name']]
instance = bpy.data.collections[self.data['name']]
return instance
@staticmethod
def construct(data: dict) -> object:
instance = bpy.data.collections.new(data["name"]) instance = bpy.data.collections.new(data["name"])
return instance return instance
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
# Objects # Objects
load_collection_objects(data['objects'], datablock) load_collection_objects(data['objects'], target)
# Link childrens # Link childrens
load_collection_childrens(data['children'], datablock) load_collection_childrens(data['children'], target)
# FIXME: Find a better way after the replication big refacotoring def _dump_implementation(self, data, instance=None):
# Keep other user from deleting collection object by flushing their history assert(instance)
utils.flush_history()
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
"name", "name",
"instance_offset" "instance_offset"
] ]
data = dumper.dump(datablock) data = dumper.dump(instance)
# dump objects # dump objects
data['objects'] = dump_collection_objects(datablock) data['objects'] = dump_collection_objects(instance)
# dump children collections # dump children collections
data['children'] = dump_collection_children(datablock) data['children'] = dump_collection_children(instance)
return data return data
def _resolve_deps_implementation(self):
deps = []
@staticmethod for child in self.instance.children:
def resolve(data: dict) -> object: deps.append(child)
uuid = data.get('uuid') for object in self.instance.objects:
return resolve_datablock_from_uuid(uuid, bpy.data.collections) deps.append(object)
return deps
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return resolve_collection_dependencies(datablock)
@staticmethod
def compute_delta(last_data: dict, current_data: dict) -> Delta:
diff_params = {
'ignore_order': True,
'report_repetition': True
}
delta_params = {
# 'mutate': True
}
return Delta(
DeepDiff(last_data,
current_data,
cache_size=5000,
**diff_params),
**delta_params)
_type = bpy.types.Collection
_class = BlCollection

View File

@ -21,14 +21,11 @@ import bpy.types as T
import mathutils import mathutils
import logging import logging
from ..utils import get_preferences from .. import utils
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .dump_anything import (Dumper, Loader, from .dump_anything import (Dumper, Loader,
np_load_collection, np_load_collection,
np_dump_collection) np_dump_collection)
from .bl_material import dump_materials_slots, load_materials_slots
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
SPLINE_BEZIER_POINT = [ SPLINE_BEZIER_POINT = [
@ -71,6 +68,8 @@ CURVE_METADATA = [
'font_bold', 'font_bold',
'font_bold_italic', 'font_bold_italic',
'font_italic', 'font_italic',
'make_local',
'materials',
'name', 'name',
'offset', 'offset',
'offset_x', 'offset_x',
@ -80,6 +79,7 @@ CURVE_METADATA = [
'override_create', 'override_create',
'override_library', 'override_library',
'path_duration', 'path_duration',
'preview',
'render_resolution_u', 'render_resolution_u',
'render_resolution_v', 'render_resolution_v',
'resolution_u', 'resolution_u',
@ -113,6 +113,8 @@ CURVE_METADATA = [
] ]
SPLINE_METADATA = [ SPLINE_METADATA = [
'hide', 'hide',
'material_index', 'material_index',
@ -136,59 +138,57 @@ SPLINE_METADATA = [
] ]
class BlCurve(ReplicatedDatablock): class BlCurve(BlDatablock):
use_delta = True
bl_id = "curves" bl_id = "curves"
bl_class = bpy.types.Curve bl_class = bpy.types.Curve
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'CURVE_DATA' bl_icon = 'CURVE_DATA'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.curves.new(data["name"], data["type"]) return bpy.data.curves.new(data["name"], data["type"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
datablock.splines.clear() target.splines.clear()
# load splines # load splines
for spline in data['splines'].values(): for spline in data['splines'].values():
new_spline = datablock.splines.new(spline['type']) new_spline = target.splines.new(spline['type'])
# Load curve geometry data # Load curve geometry data
if new_spline.type == 'BEZIER': if new_spline.type == 'BEZIER':
bezier_points = new_spline.bezier_points bezier_points = new_spline.bezier_points
bezier_points.add(spline['bezier_points_count']) bezier_points.add(spline['bezier_points_count'])
np_load_collection( np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
spline['bezier_points'],
bezier_points,
SPLINE_BEZIER_POINT)
if new_spline.type in ['POLY', 'NURBS']: if new_spline.type == 'POLY':
points = new_spline.points points = new_spline.points
points.add(spline['points_count']) points.add(spline['points_count'])
np_load_collection(spline['points'], points, SPLINE_POINT) np_load_collection(spline['points'], points, SPLINE_POINT)
# Not working for now...
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
if new_spline.type == 'NURBS':
logging.error("NURBS not supported.")
# new_spline.points.add(len(data['splines'][spline]["points"])-1)
# for point_index in data['splines'][spline]["points"]:
# loader.load(
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
loader.load(new_spline, spline) loader.load(new_spline, spline)
# MATERIAL SLOTS def _dump_implementation(self, data, instance=None):
src_materials = data.get('materials', None) assert(instance)
if src_materials:
load_materials_slots(src_materials, datablock.materials)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper() dumper = Dumper()
# Conflicting attributes # Conflicting attributes
# TODO: remove them with the NURBS support # TODO: remove them with the NURBS support
dumper.include_filter = CURVE_METADATA dumper.include_filter = CURVE_METADATA
dumper.exclude_filter = [ dumper.exclude_filter = [
'users', 'users',
'order_u', 'order_u',
@ -197,50 +197,38 @@ class BlCurve(ReplicatedDatablock):
'point_count_u', 'point_count_u',
'active_textbox' 'active_textbox'
] ]
if datablock.use_auto_texspace: if instance.use_auto_texspace:
dumper.exclude_filter.extend([ dumper.exclude_filter.extend([
'texspace_location', 'texspace_location',
'texspace_size']) 'texspace_size'])
data = dumper.dump(datablock) data = dumper.dump(instance)
data['animation_data'] = dump_animation_data(datablock)
data['splines'] = {} data['splines'] = {}
for index, spline in enumerate(datablock.splines): for index, spline in enumerate(instance.splines):
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = SPLINE_METADATA dumper.include_filter = SPLINE_METADATA
spline_data = dumper.dump(spline) spline_data = dumper.dump(spline)
if spline.type == 'POLY':
spline_data['points_count'] = len(spline.points)-1 spline_data['points_count'] = len(spline.points)-1
spline_data['points'] = np_dump_collection( spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
spline.points, SPLINE_POINT)
spline_data['bezier_points_count'] = len(spline.bezier_points)-1 spline_data['bezier_points_count'] = len(spline.bezier_points)-1
spline_data['bezier_points'] = np_dump_collection( spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
spline.bezier_points, SPLINE_BEZIER_POINT)
data['splines'][index] = spline_data data['splines'][index] = spline_data
if isinstance(datablock, T.SurfaceCurve): if isinstance(instance, T.SurfaceCurve):
data['type'] = 'SURFACE' data['type'] = 'SURFACE'
elif isinstance(datablock, T.TextCurve): elif isinstance(instance, T.TextCurve):
data['type'] = 'FONT' data['type'] = 'FONT'
elif isinstance(datablock, T.Curve): elif isinstance(instance, T.Curve):
data['type'] = 'CURVE' data['type'] = 'CURVE'
data['materials'] = dump_materials_slots(datablock.materials)
return data return data
@staticmethod def _resolve_deps_implementation(self):
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.curves)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material # TODO: resolve material
deps = [] deps = []
curve = datablock curve = self.instance
if isinstance(curve, T.TextCurve): if isinstance(curve, T.TextCurve):
deps.extend([ deps.extend([
@ -249,19 +237,4 @@ class BlCurve(ReplicatedDatablock):
curve.font_bold_italic, curve.font_bold_italic,
curve.font_italic]) curve.font_italic])
for material in datablock.materials:
if material:
deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
@staticmethod
def needs_update(datablock: object, data: dict) -> bool:
return 'EDIT' not in bpy.context.mode \
or get_preferences().sync_flags.sync_during_editmode
_type = [bpy.types.Curve, bpy.types.TextCurve]
_class = BlCurve

View File

@ -21,15 +21,78 @@ from collections.abc import Iterable
import bpy import bpy
import mathutils import mathutils
from replication.constants import DIFF_BINARY, DIFF_JSON, UP from replication.constants import DIFF_BINARY, UP
from replication.protocol import ReplicatedDatablock from replication.data import ReplicatedDatablock
from .. import utils from .. import utils
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
def has_action(target):
""" Check if the target datablock has actions
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.action)
def has_driver(target):
""" Check if the target datablock is driven
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.drivers)
def dump_driver(driver):
dumper = Dumper()
dumper.depth = 6
data = dumper.dump(driver)
return data
def load_driver(target_datablock, src_driver):
loader = Loader()
drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver']
new_driver = drivers.new(src_driver['data_path'])
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
loader.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
src_var_data = src_driver_data['variables'][src_variable]
new_var = new_driver.driver.variables.new()
new_var.name = src_var_data['name']
new_var.type = src_var_data['type']
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
new_var.targets[src_target].id = utils.resolve_from_id(
src_target_data['id'], src_target_data['id_type'])
loader.load(
new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
for p in reversed(new_fcurve):
new_fcurve.remove(p, fast=True)
new_fcurve.add(len(src_driver['keyframe_points']))
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
loader.load(new_point, src_driver['keyframe_points'][src_point])
def get_datablock_from_uuid(uuid, default, ignore=[]): def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid: if not uuid:
return default return default
for category in dir(bpy.data): for category in dir(bpy.data):
root = getattr(bpy.data, category) root = getattr(bpy.data, category)
if isinstance(root, Iterable) and category not in ignore: if isinstance(root, Iterable) and category not in ignore:
@ -38,8 +101,127 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
return item return item
return default return default
def resolve_datablock_from_uuid(uuid, bpy_collection):
for item in bpy_collection: class BlDatablock(ReplicatedDatablock):
if getattr(item, 'uuid', None) == uuid: """BlDatablock
return item
return None bl_id : blender internal storage identifier
bl_class : blender internal type
bl_delay_refresh : refresh rate in second for observers
bl_delay_apply : refresh rate in sec for apply
bl_automatic_push : boolean
bl_icon : type icon (blender icon name)
bl_check_common: enable check even in common rights
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
self.preferences = utils.get_preferences()
# TODO: use is_library_indirect
self.is_library = (instance and hasattr(instance, 'library') and
instance.library) or \
(self.data and 'library' in self.data)
if instance and hasattr(instance, 'uuid'):
instance.uuid = self.uuid
self.diff_method = DIFF_BINARY
def resolve(self):
datablock_ref = None
datablock_root = getattr(bpy.data, self.bl_id)
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
if not datablock_ref:
try:
datablock_ref = datablock_root[self.data['name']]
except Exception:
name = self.data.get('name')
logging.debug(f"Constructing {name}")
datablock_ref = self._construct(data=self.data)
if datablock_ref:
setattr(datablock_ref, 'uuid', self.uuid)
self.instance = datablock_ref
def remove_instance(self):
"""
Remove instance from blender data
"""
assert(self.instance)
datablock_root = getattr(bpy.data, self.bl_id)
datablock_root.remove(self.instance)
def _dump(self, instance=None):
dumper = Dumper()
data = {}
# Dump animation data
if has_action(instance):
dumper = Dumper()
dumper.include_filter = ['action']
data['animation_data'] = dumper.dump(instance.animation_data)
if has_driver(instance):
dumped_drivers = {'animation_data': {'drivers': []}}
for driver in instance.animation_data.drivers:
dumped_drivers['animation_data']['drivers'].append(
dump_driver(driver))
data.update(dumped_drivers)
if self.is_library:
data.update(dumper.dump(instance))
else:
data.update(self._dump_implementation(data, instance=instance))
return data
def _dump_implementation(self, data, target):
raise NotImplementedError
def _load(self, data, target):
# Load animation data
if 'animation_data' in data.keys():
if target.animation_data is None:
target.animation_data_create()
for d in target.animation_data.drivers:
target.animation_data.drivers.remove(d)
if 'drivers' in data['animation_data']:
for driver in data['animation_data']['drivers']:
load_driver(target, driver)
if 'action' in data['animation_data']:
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
if self.is_library:
return
else:
self._load_implementation(data, target)
def _load_implementation(self, data, target):
raise NotImplementedError
def resolve_deps(self):
dependencies = []
if has_action(self.instance):
dependencies.append(self.instance.animation_data.action)
if not self.is_library:
dependencies.extend(self._resolve_deps_implementation())
logging.debug(f"{self.instance.name} dependencies: {dependencies}")
return dependencies
def _resolve_deps_implementation(self):
return []
def is_valid(self):
return getattr(bpy.data, self.bl_id).get(self.data['name'])

View File

@ -19,15 +19,14 @@
import logging import logging
import os import os
import sys import sys
from pathlib import Path, WindowsPath, PosixPath from pathlib import Path
import bpy import bpy
import mathutils import mathutils
from replication.constants import DIFF_BINARY, UP from replication.constants import DIFF_BINARY, UP
from replication.protocol import ReplicatedDatablock from replication.data import ReplicatedDatablock
from .. import utils from .. import utils
from ..utils import get_preferences
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
@ -55,20 +54,37 @@ class BlFile(ReplicatedDatablock):
bl_id = 'file' bl_id = 'file'
bl_name = "file" bl_name = "file"
bl_class = Path bl_class = Path
bl_delay_refresh = 0
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'FILE' bl_icon = 'FILE'
bl_reload_parent = True
@staticmethod def __init__(self, *args, **kwargs):
def construct(data: dict) -> object: super().__init__(*args, **kwargs)
return Path(get_filepath(data['name'])) self.instance = kwargs.get('instance', None)
@staticmethod if self.instance and not self.instance.exists():
def resolve(data: dict) -> object: raise FileNotFoundError(str(self.instance))
return Path(get_filepath(data['name']))
@staticmethod self.preferences = utils.get_preferences()
def dump(datablock: object) -> dict: self.diff_method = DIFF_BINARY
def resolve(self):
if self.data:
self.instance = Path(get_filepath(self.data['name']))
if not self.instance.exists():
logging.debug("File don't exist, loading it.")
self._load(self.data, self.instance)
def push(self, socket, identity=None):
super().push(socket, identity=None)
if self.preferences.clear_memory_filecache:
del self.data['file']
def _dump(self, instance=None):
""" """
Read the file and return a dict as: Read the file and return a dict as:
{ {
@ -80,62 +96,48 @@ class BlFile(ReplicatedDatablock):
logging.info(f"Extracting file metadata") logging.info(f"Extracting file metadata")
data = { data = {
'name': datablock.name, 'name': self.instance.name,
} }
logging.info(f"Reading {datablock.name} content: {datablock.stat().st_size} bytes") logging.info(
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
try: try:
file = open(datablock, "rb") file = open(self.instance, "rb")
data['file'] = file.read() data['file'] = file.read()
file.close() file.close()
except IOError: except IOError:
logging.warning(f"{datablock} doesn't exist, skipping") logging.warning(f"{self.instance} doesn't exist, skipping")
else: else:
file.close() file.close()
return data return data
@staticmethod def _load(self, data, target):
def load(data: dict, datablock: object):
""" """
Writing the file Writing the file
""" """
# TODO: check for empty data
if target.exists() and not self.diff():
logging.info(f"{data['name']} already on the disk, skipping.")
return
try: try:
file = open(datablock, "wb") file = open(target, "wb")
file.write(data['file']) file.write(data['file'])
if get_preferences().clear_memory_filecache: if self.preferences.clear_memory_filecache:
del data['file'] del self.data['file']
except IOError: except IOError:
logging.warning(f"{datablock} doesn't exist, skipping") logging.warning(f"{target} doesn't exist, skipping")
else: else:
file.close() file.close()
@staticmethod def diff(self):
def resolve_deps(datablock: object) -> [object]: if self.preferences.clear_memory_filecache:
return []
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
if get_preferences().clear_memory_filecache:
return False return False
else: else:
if not datablock: memory_size = sys.getsizeof(self.data['file'])-33
return None disk_size = self.instance.stat().st_size
return memory_size == disk_size
if not data:
return True
memory_size = sys.getsizeof(data['file'])-33
disk_size = datablock.stat().st_size
if memory_size != disk_size:
return True
else:
return False
_type = [WindowsPath, PosixPath]
_class = BlFile

View File

@ -22,20 +22,21 @@ from pathlib import Path
import bpy import bpy
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_file import get_filepath, ensure_unpacked from .bl_file import get_filepath, ensure_unpacked
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
from .bl_datablock import resolve_datablock_from_uuid
class BlFont(ReplicatedDatablock):
class BlFont(BlDatablock):
bl_id = "fonts" bl_id = "fonts"
bl_class = bpy.types.VectorFont bl_class = bpy.types.VectorFont
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'FILE_FONT' bl_icon = 'FILE_FONT'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
filename = data.get('filename') filename = data.get('filename')
if filename == '<builtin>': if filename == '<builtin>':
@ -43,43 +44,31 @@ class BlFont(ReplicatedDatablock):
else: else:
return bpy.data.fonts.load(get_filepath(filename)) return bpy.data.fonts.load(get_filepath(filename))
@staticmethod def _load(self, data, target):
def load(data: dict, datablock: object):
pass pass
@staticmethod def _dump(self, instance=None):
def dump(datablock: object) -> dict: if instance.filepath == '<builtin>':
if datablock.filepath == '<builtin>':
filename = '<builtin>' filename = '<builtin>'
else: else:
filename = Path(datablock.filepath).name filename = Path(instance.filepath).name
if not filename: if not filename:
raise FileExistsError(datablock.filepath) raise FileExistsError(instance.filepath)
return { return {
'filename': filename, 'filename': filename,
'name': datablock.name 'name': instance.name
} }
@staticmethod def diff(self):
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.fonts)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
if datablock.filepath and datablock.filepath != '<builtin>':
ensure_unpacked(datablock)
deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
return False return False
_type = bpy.types.VectorFont def _resolve_deps_implementation(self):
_class = BlFont deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -24,11 +24,10 @@ from .dump_anything import (Dumper,
Loader, Loader,
np_dump_collection, np_dump_collection,
np_load_collection) np_load_collection)
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from ..utils import get_preferences
# GPencil data api is structured as it follow:
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
STROKE_POINT = [ STROKE_POINT = [
'co', 'co',
@ -39,24 +38,6 @@ STROKE_POINT = [
] ]
STROKE = [
"aspect",
"display_mode",
"end_cap_mode",
"hardness",
"line_width",
"material_index",
"start_cap_mode",
"uv_rotation",
"uv_scale",
"uv_translation",
"vertex_color_fill",
]
if bpy.app.version[1] >= 91:
STROKE.append('use_cyclic')
else:
STROKE.append('draw_cyclic')
if bpy.app.version[1] >= 83: if bpy.app.version[1] >= 83:
STROKE_POINT.append('vertex_color') STROKE_POINT.append('vertex_color')
@ -107,12 +88,12 @@ def load_stroke(stroke_data, stroke):
""" """
assert(stroke and stroke_data) assert(stroke and stroke_data)
stroke.points.add(stroke_data["p_count"]) loader = Loader()
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT) loader.load(stroke, stroke_data)
# HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to stroke.points.add(stroke_data["p_count"])
# fix fill issues
stroke.uv_scale = stroke_data["uv_scale"] np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
def dump_frame(frame): def dump_frame(frame):
@ -127,11 +108,11 @@ def dump_frame(frame):
dumped_frame = dict() dumped_frame = dict()
dumped_frame['frame_number'] = frame.frame_number dumped_frame['frame_number'] = frame.frame_number
dumped_frame['strokes'] = np_dump_collection(frame.strokes, STROKE) dumped_frame['strokes'] = []
dumped_frame['strokes_points'] = []
# TODO: took existing strokes in account
for stroke in frame.strokes: for stroke in frame.strokes:
dumped_frame['strokes_points'].append(dump_stroke(stroke)) dumped_frame['strokes'].append(dump_stroke(stroke))
return dumped_frame return dumped_frame
@ -147,12 +128,14 @@ def load_frame(frame_data, frame):
assert(frame and frame_data) assert(frame and frame_data)
for stroke_data in frame_data['strokes_points']: # frame.frame_number = frame_data['frame_number']
# TODO: took existing stroke in account
for stroke_data in frame_data['strokes']:
target_stroke = frame.strokes.new() target_stroke = frame.strokes.new()
load_stroke(stroke_data, target_stroke) load_stroke(stroke_data, target_stroke)
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
def dump_layer(layer): def dump_layer(layer):
""" Dump a grease pencil layer """ Dump a grease pencil layer
@ -200,9 +183,6 @@ def dump_layer(layer):
# 'parent_bone', # 'parent_bone',
# 'matrix_inverse', # 'matrix_inverse',
] ]
if layer.id_data.is_annotation:
dumper.include_filter.append('thickness')
dumped_layer = dumper.dump(layer) dumped_layer = dumper.dump(layer)
dumped_layer['frames'] = [] dumped_layer['frames'] = []
@ -231,58 +211,50 @@ def load_layer(layer_data, layer):
load_frame(frame_data, target_frame) load_frame(frame_data, target_frame)
def layer_changed(datablock: object, data: dict) -> bool:
if datablock.layers.active and \
datablock.layers.active.info != data["active_layers"]:
return True
else:
return False
class BlGpencil(BlDatablock):
def frame_changed(data: dict) -> bool:
return bpy.context.scene.frame_current != data["eval_frame"]
class BlGpencil(ReplicatedDatablock):
bl_id = "grease_pencils" bl_id = "grease_pencils"
bl_class = bpy.types.GreasePencil bl_class = bpy.types.GreasePencil
bl_delay_refresh = 2
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'GREASEPENCIL' bl_icon = 'GREASEPENCIL'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.grease_pencils.new(data["name"]) return bpy.data.grease_pencils.new(data["name"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object): target.materials.clear()
datablock.materials.clear()
if "materials" in data.keys(): if "materials" in data.keys():
for mat in data['materials']: for mat in data['materials']:
datablock.materials.append(bpy.data.materials[mat]) target.materials.append(bpy.data.materials[mat])
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
# TODO: reuse existing layer # TODO: reuse existing layer
for layer in datablock.layers: for layer in target.layers:
datablock.layers.remove(layer) target.layers.remove(layer)
if "layers" in data.keys(): if "layers" in data.keys():
for layer in data["layers"]: for layer in data["layers"]:
layer_data = data["layers"].get(layer) layer_data = data["layers"].get(layer)
# if layer not in datablock.layers.keys(): # if layer not in target.layers.keys():
target_layer = datablock.layers.new(data["layers"][layer]["info"]) target_layer = target.layers.new(data["layers"][layer]["info"])
# else: # else:
# target_layer = target.layers[layer] # target_layer = target.layers[layer]
# target_layer.clear() # target_layer.clear()
load_layer(layer_data, target_layer) load_layer(layer_data, target_layer)
datablock.layers.update()
@staticmethod
def dump(datablock: object) -> dict:
def _dump_implementation(self, data, instance=None):
assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
@ -293,37 +265,19 @@ class BlGpencil(ReplicatedDatablock):
'pixel_factor', 'pixel_factor',
'stroke_depth_order' 'stroke_depth_order'
] ]
data = dumper.dump(datablock) data = dumper.dump(instance)
data['layers'] = {} data['layers'] = {}
for layer in datablock.layers: for layer in instance.layers:
data['layers'][layer.info] = dump_layer(layer) data['layers'][layer.info] = dump_layer(layer)
data["active_layers"] = datablock.layers.active.info if datablock.layers.active else "None"
data["eval_frame"] = bpy.context.scene.frame_current
return data return data
@staticmethod def _resolve_deps_implementation(self):
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.grease_pencils)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
for material in datablock.materials: for material in self.instance.materials:
deps.append(material) deps.append(material)
return deps return deps
@staticmethod
def needs_update(datablock: object, data: dict) -> bool:
return bpy.context.mode == 'OBJECT' \
or layer_changed(datablock, data) \
or frame_changed(data) \
or get_preferences().sync_flags.sync_during_editmode
_type = bpy.types.GreasePencil
_class = BlGpencil

View File

@ -24,12 +24,9 @@ import bpy
import mathutils import mathutils
from .. import utils from .. import utils
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
from .bl_file import get_filepath, ensure_unpacked from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
format_to_ext = { format_to_ext = {
'BMP': 'bmp', 'BMP': 'bmp',
@ -51,36 +48,35 @@ format_to_ext = {
} }
class BlImage(ReplicatedDatablock): class BlImage(BlDatablock):
bl_id = "images" bl_id = "images"
bl_class = bpy.types.Image bl_class = bpy.types.Image
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'IMAGE_DATA' bl_icon = 'IMAGE_DATA'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.images.new( return bpy.data.images.new(
name=data['name'], name=data['name'],
width=data['size'][0], width=data['size'][0],
height=data['size'][1] height=data['size'][1]
) )
@staticmethod def _load(self, data, target):
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(data, datablock) loader.load(data, target)
datablock.source = 'FILE' target.source = 'FILE'
datablock.filepath_raw = get_filepath(data['filename']) target.filepath_raw = get_filepath(data['filename'])
color_space_name = data["colorspace_settings"]["name"] target.colorspace_settings.name = data["colorspace_settings"]["name"]
if color_space_name:
datablock.colorspace_settings.name = color_space_name
@staticmethod def _dump(self, instance=None):
def dump(datablock: object) -> dict: assert(instance)
filename = Path(datablock.filepath).name
filename = Path(instance.filepath).name
data = { data = {
"filename": filename "filename": filename
@ -90,52 +86,38 @@ class BlImage(ReplicatedDatablock):
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
"name", "name",
# 'source',
'size', 'size',
'height', 'height',
'alpha', 'alpha',
'float_buffer', 'float_buffer',
'alpha_mode', 'alpha_mode',
'colorspace_settings'] 'colorspace_settings']
data.update(dumper.dump(datablock)) data.update(dumper.dump(instance))
return data return data
@staticmethod def diff(self):
def resolve(data: dict) -> object: if self.instance and (self.instance.name != self.data['name']):
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.images)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
if datablock.packed_file:
filename = Path(bpy.path.abspath(datablock.filepath)).name
datablock.filepath_raw = get_filepath(filename)
datablock.save()
# An image can't be unpacked to the modified path
# TODO: make a bug report
datablock.unpack(method="REMOVE")
elif datablock.source == "GENERATED":
filename = f"{datablock.name}.png"
datablock.filepath = get_filepath(filename)
datablock.save()
if datablock.filepath:
deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
if datablock.is_dirty:
datablock.save()
if not data or (datablock and (datablock.name != data.get('name'))):
return True return True
else: else:
return False return False
_type = bpy.types.Image def _resolve_deps_implementation(self):
_class = BlImage deps = []
if self.instance.filepath:
if self.instance.packed_file:
filename = Path(bpy.path.abspath(self.instance.filepath)).name
self.instance.filepath_raw = get_filepath(filename)
self.instance.save()
# An image can't be unpacked to the modified path
# TODO: make a bug report
self.instance.unpack(method="REMOVE")
elif self.instance.source == "GENERATED":
filename = f"{self.instance.name}.png"
self.instance.filepath = get_filepath(filename)
self.instance.save()
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -20,41 +20,35 @@ import bpy
import mathutils import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from replication.exception import ContextError from replication.exception import ContextError
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
POINT = ['co', 'weight_softbody', 'co_deform'] POINT = ['co', 'weight_softbody', 'co_deform']
class BlLattice(ReplicatedDatablock): class BlLattice(BlDatablock):
use_delta = True
bl_id = "lattices" bl_id = "lattices"
bl_class = bpy.types.Lattice bl_class = bpy.types.Lattice
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'LATTICE_DATA' bl_icon = 'LATTICE_DATA'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.lattices.new(data["name"]) return bpy.data.lattices.new(data["name"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object): if target.is_editmode:
load_animation_data(data.get('animation_data'), datablock)
if datablock.is_editmode:
raise ContextError("lattice is in edit mode") raise ContextError("lattice is in edit mode")
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
np_load_collection(data['points'], datablock.points, POINT) np_load_collection(data['points'], target.points, POINT)
@staticmethod def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict: if instance.is_editmode:
if datablock.is_editmode:
raise ContextError("lattice is in edit mode") raise ContextError("lattice is in edit mode")
dumper = Dumper() dumper = Dumper()
@ -70,20 +64,9 @@ class BlLattice(ReplicatedDatablock):
'interpolation_type_w', 'interpolation_type_w',
'use_outside' 'use_outside'
] ]
data = dumper.dump(datablock) data = dumper.dump(instance)
data['points'] = np_dump_collection(instance.points, POINT)
data['points'] = np_dump_collection(datablock.points, POINT)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lattices)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return resolve_animation_dependencies(datablock)
_type = bpy.types.Lattice
_class = BlLattice

View File

@ -0,0 +1,47 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
class BlLibrary(BlDatablock):
bl_id = "libraries"
bl_class = bpy.types.Library
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIBRARY_DATA_DIRECT'
def _construct(self, data):
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
targetData = sourceData
return sourceData
def _load(self, data, target):
pass
def _dump(self, instance=None):
assert(instance)
dumper = Dumper()
return dumper.dump(instance)

View File

@ -20,34 +20,27 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlLight(ReplicatedDatablock): class BlLight(BlDatablock):
use_delta = True
bl_id = "lights" bl_id = "lights"
bl_class = bpy.types.Light bl_class = bpy.types.Light
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'LIGHT_DATA' bl_icon = 'LIGHT_DATA'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object: return bpy.data.lights.new(data["name"], data["type"])
instance = bpy.data.lights.new(data["name"], data["type"])
instance.uuid = data.get("uuid")
return instance
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict: assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 3 dumper.depth = 3
dumper.include_filter = [ dumper.include_filter = [
@ -76,23 +69,9 @@ class BlLight(ReplicatedDatablock):
'spot_size', 'spot_size',
'spot_blend' 'spot_blend'
] ]
data = dumper.dump(datablock) data = dumper.dump(instance)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lights)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = [bpy.types.SpotLight, bpy.types.PointLight, bpy.types.AreaLight, bpy.types.SunLight]
_class = BlLight

View File

@ -21,20 +21,19 @@ import mathutils
import logging import logging
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
class BlLightprobe(ReplicatedDatablock):
use_delta = True
class BlLightprobe(BlDatablock):
bl_id = "lightprobes" bl_id = "lightprobes"
bl_class = bpy.types.LightProbe bl_class = bpy.types.LightProbe
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'LIGHTPROBE_GRID' bl_icon = 'LIGHTPROBE_GRID'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type'] type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
# See https://developer.blender.org/D6396 # See https://developer.blender.org/D6396
if bpy.app.version[1] >= 83: if bpy.app.version[1] >= 83:
@ -42,13 +41,12 @@ class BlLightprobe(ReplicatedDatablock):
else: else:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
@staticmethod def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict: assert(instance)
if bpy.app.version[1] < 83: if bpy.app.version[1] < 83:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
@ -75,16 +73,7 @@ class BlLightprobe(ReplicatedDatablock):
'visibility_blur' 'visibility_blur'
] ]
return dumper.dump(datablock) return dumper.dump(instance)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lightprobes)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return []
_type = bpy.types.LightProbe
_class = BlLightprobe

View File

@ -21,18 +21,13 @@ import mathutils
import logging import logging
import re import re
from uuid import uuid4
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock, get_datablock_from_uuid
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]') NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
def load_node(node_data, node_tree):
""" Load a node into a node_tree from a dict """ Load a node into a node_tree from a dict
:arg node_data: dumped node data :arg node_data: dumped node data
@ -42,152 +37,28 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
""" """
loader = Loader() loader = Loader()
target_node = node_tree.nodes.new(type=node_data["bl_idname"]) target_node = node_tree.nodes.new(type=node_data["bl_idname"])
target_node.select = False
loader.load(target_node, node_data) loader.load(target_node, node_data)
image_uuid = node_data.get('image_uuid', None) image_uuid = node_data.get('image_uuid', None)
node_tree_uuid = node_data.get('node_tree_uuid', None)
if image_uuid and not target_node.image: if image_uuid and not target_node.image:
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images) target_node.image = get_datablock_from_uuid(image_uuid, None)
if image is None:
logging.error(f"Fail to find material image from uuid {image_uuid}")
else:
target_node.image = image
if node_tree_uuid: for input in node_data["inputs"]:
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None) if hasattr(target_node.inputs[input], "default_value"):
inputs_data = node_data.get('inputs')
if inputs_data:
inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs):
if idx < len(inputs_data) and hasattr(inpt, "default_value"):
loaded_input = inputs_data[idx]
try: try:
if inpt.type in ['OBJECT', 'COLLECTION']: target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
inpt.default_value = get_datablock_from_uuid(loaded_input, None) except:
else: logging.error(
inpt.default_value = loaded_input f"Material {input} parameter not supported, skipping")
except Exception as e:
logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})")
else:
logging.warning(f"Node {target_node.name} input length mismatch.")
outputs_data = node_data.get('outputs') for output in node_data["outputs"]:
if outputs_data: if hasattr(target_node.outputs[output], "default_value"):
outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS]
for idx, output in enumerate(outputs):
if idx < len(outputs_data) and hasattr(output, "default_value"):
loaded_output = outputs_data[idx]
try: try:
if output.type in ['OBJECT', 'COLLECTION']: target_node.outputs[output].default_value = node_data["outputs"][output]["default_value"]
output.default_value = get_datablock_from_uuid(loaded_output, None) except:
else: logging.error(
output.default_value = loaded_output f"Material {output} parameter not supported, skipping")
except Exception as e:
logging.warning(
f"Node {target_node.name} output {output.name} parameter not supported, skipping ({e})")
else:
logging.warning(
f"Node {target_node.name} output length mismatch.")
def dump_node(node: bpy.types.ShaderNode) -> dict:
""" Dump a single node to a dict
:arg node: target node
:type node: bpy.types.Node
:retrun: dict
"""
node_dumper = Dumper()
node_dumper.depth = 1
node_dumper.exclude_filter = [
"dimensions",
"show_expanded",
"name_full",
"select",
"bl_label",
"bl_height_min",
"bl_height_max",
"bl_height_default",
"bl_width_min",
"bl_width_max",
"type",
"bl_icon",
"bl_width_default",
"bl_static_type",
"show_tetxure",
"is_active_output",
"hide",
"show_options",
"show_preview",
"show_texture",
"outputs",
"width_hidden",
"image"
]
dumped_node = node_dumper.dump(node)
if node.parent:
dumped_node['parent'] = node.parent.name
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
if dump_io_needed:
io_dumper = Dumper()
io_dumper.depth = 2
io_dumper.include_filter = ["default_value"]
if hasattr(node, 'inputs'):
dumped_node['inputs'] = []
inputs = [i for i in node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs):
if hasattr(inpt, 'default_value'):
if isinstance(inpt.default_value, bpy.types.ID):
dumped_input = inpt.default_value.uuid
else:
dumped_input = io_dumper.dump(inpt.default_value)
dumped_node['inputs'].append(dumped_input)
if hasattr(node, 'outputs'):
dumped_node['outputs'] = []
for idx, output in enumerate(node.outputs):
if output.type not in IGNORED_SOCKETS:
if hasattr(output, 'default_value'):
dumped_node['outputs'].append(
io_dumper.dump(output.default_value))
if hasattr(node, 'color_ramp'):
ramp_dumper = Dumper()
ramp_dumper.depth = 4
ramp_dumper.include_filter = [
'elements',
'alpha',
'color',
'position',
'interpolation',
'hue_interpolation',
'color_mode'
]
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
if hasattr(node, 'mapping'):
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location'
]
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
if hasattr(node, 'image') and getattr(node, 'image'):
dumped_node['image_uuid'] = node.image.uuid
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
dumped_node['node_tree_uuid'] = node.node_tree.uuid
return dumped_node
def load_links(links_data, node_tree): def load_links(links_data, node_tree):
@ -232,207 +103,135 @@ def dump_links(links):
return links_data return links_data
def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict: def dump_node(node):
""" Dump a shader node_tree to a dict including links and nodes """ Dump a single node to a dict
:arg node_tree: dumped shader node tree :arg node: target node
:type node_tree: bpy.types.ShaderNodeTree :type node: bpy.types.Node
:return: dict :retrun: dict
""" """
node_tree_data = {
'nodes': {node.name: dump_node(node) for node in node_tree.nodes},
'links': dump_links(node_tree.links),
'name': node_tree.name,
'type': type(node_tree).__name__
}
for socket_id in ['inputs', 'outputs']: node_dumper = Dumper()
socket_collection = getattr(node_tree, socket_id) node_dumper.depth = 1
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection) node_dumper.exclude_filter = [
"dimensions",
"show_expanded",
"name_full",
"select",
"bl_label",
"bl_height_min",
"bl_height_max",
"bl_height_default",
"bl_width_min",
"bl_width_max",
"type",
"bl_icon",
"bl_width_default",
"bl_static_type",
"show_tetxure",
"is_active_output",
"hide",
"show_options",
"show_preview",
"show_texture",
"outputs",
"width_hidden",
"image"
]
return node_tree_data dumped_node = node_dumper.dump(node)
if hasattr(node, 'inputs'):
dumped_node['inputs'] = {}
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict: for i in node.inputs:
""" dump sockets of a shader_node_tree input_dumper = Dumper()
input_dumper.depth = 2
input_dumper.include_filter = ["default_value"]
:arg target_node_tree: target node_tree if hasattr(i, 'default_value'):
:type target_node_tree: bpy.types.NodeTree dumped_node['inputs'][i.name] = input_dumper.dump(i)
:arg socket_id: socket identifer
:type socket_id: str
:return: dict
"""
sockets_data = []
for socket in sockets:
try:
socket_uuid = socket['uuid']
except Exception:
socket_uuid = str(uuid4())
socket['uuid'] = socket_uuid
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid)) dumped_node['outputs'] = {}
for i in node.outputs:
output_dumper = Dumper()
output_dumper.depth = 2
output_dumper.include_filter = ["default_value"]
return sockets_data if hasattr(i, 'default_value'):
dumped_node['outputs'][i.name] = output_dumper.dump(i)
if hasattr(node, 'color_ramp'):
def load_node_tree_sockets(sockets: bpy.types.Collection, ramp_dumper = Dumper()
sockets_data: dict): ramp_dumper.depth = 4
""" load sockets of a shader_node_tree ramp_dumper.include_filter = [
'elements',
:arg target_node_tree: target node_tree 'alpha',
:type target_node_tree: bpy.types.NodeTree 'color',
:arg socket_id: socket identifer 'position'
:type socket_id: str ]
:arg socket_data: dumped socket data dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
:type socket_data: dict if hasattr(node, 'mapping'):
""" curve_dumper = Dumper()
# Check for removed sockets curve_dumper.depth = 5
for socket in sockets: curve_dumper.include_filter = [
if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]: 'curves',
sockets.remove(socket) 'points',
'location'
# Check for new sockets ]
for idx, socket_data in enumerate(sockets_data): dumped_node['mapping'] = curve_dumper.dump(node.mapping)
try: if hasattr(node, 'image') and getattr(node, 'image'):
checked_socket = sockets[idx] dumped_node['image_uuid'] = node.image.uuid
if checked_socket.name != socket_data[0]: return dumped_node
checked_socket.name = socket_data[0]
except Exception:
s = sockets.new(socket_data[1], socket_data[0])
s['uuid'] = socket_data[2]
def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeTree) -> dict:
"""Load a shader node_tree from dumped data
:arg node_tree_data: dumped node data
:type node_tree_data: dict
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
"""
# TODO: load only required nodes
target_node_tree.nodes.clear()
if not target_node_tree.is_property_readonly('name'):
target_node_tree.name = node_tree_data['name']
if 'inputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'inputs')
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
if 'outputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'outputs')
load_node_tree_sockets(socket_collection, node_tree_data['outputs'])
# Load nodes
for node in node_tree_data["nodes"]:
load_node(node_tree_data["nodes"][node], target_node_tree)
for node_id, node_data in node_tree_data["nodes"].items():
target_node = target_node_tree.nodes.get(node_id, None)
if target_node is None:
continue
elif 'parent' in node_data:
target_node.parent = target_node_tree.nodes[node_data['parent']]
else:
target_node.parent = None
# TODO: load only required nodes links
# Load nodes links
target_node_tree.links.clear()
load_links(node_tree_data["links"], target_node_tree)
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list: def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
def has_image(node): return ( has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
def has_node_group(node): return ( return [node.image for node in node_tree.nodes if has_image(node)]
hasattr(node, 'node_tree') and node.node_tree)
def has_texture(node): return (
node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture)
deps = []
for node in node_tree.nodes:
if has_image(node):
deps.append(node.image)
elif has_node_group(node):
deps.append(node.node_tree)
elif has_texture(node):
deps.append(node.texture)
return deps
def dump_materials_slots(materials: bpy.types.bpy_prop_collection) -> list: class BlMaterial(BlDatablock):
""" Dump material slots collection
:arg materials: material slots collection to dump
:type materials: bpy.types.bpy_prop_collection
:return: list of tuples (mat_uuid, mat_name)
"""
return [(m.uuid, m.name) for m in materials if m]
def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_collection):
""" Load material slots
:arg src_materials: dumped material collection (ex: object.materials)
:type src_materials: list of tuples (uuid, name)
:arg dst_materials: target material collection pointer
:type dst_materials: bpy.types.bpy_prop_collection
"""
# MATERIAL SLOTS
dst_materials.clear()
for mat_uuid, mat_name in src_materials:
mat_ref = None
if mat_uuid is not None:
mat_ref = get_datablock_from_uuid(mat_uuid, None)
else:
mat_ref = bpy.data.materials[mat_name]
dst_materials.append(mat_ref)
class BlMaterial(ReplicatedDatablock):
use_delta = True
bl_id = "materials" bl_id = "materials"
bl_class = bpy.types.Material bl_class = bpy.types.Material
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'MATERIAL_DATA' bl_icon = 'MATERIAL_DATA'
bl_reload_parent = False
bl_reload_child = True
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.materials.new(data["name"]) return bpy.data.materials.new(data["name"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
is_grease_pencil = data.get('is_grease_pencil') is_grease_pencil = data.get('is_grease_pencil')
use_nodes = data.get('use_nodes') use_nodes = data.get('use_nodes')
loader.load(datablock, data) loader.load(target, data)
if is_grease_pencil: if is_grease_pencil:
if not datablock.is_grease_pencil: if not target.is_grease_pencil:
bpy.data.materials.create_gpencil_data(datablock) bpy.data.materials.create_gpencil_data(target)
loader.load(datablock.grease_pencil, data['grease_pencil']) loader.load(target.grease_pencil, data['grease_pencil'])
elif use_nodes: elif use_nodes:
if datablock.node_tree is None: if target.node_tree is None:
datablock.use_nodes = True target.use_nodes = True
load_node_tree(data['node_tree'], datablock.node_tree) target.node_tree.nodes.clear()
load_animation_data(data.get('nodes_animation_data'), datablock.node_tree)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod # Load nodes
def dump(datablock: object) -> dict: for node in data["node_tree"]["nodes"]:
load_node(data["node_tree"]["nodes"][node], target.node_tree)
# Load nodes links
target.node_tree.links.clear()
load_links(data["node_tree"]["links"], target.node_tree)
def _dump_implementation(self, data, instance=None):
assert(instance)
mat_dumper = Dumper() mat_dumper = Dumper()
mat_dumper.depth = 2 mat_dumper.depth = 2
mat_dumper.include_filter = [ mat_dumper.include_filter = [
@ -458,9 +257,17 @@ class BlMaterial(ReplicatedDatablock):
'line_priority', 'line_priority',
'is_grease_pencil' 'is_grease_pencil'
] ]
data = mat_dumper.dump(datablock) data = mat_dumper.dump(instance)
if datablock.is_grease_pencil: if instance.use_nodes:
nodes = {}
data["node_tree"] = {}
for node in instance.node_tree.nodes:
nodes[node.name] = dump_node(node)
data["node_tree"]['nodes'] = nodes
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
elif instance.is_grease_pencil:
gp_mat_dumper = Dumper() gp_mat_dumper = Dumper()
gp_mat_dumper.depth = 3 gp_mat_dumper.depth = 3
@ -490,34 +297,17 @@ class BlMaterial(ReplicatedDatablock):
'fill_style', 'fill_style',
'gradient_type', 'gradient_type',
# 'fill_image', # 'fill_image',
'use_stroke_holdout',
'use_overlap_strokes',
'use_fill_holdout',
] ]
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil) data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
elif datablock.use_nodes:
data['node_tree'] = dump_node_tree(datablock.node_tree)
data['nodes_animation_data'] = dump_animation_data(datablock.node_tree)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod def _resolve_deps_implementation(self):
def resolve(data: dict) -> object: # TODO: resolve node group deps
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.materials)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
if datablock.use_nodes: if self.instance.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree)) deps.extend(get_node_tree_dependencies(self.instance.node_tree))
deps.extend(resolve_animation_dependencies(datablock.node_tree)) if self.is_library:
deps.extend(resolve_animation_dependencies(datablock)) deps.append(self.instance.library)
return deps return deps
_type = bpy.types.Material
_class = BlMaterial

View File

@ -25,13 +25,7 @@ import numpy as np
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
from replication.constants import DIFF_BINARY from replication.constants import DIFF_BINARY
from replication.exception import ContextError from replication.exception import ContextError
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import get_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots
from ..utils import get_preferences
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
VERTICE = ['co'] VERTICE = ['co']
@ -39,8 +33,6 @@ EDGE = [
'vertices', 'vertices',
'crease', 'crease',
'bevel_weight', 'bevel_weight',
'use_seam',
'use_edge_sharp',
] ]
LOOP = [ LOOP = [
'vertex_index', 'vertex_index',
@ -54,79 +46,79 @@ POLYGON = [
'material_index', 'material_index',
] ]
class BlMesh(ReplicatedDatablock): class BlMesh(BlDatablock):
use_delta = True
bl_id = "meshes" bl_id = "meshes"
bl_class = bpy.types.Mesh bl_class = bpy.types.Mesh
bl_delay_refresh = 2
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'MESH_DATA' bl_icon = 'MESH_DATA'
bl_reload_parent = True
@staticmethod def _construct(self, data):
def construct(data: dict) -> object: instance = bpy.data.meshes.new(data["name"])
return bpy.data.meshes.new(data.get("name")) instance.uuid = self.uuid
return instance
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object): if not target or target.is_editmode:
if not datablock or datablock.is_editmode:
raise ContextError raise ContextError
else: else:
load_animation_data(data.get('animation_data'), datablock)
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
# MATERIAL SLOTS # MATERIAL SLOTS
src_materials = data.get('materials', None) target.materials.clear()
if src_materials:
load_materials_slots(src_materials, datablock.materials) for m in data["material_list"]:
target.materials.append(bpy.data.materials[m])
# CLEAR GEOMETRY # CLEAR GEOMETRY
if datablock.vertices: if target.vertices:
datablock.clear_geometry() target.clear_geometry()
datablock.vertices.add(data["vertex_count"]) target.vertices.add(data["vertex_count"])
datablock.edges.add(data["egdes_count"]) target.edges.add(data["egdes_count"])
datablock.loops.add(data["loop_count"]) target.loops.add(data["loop_count"])
datablock.polygons.add(data["poly_count"]) target.polygons.add(data["poly_count"])
# LOADING # LOADING
np_load_collection(data['vertices'], datablock.vertices, VERTICE) np_load_collection(data['vertices'], target.vertices, VERTICE)
np_load_collection(data['edges'], datablock.edges, EDGE) np_load_collection(data['edges'], target.edges, EDGE)
np_load_collection(data['loops'], datablock.loops, LOOP) np_load_collection(data['loops'], target.loops, LOOP)
np_load_collection(data["polygons"],datablock.polygons, POLYGON) np_load_collection(data["polygons"],target.polygons, POLYGON)
# UV Layers # UV Layers
if 'uv_layers' in data.keys(): if 'uv_layers' in data.keys():
for layer in data['uv_layers']: for layer in data['uv_layers']:
if layer not in datablock.uv_layers: if layer not in target.uv_layers:
datablock.uv_layers.new(name=layer) target.uv_layers.new(name=layer)
np_load_collection_primitives( np_load_collection_primitives(
datablock.uv_layers[layer].data, target.uv_layers[layer].data,
'uv', 'uv',
data["uv_layers"][layer]['data']) data["uv_layers"][layer]['data'])
# Vertex color # Vertex color
if 'vertex_colors' in data.keys(): if 'vertex_colors' in data.keys():
for color_layer in data['vertex_colors']: for color_layer in data['vertex_colors']:
if color_layer not in datablock.vertex_colors: if color_layer not in target.vertex_colors:
datablock.vertex_colors.new(name=color_layer) target.vertex_colors.new(name=color_layer)
np_load_collection_primitives( np_load_collection_primitives(
datablock.vertex_colors[color_layer].data, target.vertex_colors[color_layer].data,
'color', 'color',
data["vertex_colors"][color_layer]['data']) data["vertex_colors"][color_layer]['data'])
datablock.validate() target.validate()
datablock.update() target.update()
@staticmethod def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict: assert(instance)
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
if instance.is_editmode and not self.preferences.sync_flags.sync_during_editmode:
raise ContextError("Mesh is in edit mode") raise ContextError("Mesh is in edit mode")
mesh = datablock mesh = instance
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
@ -140,8 +132,6 @@ class BlMesh(ReplicatedDatablock):
data = dumper.dump(mesh) data = dumper.dump(mesh)
data['animation_data'] = dump_animation_data(datablock)
# VERTICES # VERTICES
data["vertex_count"] = len(mesh.vertices) data["vertex_count"] = len(mesh.vertices)
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE) data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
@ -172,31 +162,21 @@ class BlMesh(ReplicatedDatablock):
data['vertex_colors'][color_map.name] = {} data['vertex_colors'][color_map.name] = {}
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color') data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
# Materials # Fix material index
data['materials'] = dump_materials_slots(datablock.materials) m_list = []
for material in instance.materials:
if material:
m_list.append(material.name)
data['material_list'] = m_list
return data return data
@staticmethod def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
for material in datablock.materials: for material in self.instance.materials:
if material: if material:
deps.append(material) deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.meshes)
@staticmethod
def needs_update(datablock: object, data: dict) -> bool:
return ('EDIT' not in bpy.context.mode and bpy.context.mode != 'SCULPT') \
or get_preferences().sync_flags.sync_during_editmode
_type = bpy.types.Mesh
_class = BlMesh

View File

@ -23,9 +23,7 @@ from .dump_anything import (
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives, Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
np_dump_collection, np_load_collection) np_dump_collection, np_load_collection)
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
ELEMENT = [ ELEMENT = [
@ -64,35 +62,31 @@ def load_metaball_elements(elements_data, elements):
np_load_collection(elements_data, elements, ELEMENT) np_load_collection(elements_data, elements, ELEMENT)
class BlMetaball(ReplicatedDatablock): class BlMetaball(BlDatablock):
use_delta = True
bl_id = "metaballs" bl_id = "metaballs"
bl_class = bpy.types.MetaBall bl_class = bpy.types.MetaBall
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'META_BALL' bl_icon = 'META_BALL'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.metaballs.new(data["name"]) return bpy.data.metaballs.new(data["name"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
datablock.elements.clear() target.elements.clear()
for mtype in data["elements"]['type']: for mtype in data["elements"]['type']:
new_element = datablock.elements.new() new_element = target.elements.new()
load_metaball_elements(data['elements'], datablock.elements) load_metaball_elements(data['elements'], target.elements)
@staticmethod def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict: assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
@ -106,24 +100,7 @@ class BlMetaball(ReplicatedDatablock):
'texspace_size' 'texspace_size'
] ]
data = dumper.dump(datablock) data = dumper.dump(instance)
data['animation_data'] = dump_animation_data(datablock) data['elements'] = dump_metaball_elements(instance.elements)
data['elements'] = dump_metaball_elements(datablock.elements)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.metaballs)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.MetaBall
_class = BlMetaball

View File

@ -1,64 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from replication.protocol import ReplicatedDatablock
from .bl_material import (dump_node_tree,
load_node_tree,
get_node_tree_dependencies)
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlNodeGroup(ReplicatedDatablock):
use_delta = True
bl_id = "node_groups"
bl_class = bpy.types.NodeTree
bl_check_common = False
bl_icon = 'NODETREE'
bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object:
return bpy.data.node_groups.new(data["name"], data["type"])
@staticmethod
def load(data: dict, datablock: object):
load_node_tree(data, datablock)
@staticmethod
def dump(datablock: object) -> dict:
return dump_node_tree(datablock)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.node_groups)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(get_node_tree_dependencies(datablock))
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = [bpy.types.ShaderNodeTree, bpy.types.GeometryNodeTree]
_class = BlNodeGroup

View File

@ -17,153 +17,14 @@
import logging import logging
import re
import bpy import bpy
import mathutils import mathutils
from replication.exception import ContextError from replication.exception import ContextError
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock, get_datablock_from_uuid
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid from .dump_anything import Dumper, Loader
from .bl_material import IGNORED_SOCKETS from replication.exception import ReparentException
from ..utils import get_preferences
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .dump_anything import (
Dumper,
Loader,
np_load_collection,
np_dump_collection)
SKIN_DATA = [
'radius',
'use_loose',
'use_root'
]
SHAPEKEY_BLOCK_ATTR = [
'mute',
'value',
'slider_min',
'slider_max',
]
if bpy.app.version[1] >= 93:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str)
logging.warning("Geometry node Float parameter not supported in \
blender 2.92.")
def get_node_group_inputs(node_group):
inputs = []
for inpt in node_group.inputs:
if inpt.type in IGNORED_SOCKETS:
continue
else:
inputs.append(inpt)
return inputs
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
def dump_physics(target: bpy.types.Object)->dict:
"""
Dump all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
dumper = Dumper()
dumper.depth = 1
physics_data = {}
# Collisions (collision)
if target.collision and target.collision.use:
physics_data['collision'] = dumper.dump(target.collision)
# Field (field)
if target.field and target.field.type != "NONE":
physics_data['field'] = dumper.dump(target.field)
# Rigid Body (rigid_body)
if target.rigid_body:
physics_data['rigid_body'] = dumper.dump(target.rigid_body)
# Rigid Body constraint (rigid_body_constraint)
if target.rigid_body_constraint:
physics_data['rigid_body_constraint'] = dumper.dump(target.rigid_body_constraint)
return physics_data
def load_physics(dumped_settings: dict, target: bpy.types.Object):
""" Load all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
loader = Loader()
if 'collision' in dumped_settings:
loader.load(target.collision, dumped_settings['collision'])
if 'field' in dumped_settings:
loader.load(target.field, dumped_settings['field'])
if 'rigid_body' in dumped_settings:
if not target.rigid_body:
bpy.ops.rigidbody.object_add({"object": target})
loader.load(target.rigid_body, dumped_settings['rigid_body'])
elif target.rigid_body:
bpy.ops.rigidbody.object_remove({"object": target})
if 'rigid_body_constraint' in dumped_settings:
if not target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_add({"object": target})
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_remove({"object": target})
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties
:arg modifier: geometry node modifier to dump
:type modifier: bpy.type.Modifier
"""
dumped_inputs = []
for inpt in get_node_group_inputs(modifier.node_group):
input_value = modifier[inpt.identifier]
dumped_input = None
if isinstance(input_value, bpy.types.ID):
dumped_input = input_value.uuid
elif isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
dumped_input = input_value
elif hasattr(input_value, 'to_list'):
dumped_input = input_value.to_list()
dumped_inputs.append(dumped_input)
return dumped_inputs
def load_modifier_geometry_node_inputs(dumped_modifier: dict, target_modifier: bpy.types.Modifier):
""" Load geometry node modifier inputs
:arg dumped_modifier: source dumped modifier to load
:type dumped_modifier: dict
:arg target_modifier: target geometry node modifier
:type target_modifier: bpy.type.Modifier
"""
for input_index, inpt in enumerate(get_node_group_inputs(target_modifier.node_group)):
dumped_value = dumped_modifier['inputs'][input_index]
input_value = target_modifier[inpt.identifier]
if isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
target_modifier[inpt.identifier] = dumped_value
elif hasattr(input_value, 'to_list'):
for index in range(len(input_value)):
input_value[index] = dumped_value[index]
elif inpt.type in ['COLLECTION', 'OBJECT']:
target_modifier[inpt.identifier] = get_datablock_from_uuid(
dumped_value, None)
def load_pose(target_bone, data): def load_pose(target_bone, data):
@ -203,9 +64,6 @@ def find_data_from_name(name=None):
else: else:
logging.warning( logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") "Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys():
# Only supported since 2.91
instance = bpy.data.volumes[name]
return instance return instance
@ -221,364 +79,107 @@ def _is_editmode(object: bpy.types.Object) -> bool:
child_data.is_editmode) child_data.is_editmode)
def find_textures_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.Texture]: class BlObject(BlDatablock):
""" Find textures lying in a modifier stack
:arg modifiers: modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
:return: list of bpy.types.Texture pointers
"""
textures = []
for mod in modifiers:
modifier_attributes = [getattr(mod, attr_name)
for attr_name in mod.bl_rna.properties.keys()]
for attr in modifier_attributes:
if issubclass(type(attr), bpy.types.Texture) and attr is not None:
textures.append(attr)
return textures
def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]:
""" Find geometry nodes dependencies from a modifier stack
:arg modifiers: modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
:return: list of bpy.types.NodeTree pointers
"""
dependencies = []
for mod in modifiers:
if mod.type == 'NODES' and mod.node_group:
dependencies.append(mod.node_group)
# for inpt in get_node_group_inputs(mod.node_group):
# parameter = mod.get(inpt.identifier)
# if parameter and isinstance(parameter, bpy.types.ID):
# dependencies.append(parameter)
return dependencies
def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
""" Dump object's vertex groups
:param target_object: dump vertex groups of this object
:type target_object: bpy.types.Object
"""
if isinstance(src_object.data, bpy.types.GreasePencil):
logging.warning(
"Grease pencil vertex groups are not supported yet. More info: https://gitlab.com/slumber/multi-user/-/issues/161")
else:
points_attr = 'vertices' if isinstance(
src_object.data, bpy.types.Mesh) else 'points'
dumped_vertex_groups = {}
# Vertex group metadata
for vg in src_object.vertex_groups:
dumped_vertex_groups[vg.index] = {
'name': vg.name,
'vertices': []
}
# Vertex group assignation
for vert in getattr(src_object.data, points_attr):
for vg in vert.groups:
vertices = dumped_vertex_groups.get(vg.group)['vertices']
vertices.append((vert.index, vg.weight))
return dumped_vertex_groups
def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Object):
""" Load object vertex groups
:param dumped_vertex_groups: vertex_groups to load
:type dumped_vertex_groups: dict
:param target_object: object to load the vertex groups into
:type target_object: bpy.types.Object
"""
target_object.vertex_groups.clear()
for vg in dumped_vertex_groups.values():
vertex_group = target_object.vertex_groups.new(name=vg['name'])
for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE')
def dump_shape_keys(target_key: bpy.types.Key)->dict:
""" Dump the target shape_keys datablock to a dict using numpy
:param dumped_key: target key datablock
:type dumped_key: bpy.types.Key
:return: dict
"""
dumped_key_blocks = []
dumper = Dumper()
dumper.include_filter = [
'name',
'mute',
'value',
'slider_min',
'slider_max',
]
for key in target_key.key_blocks:
dumped_key_block = dumper.dump(key)
dumped_key_block['data'] = np_dump_collection(key.data, ['co'])
dumped_key_block['relative_key'] = key.relative_key.name
dumped_key_blocks.append(dumped_key_block)
return {
'reference_key': target_key.reference_key.name,
'use_relative': target_key.use_relative,
'key_blocks': dumped_key_blocks,
'animation_data': dump_animation_data(target_key)
}
def load_shape_keys(dumped_shape_keys: dict, target_object: bpy.types.Object):
""" Load the target shape_keys datablock to a dict using numpy
:param dumped_key: src key data
:type dumped_key: bpy.types.Key
:param target_object: object used to load the shapekeys data onto
:type target_object: bpy.types.Object
"""
loader = Loader()
# Remove existing ones
target_object.shape_key_clear()
# Create keys and load vertices coords
dumped_key_blocks = dumped_shape_keys.get('key_blocks')
for dumped_key_block in dumped_key_blocks:
key_block = target_object.shape_key_add(name=dumped_key_block['name'])
loader.load(key_block, dumped_key_block)
np_load_collection(dumped_key_block['data'], key_block.data, ['co'])
# Load relative key after all
for dumped_key_block in dumped_key_blocks:
relative_key_name = dumped_key_block.get('relative_key')
key_name = dumped_key_block.get('name')
target_keyblock = target_object.data.shape_keys.key_blocks[key_name]
relative_key = target_object.data.shape_keys.key_blocks[relative_key_name]
target_keyblock.relative_key = relative_key
# Shape keys animation data
anim_data = dumped_shape_keys.get('animation_data')
if anim_data:
load_animation_data(anim_data, target_object.data.shape_keys)
def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
""" Dump all modifiers of a modifier collection into a dict
:param modifiers: modifiers
:type modifiers: bpy.types.bpy_prop_collection
:return: dict
"""
dumped_modifiers = []
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for modifier in modifiers:
dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
dumped_modifier['inputs'] = dumped_inputs
elif modifier.type == 'PARTICLE_SYSTEM':
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
elif modifier.type == 'UV_PROJECT':
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
dumped_modifiers.append(dumped_modifier)
return dumped_modifiers
def dump_constraints(constraints: bpy.types.bpy_prop_collection)->list:
"""Dump all constraints to a list
:param constraints: constraints
:type constraints: bpy.types.bpy_prop_collection
:return: dict
"""
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = None
dumped_constraints = []
for constraint in constraints:
dumped_constraints.append(dumper.dump(constraint))
return dumped_constraints
def load_constraints(dumped_constraints: list, constraints: bpy.types.bpy_prop_collection):
""" Load dumped constraints
:param dumped_constraints: list of constraints to load
:type dumped_constraints: list
:param constraints: constraints
:type constraints: bpy.types.bpy_prop_collection
"""
loader = Loader()
constraints.clear()
for dumped_constraint in dumped_constraints:
constraint_type = dumped_constraint.get('type')
new_constraint = constraints.new(constraint_type)
loader.load(new_constraint, dumped_constraint)
def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collection):
""" Dump all modifiers of a modifier collection into a dict
:param dumped_modifiers: list of modifiers to load
:type dumped_modifiers: list
:param modifiers: modifiers
:type modifiers: bpy.types.bpy_prop_collection
"""
loader = Loader()
modifiers.clear()
for dumped_modifier in dumped_modifiers:
name = dumped_modifier.get('name')
mtype = dumped_modifier.get('type')
loaded_modifier = modifiers.new(name, mtype)
loader.load(loaded_modifier, dumped_modifier)
if loaded_modifier.type == 'NODES':
load_modifier_geometry_node_inputs(dumped_modifier, loaded_modifier)
elif loaded_modifier.type == 'PARTICLE_SYSTEM':
default = loaded_modifier.particle_system.settings
dumped_particles = dumped_modifier['particle_system']
loader.load(loaded_modifier.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
loaded_modifier.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
elif loaded_modifier.type in ['SOFT_BODY', 'CLOTH']:
loader.load(loaded_modifier.settings, dumped_modifier['settings'])
elif loaded_modifier.type == 'UV_PROJECT':
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
target_object = bpy.data.objects.get(projector_object)
if target_object:
loaded_modifier.projectors[projector_index].object = target_object
else:
logging.error("Could't load projector target object {projector_object}")
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
""" Load modifiers custom data not managed by the dump_anything loader
:param dumped_modifiers: modifiers to load
:type dumped_modifiers: dict
:param modifiers: target modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
"""
loader = Loader()
for modifier in modifiers:
dumped_modifier = dumped_modifiers.get(modifier.name)
class BlObject(ReplicatedDatablock):
use_delta = True
bl_id = "objects" bl_id = "objects"
bl_class = bpy.types.Object bl_class = bpy.types.Object
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'OBJECT_DATA' bl_icon = 'OBJECT_DATA'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
instance = None instance = None
if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.objects = [
name for name in sourceData.objects if name == self.data['name']]
instance = bpy.data.objects[self.data['name']]
instance.uuid = self.uuid
return instance
# TODO: refactoring # TODO: refactoring
object_name = data.get("name") object_name = data.get("name")
data_uuid = data.get("data_uuid") data_uuid = data.get("data_uuid")
data_id = data.get("data") data_id = data.get("data")
data_type = data.get("type")
object_data = get_datablock_from_uuid( object_data = get_datablock_from_uuid(
data_uuid, data_uuid,
find_data_from_name(data_id), find_data_from_name(data_id),
ignore=['images']) # TODO: use resolve_from_id ignore=['images']) #TODO: use resolve_from_id
instance = bpy.data.objects.new(object_name, object_data)
instance.uuid = self.uuid
if data_type != 'EMPTY' and object_data is None: return instance
raise Exception(f"Fail to load object {data['name']})")
return bpy.data.objects.new(object_name, object_data) def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
load_animation_data(data.get('animation_data'), datablock)
data_uuid = data.get("data_uuid") data_uuid = data.get("data_uuid")
data_id = data.get("data") data_id = data.get("data")
if datablock.data and (datablock.data.name != data_id): if target.type != data['type']:
datablock.data = get_datablock_from_uuid( raise ReparentException()
data_uuid, find_data_from_name(data_id), ignore=['images']) elif target.data and (target.data.name != data_id):
target.data = get_datablock_from_uuid(data_uuid, find_data_from_name(data_id), ignore=['images'])
# vertex groups # vertex groups
vertex_groups = data.get('vertex_groups', None) if 'vertex_groups' in data:
if vertex_groups: target.vertex_groups.clear()
load_vertex_groups(vertex_groups, datablock) for vg in data['vertex_groups']:
vertex_group=target.vertex_groups.new(name = vg['name'])
object_data = datablock.data point_attr='vertices' if 'vertices' in vg else 'points'
for vert in vg[point_attr]:
vertex_group.add(
[vert['index']], vert['weight'], 'REPLACE')
# SHAPE KEYS # SHAPE KEYS
shape_keys = data.get('shape_keys') if 'shape_keys' in data:
if shape_keys: target.shape_key_clear()
load_shape_keys(shape_keys, datablock)
object_data=target.data
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data=data['shape_keys']['key_blocks'][key_block]
target.shape_key_add(name = key_block)
loader.load(
target.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
# Load transformation data # Load transformation data
loader.load(datablock, data) loader.load(target, data)
# Object display fields loader.load(target.display, data['display'])
if 'display' in data:
loader.load(datablock.display, data['display'])
# Parenting
parent_id = data.get('parent_uid')
if parent_id:
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
# Avoid reloading
if datablock.parent != parent and parent is not None:
datablock.parent = parent
elif datablock.parent:
datablock.parent = None
# Pose # Pose
if 'pose' in data: if 'pose' in data:
if not datablock.pose: if not target.pose:
raise Exception('No pose data yet (Fixed in a near futur)') raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups # Bone groups
for bg_name in data['pose']['bone_groups']: for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name) bg_data = data['pose']['bone_groups'].get(bg_name)
bg_target = datablock.pose.bone_groups.get(bg_name) bg_target = target.pose.bone_groups.get(bg_name)
if not bg_target: if not bg_target:
bg_target = datablock.pose.bone_groups.new(name=bg_name) bg_target = target.pose.bone_groups.new(name=bg_name)
loader.load(bg_target, bg_data) loader.load(bg_target, bg_data)
# datablock.pose.bone_groups.get # target.pose.bone_groups.get
# Bones # Bones
for bone in data['pose']['bones']: for bone in data['pose']['bones']:
target_bone = datablock.pose.bones.get(bone) target_bone = target.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone) bone_data = data['pose']['bones'].get(bone)
if 'constraints' in bone_data.keys(): if 'constraints' in bone_data.keys():
@ -587,50 +188,20 @@ class BlObject(ReplicatedDatablock):
load_pose(target_bone, bone_data) load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys(): if 'bone_index' in bone_data.keys():
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']] target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
# TODO: find another way... # TODO: find another way...
if datablock.empty_display_type == "IMAGE": if target.type == 'EMPTY':
img_uuid = data.get('data_uuid') img_uuid = data.get('data_uuid')
if datablock.data is None and img_uuid: if target.data is None and img_uuid:
datablock.data = get_datablock_from_uuid(img_uuid, None) target.data = get_datablock_from_uuid(img_uuid, None)#bpy.data.images.get(img_key, None)
if hasattr(object_data, 'skin_vertices') \ def _dump_implementation(self, data, instance=None):
and object_data.skin_vertices\ assert(instance)
and 'skin_vertices' in data:
for index, skin_data in enumerate(object_data.skin_vertices):
np_load_collection(
data['skin_vertices'][index],
skin_data.data,
SKIN_DATA)
if hasattr(datablock, 'cycles_visibility') \ if _is_editmode(instance):
and 'cycles_visibility' in data: if self.preferences.sync_flags.sync_during_editmode:
loader.load(datablock.cycles_visibility, data['cycles_visibility']) instance.update_from_editmode()
if hasattr(datablock, 'modifiers'):
load_modifiers(data['modifiers'], datablock.modifiers)
constraints = data.get('constraints')
if constraints:
load_constraints(constraints, datablock.constraints)
# PHYSICS
load_physics(data, datablock)
transform = data.get('transforms', None)
if transform:
datablock.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse'])
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
@staticmethod
def dump(datablock: object) -> dict:
if _is_editmode(datablock):
if get_preferences().sync_flags.sync_during_editmode:
datablock.update_from_editmode()
else: else:
raise ContextError("Object is in edit-mode.") raise ContextError("Object is in edit-mode.")
@ -639,7 +210,9 @@ class BlObject(ReplicatedDatablock):
dumper.include_filter = [ dumper.include_filter = [
"name", "name",
"rotation_mode", "rotation_mode",
"parent",
"data", "data",
"children",
"library", "library",
"empty_display_type", "empty_display_type",
"empty_display_size", "empty_display_size",
@ -653,6 +226,8 @@ class BlObject(ReplicatedDatablock):
"color", "color",
"instance_collection", "instance_collection",
"instance_type", "instance_type",
"location",
"scale",
'lock_location', 'lock_location',
'lock_rotation', 'lock_rotation',
'lock_scale', 'lock_scale',
@ -667,66 +242,38 @@ class BlObject(ReplicatedDatablock):
'show_texture_space', 'show_texture_space',
'show_in_front', 'show_in_front',
'type', 'type',
'parent_type', 'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
'parent_bone',
'track_axis',
'up_axis',
] ]
data = dumper.dump(datablock) data = dumper.dump(instance)
data['animation_data'] = dump_animation_data(datablock)
dumper.include_filter = [
'matrix_parent_inverse',
'matrix_local',
'matrix_basis']
data['transforms'] = dumper.dump(datablock)
dumper.include_filter = [ dumper.include_filter = [
'show_shadows', 'show_shadows',
] ]
data['display'] = dumper.dump(datablock.display) data['display'] = dumper.dump(instance.display)
data['data_uuid'] = getattr(datablock.data, 'uuid', None) data['data_uuid'] = getattr(instance.data, 'uuid', None)
if self.is_library:
# PARENTING return data
if datablock.parent:
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
# MODIFIERS # MODIFIERS
modifiers = getattr(datablock, 'modifiers', None) if hasattr(instance, 'modifiers'):
if hasattr(datablock, 'modifiers'):
data['modifiers'] = dump_modifiers(modifiers)
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
if gp_modifiers:
dumper.include_filter = None dumper.include_filter = None
dumper.depth = 1 dumper.depth = 1
gp_modifiers_data = data["grease_pencil_modifiers"] = {} data["modifiers"] = {}
for index, modifier in enumerate(instance.modifiers):
for index, modifier in enumerate(gp_modifiers): data["modifiers"][modifier.name] = dumper.dump(modifier)
gp_mod_data = gp_modifiers_data[modifier.name] = dict()
gp_mod_data.update(dumper.dump(modifier))
if hasattr(modifier, 'use_custom_curve') \
and modifier.use_custom_curve:
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location']
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
# CONSTRAINTS # CONSTRAINTS
if hasattr(datablock, 'constraints'): if hasattr(instance, 'constraints'):
data["constraints"] = dump_constraints(datablock.constraints) dumper.depth = 3
data["constraints"] = dumper.dump(instance.constraints)
# POSE # POSE
if hasattr(datablock, 'pose') and datablock.pose: if hasattr(instance, 'pose') and instance.pose:
# BONES # BONES
bones = {} bones = {}
for bone in datablock.pose.bones: for bone in instance.pose.bones:
bones[bone.name] = {} bones[bone.name] = {}
dumper.depth = 1 dumper.depth = 1
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler' rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
@ -751,7 +298,7 @@ class BlObject(ReplicatedDatablock):
# GROUPS # GROUPS
bone_groups = {} bone_groups = {}
for group in datablock.pose.bone_groups: for group in instance.pose.bone_groups:
dumper.depth = 3 dumper.depth = 3
dumper.include_filter = [ dumper.include_filter = [
'name', 'name',
@ -760,75 +307,84 @@ class BlObject(ReplicatedDatablock):
bone_groups[group.name] = dumper.dump(group) bone_groups[group.name] = dumper.dump(group)
data['pose']['bone_groups'] = bone_groups data['pose']['bone_groups'] = bone_groups
# CHILDS
if len(instance.children) > 0:
childs = []
for child in instance.children:
childs.append(child.name)
data["children"] = childs
# VERTEx GROUP # VERTEx GROUP
if len(datablock.vertex_groups) > 0: if len(instance.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(datablock) points_attr = 'vertices' if isinstance(
instance.data, bpy.types.Mesh) else 'points'
vg_data = []
for vg in instance.vertex_groups:
vg_idx = vg.index
dumped_vg = {}
dumped_vg['name'] = vg.name
vertices = []
for i, v in enumerate(getattr(instance.data, points_attr)):
for vg in v.groups:
if vg.group == vg_idx:
vertices.append({
'index': i,
'weight': vg.weight
})
dumped_vg['vertices'] = vertices
vg_data.append(dumped_vg)
data['vertex_groups'] = vg_data
# SHAPE KEYS # SHAPE KEYS
object_data = datablock.data object_data = instance.data
if hasattr(object_data, 'shape_keys') and object_data.shape_keys: if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
data['shape_keys'] = dump_shape_keys(object_data.shape_keys) dumper = Dumper()
dumper.depth = 2
# SKIN VERTICES
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
skin_vertices = list()
for skin_data in object_data.skin_vertices:
skin_vertices.append(
np_dump_collection(skin_data.data, SKIN_DATA))
data['skin_vertices'] = skin_vertices
# CYCLE SETTINGS
if hasattr(datablock, 'cycles_visibility'):
dumper.include_filter = [ dumper.include_filter = [
'camera', 'reference_key',
'diffuse', 'use_relative'
'glossy',
'transmission',
'scatter',
'shadow',
] ]
data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility) data['shape_keys'] = dumper.dump(object_data.shape_keys)
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
# PHYSICS key_blocks = {}
data.update(dump_physics(datablock)) for key in object_data.shape_keys.key_blocks:
dumper.depth = 3
dumper.include_filter = [
'name',
'data',
'mute',
'value',
'slider_min',
'slider_max',
'data',
'co'
]
key_blocks[key.name] = dumper.dump(key)
key_blocks[key.name]['relative_key'] = key.relative_key.name
data['shape_keys']['key_blocks'] = key_blocks
return data return data
@staticmethod def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
# Avoid Empty case # Avoid Empty case
if datablock.data: if self.instance.data:
deps.append(datablock.data) deps.append(self.instance.data)
if len(self.instance.children) > 0:
deps.extend(list(self.instance.children))
# Particle systems if self.is_library:
for particle_slot in datablock.particle_systems: deps.append(self.instance.library)
deps.append(particle_slot.settings)
if datablock.parent: if self.instance.instance_type == 'COLLECTION':
deps.append(datablock.parent)
if datablock.instance_type == 'COLLECTION':
# TODO: uuid based # TODO: uuid based
deps.append(datablock.instance_collection) deps.append(self.instance.instance_collection)
if datablock.modifiers:
deps.extend(find_textures_dependencies(datablock.modifiers))
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.objects)
_type = bpy.types.Object
_class = BlObject

View File

@ -1,106 +0,0 @@
import bpy
import mathutils
from . import dump_anything
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
""" Dump every texture slot collection as the form:
[(index, slot_texture_uuid, slot_texture_name), (), ...]
"""
dumped_slots = []
for index, slot in enumerate(texture_slots):
if slot and slot.texture:
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
return dumped_slots
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
"""
"""
for index, slot in enumerate(target_slots):
if slot:
target_slots.clear(index)
for index, slot_uuid, slot_name in dumped_slots:
target_slots.create(index).texture = get_datablock_from_uuid(
slot_uuid, slot_name
)
IGNORED_ATTR = [
"is_embedded_data",
"is_evaluated",
"is_fluid",
"is_library_indirect",
"users"
]
class BlParticle(ReplicatedDatablock):
use_delta = True
bl_id = "particles"
bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES"
bl_check_common = False
bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object:
return bpy.data.particles.new(data["name"])
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
dump_anything.load(datablock, data)
dump_anything.load(datablock.effector_weights, data["effector_weights"])
# Force field
force_field_1 = data.get("force_field_1", None)
if force_field_1:
dump_anything.load(datablock.force_field_1, force_field_1)
force_field_2 = data.get("force_field_2", None)
if force_field_2:
dump_anything.load(datablock.force_field_2, force_field_2)
# Texture slots
load_texture_slots(data["texture_slots"], datablock.texture_slots)
@staticmethod
def dump(datablock: object) -> dict:
dumper = dump_anything.Dumper()
dumper.depth = 1
dumper.exclude_filter = IGNORED_ATTR
data = dumper.dump(datablock)
# Particle effectors
data["effector_weights"] = dumper.dump(datablock.effector_weights)
if datablock.force_field_1:
data["force_field_1"] = dumper.dump(datablock.force_field_1)
if datablock.force_field_2:
data["force_field_2"] = dumper.dump(datablock.force_field_2)
# Texture slots
data["texture_slots"] = dump_textures_slots(datablock.texture_slots)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.particles)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [t.texture for t in datablock.texture_slots if t and t.texture]
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.ParticleSettings
_class = BlParticle

View File

@ -16,25 +16,15 @@
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import logging
from pathlib import Path
from uuid import uuid4
import bpy import bpy
import mathutils import mathutils
from deepdiff import DeepDiff, Delta
from replication.constants import DIFF_JSON, MODIFIED
from replication.protocol import ReplicatedDatablock
from ..utils import flush_history, get_preferences from .dump_anything import Loader, Dumper
from .bl_action import (dump_animation_data, load_animation_data, from .bl_datablock import BlDatablock
resolve_animation_dependencies) from .bl_collection import dump_collection_children, dump_collection_objects, load_collection_childrens, load_collection_objects
from .bl_collection import (dump_collection_children, dump_collection_objects, from replication.constants import (DIFF_JSON, MODIFIED)
load_collection_childrens, load_collection_objects, from deepdiff import DeepDiff
resolve_collection_dependencies) import logging
from .bl_datablock import resolve_datablock_from_uuid
from .bl_file import get_filepath
from .dump_anything import Dumper, Loader
RENDER_SETTINGS = [ RENDER_SETTINGS = [
'dither_intensity', 'dither_intensity',
@ -271,190 +261,67 @@ VIEW_SETTINGS = [
'black_level' 'black_level'
] ]
class BlScene(BlDatablock):
def dump_sequence(sequence: bpy.types.Sequence) -> dict:
""" Dump a sequence to a dict
:arg sequence: sequence to dump
:type sequence: bpy.types.Sequence
:return dict:
"""
dumper = Dumper()
dumper.exclude_filter = [
'lock',
'select',
'select_left_handle',
'select_right_handle',
'strobe'
]
dumper.depth = 1
data = dumper.dump(sequence)
# TODO: Support multiple images
if sequence.type == 'IMAGE':
data['filenames'] = [e.filename for e in sequence.elements]
# Effect strip inputs
input_count = getattr(sequence, 'input_count', None)
if input_count:
for n in range(input_count):
input_name = f"input_{n+1}"
data[input_name] = getattr(sequence, input_name).name
return data
def load_sequence(sequence_data: dict,
sequence_editor: bpy.types.SequenceEditor):
""" Load sequence from dumped data
:arg sequence_data: sequence to dump
:type sequence_data:dict
:arg sequence_editor: root sequence editor
:type sequence_editor: bpy.types.SequenceEditor
"""
strip_type = sequence_data.get('type')
strip_name = sequence_data.get('name')
strip_channel = sequence_data.get('channel')
strip_frame_start = sequence_data.get('frame_start')
sequence = sequence_editor.sequences_all.get(strip_name, None)
if sequence is None:
if strip_type == 'SCENE':
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
sequence = sequence_editor.sequences.new_scene(strip_name,
strip_scene,
strip_channel,
strip_frame_start)
elif strip_type == 'MOVIE':
filepath = get_filepath(Path(sequence_data['filepath']).name)
sequence = sequence_editor.sequences.new_movie(strip_name,
filepath,
strip_channel,
strip_frame_start)
elif strip_type == 'SOUND':
filepath = bpy.data.sounds[sequence_data['sound']].filepath
sequence = sequence_editor.sequences.new_sound(strip_name,
filepath,
strip_channel,
strip_frame_start)
elif strip_type == 'IMAGE':
images_name = sequence_data.get('filenames')
filepath = get_filepath(images_name[0])
sequence = sequence_editor.sequences.new_image(strip_name,
filepath,
strip_channel,
strip_frame_start)
# load other images
if len(images_name) > 1:
for img_idx in range(1, len(images_name)):
sequence.elements.append((images_name[img_idx]))
else:
seq = {}
for i in range(sequence_data['input_count']):
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(
sequence_data.get(f"input_{i+1}", None))
sequence = sequence_editor.sequences.new_effect(name=strip_name,
type=strip_type,
channel=strip_channel,
frame_start=strip_frame_start,
frame_end=sequence_data['frame_final_end'],
**seq)
loader = Loader()
loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
loader.load(sequence, sequence_data)
sequence.select = False
class BlScene(ReplicatedDatablock):
is_root = True
use_delta = True
bl_id = "scenes" bl_id = "scenes"
bl_class = bpy.types.Scene bl_class = bpy.types.Scene
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True bl_check_common = True
bl_icon = 'SCENE_DATA' bl_icon = 'SCENE_DATA'
bl_reload_parent = False
@staticmethod def __init__(self, *args, **kwargs):
def construct(data: dict) -> object: super().__init__(*args, **kwargs)
return bpy.data.scenes.new(data["name"])
@staticmethod self.diff_method = DIFF_JSON
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
def _construct(self, data):
instance = bpy.data.scenes.new(data["name"])
return instance
def _load_implementation(self, data, target):
# Load other meshes metadata # Load other meshes metadata
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
# Load master collection # Load master collection
load_collection_objects( load_collection_objects(
data['collection']['objects'], datablock.collection) data['collection']['objects'], target.collection)
load_collection_childrens( load_collection_childrens(
data['collection']['children'], datablock.collection) data['collection']['children'], target.collection)
if 'world' in data.keys(): if 'world' in data.keys():
datablock.world = bpy.data.worlds[data['world']] target.world = bpy.data.worlds[data['world']]
# Annotation # Annotation
if 'grease_pencil' in data.keys(): if 'grease_pencil' in data.keys():
datablock.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']] target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
if get_preferences().sync_flags.sync_render_settings: if self.preferences.sync_flags.sync_render_settings:
if 'eevee' in data.keys(): if 'eevee' in data.keys():
loader.load(datablock.eevee, data['eevee']) loader.load(target.eevee, data['eevee'])
if 'cycles' in data.keys(): if 'cycles' in data.keys():
loader.load(datablock.cycles, data['cycles']) loader.load(target.cycles, data['cycles'])
if 'render' in data.keys(): if 'render' in data.keys():
loader.load(datablock.render, data['render']) loader.load(target.render, data['render'])
view_settings = data.get('view_settings') if 'view_settings' in data.keys():
if view_settings: loader.load(target.view_settings, data['view_settings'])
loader.load(datablock.view_settings, view_settings) if target.view_settings.use_curve_mapping and \
if datablock.view_settings.use_curve_mapping and \ 'curve_mapping' in data['view_settings']:
'curve_mapping' in view_settings:
# TODO: change this ugly fix # TODO: change this ugly fix
datablock.view_settings.curve_mapping.white_level = view_settings['curve_mapping']['white_level'] target.view_settings.curve_mapping.white_level = data[
datablock.view_settings.curve_mapping.black_level = view_settings['curve_mapping']['black_level'] 'view_settings']['curve_mapping']['white_level']
datablock.view_settings.curve_mapping.update() target.view_settings.curve_mapping.black_level = data[
'view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update()
# Sequencer def _dump_implementation(self, data, instance=None):
sequences = data.get('sequences') assert(instance)
if sequences:
# Create sequencer data
datablock.sequence_editor_create()
vse = datablock.sequence_editor
# Clear removed sequences
for seq in vse.sequences_all:
if seq.name not in sequences:
vse.sequences.remove(seq)
# Load existing sequences
for seq_data in sequences.value():
load_sequence(seq_data, vse)
# If the sequence is no longer used, clear it
elif datablock.sequence_editor and not sequences:
datablock.sequence_editor_clear()
# FIXME: Find a better way after the replication big refacotoring
# Keep other user from deleting collection object by flushing their history
flush_history()
@staticmethod
def dump(datablock: object) -> dict:
data = {} data = {}
data['animation_data'] = dump_animation_data(datablock)
# Metadata
scene_dumper = Dumper() scene_dumper = Dumper()
scene_dumper.depth = 1 scene_dumper.depth = 1
scene_dumper.include_filter = [ scene_dumper.include_filter = [
@ -466,40 +333,41 @@ class BlScene(ReplicatedDatablock):
'frame_end', 'frame_end',
'frame_step', 'frame_step',
] ]
if get_preferences().sync_flags.sync_active_camera: if self.preferences.sync_flags.sync_active_camera:
scene_dumper.include_filter.append('camera') scene_dumper.include_filter.append('camera')
data.update(scene_dumper.dump(datablock)) data = scene_dumper.dump(instance)
# Master collection scene_dumper.depth = 3
scene_dumper.include_filter = ['children', 'objects', 'name']
data['collection'] = {} data['collection'] = {}
data['collection']['children'] = dump_collection_children( data['collection']['children'] = dump_collection_children(
datablock.collection) instance.collection)
data['collection']['objects'] = dump_collection_objects( data['collection']['objects'] = dump_collection_objects(
datablock.collection) instance.collection)
scene_dumper.depth = 1 scene_dumper.depth = 1
scene_dumper.include_filter = None scene_dumper.include_filter = None
# Render settings if self.preferences.sync_flags.sync_render_settings:
if get_preferences().sync_flags.sync_render_settings:
scene_dumper.include_filter = RENDER_SETTINGS scene_dumper.include_filter = RENDER_SETTINGS
data['render'] = scene_dumper.dump(datablock.render) data['render'] = scene_dumper.dump(instance.render)
if datablock.render.engine == 'BLENDER_EEVEE': if instance.render.engine == 'BLENDER_EEVEE':
scene_dumper.include_filter = EVEE_SETTINGS scene_dumper.include_filter = EVEE_SETTINGS
data['eevee'] = scene_dumper.dump(datablock.eevee) data['eevee'] = scene_dumper.dump(instance.eevee)
elif datablock.render.engine == 'CYCLES': elif instance.render.engine == 'CYCLES':
scene_dumper.include_filter = CYCLES_SETTINGS scene_dumper.include_filter = CYCLES_SETTINGS
data['cycles'] = scene_dumper.dump(datablock.cycles) data['cycles'] = scene_dumper.dump(instance.cycles)
scene_dumper.include_filter = VIEW_SETTINGS scene_dumper.include_filter = VIEW_SETTINGS
data['view_settings'] = scene_dumper.dump(datablock.view_settings) data['view_settings'] = scene_dumper.dump(instance.view_settings)
if datablock.view_settings.use_curve_mapping: if instance.view_settings.use_curve_mapping:
data['view_settings']['curve_mapping'] = scene_dumper.dump( data['view_settings']['curve_mapping'] = scene_dumper.dump(
datablock.view_settings.curve_mapping) instance.view_settings.curve_mapping)
scene_dumper.depth = 5 scene_dumper.depth = 5
scene_dumper.include_filter = [ scene_dumper.include_filter = [
'curves', 'curves',
@ -507,90 +375,41 @@ class BlScene(ReplicatedDatablock):
'location', 'location',
] ]
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump( data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
datablock.view_settings.curve_mapping.curves) instance.view_settings.curve_mapping.curves)
# Sequence
vse = datablock.sequence_editor
if vse:
dumped_sequences = {}
for seq in vse.sequences_all:
dumped_sequences[seq.name] = dump_sequence(seq)
data['sequences'] = dumped_sequences
return data return data
@staticmethod def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
# Master Collection # child collections
deps.extend(resolve_collection_dependencies(datablock.collection)) for child in self.instance.collection.children:
deps.append(child)
# childs objects
for object in self.instance.collection.objects:
deps.append(object)
# world # world
if datablock.world: if self.instance.world:
deps.append(datablock.world) deps.append(self.instance.world)
# annotations # annotations
if datablock.grease_pencil: if self.instance.grease_pencil:
deps.append(datablock.grease_pencil) deps.append(self.instance.grease_pencil)
deps.extend(resolve_animation_dependencies(datablock))
# Sequences
vse = datablock.sequence_editor
if vse:
for sequence in vse.sequences_all:
if sequence.type == 'MOVIE' and sequence.filepath:
deps.append(Path(bpy.path.abspath(sequence.filepath)))
elif sequence.type == 'SOUND' and sequence.sound:
deps.append(sequence.sound)
elif sequence.type == 'IMAGE':
for elem in sequence.elements:
sequence.append(
Path(bpy.path.abspath(sequence.directory),
elem.filename))
return deps return deps
@staticmethod def diff(self):
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
if datablock is None:
datablock = bpy.data.scenes.get(name)
return datablock
@staticmethod
def compute_delta(last_data: dict, current_data: dict) -> Delta:
exclude_path = [] exclude_path = []
if not get_preferences().sync_flags.sync_render_settings: if not self.preferences.sync_flags.sync_render_settings:
exclude_path.append("root['eevee']") exclude_path.append("root['eevee']")
exclude_path.append("root['cycles']") exclude_path.append("root['cycles']")
exclude_path.append("root['view_settings']") exclude_path.append("root['view_settings']")
exclude_path.append("root['render']") exclude_path.append("root['render']")
if not get_preferences().sync_flags.sync_active_camera: if not self.preferences.sync_flags.sync_active_camera:
exclude_path.append("root['camera']") exclude_path.append("root['camera']")
diff_params = { return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)
'exclude_paths': exclude_path,
'ignore_order': True,
'report_repetition': True
}
delta_params = {
# 'mutate': True
}
return Delta(
DeepDiff(last_data,
current_data,
cache_size=5000,
**diff_params),
**delta_params)
_type = bpy.types.Scene
_class = BlScene

View File

@ -23,59 +23,47 @@ from pathlib import Path
import bpy import bpy
from .bl_file import get_filepath, ensure_unpacked from .bl_file import get_filepath, ensure_unpacked
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
from .bl_datablock import resolve_datablock_from_uuid
class BlSound(ReplicatedDatablock): class BlSound(BlDatablock):
bl_id = "sounds" bl_id = "sounds"
bl_class = bpy.types.Sound bl_class = bpy.types.Sound
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'SOUND' bl_icon = 'SOUND'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
filename = data.get('filename') filename = data.get('filename')
return bpy.data.sounds.load(get_filepath(filename)) return bpy.data.sounds.load(get_filepath(filename))
@staticmethod def _load(self, data, target):
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
@staticmethod def diff(self):
def dump(datablock: object) -> dict: return False
filename = Path(datablock.filepath).name
def _dump(self, instance=None):
filename = Path(instance.filepath).name
if not filename: if not filename:
raise FileExistsError(datablock.filepath) raise FileExistsError(instance.filepath)
return { return {
'filename': filename, 'filename': filename,
'name': datablock.name 'name': instance.name
} }
@staticmethod def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
if datablock.filepath and datablock.filepath != '<builtin>': if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(datablock) ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(datablock.filepath))) deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.sounds)
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
return False
_type = bpy.types.Sound
_class = BlSound

View File

@ -20,31 +20,28 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlSpeaker(ReplicatedDatablock):
use_delta = True
class BlSpeaker(BlDatablock):
bl_id = "speakers" bl_id = "speakers"
bl_class = bpy.types.Speaker bl_class = bpy.types.Speaker
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False bl_check_common = False
bl_icon = 'SPEAKER' bl_icon = 'SPEAKER'
bl_reload_parent = False
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.speakers.new(data["name"]) return bpy.data.speakers.new(data["name"])
@staticmethod def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict: assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
@ -63,27 +60,17 @@ class BlSpeaker(ReplicatedDatablock):
'cone_volume_outer' 'cone_volume_outer'
] ]
data = dumper.dump(datablock) return dumper.dump(instance)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod def _resolve_deps_implementation(self):
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.speakers)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material # TODO: resolve material
deps = [] deps = []
sound = datablock.sound sound = self.instance.sound
if sound: if sound:
deps.append(sound) deps.append(sound)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
_type = bpy.types.Speaker
_class = BlSpeaker

View File

@ -1,97 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
import bpy.types as T
class BlTexture(ReplicatedDatablock):
use_delta = True
bl_id = "textures"
bl_class = bpy.types.Texture
bl_check_common = False
bl_icon = 'TEXTURE'
bl_reload_parent = False
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(datablock, data)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod
def construct(data: dict) -> object:
return bpy.data.textures.new(data["name"], data["type"])
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = [
'tag',
'original',
'users',
'uuid',
'is_embedded_data',
'is_evaluated',
'name_full'
]
data = dumper.dump(datablock)
color_ramp = getattr(datablock, 'color_ramp', None)
if color_ramp:
dumper.depth = 4
data['color_ramp'] = dumper.dump(color_ramp)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.textures)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
image = getattr(datablock,"image", None)
if image:
deps.append(image)
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = [T.WoodTexture, T.VoronoiTexture,
T.StucciTexture, T.NoiseTexture,
T.MusgraveTexture, T.MarbleTexture,
T.MagicTexture, T.ImageTexture,
T.DistortedNoiseTexture, T.CloudsTexture,
T.BlendTexture]
_class = BlTexture

View File

@ -1,101 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from pathlib import Path
from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlVolume(ReplicatedDatablock):
use_delta = True
bl_id = "volumes"
bl_class = bpy.types.Volume
bl_check_common = False
bl_icon = 'VOLUME_DATA'
bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object:
return bpy.data.volumes.new(data["name"])
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = [
'tag',
'original',
'users',
'uuid',
'is_embedded_data',
'is_evaluated',
'name_full',
'use_fake_user'
]
data = dumper.dump(datablock)
data['display'] = dumper.dump(datablock.display)
# Fix material index
data['materials'] = dump_materials_slots(datablock.materials)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader()
loader.load(datablock, data)
loader.load(datablock.display, data['display'])
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, datablock.materials)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.volumes)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = []
external_vdb = Path(bpy.path.abspath(datablock.filepath))
if external_vdb.exists() and not external_vdb.is_dir():
deps.append(external_vdb)
for material in datablock.materials:
if material:
deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.Volume
_class = BlVolume

View File

@ -20,42 +20,47 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock from .bl_datablock import BlDatablock
from .bl_material import (load_node_tree, from .bl_material import (load_links,
dump_node_tree, load_node,
dump_node,
dump_links,
get_node_tree_dependencies) get_node_tree_dependencies)
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlWorld(ReplicatedDatablock):
use_delta = True
class BlWorld(BlDatablock):
bl_id = "worlds" bl_id = "worlds"
bl_class = bpy.types.World bl_class = bpy.types.World
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True bl_check_common = True
bl_icon = 'WORLD_DATA' bl_icon = 'WORLD_DATA'
bl_reload_parent = False
@staticmethod def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.worlds.new(data["name"]) return bpy.data.worlds.new(data["name"])
@staticmethod def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(target, data)
if data["use_nodes"]: if data["use_nodes"]:
if datablock.node_tree is None: if target.node_tree is None:
datablock.use_nodes = True target.use_nodes = True
load_node_tree(data['node_tree'], datablock.node_tree) target.node_tree.nodes.clear()
for node in data["node_tree"]["nodes"]:
load_node(data["node_tree"]["nodes"][node], target.node_tree)
# Load nodes links
target.node_tree.links.clear()
load_links(data["node_tree"]["links"], target.node_tree)
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
world_dumper = Dumper() world_dumper = Dumper()
world_dumper.depth = 1 world_dumper.depth = 1
world_dumper.include_filter = [ world_dumper.include_filter = [
@ -63,27 +68,25 @@ class BlWorld(ReplicatedDatablock):
"name", "name",
"color" "color"
] ]
data = world_dumper.dump(datablock) data = world_dumper.dump(instance)
if datablock.use_nodes: if instance.use_nodes:
data['node_tree'] = dump_node_tree(datablock.node_tree) data['node_tree'] = {}
nodes = {}
for node in instance.node_tree.nodes:
nodes[node.name] = dump_node(node)
data["node_tree"]['nodes'] = nodes
data["node_tree"]['links'] = dump_links(instance.node_tree.links)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod def _resolve_deps_implementation(self):
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.worlds)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
if datablock.use_nodes: if self.instance.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree)) deps.extend(get_node_tree_dependencies(self.instance.node_tree))
if self.is_library:
deps.extend(resolve_animation_dependencies(datablock)) deps.append(self.instance.library)
return deps return deps
_type = bpy.types.World
_class = BlWorld

View File

@ -465,7 +465,6 @@ class Loader:
self.type_subset = self.match_subset_all self.type_subset = self.match_subset_all
self.occlude_read_only = False self.occlude_read_only = False
self.order = ['*'] self.order = ['*']
self.exclure_filter = []
def load(self, dst_data, src_dumped_data): def load(self, dst_data, src_dumped_data):
self._load_any( self._load_any(
@ -476,8 +475,7 @@ class Loader:
def _load_any(self, any, dump): def _load_any(self, any, dump):
for filter_function, load_function in self.type_subset: for filter_function, load_function in self.type_subset:
if filter_function(any) and \ if filter_function(any):
any.sub_element_name not in self.exclure_filter:
load_function(any, dump) load_function(any, dump)
return return
@ -507,12 +505,14 @@ class Loader:
_constructors = { _constructors = {
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]), T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []), T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]), T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
} }
destructors = { destructors = {
T.ColorRampElement: DESTRUCTOR_REMOVE, T.ColorRampElement: DESTRUCTOR_REMOVE,
T.GpencilModifier: DESTRUCTOR_CLEAR, T.Modifier: DESTRUCTOR_CLEAR,
T.Constraint: CONSTRUCTOR_NEW,
} }
element_type = element.bl_rna_property.fixed_type element_type = element.bl_rna_property.fixed_type
@ -527,13 +527,7 @@ class Loader:
if destructor: if destructor:
if destructor == DESTRUCTOR_REMOVE: if destructor == DESTRUCTOR_REMOVE:
collection = element.read() collection = element.read()
elems_to_remove = len(collection) for i in range(len(collection)-1):
# Color ramp doesn't allow to remove all elements
if type(element_type) == T.ColorRampElement:
elems_to_remove -= 1
for i in range(elems_to_remove):
collection.remove(collection[0]) collection.remove(collection[0])
else: else:
getattr(element.read(), DESTRUCTOR_CLEAR)() getattr(element.read(), DESTRUCTOR_CLEAR)()
@ -580,7 +574,6 @@ class Loader:
dst_curve.points[int(point_idx)].location = pos dst_curve.points[int(point_idx)].location = pos
else: else:
dst_curve.points.new(pos[0], pos[1]) dst_curve.points.new(pos[0], pos[1])
curves.update()
def _load_pointer(self, instance, dump): def _load_pointer(self, instance, dump):
rna_property_type = instance.bl_rna_property.fixed_type rna_property_type = instance.bl_rna_property.fixed_type
@ -592,8 +585,6 @@ class Loader:
instance.write(bpy.data.textures.get(dump)) instance.write(bpy.data.textures.get(dump))
elif isinstance(rna_property_type, T.ColorRamp): elif isinstance(rna_property_type, T.ColorRamp):
self._load_default(instance, dump) self._load_default(instance, dump)
elif isinstance(rna_property_type, T.NodeTree):
instance.write(bpy.data.node_groups.get(dump))
elif isinstance(rna_property_type, T.Object): elif isinstance(rna_property_type, T.Object):
instance.write(bpy.data.objects.get(dump)) instance.write(bpy.data.objects.get(dump))
elif isinstance(rna_property_type, T.Mesh): elif isinstance(rna_property_type, T.Mesh):
@ -606,8 +597,6 @@ class Loader:
instance.write(bpy.data.fonts.get(dump)) instance.write(bpy.data.fonts.get(dump))
elif isinstance(rna_property_type, T.Sound): elif isinstance(rna_property_type, T.Sound):
instance.write(bpy.data.sounds.get(dump)) instance.write(bpy.data.sounds.get(dump))
# elif isinstance(rna_property_type, T.ParticleSettings):
# instance.write(bpy.data.particles.get(dump))
def _load_matrix(self, matrix, dump): def _load_matrix(self, matrix, dump):
matrix.write(mathutils.Matrix(dump)) matrix.write(mathutils.Matrix(dump))

View File

@ -16,68 +16,76 @@
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import logging import logging
import sys
import traceback
import bpy import bpy
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_LOBBY, STATE_QUITTING, from . import utils
STATE_SRV_SYNC, STATE_SYNCING, UP) from .presence import (renderer,
from replication.exception import NonAuthorizedOperationError, ContextError UserFrustumWidget,
UserNameWidget,
UserSelectionWidget,
refresh_3d_view,
generate_user_camera,
get_view_matrix,
refresh_sidebar_view)
from replication.constants import (FETCHED,
UP,
RP_COMMON,
STATE_INITIAL,
STATE_QUITTING,
STATE_ACTIVE,
STATE_SYNCING,
STATE_LOBBY,
STATE_SRV_SYNC,
REPARENT)
from replication.interface import session from replication.interface import session
from replication import porcelain from replication.exception import NonAuthorizedOperationError
from . import operators, utils class Delayable():
from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget, """Delayable task interface
generate_user_camera, get_view_matrix, refresh_3d_view,
refresh_sidebar_view, renderer)
from . import shared_data
this = sys.modules[__name__]
# Registered timers
this.registry = dict()
def is_annotating(context: bpy.types.Context):
""" Check if the annotate mode is enabled
""" """
return bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False).idname == 'builtin.annotate'
def __init__(self):
self.is_registered = False
def register(self):
raise NotImplementedError
def execute(self):
raise NotImplementedError
def unregister(self):
raise NotImplementedError
class Timer(object): class Timer(Delayable):
"""Timer binder interface for blender """Timer binder interface for blender
Run a bpy.app.Timer in the background looping at the given rate Run a bpy.app.Timer in the background looping at the given rate
""" """
def __init__(self, timeout=10, id=None): def __init__(self, duration=1):
self._timeout = timeout super().__init__()
self.is_running = False self._timeout = duration
self.id = id if id else self.__class__.__name__ self._running = True
def register(self): def register(self):
"""Register the timer into the blender timer system """Register the timer into the blender timer system
""" """
if not self.is_running: if not self.is_registered:
this.registry[self.id] = self
bpy.app.timers.register(self.main) bpy.app.timers.register(self.main)
self.is_running = True self.is_registered = True
logging.debug(f"Register {self.__class__.__name__}") logging.debug(f"Register {self.__class__.__name__}")
else: else:
logging.debug( logging.debug(
f"Timer {self.__class__.__name__} already registered") f"Timer {self.__class__.__name__} already registered")
def main(self): def main(self):
try:
self.execute() self.execute()
except Exception as e:
logging.error(e) if self._running:
self.unregister()
traceback.print_exc()
session.disconnect(reason=f"Error during timer {self.id} execution")
else:
if self.is_running:
return self._timeout return self._timeout
def execute(self): def execute(self):
@ -89,100 +97,58 @@ class Timer(object):
"""Unnegister the timer of the blender timer system """Unnegister the timer of the blender timer system
""" """
if bpy.app.timers.is_registered(self.main): if bpy.app.timers.is_registered(self.main):
logging.info(f"Unregistering {self.id}")
bpy.app.timers.unregister(self.main) bpy.app.timers.unregister(self.main)
del this.registry[self.id] self._running = False
self.is_running = False
class SessionBackupTimer(Timer):
def __init__(self, timeout=10, filepath=None):
self._filepath = filepath
super().__init__(timeout)
def execute(self):
session.repository.dumps(self._filepath)
class SessionListenTimer(Timer):
def execute(self):
session.listen()
class ApplyTimer(Timer): class ApplyTimer(Timer):
def __init__(self, timout=1, target_type=None):
self._type = target_type
super().__init__(timout)
def execute(self): def execute(self):
if session and session.state == STATE_ACTIVE: if session and session.state['STATE'] == STATE_ACTIVE:
for node in session.repository.graph.keys(): if self._type:
node_ref = session.repository.graph.get(node) nodes = session.list(filter=self._type)
else:
nodes = session.list()
for node in nodes:
node_ref = session.get(uuid=node)
if node_ref.state == FETCHED: if node_ref.state == FETCHED:
try: try:
shared_data.session.applied_updates.append(node) session.apply(node)
porcelain.apply(session.repository, node)
except Exception as e: except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}") logging.error(f"Fail to apply {node_ref.uuid}: {e}")
traceback.print_exc() elif node_ref.state == REPARENT:
else: # Reload the node
impl = session.repository.rdp.get_implementation(node_ref.instance) node_ref.remove_instance()
if impl.bl_reload_parent: node_ref.resolve()
for parent in session.repository.graph.get_parents(node): session.apply(node)
logging.debug("Refresh parent {node}") for parent in session._graph.find_parents(node):
porcelain.apply(session.repository, logging.info(f"Applying parent {parent}")
parent.uuid, session.apply(parent, force=True)
force=True) node_ref.state = UP
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
for dep in node_ref.dependencies:
porcelain.apply(session.repository,
dep,
force=True)
class DynamicRightSelectTimer(Timer): class DynamicRightSelectTimer(Timer):
def __init__(self, timeout=.1): def __init__(self, timout=.1):
super().__init__(timeout) super().__init__(timout)
self._last_selection = [] self._last_selection = []
self._user = None self._user = None
self._annotating = False self._right_strategy = RP_COMMON
def execute(self): def execute(self):
settings = utils.get_preferences() settings = utils.get_preferences()
if session and session.state == STATE_ACTIVE: if session and session.state['STATE'] == STATE_ACTIVE:
# Find user # Find user
if self._user is None: if self._user is None:
self._user = session.online_users.get(settings.username) self._user = session.online_users.get(settings.username)
if self._user: if self._user:
ctx = bpy.context
annotation_gp = ctx.scene.grease_pencil
if annotation_gp and not annotation_gp.uuid:
ctx.scene.update_tag()
# if an annotation exist and is tracked
if annotation_gp and annotation_gp.uuid:
registered_gp = session.repository.graph.get(annotation_gp.uuid)
if is_annotating(bpy.context):
# try to get the right on it
if registered_gp.owner == RP_COMMON:
self._annotating = True
logging.debug(
"Getting the right on the annotation GP")
porcelain.lock(session.repository,
registered_gp.uuid,
ignore_warnings=True,
affect_dependencies=False)
if registered_gp.owner == settings.username:
gp_node = session.repository.graph.get(annotation_gp.uuid)
porcelain.commit(session.repository, gp_node.uuid)
porcelain.push(session.repository, 'origin', gp_node.uuid)
elif self._annotating:
porcelain.unlock(session.repository,
registered_gp.uuid,
ignore_warnings=True,
affect_dependencies=False)
current_selection = utils.get_selected_objects( current_selection = utils.get_selected_objects(
bpy.context.scene, bpy.context.scene,
bpy.data.window_managers['WinMan'].windows[0].view_layer bpy.data.window_managers['WinMan'].windows[0].view_layer
@ -195,24 +161,23 @@ class DynamicRightSelectTimer(Timer):
# change old selection right to common # change old selection right to common
for obj in obj_common: for obj in obj_common:
node = session.repository.graph.get(obj) node = session.get(uuid=obj)
if node and (node.owner == settings.username or node.owner == RP_COMMON): if node and (node.owner == settings.username or node.owner == RP_COMMON):
recursive = True recursive = True
if node.data and 'instance_type' in node.data.keys(): if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION' recursive = node.data['instance_type'] != 'COLLECTION'
try: try:
porcelain.unlock(session.repository, session.change_owner(
node.uuid, node.uuid,
ignore_warnings=True, RP_COMMON,
affect_dependencies=recursive) recursive=recursive)
except NonAuthorizedOperationError: except NonAuthorizedOperationError:
logging.warning( logging.warning(f"Not authorized to change {node} owner")
f"Not authorized to change {node} owner")
# change new selection to our # change new selection to our
for obj in obj_ours: for obj in obj_ours:
node = session.repository.graph.get(obj) node = session.get(uuid=obj)
if node and node.owner == RP_COMMON: if node and node.owner == RP_COMMON:
recursive = True recursive = True
@ -220,13 +185,12 @@ class DynamicRightSelectTimer(Timer):
recursive = node.data['instance_type'] != 'COLLECTION' recursive = node.data['instance_type'] != 'COLLECTION'
try: try:
porcelain.lock(session.repository, session.change_owner(
node.uuid, node.uuid,
ignore_warnings=True, settings.username,
affect_dependencies=recursive) recursive=recursive)
except NonAuthorizedOperationError: except NonAuthorizedOperationError:
logging.warning( logging.warning(f"Not authorized to change {node} owner")
f"Not authorized to change {node} owner")
else: else:
return return
@ -236,35 +200,33 @@ class DynamicRightSelectTimer(Timer):
'selected_objects': current_selection 'selected_objects': current_selection
} }
porcelain.update_user_metadata(session.repository, user_metadata) session.update_user_metadata(user_metadata)
logging.debug("Update selection") logging.debug("Update selection")
# Fix deselection until right managment refactoring (with Roles concepts) # Fix deselection until right managment refactoring (with Roles concepts)
if len(current_selection) == 0 : if len(current_selection) == 0 and self._right_strategy == RP_COMMON:
owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username] owned_keys = session.list(
filter_owner=settings.username)
for key in owned_keys: for key in owned_keys:
node = session.repository.graph.get(key) node = session.get(uuid=key)
try: try:
porcelain.unlock(session.repository, session.change_owner(
key, key,
ignore_warnings=True, RP_COMMON,
affect_dependencies=True) recursive=recursive)
except NonAuthorizedOperationError: except NonAuthorizedOperationError:
logging.warning( logging.warning(f"Not authorized to change {key} owner")
f"Not authorized to change {key} owner")
for obj in bpy.data.objects: for obj in bpy.data.objects:
object_uuid = getattr(obj, 'uuid', None) object_uuid = getattr(obj, 'uuid', None)
if object_uuid: if object_uuid:
is_selectable = not session.repository.is_node_readonly(object_uuid) is_selectable = not session.is_readonly(object_uuid)
if obj.hide_select != is_selectable: if obj.hide_select != is_selectable:
obj.hide_select = is_selectable obj.hide_select = is_selectable
shared_data.session.applied_updates.append(object_uuid)
class ClientUpdate(Timer): class ClientUpdate(Timer):
def __init__(self, timeout=.1): def __init__(self, timout=.1):
super().__init__(timeout) super().__init__(timout)
self.handle_quit = False self.handle_quit = False
self.users_metadata = {} self.users_metadata = {}
@ -272,7 +234,7 @@ class ClientUpdate(Timer):
settings = utils.get_preferences() settings = utils.get_preferences()
if session and renderer: if session and renderer:
if session.state in [STATE_ACTIVE, STATE_LOBBY]: if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
local_user = session.online_users.get( local_user = session.online_users.get(
settings.username) settings.username)
@ -311,31 +273,31 @@ class ClientUpdate(Timer):
'frame_current': bpy.context.scene.frame_current, 'frame_current': bpy.context.scene.frame_current,
'scene_current': scene_current 'scene_current': scene_current
} }
porcelain.update_user_metadata(session.repository, metadata) session.update_user_metadata(metadata)
# Update client representation # Update client representation
# Update client current scene # Update client current scene
elif scene_current != local_user_metadata['scene_current']: elif scene_current != local_user_metadata['scene_current']:
local_user_metadata['scene_current'] = scene_current local_user_metadata['scene_current'] = scene_current
porcelain.update_user_metadata(session.repository, local_user_metadata) session.update_user_metadata(local_user_metadata)
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']: elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
local_user_metadata['view_corners'] = current_view_corners local_user_metadata['view_corners'] = current_view_corners
local_user_metadata['view_matrix'] = get_view_matrix( local_user_metadata['view_matrix'] = get_view_matrix(
) )
porcelain.update_user_metadata(session.repository, local_user_metadata) session.update_user_metadata(local_user_metadata)
class SessionStatusUpdate(Timer): class SessionStatusUpdate(Timer):
def __init__(self, timeout=1): def __init__(self, timout=1):
super().__init__(timeout) super().__init__(timout)
def execute(self): def execute(self):
refresh_sidebar_view() refresh_sidebar_view()
class SessionUserSync(Timer): class SessionUserSync(Timer):
def __init__(self, timeout=1): def __init__(self, timout=1):
super().__init__(timeout) super().__init__(timout)
self.settings = utils.get_preferences() self.settings = utils.get_preferences()
def execute(self): def execute(self):
@ -368,12 +330,12 @@ class SessionUserSync(Timer):
class MainThreadExecutor(Timer): class MainThreadExecutor(Timer):
def __init__(self, timeout=1, execution_queue=None): def __init__(self, timout=1, execution_queue=None):
super().__init__(timeout) super().__init__(timout)
self.execution_queue = execution_queue self.execution_queue = execution_queue
def execute(self): def execute(self):
while not self.execution_queue.empty(): while not self.execution_queue.empty():
function, kwargs = self.execution_queue.get() function = self.execution_queue.get()
logging.debug(f"Executing {function.__name__}") logging.debug(f"Executing {function.__name__}")
function(**kwargs) function()

View File

@ -24,25 +24,20 @@ import sys
from pathlib import Path from pathlib import Path
import socket import socket
import re import re
import bpy
VERSION_EXPR = re.compile('\d+.\d+.\d+') VERSION_EXPR = re.compile('\d+\.\d+\.\d+\w\d+')
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
DEFAULT_CACHE_DIR = os.path.join( DEFAULT_CACHE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "cache") os.path.dirname(os.path.abspath(__file__)), "cache")
REPLICATION_DEPENDENCIES = {
"zmq",
"deepdiff"
}
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
REPLICATION = os.path.join(LIBS,"replication")
PYTHON_PATH = None PYTHON_PATH = None
SUBPROCESS_DIR = None SUBPROCESS_DIR = None
rtypes = [] rtypes = []
def module_can_be_imported(name: str) -> bool: def module_can_be_imported(name):
try: try:
__import__(name) __import__(name)
return True return True
@ -55,7 +50,7 @@ def install_pip():
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"]) subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
def install_package(name: str, install_dir: str): def install_package(name, version):
logging.info(f"installing {name} version...") logging.info(f"installing {name} version...")
env = os.environ env = os.environ
if "PIP_REQUIRE_VIRTUALENV" in env: if "PIP_REQUIRE_VIRTUALENV" in env:
@ -65,13 +60,9 @@ def install_package(name: str, install_dir: str):
# env var for the subprocess. # env var for the subprocess.
env = os.environ.copy() env = os.environ.copy()
del env["PIP_REQUIRE_VIRTUALENV"] del env["PIP_REQUIRE_VIRTUALENV"]
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env) subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
if name in sys.modules: def check_package_version(name, required_version):
del sys.modules[name]
def check_package_version(name: str, required_version: str):
logging.info(f"Checking {name} version...") logging.info(f"Checking {name} version...")
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True) out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
@ -83,7 +74,6 @@ def check_package_version(name: str, required_version: str):
logging.info(f"{name} need an update") logging.info(f"{name} need an update")
return False return False
def get_ip(): def get_ip():
""" """
Retrieve the main network interface IP. Retrieve the main network interface IP.
@ -101,25 +91,7 @@ def check_dir(dir):
os.makedirs(dir) os.makedirs(dir)
def setup_paths(paths: list): def setup(dependencies, python_path):
""" Add missing path to sys.path
"""
for path in paths:
if path not in sys.path:
logging.debug(f"Adding {path} dir to the path.")
sys.path.insert(0, path)
def remove_paths(paths: list):
""" Remove list of path from sys.path
"""
for path in paths:
if path in sys.path:
logging.debug(f"Removing {path} dir from the path.")
sys.path.remove(path)
def install_modules(dependencies: list, python_path: str, install_dir: str):
global PYTHON_PATH, SUBPROCESS_DIR global PYTHON_PATH, SUBPROCESS_DIR
PYTHON_PATH = Path(python_path) PYTHON_PATH = Path(python_path)
@ -128,23 +100,9 @@ def install_modules(dependencies: list, python_path: str, install_dir: str):
if not module_can_be_imported("pip"): if not module_can_be_imported("pip"):
install_pip() install_pip()
for package_name in dependencies: for package_name, package_version in dependencies:
if not module_can_be_imported(package_name): if not module_can_be_imported(package_name):
install_package(package_name, install_dir=install_dir) install_package(package_name, package_version)
module_can_be_imported(package_name) module_can_be_imported(package_name)
elif not check_package_version(package_name, package_version):
def register(): install_package(package_name, package_version)
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
for module_name in list(sys.modules.keys()):
if 'replication' in module_name:
del sys.modules[module_name]
setup_paths([LIBS, REPLICATION])
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
def unregister():
remove_paths([REPLICATION, LIBS])

View File

@ -1,150 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import bpy
from bpy.app.handlers import persistent
from replication import porcelain
from replication.constants import RP_COMMON, STATE_ACTIVE, STATE_SYNCING, UP
from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session
from . import shared_data, utils
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node in session.repository.graph.values():
node.instance = session.repository.rdp.resolve(node.data)
if node is None \
or (node.state == UP and not node.instance):
if remove_nodes:
try:
porcelain.rm(session.repository,
node.uuid,
remove_dependencies=False)
logging.info(f"Removing {node.uuid}")
rm_cpt += 1
except NonAuthorizedOperationError:
continue
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
def update_external_dependencies():
"""Force external dependencies(files such as images) evaluation
"""
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in ['WindowsPath', 'PosixPath']]
for node_id in nodes_ids:
node = session.repository.graph.get(node_id)
if node and node.owner in [session.repository.username, RP_COMMON]:
porcelain.commit(session.repository, node_id)
porcelain.push(session.repository, 'origin', node_id)
@persistent
def on_scene_update(scene):
"""Forward blender depsgraph update to replication
"""
if session and session.state == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
incoming_updates = shared_data.session.applied_updates
distant_update = [getattr(u.id, 'uuid', None) for u in dependency_updates if getattr(u.id, 'uuid', None) in incoming_updates]
if distant_update:
for u in distant_update:
shared_data.session.applied_updates.remove(u)
logging.debug(f"Ignoring distant update of {dependency_updates[0].id.name}")
return
update_external_dependencies()
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
update_uuid = getattr(update.id, 'uuid', None)
if update_uuid:
node = session.repository.graph.get(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
if node and (node.owner == session.repository.username or check_common):
logging.debug(f"Evaluate {update.id.name}")
if node.state == UP:
try:
porcelain.commit(session.repository, node.uuid)
porcelain.push(session.repository,
'origin', node.uuid)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
except ContextError as e:
logging.debug(e)
except Exception as e:
logging.error(e)
else:
continue
elif isinstance(update.id, bpy.types.Scene):
scn_uuid = porcelain.add(session.repository, update.id)
porcelain.commit(session.repository, scn_uuid)
porcelain.push(session.repository, 'origin', scn_uuid)
@persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state == STATE_ACTIVE:
porcelain.update_user_metadata(session.repository, {
'frame_current': scene.frame_current
})
def register():
bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister():
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)

View File

@ -17,52 +17,34 @@
import asyncio import asyncio
import copy
import gzip
import logging import logging
import os import os
import queue import queue
import random import random
import shutil import shutil
import string import string
import sys
import time import time
import traceback
from datetime import datetime
from operator import itemgetter from operator import itemgetter
from pathlib import Path from pathlib import Path
from queue import Queue from queue import Queue
from time import gmtime, strftime
from bpy.props import FloatProperty
try:
import _pickle as pickle
except ImportError:
import pickle
import bpy import bpy
import mathutils import mathutils
from bpy.app.handlers import persistent from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
from replication import porcelain
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP) STATE_INITIAL, STATE_SYNCING, UP)
from replication.exception import ContextError, NonAuthorizedOperationError from replication.data import ReplicatedDataFactory
from replication.exception import NonAuthorizedOperationError
from replication.interface import session from replication.interface import session
from replication.objects import Node
from replication.protocol import DataTranslationProtocol
from replication.repository import Repository
from . import bl_types, environment, shared_data, timers, ui, utils from . import bl_types, delayable, environment, ui, utils
from .handlers import on_scene_update, sanitize_deps_graph from .presence import (SessionStatusWidget, renderer, view3d_find)
from .presence import SessionStatusWidget, renderer, view3d_find
from .timers import registry
background_execution_queue = Queue() background_execution_queue = Queue()
deleyables = [] delayables = []
stop_modal_executor = False stop_modal_executor = False
def session_callback(name): def session_callback(name):
""" Session callback wrapper """ Session callback wrapper
@ -71,8 +53,8 @@ def session_callback(name):
""" """
def func_wrapper(func): def func_wrapper(func):
@session.register(name) @session.register(name)
def add_background_task(**kwargs): def add_background_task():
background_execution_queue.put((func, kwargs)) background_execution_queue.put(func)
return add_background_task return add_background_task
return func_wrapper return func_wrapper
@ -84,65 +66,53 @@ def initialize_session():
settings = utils.get_preferences() settings = utils.get_preferences()
runtime_settings = bpy.context.window_manager.session runtime_settings = bpy.context.window_manager.session
if not runtime_settings.is_host:
logging.info("Intializing the scene")
# Step 1: Constrect nodes # Step 1: Constrect nodes
logging.info("Instantiating nodes") for node in session._graph.list_ordered():
for node in session.repository.index_sorted: node_ref = session.get(node)
node_ref = session.repository.graph.get(node) if node_ref.state == FETCHED:
if node_ref is None: node_ref.resolve()
logging.error(f"Can't construct node {node}")
elif node_ref.state == FETCHED:
node_ref.instance = session.repository.rdp.resolve(node_ref.data)
if node_ref.instance is None:
node_ref.instance = session.repository.rdp.construct(node_ref.data)
node_ref.instance.uuid = node_ref.uuid
# Step 2: Load nodes # Step 2: Load nodes
logging.info("Applying nodes") for node in session._graph.list_ordered():
for node in session.repository.heads: node_ref = session.get(node)
porcelain.apply(session.repository, node) if node_ref.state == FETCHED:
node_ref.apply()
logging.info("Registering timers")
# Step 4: Register blender timers # Step 4: Register blender timers
for d in deleyables: for d in delayables:
d.register() d.register()
if settings.update_method == 'DEPSGRAPH':
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
# Step 5: Clearing history bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
utils.flush_history()
# Step 6: Launch deps graph update handling
bpy.app.handlers.depsgraph_update_post.append(on_scene_update)
@session_callback('on_exit') @session_callback('on_exit')
def on_connection_end(reason="none"): def on_connection_end():
"""Session connection finished handler """Session connection finished handler
""" """
global deleyables, stop_modal_executor global delayables, stop_modal_executor
settings = utils.get_preferences() settings = utils.get_preferences()
# Step 1: Unregister blender timers # Step 1: Unregister blender timers
for d in deleyables: for d in delayables:
try: try:
d.unregister() d.unregister()
except: except:
continue continue
deleyables.clear()
stop_modal_executor = True stop_modal_executor = True
if on_scene_update in bpy.app.handlers.depsgraph_update_post: if settings.update_method == 'DEPSGRAPH':
bpy.app.handlers.depsgraph_update_post.remove(on_scene_update) bpy.app.handlers.depsgraph_update_post.remove(
depsgraph_evaluation)
# Step 3: remove file handled # Step 3: remove file handled
logger = logging.getLogger() logger = logging.getLogger()
for handler in logger.handlers: for handler in logger.handlers:
if isinstance(handler, logging.FileHandler): if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler) logger.removeHandler(handler)
if reason != "user":
bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
# OPERATORS # OPERATORS
@ -158,15 +128,15 @@ class SessionStartOperator(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
global deleyables global delayables
settings = utils.get_preferences() settings = utils.get_preferences()
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
users = bpy.data.window_managers['WinMan'].online_users users = bpy.data.window_managers['WinMan'].online_users
admin_pass = settings.password admin_pass = runtime_settings.password
use_extern_update = settings.update_method == 'DEPSGRAPH'
users.clear() users.clear()
deleyables.clear() delayables.clear()
logger = logging.getLogger() logger = logging.getLogger()
if len(logger.handlers) == 1: if len(logger.handlers) == 1:
@ -175,10 +145,9 @@ class SessionStartOperator(bpy.types.Operator):
datefmt='%H:%M:%S' datefmt='%H:%M:%S'
) )
start_time = datetime.now().strftime('%Y_%m_%d_%H-%M-%S')
log_directory = os.path.join( log_directory = os.path.join(
settings.cache_directory, settings.cache_directory,
f"multiuser_{start_time}.log") "multiuser_client.log")
os.makedirs(settings.cache_directory, exist_ok=True) os.makedirs(settings.cache_directory, exist_ok=True)
@ -191,25 +160,46 @@ class SessionStartOperator(bpy.types.Operator):
handler.setFormatter(formatter) handler.setFormatter(formatter)
bpy_protocol = bl_types.get_data_translation_protocol() bpy_factory = ReplicatedDataFactory()
supported_bl_types = []
# Check if supported_datablocks are up to date before starting the # init the factory with supported types
# the session for type in bl_types.types_to_register():
for dcc_type_id in bpy_protocol.implementations.keys(): type_module = getattr(bl_types, type)
if dcc_type_id not in settings.supported_datablocks: type_impl_name = f"Bl{type.split('_')[1].capitalize()}"
logging.info(f"{dcc_type_id} not found, \ type_module_class = getattr(type_module, type_impl_name)
supported_bl_types.append(type_module_class.bl_id)
if type_impl_name not in settings.supported_datablocks:
logging.info(f"{type_impl_name} not found, \
regenerate type settings...") regenerate type settings...")
settings.generate_supported_types() settings.generate_supported_types()
type_local_config = settings.supported_datablocks[type_impl_name]
if bpy.app.version[1] >= 91: bpy_factory.register_type(
python_binary_path = sys.executable type_module_class.bl_class,
else: type_module_class,
python_binary_path = bpy.app.binary_path_python timer=type_local_config.bl_delay_refresh*1000,
automatic=type_local_config.auto_push,
check_common=type_module_class.bl_check_common)
repo = Repository( if settings.update_method == 'DEFAULT':
rdp=bpy_protocol, if type_local_config.bl_delay_apply > 0:
username=settings.username) delayables.append(
delayable.ApplyTimer(
timout=type_local_config.bl_delay_apply,
target_type=type_module_class))
session.configure(
factory=bpy_factory,
python_path=bpy.app.binary_path_python,
external_update_handling=use_extern_update)
if settings.update_method == 'DEPSGRAPH':
delayables.append(delayable.ApplyTimer(
settings.depsgraph_update_rate/1000))
# Host a session # Host a session
if self.host: if self.host:
@ -220,19 +210,13 @@ class SessionStartOperator(bpy.types.Operator):
runtime_settings.internet_ip = environment.get_ip() runtime_settings.internet_ip = environment.get_ip()
try: try:
# Init repository
for scene in bpy.data.scenes: for scene in bpy.data.scenes:
porcelain.add(repo, scene) session.add(scene)
porcelain.remote_add(
repo,
'origin',
'127.0.0.1',
settings.port,
admin_password=admin_pass)
session.host( session.host(
repository= repo, id=settings.username,
remote='origin', port=settings.port,
ipc_port=settings.ipc_port,
timeout=settings.connection_timeout, timeout=settings.connection_timeout,
password=admin_pass, password=admin_pass,
cache_directory=settings.cache_directory, cache_directory=settings.cache_directory,
@ -242,7 +226,7 @@ class SessionStartOperator(bpy.types.Operator):
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
logging.error(f"Error: {e}") logging.error(f"Error: {e}")
traceback.print_exc()
# Join a session # Join a session
else: else:
if not runtime_settings.admin: if not runtime_settings.admin:
@ -251,14 +235,11 @@ class SessionStartOperator(bpy.types.Operator):
admin_pass = None admin_pass = None
try: try:
porcelain.remote_add(
repo,
'origin',
settings.ip,
settings.port,
admin_password=admin_pass)
session.connect( session.connect(
repository= repo, id=settings.username,
address=settings.ip,
port=settings.port,
ipc_port=settings.ipc_port,
timeout=settings.connection_timeout, timeout=settings.connection_timeout,
password=admin_pass password=admin_pass
) )
@ -267,26 +248,27 @@ class SessionStartOperator(bpy.types.Operator):
logging.error(str(e)) logging.error(str(e))
# Background client updates service # Background client updates service
deleyables.append(timers.ClientUpdate()) delayables.append(delayable.ClientUpdate())
deleyables.append(timers.DynamicRightSelectTimer()) delayables.append(delayable.DynamicRightSelectTimer())
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
session_update = timers.SessionStatusUpdate() session_update = delayable.SessionStatusUpdate()
session_user_sync = timers.SessionUserSync() session_user_sync = delayable.SessionUserSync()
session_background_executor = timers.MainThreadExecutor( session_background_executor = delayable.MainThreadExecutor(
execution_queue=background_execution_queue) execution_queue=background_execution_queue)
session_listen = timers.SessionListenTimer(timeout=0.001)
session_listen.register()
session_update.register() session_update.register()
session_user_sync.register() session_user_sync.register()
session_background_executor.register() session_background_executor.register()
deleyables.append(session_background_executor) delayables.append(session_background_executor)
deleyables.append(session_update) delayables.append(session_update)
deleyables.append(session_user_sync) delayables.append(session_user_sync)
deleyables.append(session_listen)
self.report(
{'INFO'},
f"connecting to tcp://{settings.ip}:{settings.port}")
return {"FINISHED"} return {"FINISHED"}
@ -322,10 +304,9 @@ class SessionInitOperator(bpy.types.Operator):
utils.clean_scene() utils.clean_scene()
for scene in bpy.data.scenes: for scene in bpy.data.scenes:
porcelain.add(session.repository, scene) session.add(scene)
session.init() session.init()
context.window_manager.session.is_host = True
return {"FINISHED"} return {"FINISHED"}
@ -341,11 +322,11 @@ class SessionStopOperator(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
global deleyables, stop_modal_executor global delayables, stop_modal_executor
if session: if session:
try: try:
session.disconnect(reason='user') session.disconnect()
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
@ -368,11 +349,11 @@ class SessionKickOperator(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
global deleyables, stop_modal_executor global delayables, stop_modal_executor
assert(session) assert(session)
try: try:
porcelain.kick(session.repository, self.user) session.kick(self.user)
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
@ -401,7 +382,7 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
def execute(self, context): def execute(self, context):
try: try:
porcelain.rm(session.repository, self.property_path) session.remove(self.property_path)
return {"FINISHED"} return {"FINISHED"}
except: # NonAuthorizedOperationError: except: # NonAuthorizedOperationError:
@ -443,17 +424,9 @@ class SessionPropertyRightOperator(bpy.types.Operator):
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
if session: if session:
if runtime_settings.clients == RP_COMMON: session.change_owner(self.key,
porcelain.unlock(session.repository,
self.key,
ignore_warnings=True,
affect_dependencies=self.recursive)
else:
porcelain.lock(session.repository,
self.key,
runtime_settings.clients, runtime_settings.clients,
ignore_warnings=True, recursive=self.recursive)
affect_dependencies=self.recursive)
return {"FINISHED"} return {"FINISHED"}
@ -568,7 +541,7 @@ class SessionSnapTimeOperator(bpy.types.Operator):
def modal(self, context, event): def modal(self, context, event):
is_running = context.window_manager.session.user_snap_running is_running = context.window_manager.session.user_snap_running
if not is_running: if event.type in {'RIGHTMOUSE', 'ESC'} or not is_running:
self.cancel(context) self.cancel(context)
return {'CANCELLED'} return {'CANCELLED'}
@ -600,29 +573,9 @@ class SessionApply(bpy.types.Operator):
def execute(self, context): def execute(self, context):
logging.debug(f"Running apply on {self.target}") logging.debug(f"Running apply on {self.target}")
try: session.apply(self.target,
node_ref = session.repository.graph.get(self.target) force=True,
porcelain.apply(session.repository, force_dependencies=self.reset_dependencies)
self.target,
force=True)
impl = session.repository.rdp.get_implementation(node_ref.instance)
# NOTE: find another way to handle child and parent automatic reloading
if impl.bl_reload_parent:
for parent in session.repository.graph.get_parents(self.target):
logging.debug(f"Refresh parent {parent}")
porcelain.apply(session.repository,
parent.uuid,
force=True)
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
for dep in node_ref.dependencies:
porcelain.apply(session.repository,
dep,
force=True)
except Exception as e:
self.report({'ERROR'}, repr(e))
traceback.print_exc()
return {"CANCELLED"}
return {"FINISHED"} return {"FINISHED"}
@ -640,16 +593,56 @@ class SessionCommit(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
try: # session.get(uuid=target).diff()
porcelain.commit(session.repository, self.target) session.commit(uuid=self.target)
porcelain.push(session.repository, 'origin', self.target, force=True) session.push(self.target)
return {"FINISHED"} return {"FINISHED"}
class ApplyArmatureOperator(bpy.types.Operator):
"""Operator which runs its self from a timer"""
bl_idname = "session.apply_armature_operator"
bl_label = "Modal Executor Operator"
_timer = None
def modal(self, context, event):
global stop_modal_executor, modal_executor_queue
if stop_modal_executor:
self.cancel(context)
return {'CANCELLED'}
if event.type == 'TIMER':
if session and session.state['STATE'] == STATE_ACTIVE:
nodes = session.list(filter=bl_types.bl_armature.BlArmature)
for node in nodes:
node_ref = session.get(uuid=node)
if node_ref.state == FETCHED:
try:
session.apply(node)
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) logging.error("Fail to apply armature: {e}")
return {"CANCELLED"}
return {'PASS_THROUGH'}
def execute(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(2, window=context.window)
wm.modal_handler_add(self)
return {'RUNNING_MODAL'}
def cancel(self, context):
global stop_modal_executor
wm = context.window_manager
wm.event_timer_remove(self._timer)
stop_modal_executor = False
class SessionClearCache(bpy.types.Operator): class ClearCache(bpy.types.Operator):
"Clear local session cache" "Clear local session cache"
bl_idname = "session.clear_cache" bl_idname = "session.clear_cache"
bl_label = "Modal Executor Operator" bl_label = "Modal Executor Operator"
@ -678,231 +671,6 @@ class SessionClearCache(bpy.types.Operator):
row.label(text=f" Do you really want to remove local cache ? ") row.label(text=f" Do you really want to remove local cache ? ")
class SessionPurgeOperator(bpy.types.Operator):
"Remove node with lost references"
bl_idname = "session.purge"
bl_label = "Purge session data"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
try:
sanitize_deps_graph(remove_nodes=True)
porcelain.purge_orphan_nodes(session.repository)
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"FINISHED"}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
class SessionNotifyOperator(bpy.types.Operator):
"""Dialog only operator"""
bl_idname = "session.notify"
bl_label = "Multi-user"
bl_description = "multiuser notification"
message: bpy.props.StringProperty()
@classmethod
def poll(cls, context):
return True
def execute(self, context):
return {'FINISHED'}
def draw(self, context):
layout = self.layout
layout.row().label(text=self.message)
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
bl_idname = "session.save"
bl_label = "Save session data"
bl_description = "Save a snapshot of the collaborative session"
# ExportHelper mixin class uses this
filename_ext = ".db"
filter_glob: bpy.props.StringProperty(
default="*.db",
options={'HIDDEN'},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
enable_autosave: bpy.props.BoolProperty(
name="Auto-save",
description="Enable session auto-save",
default=True,
)
save_interval: bpy.props.FloatProperty(
name="Auto save interval",
description="auto-save interval (seconds)",
default=10,
)
def execute(self, context):
if self.enable_autosave:
recorder = timers.SessionBackupTimer(
filepath=self.filepath,
timeout=self.save_interval)
recorder.register()
deleyables.append(recorder)
else:
session.repository.dumps(self.filepath)
return {'FINISHED'}
@classmethod
def poll(cls, context):
return session.state == STATE_ACTIVE
class SessionStopAutoSaveOperator(bpy.types.Operator):
bl_idname = "session.cancel_autosave"
bl_label = "Cancel auto-save"
bl_description = "Cancel session auto-save"
@classmethod
def poll(cls, context):
return (session.state == STATE_ACTIVE and 'SessionBackupTimer' in registry)
def execute(self, context):
autosave_timer = registry.get('SessionBackupTimer')
autosave_timer.unregister()
return {'FINISHED'}
class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
bl_idname = "session.load"
bl_label = "Load session save"
bl_description = "Load a Multi-user session save"
bl_options = {'REGISTER', 'UNDO'}
# ExportHelper mixin class uses this
filename_ext = ".db"
filter_glob: bpy.props.StringProperty(
default="*.db",
options={'HIDDEN'},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
def execute(self, context):
from replication.repository import Repository
# init the factory with supported types
bpy_protocol = bl_types.get_data_translation_protocol()
repo = Repository(bpy_protocol)
repo.loads(self.filepath)
utils.clean_scene()
nodes = [repo.graph.get(n) for n in repo.index_sorted]
# Step 1: Construct nodes
for node in nodes:
node.instance = bpy_protocol.resolve(node.data)
if node.instance is None:
node.instance = bpy_protocol.construct(node.data)
node.instance.uuid = node.uuid
# Step 2: Load nodes
for node in nodes:
porcelain.apply(repo, node.uuid)
return {'FINISHED'}
@classmethod
def poll(cls, context):
return True
class SessionPresetServerAdd(bpy.types.Operator):
"""Add a server to the server list preset"""
bl_idname = "session.preset_server_add"
bl_label = "add server preset"
bl_description = "add the current server to the server preset list"
bl_options = {"REGISTER"}
name : bpy.props.StringProperty(default="server_preset")
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
assert(context)
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
layout = self.layout
col = layout.column()
settings = utils.get_preferences()
col.prop(settings, "server_name", text="server name")
def execute(self, context):
assert(context)
settings = utils.get_preferences()
existing_preset = settings.server_preset.get(settings.server_name)
new_server = existing_preset if existing_preset else settings.server_preset.add()
new_server.name = settings.server_name
new_server.server_ip = settings.ip
new_server.server_port = settings.port
new_server.server_password = settings.password
settings.server_preset_interface = settings.server_name
if new_server == existing_preset :
self.report({'INFO'}, "Server '" + settings.server_name + "' override")
else :
self.report({'INFO'}, "New '" + settings.server_name + "' server preset")
return {'FINISHED'}
class SessionPresetServerRemove(bpy.types.Operator):
"""Remove a server to the server list preset"""
bl_idname = "session.preset_server_remove"
bl_label = "remove server preset"
bl_description = "remove the current server from the server preset list"
bl_options = {"REGISTER"}
@classmethod
def poll(cls, context):
return True
def execute(self, context):
assert(context)
settings = utils.get_preferences()
settings.server_preset.remove(settings.server_preset.find(settings.server_preset_interface))
return {'FINISHED'}
def menu_func_import(self, context):
self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)')
classes = ( classes = (
SessionStartOperator, SessionStartOperator,
SessionStopOperator, SessionStopOperator,
@ -912,30 +680,98 @@ classes = (
SessionPropertyRightOperator, SessionPropertyRightOperator,
SessionApply, SessionApply,
SessionCommit, SessionCommit,
ApplyArmatureOperator,
SessionKickOperator, SessionKickOperator,
SessionInitOperator, SessionInitOperator,
SessionClearCache, ClearCache,
SessionNotifyOperator,
SessionSaveBackupOperator,
SessionLoadSaveOperator,
SessionStopAutoSaveOperator,
SessionPurgeOperator,
SessionPresetServerAdd,
SessionPresetServerRemove,
) )
@persistent
def sanitize_deps_graph(dummy):
"""sanitize deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state['STATE'] == STATE_ACTIVE:
for node_key in session.list():
session.get(node_key).resolve()
@persistent
def load_pre_handler(dummy):
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
session.update_user_metadata({
'frame_current': scene.frame_current
})
@persistent
def depsgraph_evaluation(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.get(update.id.uuid)
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
# - if its to someone else, ignore the update (go deeper ?)
if node and node.owner in [session.id, RP_COMMON] and node.state == UP:
# Avoid slow geometry update
if 'EDIT' in context.mode and \
not settings.sync_during_editmode:
break
session.stash(node.uuid)
else:
# Distant update
continue
# else:
# # New items !
# logger.error("UPDATE: ADD")
def register(): def register():
from bpy.utils import register_class from bpy.utils import register_class
for cls in classes: for cls in classes:
register_class(cls) register_class(cls)
bpy.app.handlers.undo_post.append(sanitize_deps_graph)
bpy.app.handlers.redo_post.append(sanitize_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister(): def unregister():
if session and session.state == STATE_ACTIVE: if session and session.state['STATE'] == STATE_ACTIVE:
session.disconnect() session.disconnect()
from bpy.utils import unregister_class from bpy.utils import unregister_class
for cls in reversed(classes): for cls in reversed(classes):
unregister_class(cls) unregister_class(cls)
bpy.app.handlers.undo_post.remove(sanitize_deps_graph)
bpy.app.handlers.redo_post.remove(sanitize_deps_graph)
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)

View File

@ -29,22 +29,8 @@ from .utils import get_preferences, get_expanded_icon
from replication.constants import RP_COMMON from replication.constants import RP_COMMON
from replication.interface import session from replication.interface import session
# From https://stackoverflow.com/a/106223 IP_EXPR = re.compile('\d+\.\d+\.\d+\.\d+')
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
DEFAULT_PRESETS = {
"localhost" : {
"server_ip": "localhost",
"server_port": 5555,
"server_password": "admin"
},
"public session" : {
"server_ip": "51.75.71.183",
"server_port": 5555,
"server_password": ""
},
}
def randomColor(): def randomColor():
"""Generate a random color """ """Generate a random color """
@ -67,22 +53,24 @@ def update_panel_category(self, context):
def update_ip(self, context): def update_ip(self, context):
ip = IP_REGEX.search(self.ip) ip = IP_EXPR.search(self.ip)
dns = HOSTNAME_REGEX.search(self.ip)
if ip: if ip:
self['ip'] = ip.group() self['ip'] = ip.group()
elif dns:
self['ip'] = dns.group()
else: else:
logging.error("Wrong IP format") logging.error("Wrong IP format")
self['ip'] = "127.0.0.1" self['ip'] = "127.0.0.1"
def update_server_preset_interface(self, context):
self.server_name = self.server_preset.get(self.server_preset_interface).name def update_port(self, context):
self.ip = self.server_preset.get(self.server_preset_interface).server_ip max_port = self.port + 3
self.port = self.server_preset.get(self.server_preset_interface).server_port
self.password = self.server_preset.get(self.server_preset_interface).server_password if self.ipc_port < max_port and \
self['ipc_port'] >= self.port:
logging.error(
"IPC Port in conflict with the port, assigning a random value")
self['ipc_port'] = random.randrange(self.port+4, 10000)
def update_directory(self, context): def update_directory(self, context):
new_dir = Path(self.cache_directory) new_dir = Path(self.cache_directory)
@ -105,14 +93,12 @@ def get_log_level(self):
class ReplicatedDatablock(bpy.types.PropertyGroup): class ReplicatedDatablock(bpy.types.PropertyGroup):
type_name: bpy.props.StringProperty() type_name: bpy.props.StringProperty()
bl_name: bpy.props.StringProperty() bl_name: bpy.props.StringProperty()
bl_delay_refresh: bpy.props.FloatProperty()
bl_delay_apply: bpy.props.FloatProperty()
use_as_filter: bpy.props.BoolProperty(default=True) use_as_filter: bpy.props.BoolProperty(default=True)
auto_push: bpy.props.BoolProperty(default=True) auto_push: bpy.props.BoolProperty(default=True)
icon: bpy.props.StringProperty() icon: bpy.props.StringProperty()
class ServerPreset(bpy.types.PropertyGroup):
server_ip: bpy.props.StringProperty()
server_port: bpy.props.IntProperty(default=5555)
server_password: bpy.props.StringProperty(default="admin", subtype = "PASSWORD")
def set_sync_render_settings(self, value): def set_sync_render_settings(self, value):
self['sync_render_settings'] = value self['sync_render_settings'] = value
@ -165,7 +151,7 @@ class SessionPrefs(bpy.types.AddonPreferences):
ip: bpy.props.StringProperty( ip: bpy.props.StringProperty(
name="ip", name="ip",
description='Distant host ip', description='Distant host ip',
default="localhost", default="127.0.0.1",
update=update_ip) update=update_ip)
username: bpy.props.StringProperty( username: bpy.props.StringProperty(
name="Username", name="Username",
@ -180,23 +166,18 @@ class SessionPrefs(bpy.types.AddonPreferences):
description='Distant host port', description='Distant host port',
default=5555 default=5555
) )
server_name: bpy.props.StringProperty(
name="server_name",
description="Custom name of the server",
default='localhost',
)
password: bpy.props.StringProperty(
name="password",
default=random_string_digits(),
description='Session password',
subtype='PASSWORD'
)
sync_flags: bpy.props.PointerProperty( sync_flags: bpy.props.PointerProperty(
type=ReplicationFlags type=ReplicationFlags
) )
supported_datablocks: bpy.props.CollectionProperty( supported_datablocks: bpy.props.CollectionProperty(
type=ReplicatedDatablock, type=ReplicatedDatablock,
) )
ipc_port: bpy.props.IntProperty(
name="ipc_port",
description='internal ttl port(only useful for multiple local instances)',
default=random.randrange(5570, 70000),
update=update_port,
)
init_method: bpy.props.EnumProperty( init_method: bpy.props.EnumProperty(
name='init_method', name='init_method',
description='Init repo', description='Init repo',
@ -212,13 +193,22 @@ class SessionPrefs(bpy.types.AddonPreferences):
connection_timeout: bpy.props.IntProperty( connection_timeout: bpy.props.IntProperty(
name='connection timeout', name='connection timeout',
description='connection timeout before disconnection', description='connection timeout before disconnection',
default=5000 default=1000
)
update_method: bpy.props.EnumProperty(
name='update method',
description='replication update method',
items=[
('DEFAULT', "Default", "Default: Use threads to monitor databloc changes"),
('DEPSGRAPH', "Depsgraph",
"Experimental: Use the blender dependency graph to trigger updates"),
],
) )
# Replication update settings # Replication update settings
depsgraph_update_rate: bpy.props.FloatProperty( depsgraph_update_rate: bpy.props.IntProperty(
name='depsgraph update rate (s)', name='depsgraph update rate',
description='Dependency graph uppdate rate (s)', description='Dependency graph uppdate rate (milliseconds)',
default=1 default=100
) )
clear_memory_filecache: bpy.props.BoolProperty( clear_memory_filecache: bpy.props.BoolProperty(
name="Clear memory filecache", name="Clear memory filecache",
@ -248,31 +238,6 @@ class SessionPrefs(bpy.types.AddonPreferences):
set=set_log_level, set=set_log_level,
get=get_log_level get=get_log_level
) )
presence_hud_scale: bpy.props.FloatProperty(
name="Text scale",
description="Adjust the session widget text scale",
min=7,
max=90,
default=25,
)
presence_hud_hpos: bpy.props.FloatProperty(
name="Horizontal position",
description="Adjust the session widget horizontal position",
min=1,
max=90,
default=1,
step=1,
subtype='PERCENTAGE',
)
presence_hud_vpos: bpy.props.FloatProperty(
name="Vertical position",
description="Adjust the session widget vertical position",
min=1,
max=94,
default=1,
step=1,
subtype='PERCENTAGE',
)
conf_session_identity_expanded: bpy.props.BoolProperty( conf_session_identity_expanded: bpy.props.BoolProperty(
name="Identity", name="Identity",
description="Identity", description="Identity",
@ -288,6 +253,11 @@ class SessionPrefs(bpy.types.AddonPreferences):
description="Rights", description="Rights",
default=False default=False
) )
conf_session_timing_expanded: bpy.props.BoolProperty(
name="timings",
description="timings",
default=False
)
conf_session_cache_expanded: bpy.props.BoolProperty( conf_session_cache_expanded: bpy.props.BoolProperty(
name="Cache", name="Cache",
description="cache", description="cache",
@ -352,25 +322,6 @@ class SessionPrefs(bpy.types.AddonPreferences):
max=59 max=59
) )
# Server preset
def server_list_callback(scene, context):
settings = get_preferences()
enum = []
for i in settings.server_preset:
enum.append((i.name, i.name, ""))
return enum
server_preset: bpy.props.CollectionProperty(
name="server preset",
type=ServerPreset,
)
server_preset_interface: bpy.props.EnumProperty(
name="servers",
description="servers enum",
items=server_list_callback,
update=update_server_preset_interface,
)
# Custom panel # Custom panel
panel_category: bpy.props.StringProperty( panel_category: bpy.props.StringProperty(
description="Choose a name for the category of the panel", description="Choose a name for the category of the panel",
@ -410,7 +361,28 @@ class SessionPrefs(bpy.types.AddonPreferences):
row = box.row() row = box.row()
row.label(text="Init the session from:") row.label(text="Init the session from:")
row.prop(self, "init_method", text="") row.prop(self, "init_method", text="")
row = box.row()
row.label(text="Update method:")
row.prop(self, "update_method", text="")
table = box.box()
table.row().prop(
self, "conf_session_timing_expanded", text="Refresh rates",
icon=get_expanded_icon(self.conf_session_timing_expanded),
emboss=False)
if self.conf_session_timing_expanded:
line = table.row()
line.label(text=" ")
line.separator()
line.label(text="refresh (sec)")
line.label(text="apply (sec)")
for item in self.supported_datablocks:
line = table.row(align=True)
line.label(text="", icon=item.icon)
line.prop(item, "bl_delay_refresh", text="")
line.prop(item, "bl_delay_apply", text="")
# HOST SETTINGS # HOST SETTINGS
box = grid.box() box = grid.box()
box.prop( box.prop(
@ -440,15 +412,6 @@ class SessionPrefs(bpy.types.AddonPreferences):
emboss=False) emboss=False)
if self.conf_session_ui_expanded: if self.conf_session_ui_expanded:
box.row().prop(self, "panel_category", text="Panel category", expand=True) box.row().prop(self, "panel_category", text="Panel category", expand=True)
row = box.row()
row.label(text="Session widget:")
col = box.column(align=True)
col.prop(self, "presence_hud_scale", expand=True)
col.prop(self, "presence_hud_hpos", expand=True)
col.prop(self, "presence_hud_vpos", expand=True)
if self.category == 'UPDATE': if self.category == 'UPDATE':
from . import addon_updater_ops from . import addon_updater_ops
@ -457,30 +420,21 @@ class SessionPrefs(bpy.types.AddonPreferences):
def generate_supported_types(self): def generate_supported_types(self):
self.supported_datablocks.clear() self.supported_datablocks.clear()
bpy_protocol = bl_types.get_data_translation_protocol() for type in bl_types.types_to_register():
# init the factory with supported types
for dcc_type_id, impl in bpy_protocol.implementations.items():
new_db = self.supported_datablocks.add() new_db = self.supported_datablocks.add()
new_db.name = dcc_type_id type_module = getattr(bl_types, type)
new_db.type_name = dcc_type_id type_impl_name = f"Bl{type.split('_')[1].capitalize()}"
type_module_class = getattr(type_module, type_impl_name)
new_db.name = type_impl_name
new_db.type_name = type_impl_name
new_db.bl_delay_refresh = type_module_class.bl_delay_refresh
new_db.bl_delay_apply = type_module_class.bl_delay_apply
new_db.use_as_filter = True new_db.use_as_filter = True
new_db.icon = impl.bl_icon new_db.icon = type_module_class.bl_icon
new_db.bl_name = impl.bl_id new_db.auto_push = type_module_class.bl_automatic_push
new_db.bl_name = type_module_class.bl_id
# custom at launch server preset
def generate_default_presets(self):
for preset_name, preset_data in DEFAULT_PRESETS.items():
existing_preset = self.server_preset.get(preset_name)
if existing_preset :
continue
new_server = self.server_preset.add()
new_server.name = preset_name
new_server.server_ip = preset_data.get('server_ip')
new_server.server_port = preset_data.get('server_port')
new_server.server_password = preset_data.get('server_password',None)
def client_list_callback(scene, context): def client_list_callback(scene, context):
@ -553,16 +507,17 @@ class SessionProps(bpy.types.PropertyGroup):
description='Show only owned datablocks', description='Show only owned datablocks',
default=True default=True
) )
filter_name: bpy.props.StringProperty(
name="filter_name",
default="",
description='Node name filter',
)
admin: bpy.props.BoolProperty( admin: bpy.props.BoolProperty(
name="admin", name="admin",
description='Connect as admin', description='Connect as admin',
default=False default=False
) )
password: bpy.props.StringProperty(
name="password",
default=random_string_digits(),
description='Session password',
subtype='PASSWORD'
)
internet_ip: bpy.props.StringProperty( internet_ip: bpy.props.StringProperty(
name="internet ip", name="internet ip",
default="no found", default="no found",
@ -584,7 +539,6 @@ classes = (
SessionProps, SessionProps,
ReplicationFlags, ReplicationFlags,
ReplicatedDatablock, ReplicatedDatablock,
ServerPreset,
SessionPrefs, SessionPrefs,
) )
@ -600,10 +554,6 @@ def register():
logging.debug('Generating bl_types preferences') logging.debug('Generating bl_types preferences')
prefs.generate_supported_types() prefs.generate_supported_types()
# at launch server presets
prefs.generate_default_presets()
def unregister(): def unregister():
from bpy.utils import unregister_class from bpy.utils import unregister_class

View File

@ -30,12 +30,12 @@ import mathutils
from bpy_extras import view3d_utils from bpy_extras import view3d_utils
from gpu_extras.batch import batch_for_shader from gpu_extras.batch import batch_for_shader
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG, from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
STATE_INITIAL, CONNECTING, STATE_INITIAL, STATE_LAUNCHING_SERVICES,
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC, STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
STATE_SYNCING, STATE_WAITING) STATE_SYNCING, STATE_WAITING)
from replication.interface import session from replication.interface import session
from .utils import find_from_attr, get_state_str, get_preferences from .utils import find_from_attr, get_state_str
# Helper functions # Helper functions
@ -301,38 +301,40 @@ class UserSelectionWidget(Widget):
if not ob: if not ob:
return return
vertex_pos = bbox_from_obj(ob, 1.0) position = None
vertex_indices = (
if ob.type == 'EMPTY':
# TODO: Child case
# Collection instance case
indices = (
(0, 1), (1, 2), (2, 3), (0, 3), (0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7), (4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7)) (0, 4), (1, 5), (2, 6), (3, 7))
if ob.instance_collection: if ob.instance_collection:
for obj in ob.instance_collection.objects: for obj in ob.instance_collection.objects:
if obj.type == 'MESH' and hasattr(obj, 'bound_box'): if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
vertex_pos = get_bb_coords_from_obj(obj, instance=ob) positions = get_bb_coords_from_obj(obj, instance=ob)
break break
elif ob.type == 'EMPTY':
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
elif ob.type == 'LIGHT':
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
elif ob.type == 'LIGHT_PROBE':
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
elif ob.type == 'CAMERA':
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
elif hasattr(ob, 'bound_box'): elif hasattr(ob, 'bound_box'):
vertex_indices = ( indices = (
(0, 1), (1, 2), (2, 3), (0, 3), (0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7), (4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7)) (0, 4), (1, 5), (2, 6), (3, 7))
vertex_pos = get_bb_coords_from_obj(ob) positions = get_bb_coords_from_obj(ob)
if positions is None:
indices = (
(0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
positions = bbox_from_obj(ob, ob.scale.x)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR') shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader( batch = batch_for_shader(
shader, shader,
'LINES', 'LINES',
{"pos": vertex_pos}, {"pos": positions},
indices=vertex_indices) indices=indices)
shader.bind() shader.bind()
shader.uniform_float("color", self.data.get('color')) shader.uniform_float("color", self.data.get('color'))
@ -385,9 +387,6 @@ class UserNameWidget(Widget):
class SessionStatusWidget(Widget): class SessionStatusWidget(Widget):
draw_type = 'POST_PIXEL' draw_type = 'POST_PIXEL'
def __init__(self):
self.preferences = get_preferences()
@property @property
def settings(self): def settings(self):
return getattr(bpy.context.window_manager, 'session', None) return getattr(bpy.context.window_manager, 'session', None)
@ -397,21 +396,17 @@ class SessionStatusWidget(Widget):
self.settings.enable_presence self.settings.enable_presence
def draw(self): def draw(self):
text_scale = self.preferences.presence_hud_scale
ui_scale = bpy.context.preferences.view.ui_scale
color = [1, 1, 0, 1] color = [1, 1, 0, 1]
state = session.state state = session.state.get('STATE')
state_str = f"{get_state_str(state)}" state_str = f"{get_state_str(state)}"
if state == STATE_ACTIVE: if state == STATE_ACTIVE:
color = [0, 1, 0, 1] color = [0, 1, 0, 1]
elif state == STATE_INITIAL: elif state == STATE_INITIAL:
color = [1, 0, 0, 1] color = [1, 0, 0, 1]
hpos = (self.preferences.presence_hud_hpos*bpy.context.area.width)/100
vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100
blf.position(0, hpos, vpos, 0) blf.position(0, 10, 20, 0)
blf.size(0, int(text_scale*ui_scale), 72) blf.size(0, 16, 45)
blf.color(0, color[0], color[1], color[2], color[3]) blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, state_str) blf.draw(0, state_str)

View File

@ -1,48 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
from replication.constants import STATE_INITIAL
class SessionData():
""" A structure to share easily the current session data across the addon
modules.
This object will completely replace the Singleton lying in replication
interface module.
"""
def __init__(self):
self.repository = None # The current repository
self.remote = None # The active remote
self.server = None
self.applied_updates = []
@property
def state(self):
if self.remote is None:
return STATE_INITIAL
else:
return self.remote.connection_status
def clear(self):
self.remote = None
self.repository = None
self.server = None
self.applied_updates = []
session = SessionData()

View File

@ -26,10 +26,9 @@ from replication.constants import (ADDED, ERROR, FETCHED,
STATE_INITIAL, STATE_SRV_SYNC, STATE_INITIAL, STATE_SRV_SYNC,
STATE_WAITING, STATE_QUITTING, STATE_WAITING, STATE_QUITTING,
STATE_LOBBY, STATE_LOBBY,
CONNECTING) STATE_LAUNCHING_SERVICES)
from replication import __version__ from replication import __version__
from replication.interface import session from replication.interface import session
from .timers import registry
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
'TRIA_UP', # COMMITED 'TRIA_UP', # COMMITED
@ -71,9 +70,9 @@ class SESSION_PT_settings(bpy.types.Panel):
def draw_header(self, context): def draw_header(self, context):
layout = self.layout layout = self.layout
if session and session.state != STATE_INITIAL: if session and session.state['STATE'] != STATE_INITIAL:
cli_state = session.state cli_state = session.state
state = session.state state = session.state.get('STATE')
connection_icon = "KEYTYPE_MOVING_HOLD_VEC" connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
if state == STATE_ACTIVE: if state == STATE_ACTIVE:
@ -81,12 +80,13 @@ class SESSION_PT_settings(bpy.types.Panel):
else: else:
connection_icon = 'PROP_CON' connection_icon = 'PROP_CON'
layout.label(text=f"Session - {get_state_str(cli_state)}", icon=connection_icon) layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
else: else:
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF") layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
layout.use_property_split = True
row = layout.row() row = layout.row()
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
settings = get_preferences() settings = get_preferences()
@ -94,17 +94,17 @@ class SESSION_PT_settings(bpy.types.Panel):
if hasattr(context.window_manager, 'session'): if hasattr(context.window_manager, 'session'):
# STATE INITIAL # STATE INITIAL
if not session \ if not session \
or (session and session.state == STATE_INITIAL): or (session and session.state['STATE'] == STATE_INITIAL):
pass pass
else: else:
progress = session.state_progress cli_state = session.state
row = layout.row() row = layout.row()
current_state = session.state current_state = cli_state['STATE']
info_msg = None info_msg = None
if current_state in [STATE_ACTIVE]: if current_state in [STATE_ACTIVE]:
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True) row = row.split(factor=0.3)
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE') row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT') row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE') row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE')
@ -124,8 +124,8 @@ class SESSION_PT_settings(bpy.types.Panel):
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]: if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
info_box = row.box() info_box = row.box()
info_box.row().label(text=printProgressBar( info_box.row().label(text=printProgressBar(
progress['current'], cli_state['CURRENT'],
progress['total'], cli_state['TOTAL'],
length=16 length=16
)) ))
@ -141,7 +141,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return not session \ return not session \
or (session and session.state == 0) or (session and session.state['STATE'] == 0)
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='URL') self.layout.label(text="", icon='URL')
@ -157,12 +157,6 @@ class SESSION_PT_settings_network(bpy.types.Panel):
row.prop(runtime_settings, "session_mode", expand=True) row.prop(runtime_settings, "session_mode", expand=True)
row = layout.row() row = layout.row()
col = row.row(align=True)
col.prop(settings, "server_preset_interface", text="")
col.operator("session.preset_server_add", icon='ADD', text="")
col.operator("session.preset_server_remove", icon='REMOVE', text="")
row = layout.row()
box = row.box() box = row.box()
if runtime_settings.session_mode == 'HOST': if runtime_settings.session_mode == 'HOST':
@ -174,7 +168,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
row.prop(settings, "init_method", text="") row.prop(settings, "init_method", text="")
row = box.row() row = box.row()
row.label(text="Admin password:") row.label(text="Admin password:")
row.prop(settings, "password", text="") row.prop(runtime_settings, "password", text="")
row = box.row() row = box.row()
row.operator("session.start", text="HOST").host = True row.operator("session.start", text="HOST").host = True
else: else:
@ -190,10 +184,11 @@ class SESSION_PT_settings_network(bpy.types.Panel):
if runtime_settings.admin: if runtime_settings.admin:
row = box.row() row = box.row()
row.label(text="Password:") row.label(text="Password:")
row.prop(settings, "password", text="") row.prop(runtime_settings, "password", text="")
row = box.row() row = box.row()
row.operator("session.start", text="CONNECT").host = False row.operator("session.start", text="CONNECT").host = False
class SESSION_PT_settings_user(bpy.types.Panel): class SESSION_PT_settings_user(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_USER_PT_panel" bl_idname = "MULTIUSER_SETTINGS_USER_PT_panel"
bl_label = "User info" bl_label = "User info"
@ -204,7 +199,7 @@ class SESSION_PT_settings_user(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return not session \ return not session \
or (session and session.state == 0) or (session and session.state['STATE'] == 0)
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='USER') self.layout.label(text="", icon='USER')
@ -235,7 +230,7 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return not session \ return not session \
or (session and session.state == 0) or (session and session.state['STATE'] == 0)
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='PREFERENCES') self.layout.label(text="", icon='PREFERENCES')
@ -256,6 +251,9 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
emboss=False) emboss=False)
if settings.sidebar_advanced_net_expanded: if settings.sidebar_advanced_net_expanded:
net_section_row = net_section.row()
net_section_row.label(text="IPC Port:")
net_section_row.prop(settings, "ipc_port", text="")
net_section_row = net_section.row() net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):") net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="") net_section_row.prop(settings, "connection_timeout", text="")
@ -271,6 +269,7 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
if settings.sidebar_advanced_rep_expanded: if settings.sidebar_advanced_rep_expanded:
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
replication_section_row.label(text="Sync flags", icon='COLLECTION_NEW')
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_render_settings") replication_section_row.prop(settings.sync_flags, "sync_render_settings")
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
@ -283,8 +282,34 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
warning = replication_section_row.box() warning = replication_section_row.box()
warning.label(text="Don't use this with heavy meshes !", icon='ERROR') warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
replication_section_row.prop(settings, "depsgraph_update_rate", text="Apply delay")
replication_section_row.label(text="Update method", icon='RECOVER_LAST')
replication_section_row = replication_section.row()
replication_section_row.prop(settings, "update_method", expand=True)
replication_section_row = replication_section.row()
replication_timers = replication_section_row.box()
replication_timers.label(text="Replication timers", icon='TIME')
if settings.update_method == "DEFAULT":
replication_timers = replication_timers.row()
# Replication frequencies
flow = replication_timers.grid_flow(
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
line = flow.row(align=True)
line.label(text=" ")
line.separator()
line.label(text="refresh (sec)")
line.label(text="apply (sec)")
for item in settings.supported_datablocks:
line = flow.row(align=True)
line.prop(item, "auto_push", text="", icon=item.icon)
line.separator()
line.prop(item, "bl_delay_refresh", text="")
line.prop(item, "bl_delay_apply", text="")
else:
replication_timers = replication_timers.row()
replication_timers.label(text="Update rate (ms):")
replication_timers.prop(settings, "depsgraph_update_rate", text="")
cache_section = layout.row().box() cache_section = layout.row().box()
cache_section.prop( cache_section.prop(
@ -324,7 +349,7 @@ class SESSION_PT_user(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return session and session.state in [STATE_ACTIVE, STATE_LOBBY] return session and session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='USER') self.layout.label(text="", icon='USER')
@ -355,7 +380,7 @@ class SESSION_PT_user(bpy.types.Panel):
if active_user != 0 and active_user.username != settings.username: if active_user != 0 and active_user.username != settings.username:
row = layout.row() row = layout.row()
user_operations = row.split() user_operations = row.split()
if session.state == STATE_ACTIVE: if session.state['STATE'] == STATE_ACTIVE:
user_operations.alert = context.window_manager.session.time_snap_running user_operations.alert = context.window_manager.session.time_snap_running
user_operations.operator( user_operations.operator(
@ -413,7 +438,7 @@ class SESSION_PT_presence(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return not session \ return not session \
or (session and session.state in [STATE_INITIAL, STATE_ACTIVE]) or (session and session.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
def draw_header(self, context): def draw_header(self, context):
self.layout.prop(context.window_manager.session, self.layout.prop(context.window_manager.session,
@ -423,17 +448,9 @@ class SESSION_PT_presence(bpy.types.Panel):
layout = self.layout layout = self.layout
settings = context.window_manager.session settings = context.window_manager.session
pref = get_preferences()
layout.active = settings.enable_presence layout.active = settings.enable_presence
col = layout.column() col = layout.column()
col.prop(settings, "presence_show_session_status") col.prop(settings, "presence_show_session_status")
row = col.column()
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_scale", expand=True)
row = col.column(align=True)
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_hpos", expand=True)
row.prop(pref, "presence_hud_vpos", expand=True)
col.prop(settings, "presence_show_selected") col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user") col.prop(settings, "presence_show_user")
row = layout.column() row = layout.column()
@ -443,8 +460,8 @@ class SESSION_PT_presence(bpy.types.Panel):
def draw_property(context, parent, property_uuid, level=0): def draw_property(context, parent, property_uuid, level=0):
settings = get_preferences() settings = get_preferences()
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
item = session.repository.graph.get(property_uuid) item = session.get(uuid=property_uuid)
type_id = item.data.get('type_id')
area_msg = parent.row(align=True) area_msg = parent.row(align=True)
if item.state == ERROR: if item.state == ERROR:
@ -455,10 +472,11 @@ def draw_property(context, parent, property_uuid, level=0):
line = area_msg.box() line = area_msg.box()
name = item.data['name'] if item.data else item.uuid name = item.data['name'] if item.data else item.uuid
icon = settings.supported_datablocks[type_id].icon if type_id else 'ERROR'
detail_item_box = line.row(align=True) detail_item_box = line.row(align=True)
detail_item_box.label(text="", icon=icon) detail_item_box.label(text="",
icon=settings.supported_datablocks[item.str_type].icon)
detail_item_box.label(text=f"{name}") detail_item_box.label(text=f"{name}")
# Operations # Operations
@ -520,8 +538,8 @@ class SESSION_PT_repository(bpy.types.Panel):
admin = usr['admin'] admin = usr['admin']
return hasattr(context.window_manager, 'session') and \ return hasattr(context.window_manager, 'session') and \
session and \ session and \
(session.state == STATE_ACTIVE or \ (session.state['STATE'] == STATE_ACTIVE or \
session.state == STATE_LOBBY and admin) session.state['STATE'] == STATE_LOBBY and admin)
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE') self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
@ -537,42 +555,43 @@ class SESSION_PT_repository(bpy.types.Panel):
row = layout.row() row = layout.row()
if session.state == STATE_ACTIVE: if session.state['STATE'] == STATE_ACTIVE:
if 'SessionBackupTimer' in registry: flow = layout.grid_flow(
row.alert = True row_major=True,
row.operator('session.cancel_autosave', icon="CANCEL") columns=0,
row.alert = False even_columns=True,
else: even_rows=False,
row.operator('session.save', icon="FILE_TICK") align=True)
box = layout.box() for item in settings.supported_datablocks:
row = box.row() col = flow.column(align=True)
row.prop(runtime_settings, "filter_owned", text="Show only owned Nodes", icon_only=True, icon="DECORATE_UNLOCKED") col.prop(item, "use_as_filter", text="", icon=item.icon)
row = box.row()
row.prop(runtime_settings, "filter_name", text="Filter") row = layout.row(align=True)
row = box.row() row.prop(runtime_settings, "filter_owned", text="Show only owned")
row = layout.row(align=True)
# Properties # Properties
owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username] types_filter = [t.type_name for t in settings.supported_datablocks
if t.use_as_filter]
filtered_node = owned_nodes if runtime_settings.filter_owned else session.repository.graph.keys() key_to_filter = session.list(
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
if runtime_settings.filter_name: client_keys = [key for key in key_to_filter
for node_id in filtered_node: if session.get(uuid=key).str_type
node_instance = session.repository.graph.get(node_id) in types_filter]
name = node_instance.data.get('name')
if runtime_settings.filter_name not in name:
filtered_node.remove(node_id)
if filtered_node: if client_keys:
col = layout.column(align=True) col = layout.column(align=True)
for key in filtered_node: for key in client_keys:
draw_property(context, col, key) draw_property(context, col, key)
else: else:
layout.row().label(text="Empty") row.label(text="Empty")
elif session.state == STATE_LOBBY and usr and usr['admin']: elif session.state['STATE'] == STATE_LOBBY and usr and usr['admin']:
row.operator("session.init", icon='TOOL_SETTINGS', text="Init") row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
else: else:
row.label(text="Waiting to start") row.label(text="Waiting to start")

View File

@ -36,16 +36,8 @@ from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_INITIAL, STATE_SRV_SYNC, STATE_INITIAL, STATE_SRV_SYNC,
STATE_WAITING, STATE_QUITTING, STATE_WAITING, STATE_QUITTING,
STATE_LOBBY, STATE_LOBBY,
CONNECTING) STATE_LAUNCHING_SERVICES)
CLEARED_DATABLOCKS = ['actions', 'armatures', 'cache_files', 'cameras',
'collections', 'curves', 'filepath', 'fonts',
'grease_pencils', 'images', 'lattices', 'libraries',
'lightprobes', 'lights', 'linestyles', 'masks',
'materials', 'meshes', 'metaballs', 'movieclips',
'node_groups', 'objects', 'paint_curves', 'particles',
'scenes', 'shape_keys', 'sounds', 'speakers', 'texts',
'textures', 'volumes', 'worlds']
def find_from_attr(attr_name, attr_value, list): def find_from_attr(attr_name, attr_value, list):
for item in list: for item in list:
@ -73,15 +65,6 @@ def get_datablock_users(datablock):
return users return users
def flush_history():
try:
logging.debug("Flushing history")
for i in range(bpy.context.preferences.edit.undo_steps+1):
bpy.ops.ed.undo_push(message="Multiuser history flush")
except RuntimeError:
logging.error("Fail to overwrite history")
def get_state_str(state): def get_state_str(state):
state_str = 'UNKOWN' state_str = 'UNKOWN'
if state == STATE_WAITING: if state == STATE_WAITING:
@ -100,7 +83,7 @@ def get_state_str(state):
state_str = 'OFFLINE' state_str = 'OFFLINE'
elif state == STATE_QUITTING: elif state == STATE_QUITTING:
state_str = 'QUITTING' state_str = 'QUITTING'
elif state == CONNECTING: elif state == STATE_LAUNCHING_SERVICES:
state_str = 'LAUNCHING SERVICES' state_str = 'LAUNCHING SERVICES'
elif state == STATE_LOBBY: elif state == STATE_LOBBY:
state_str = 'LOBBY' state_str = 'LOBBY'
@ -109,24 +92,14 @@ def get_state_str(state):
def clean_scene(): def clean_scene():
for type_name in CLEARED_DATABLOCKS: for type_name in dir(bpy.data):
sub_collection_to_avoid = [
bpy.data.linestyles.get('LineStyle'),
bpy.data.materials.get('Dots Stroke')
]
type_collection = getattr(bpy.data, type_name)
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid]
for item in items_to_remove:
try: try:
type_collection = getattr(bpy.data, type_name)
for item in type_collection:
type_collection.remove(item) type_collection.remove(item)
logging.info(item.name)
except: except:
continue continue
# Clear sequencer
bpy.context.scene.sequence_editor_clear()
def get_selected_objects(scene, active_view_layer): def get_selected_objects(scene, active_view_layer):
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)] return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]

View File

@ -1,7 +1,7 @@
# Download base image debian jessie # Download base image debian jessie
FROM python:slim FROM python:slim
ARG replication_version=0.1.13 ARG replication_version=0.0.21
ARG version=0.1.1 ARG version=0.1.1
# Infos # Infos
@ -13,13 +13,12 @@ LABEL description="Blender multi-user addon \
# Argument # Argument
ENV password='admin' ENV password='admin'
ENV port=5555 ENV port=5555
ENV timeout=5000 ENV timeout=3000
ENV log_level=DEBUG ENV log_level=INFO
ENV log_file="multiuser_server.log" ENV log_file="multiuser_server.log"
#Install replication #Install replication
RUN pip install replication==$replication_version RUN pip install replication==$replication_version
# Run the server with parameters # Run the server with parameters
ENTRYPOINT ["/bin/sh", "-c"] CMD replication.serve -pwd ${password} -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}
CMD ["python3 -m replication.server -pwd ${password} -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]

View File

@ -1,4 +1,4 @@
import re import re
init_py = open("multi_user/libs/replication/replication/__init__.py").read() init_py = open("multi_user/__init__.py").read()
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0)) print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))

View File

@ -13,7 +13,7 @@ def main():
if len(sys.argv) > 2: if len(sys.argv) > 2:
blender_rev = sys.argv[2] blender_rev = sys.argv[2]
else: else:
blender_rev = "2.93.0" blender_rev = "2.90.0"
try: try:
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev) exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)

Some files were not shown because too many files have changed in this diff Show More