Compare commits

..

2 Commits

Author SHA1 Message Date
48866b74d3 refacctor: remove wrong charaters 2020-07-07 22:35:56 +02:00
d9f1031107 feat: initial version 2020-07-07 22:34:40 +02:00
105 changed files with 2700 additions and 5857 deletions

View File

@ -1,10 +1,8 @@
stages:
- test
- build
- deploy
include:
- local: .gitlab/ci/test.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/deploy.gitlab-ci.yml

View File

@ -1,13 +1,14 @@
build:
stage: build
image: debian:stable-slim
image: python:latest
script:
- git submodule init
- git submodule update
- cd multi_user/libs/replication
- rm -rf tests .git .gitignore script
artifacts:
name: multi_user
paths:
- multi_user
only:
refs:
- master
- develop

View File

@ -1,24 +0,0 @@
deploy:
stage: deploy
image: slumber/docker-python
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs"
services:
- docker:19.03.12-dind
script:
- RP_VERSION="$(python scripts/get_replication_version.py)"
- VERSION="$(python scripts/get_addon_version.py)"
- echo "Building docker image with replication ${RP_VERSION}"
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
- echo "Pushing to gitlab registry ${VERSION}"
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} registry.gitlab.com/slumber/multi-user/multi-user-server:latest
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server
only:
refs:
- master
- develop

View File

@ -1,5 +1,14 @@
test:
stage: test
image: slumber/blender-addon-testing:latest
image: python:latest
script:
- git submodule init
- git submodule update
- apt update
# install blender to get all required dependencies
# TODO: indtall only dependencies
- apt install -f -y gcc python-dev python3.7-dev
- apt install -f -y blender
- python3 -m pip install blender-addon-tester
- python3 scripts/test_addon.py

3
.gitmodules vendored
View File

@ -0,0 +1,3 @@
[submodule "multi_user/libs/replication"]
path = multi_user/libs/replication
url = https://gitlab.com/slumber/replication.git

View File

@ -37,7 +37,7 @@ All notable changes to this project will be documented in this file.
- Serialization is now based on marshal (2x performance improvements).
- Let pip chose python dependencies install path.
## [0.0.3] - 2020-07-29
## [0.0.3] - Upcoming
### Added
@ -60,101 +60,8 @@ All notable changes to this project will be documented in this file.
- user localization
- repository init
### Removed
- Unused strict right management strategy
- Legacy config management system
## [0.1.0] - 2020-10-05
### Added
- Dependency graph driven updates [experimental]
- Edit Mode updates
- Late join mechanism
- Sync Axis lock replication
- Sync collection offset
- Sync camera orthographic scale
- Sync custom fonts
- Sync sound files
- Logging configuration (file output and level)
- Object visibility type replication
- Optionnal sync for active camera
- Curve->Mesh conversion
- Mesh->gpencil conversion
### Changed
- Auto updater now handle installation from branches
- Use uuid for collection loading
- Moved session instance to replication package
### Fixed
- Prevent unsupported data types to crash the session
- Modifier vertex group assignation
- World sync
- Snapshot UUID error
- The world is not synchronized
## [0.1.1] - 2020-10-16
### Added
- Session status widget
- Affect dependencies during change owner
- Dedicated server managment scripts(@brybalicious)
### Changed
- Refactored presence.py
- Reset button UI icon
- Documentation `How to contribute` improvements (@brybalicious)
- Documentation `Hosting guide` improvements (@brybalicious)
- Show flags are now available from the viewport overlay
### Fixed
- Render sync race condition (causing scene errors)
- Binary differentials
- Hybrid session crashes between Linux/Windows
- Materials node default output value
- Right selection
- Client node rights changed to COMMON after disconnecting from the server
- Collection instances selection draw
- Packed image save error
- Material replication
- UI spelling errors (@brybalicious)
## [0.2.0] - 2020-12-17
### Added
- Documentation `Troubleshouting` section (@brybalicious)
- Documentation `Update` section (@brybalicious)
- Documentation `Cloud Hosting Walkthrough` (@brybalicious)
- Support DNS name
- Sync annotations
- Sync volume objects
- Sync material node_goups
- Sync VSE
- Sync grease pencil modifiers
- Sync textures (modifier only)
- Session status widget
- Disconnection popup
- Popup with disconnection reason
### Changed
- Improved GPencil performances
### Fixed
- Texture paint update
- Various documentation fixes section (@brybalicious)
- Empty and Light object selection highlights
- Material renaming
- Default material nodes input parameters
- blender 2.91 python api compatibility
- Legacy config management system

View File

@ -11,7 +11,7 @@ This tool aims to allow multiple users to work on the same scene over the networ
## Quick installation
1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
1. Download latest release [multi_user.zip](/uploads/8aef79c7cf5b1d9606dc58307fd9ad8b/multi_user.zip).
2. Run blender as administrator (dependencies installation).
3. Install last_version.zip from your addon preferences.
@ -21,42 +21,31 @@ This tool aims to allow multiple users to work on the same scene over the networ
See the [documentation](https://multi-user.readthedocs.io/en/latest/) for details.
## Troubleshooting
See the [troubleshooting guide](https://multi-user.readthedocs.io/en/latest/getting_started/troubleshooting.html) for tips on the most common issues.
## Current development status
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment |
| ----------- | :----: | :--------------------------------------------------------------------------: |
| action | ✔️ | |
| armature | ❗ | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | | Nurbs not supported |
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
| image | ✔️ | |
| mesh | ✔️ | |
| material | ✔️ | |
| node_groups | ❗ | Material only |
| metaball | ✔️ | |
| object | ✔️ | |
| textures | | Supported for modifiers only |
| texts | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | ❌ | |
| volumes | ✔️ | |
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❗ | Mask and Clip not supported yet |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
| Name | Status | Comment |
| ----------- | :----: | :-----------------------------------------------------------: |
| action | | Not stable |
| armature | ❗ | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | ✔️ | Nurbs surface don't load correctly |
| gpencil | ✔️ | |
| image | | Not stable yet |
| mesh | ✔️ | |
| material | ✔️ | |
| metaball | ✔️ | |
| object | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| particles | | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | | [Planned](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
### Performance issues
@ -68,16 +57,14 @@ I'm working on it.
| Dependencies | Version | Needed |
| ------------ | :-----: | -----: |
| Replication | latest | yes |
| ZeroMQ | latest | yes |
| JsonDiff | latest | yes |
## Contributing
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.
## Licensing
See [license](LICENSE)

View File

@ -19,10 +19,10 @@ import sys
project = 'multi-user'
copyright = '2020, Swann Martinez'
author = 'Swann Martinez, with contributions from Poochy'
author = 'Swann Martinez'
# The full version, including alpha/beta/rc tags
release = '0.2.0'
release = '0.0.2'
# -- General configuration ---------------------------------------------------

View File

@ -9,14 +9,14 @@ Glossary
administrator
*A session administrator can manage users (kick) and hold write access on
each datablock. They can also init a dedicated server repository.*
*A session administrator can manage users (kick) and have a write access on
each datablock. He could also init a dedicated server repository.*
.. _session-status:
session status
*Located in the title of the multi-user panel, the session status shows
*Located in the title of the multi-user panel, the session status show
you the connection state.*
.. figure:: img/quickstart_session_status.png
@ -24,7 +24,7 @@ Glossary
Session status in panel title bar
All possible connection states are listed here with their meaning:*
All possible state are listed here with their meaning:*
+--------------------+---------------------------------------------------------------------------------------------+
| State | Description |
@ -33,7 +33,7 @@ Glossary
+--------------------+---------------------------------------------------------------------------------------------+
| FETCHING | Dowloading snapshot from the server |
+--------------------+---------------------------------------------------------------------------------------------+
| AUTHENTICATION | Initial server authentication |
| AUTHENTIFICATION | Initial server authentication |
+--------------------+---------------------------------------------------------------------------------------------+
| ONLINE | Connected to the session |
+--------------------+---------------------------------------------------------------------------------------------+
@ -55,5 +55,5 @@ Glossary
common right
When a data block is under common right, it is available to everyone for modification.
The rights will be given to the user that selects it first.
When a data block is under common right, it is available for everyone to modification.
The rights will be given to the user that select it first.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.4 KiB

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 559 B

After

Width:  |  Height:  |  Size: 7.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 223 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 209 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 217 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 226 KiB

View File

@ -8,5 +8,5 @@ Getting started
install
quickstart
troubleshooting
known_problems
glossary

View File

@ -5,54 +5,9 @@ Installation
.. hint::
The process is the same for linux, mac and windows.
1. Download `LATEST build <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build>`_ or `STABLE build <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build>`_.
1. Download latest `release <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build>`_ or `develop (unstable !) <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build>`_ build.
2. Run blender as administrator (to allow python dependencies auto-installation).
3. Install **multi-user.zip** from your addon preferences.
Once the addon is succesfully installed, I strongly recommend you to follow the :ref:`quickstart`
tutorial.
.. _update-version:
Updates
=======
Multi-User has a built-in auto-update function.
1. Navigate to Edit >> Preferences pane in Blender, and go to the 'Add-ons' section.
2. Search 'multi-user', select the 'Update' tab, click 'Auto-check for Update' and choose the frequency you'd like.
3. Make sure to click the three bars in the bottom-left, and save this to your preferences (userpref.blend).
Sometimes you'd like to perform manual update, or even side-grade or rollback your multi-user version. Perhaps you are trying out new features from the 'develop' branch in a test session.
1. Click on 'Check now for multiuser update'. Multi-user will now find new versions
.. figure:: img/update_1.jpg
:align: center
:width: 300px
Check for updates
2. Select 'Install latest master / old version'
.. figure:: img/update_2.jpg
:align: center
:width: 300px
Install
3. In most cases, select 'master' branch for the latest stable release. The unstable 'develop' branch and older releases are available
.. figure:: img/update_3.jpg
:align: center
:width: 300px
Select version
4. Finally, restart blender to use the updated version
.. figure:: img/update_4.jpg
:align: center
:width: 300px
Restart blender
tutorial.

View File

@ -0,0 +1,46 @@
.. _known-problems:
==============
Known problems
==============
.. rubric:: What do you need to do in order to use Multi-User through internet?
1. Use Hamachi or ZeroTier (I prefer Hamachi) and create a network.
2. All participants need to join this network.
3. Go to Blender and install Multi-User in the preferneces.
4. Setup and start the session:
* **Host**: After activating Multi-User as an Add-On, press N and go on Multi-User.
Then, put the IP of your network where IP is asked for.
Leave Port and IPC Port on default(5555 and 5561). Increase the Timeout(ms) if the connection is not stable.
Then press on "host".
* **Guest**: After activating Multi-User as an Add-On, press N and go to Multi-User
Then, put the IP of your network where IP is asked for.
Leave Port and IPC Port on default(5555 and 5561)(Simpler, put the same information that the host is using.
BUT,it needs 4 ports for communication. Therefore, you need to put 5555+count of guests [up to 4]. ).
Increase the Timeout(ms) if the connection is not stable. Then press on "connexion".
.. rubric:: What do you need to check if you can't host?
You need to check, if the IP and all ports are correct. If it's not loading, because you laoded a project before hosting, it's not your fault.
Then the version is not sable yet (the project contains data, that is not made stable yet).
.. rubric:: What do you need to check if you can't connect?
Check, if you are connected to the network (VPN) of the host. Also, check if you have all of the information like the host has.
Maybe you have different versions (which shouldn't be the case after Auto-Updater is introduced).
.. rubric:: You are connected, but you dont see anything?
After pressing N, go presence overlay and check the box.
Also, go down and uncheck the box "Show only owned"(unless you need privacy ( ͡° ͜ʖ ͡°) ).
If it's still not working, hit the support channel on the discord channel "multi-user". This little helping text is produced by my own experience
(Ultr-X).
In order to bring attention to other problems, please @ me on the support channel. Every problem brought to me will be documentated to optimize and update this text.
Thank you and have fun with Multi-User, brought to you by "swann".
Here the discord server: https://discord.gg/v5eKgm

View File

@ -5,10 +5,10 @@ Quick start
===========
.. hint::
*All session-related settings are located under: `View3D -> Sidebar -> Multiuser panel`*
*All session related settings are located under: `View3D -> Sidebar -> Multiuser panel`*
The multi-user addon provides a session management system.
In this guide, you will quickly learn how to use the collaborative session management system in three parts:
The multi-user is based on a session management system.
In this this guide you will quickly learn how to use the collaborative session system in three part:
- :ref:`how-to-host`
- :ref:`how-to-join`
@ -19,22 +19,22 @@ In this guide, you will quickly learn how to use the collaborative session manag
How to host a session
=====================
The multi-user add-on relies on a Client-Server architecture.
The server is the heart of the collaborative session.
It is what allows user's blender instances to communicate with each other.
The multi-user add-on rely on a Client-Server architecture.
The server is the heart of the collaborative session,
it will allow each users to communicate with each others.
In simple terms, *Hosting a session* means *run a local server and connect the local client to it*.
When I say **local server** I mean a server which is accessible from the LAN (Local Area Network) without requiring an internet connection.
When I said **local server** I mean accessible from the LAN (Local Area Network).
However, there are times when you will need to host a session over the internet.
In this case, I strongly recommend that you read the :ref:`internet-guide` tutorial.
However sometime you will need to host a session over the internet,
in this case I strongly recommand you to read the :ref:`internet-guide` tutorial.
.. _user-info:
--------------------------------
1. Fill in your user information
--------------------------------
-----------------------------
1. Fill your user information
-----------------------------
The **User Info** panel (See image below) allows you to customise your online identity.
The **User Info** panel (See image below) allow you to constomize your online identity.
.. figure:: img/quickstart_user_info.png
:align: center
@ -42,38 +42,38 @@ The **User Info** panel (See image below) allows you to customise your online id
User info panel
Let's fill in those two fields:
Let's fill those tow field:
- **name**: your online name.
- **color**: a color used to represent you in other users' workspaces (see image below).
- **color**: a color used to represent you into other user workspace(see image below).
During online sessions, other users will see your selected object and camera highlighted in your profile color.
During online sessions, other users will see your selected object and camera hilghlited in your profile color.
.. _user-representation:
.. figure:: img/quickstart_user_representation.png
:align: center
User viewport representation aka 'User Presence'
User viewport representation
---------------------
2. Set up the network
---------------------
--------------------
2. Setup the network
--------------------
When the hosting process starts, the multi-user addon will launch a local server instance.
In the network panel, select **HOST**.
The **Host sub-panel** (see image below) allows you to configure the server according to:
When the hosting process will start, the multi-user addon will lauch a local server instance.
In the nerwork panel select **HOST**.
The **Host sub-panel** (see image below) allow you to configure the server according to:
* **Port**: Port on which the server is listening.
* **Port**: Port on wich the server is listening.
* **Start from**: The session initialisation method.
* **current scenes**: Start with the data loaded in the current blend file.
* **an empty scene**: Clear the blend file's data and start over.
* **current scenes**: Start with the current blendfile datas.
* **an empty scene**: Clear a data and start over.
.. danger::
By starting from an empty scene, all of the blend data will be removed!
Be sure to save your existing work before launching the session.
By starting from an empty, all of the blend data will be removed !
Ensure to save your existing work before launching the session.
* **Admin password**: The session administration password.
@ -84,16 +84,16 @@ The **Host sub-panel** (see image below) allows you to configure the server acco
Host network panel
.. note:: Additional configuration setting can be found in the :ref:`advanced` section.
.. note:: Additionnal configuration setting can be found in the :ref:`advanced` section.
Once everything is set up, you can hit the **HOST** button to launch the session!
Once everything is setup you can hit the **HOST** button to launch the session !
This will do two things:
It will do two things:
* Start a local server
* Connect you to it as an :ref:`admin`
During an online session, various actions are available to you, go to :ref:`how-to-manage` section to
During online session, various actions are available to you, go to :ref:`how-to-manage` section to
learn more about them.
.. _how-to-join:
@ -101,12 +101,12 @@ learn more about them.
How to join a session
=====================
This section describes how join a launched session.
Before starting make sure that you have access to the session IP address and port number.
This section describe how join a launched session.
Before starting make sure that you have access to the session ip and port.
--------------------------------
1. Fill in your user information
--------------------------------
-----------------------------
1. Fill your user information
-----------------------------
Follow the user-info_ section for this step.
@ -114,9 +114,9 @@ Follow the user-info_ section for this step.
2. Network setup
----------------
In the network panel, select **JOIN**.
The **join sub-panel** (see image below) allows you to configure your client to join a
collaborative session which is already hosted.
In the nerwork panel select **JOIN**.
The **join sub-panel** (see image below) allow you configure the client to join a
collaborative session.
.. figure:: img/quickstart_join.png
:align: center
@ -124,32 +124,32 @@ collaborative session which is already hosted.
Connection panel
Fill in the fields with your information:
Fill those field with your information:
- **IP**: the host's IP address.
- **Port**: the host's port number.
- **Connect as admin**: connect yourself with **admin rights** (see :ref:`admin` ) to the session.
- **IP**: the host ip.
- **Port**: the host port.
- **Connect as admin**: connect you with **admin rights** (see :ref:`admin` ) to the session.
.. Maybe something more explicit here
.. note::
Additional configuration settings can be found in the :ref:`advanced` section.
Additionnal configuration setting can be found in the :ref:`advanced` section.
Once you've configured every field, hit the button **CONNECT** to join the session !
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
Once you've set every field, hit the button **CONNECT** to join the session !
When the :ref:`session-status` is **ONLINE** you are online and ready to start to collaborate.
.. note::
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
On the **dedicated server** startup, the session status will get you to the **LOBBY** waiting a admin to start it.
If the session status is set to **LOBBY** and you are a regular user, you need to wait for the admin to launch the scene.
If you are the admin, you just need to initialise the repository to start the session (see image below).
If the session status is set to **LOBBY** and you are a regular user, you need to wait that an admin launch it.
If you are the admin, you just need to init the repository to start the session (see image below).
.. figure:: img/quickstart_session_init.png
:align: center
Session initialisation for dedicated server
During an online session, various actions are available to you. Go to :ref:`how-to-manage` to
During online session, various actions are available to you, go to :ref:`how-to-manage` section to
learn more about them.
.. _how-to-manage:
@ -157,52 +157,40 @@ learn more about them.
How to manage a session
=======================
The quality of a collaborative session directly depends on the quality of the network connection, and the communication between the users. This section describes
various tools which have been made in an effort to ease the communication between your fellow creators.
Feel free to suggest any ideas for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
---------------------------
Change replication behavior
---------------------------
During a session, multi-user will replicate all of your local modifications to the scene, to all other users' blender instances.
In order to avoid annoying other users when you are experimenting, you can flag some of your local modifications to be ignored via
various flags present at the top of the panel (see red area in the image below). Those flags are explained in the :ref:`replication` section.
.. figure:: img/quickstart_replication.png
:align: center
Session replication flags
The collaboration quality directly depend on the communication quality. This section describes
various tools made in an effort to ease the communication between the different session users.
Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
--------------------
Monitor online users
--------------------
One of the most vital tools is the **Online user panel**. It lists all connected
users' information including your own:
One of the most vital tool is the **Online user panel**. It list all connected
users information's including yours such as :
* **Role** : if a user is an admin or a regular user.
* **Role** : if user is an admin or a regular user.
* **Location**: Where the user is actually working.
* **Frame**: When (on which frame) the user is working.
* **Ping**: user's connection delay in milliseconds
* **Frame**: When (in frame) the user working.
* **Ping**: user connection delay in milliseconds
.. figure:: img/quickstart_users.png
:align: center
Online user panel
By selecting a user in the list you'll have access to different users' related **actions**.
Those operators allow you to experience the selected user's state in two different dimensions: **SPACE** and **TIME**.
By selecting a user in the list you'll have access to different user related **actions**.
Those operators allow you reach the selected user state in tow different dimensions: **SPACE** and **TIME**.
Snapping in space
-----------------
----------------
The **CAMERA button** (Also called **snap view** operator) allow you to snap to
the user's viewpoint. To disable the snap, click on the button once again. This action
serves different purposes such as easing the review process, and working together on a large or populated world.
The **CAMERA button** (Also called **snap view** operator) allow you to snap on
the user viewpoint. To disable the snap, click back on the button. This action
served different purposes such as easing the review process, working together on
wide world.
.. hint::
If the target user is located in another scene, the **snap view** operator will send you to their scene.
If the target user is localized on another scene, the **snap view** operator will send you to his scene.
.. figure:: img/quickstart_snap_view.gif
:align: center
@ -210,11 +198,11 @@ serves different purposes such as easing the review process, and working togethe
Snap view in action
Snapping in time
----------------
---------------
The **CLOCK button** (Also called **snap time** operator) allows you to snap to
the user's time (current frame). To disable the snap, click on the button once again.
This action helps various multiple creators to work in the same time-frame
The **CLOCK button** (Also called **snap time** operator) allow you to snap on
the user time (current frame). To disable the snap, click back on the button.
This action is built to help various actors to work on the same temporality
(for instance multiple animators).
.. figure:: img/quickstart_snap_time.gif
@ -229,14 +217,14 @@ Kick a user
.. warning:: Only available for :ref:`admin` !
The **CROSS button** (Also called **kick** operator) allows the administrator to kick the selected user. This can be helpful if a user is acting unruly, but more importantly, if they are experiencing a high ping which is slowing down the scene. Meanwhile, in the target user's world, the session will properly disconnect.
The **CROSS button** (Also called **kick** operator) allow the admin to kick the selected user. On the target user side, the session will properly disconnect.
Change users display
--------------------
Presence is the multi-user module responsible for displaying user presence. During the session,
it draw users' related information in your viewport such as:
Presence is the multi-user module responsible for users display. During the session,
it draw users related information in your viewport such as:
* Username
* User point of view
@ -247,61 +235,51 @@ it draw users' related information in your viewport such as:
Presence show flags
The presence overlay panel (see image above) allows you to enable/disable
The presence overlay panel (see image above) allow you to enable/disable
various drawn parts via the following flags:
- **Show session status**: display the session status in the viewport
.. figure:: img/quickstart_status.png
:align: center
- **Text scale**: session status text size
- **Vertical/Horizontal position**: session position in the viewport
- **Show selected objects**: display other users' current selections
- **Show users**: display users' current viewpoint
- **Show selected objects**: display other users current selection
- **Show users**: display users current viewpoint
- **Show different scenes**: display users working on other scenes
-----------
Manage data
-----------
In order to understand replication data managment, a quick introduction to the multi-user data workflow is in order.
The first thing to know: until now, the addon relies on data-based replication. In simple words, it means that it replicates
the resultant output of a user's actions.
To replicate datablocks between clients, multi-user relies on a standard distributed architecture:
In order to understand replication data managment, a quick introduction to the multi-user data workflow is required.
First thing to know: until now, the addon rely on a data-based replication. In simple words, it means that it replicate
user's action results.
To replicate datablocks between clients the multi-user rely on what tends to be a distributed architecture:
- The server stores the "master" version of the work.
- Each client has a local version of the work.
- The server store the "master" version of the work.
- Each client have a local version of the work.
When an artist modifies something in the scene, here is what is happening in the background:
When an artist modified something in the scene, here is what is happening in the background:
1. Modified data are **COMMITTED** to the local repository.
2. Once committed locally, they are **PUSHED** to the server
3. As soon as the server receives updates, they are stored locally and pushed to every other client
3. As soon as the server is getting updates, they are stored locally and pushed to every other clients
At the top of this data management system, a rights management system prevents
multiple users from modifying the same data at the same time. A datablock may belong to
At the top of this data management system, a right management system prevent
multiple users from modifying same data at same time. A datablock may belong to
a connected user or be under :ref:`common-right<**COMMON**>` rights.
.. note::
In a near future, the rights management system will support roles to allow multiple users to
work on different aspects of the same datablock.
In a near future, the right management system will support roles to allow multiple users to
work on different aspect of the same datablock.
The Repository panel (see image below) allows you to monitor, change datablock states and rights manually.
The Repository panel (see image below) allow you to monitor, change datablock states and right manually.
.. figure:: img/quickstart_properties.png
:align: center
Repository panel
The **show only owned** flag allows you to see which datablocks you are currently modifying.
The **show only owned** flag allow you to see which datablocks you are currently modifying.
.. warning::
If you are editing a datablock not listed with this flag enabled, it means that you have not been granted the rights to modify it.
So, it won't be updated to other clients!
If you are editing a datablock not listed with this fag enabled, it means that you do
not have right granted to modify it. So it won't be updated to other client !
Here is a quick list of available actions:
@ -321,105 +299,37 @@ Here is a quick list of available actions:
.. _advanced:
Advanced settings
=================
Advanced configuration
======================
This section contains optional settings to configure the session behavior.
.. figure:: img/quickstart_advanced.png
:align: center
Advanced configuration panel
Repository panel
-------
Network
-------
.. figure:: img/quickstart_advanced_network.png
:align: center
Advanced network settings
.. rubric:: Network
**IPC Port** is the port used for Inter Process Communication. This port is used
by the multi-user subprocesses to communicate with each other. If different instances
of multi-user are using the same IPC port, this will create conflict !
by the multi-users subprocesses to communicate with each others. If different instances
of the multi-user are using the same IPC port it will create conflict !
.. note::
You only need to modify this setting if you need to launch multiple clients from the same
computer (or if you try to host and join from the same computer). To resolve this, you simply need to enter a different
**IPC port** for each blender instance.
You only need to modify it if you need to launch multiple clients from the same
computer(or if you try to host and join on the same computer). You should just enter a different
**IPC port** for each blender instance.
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
You should only increase it if you have a bad connection.
.. _replication:
.. rubric:: Replication
-----------
Replication
-----------
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
.. figure:: img/quickstart_advanced_replication.png
:align: center
Advanced replication settings
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match renders between clients.
**Synchronize active camera** sync the scene's active camera.
**Edit Mode Updates** enable objects to update while you are in Edit_Mode.
.. warning:: Edit Mode Updates kills the session's performance with complex objects (heavy meshes, gpencil, etc...).
**Update method** allows you to change how replication updates are triggered. Until now, two update methods are implemented:
- **Default**: Use external threads to monitor datablocks changes. Slower and less accurate.
- **Despgraph ⚠️**: Use the blender dependency graph to trigger updates. Faster but experimental and unstable !
**Properties frequency grid** set a custom replication frequency for each type of data-block:
**Properties frequency gird** allow to set a custom replication frequency for each type of data-block:
- **Refresh**: pushed data update rate (in second)
- **Apply**: pulled data update rate (in second)
-----
Cache
-----
.. note:: Per-data type settings will soon be revamped for simplification purposes
Multi-user allows you to replicate external dependencies such as images (textures, hdris, etc...), movies, and sounds.
On each client, the files will be stored in the multi-user cache folder.
.. figure:: img/quickstart_advanced_cache.png
:align: center
Advanced cache settings
**cache_directory** choose where cached files (images, sound, movies) will be saved.
**Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it has been written to the disk.
**Clear cache** will remove all files from the cache folder.
.. warning:: Clearing the cache could break your scene images/movies/sounds if they are used in a blend file! Try saving the blend file and choosing 'Pack all into blend' before clearing the cache.
---
Log
---
.. figure:: img/quickstart_advanced_logging.png
:align: center
Advanced log settings
**log level** allows you to set the level of detail captured in multi-user's logging output. Here is a brief description on the level of detail for each value of the logging parameter:
+-----------+-----------------------------------------------+
| Log level | Description |
+===========+===============================================+
| ERROR | Shows only critical errors |
+-----------+-----------------------------------------------+
| WARNING | Shows only errors (of all kinds) |
+-----------+-----------------------------------------------+
| INFO | Shows only status-related messages and errors |
+-----------+-----------------------------------------------+
| DEBUG | Shows all possible information |
+-----------+-----------------------------------------------+

View File

@ -1,19 +0,0 @@
.. _troubleshooting:
===============
Troubleshooting
===============
The majority of issues new users experience when first using Multi-User can be solved with a few quick checks.
- Run Blender in Administrator mode
- Update the multi-user addon to the latest version
- Make sure to allow Blender through your firewall
.. hint:: Your firewall may have additional settings like Ransomware protection, or you may need to enable both Blender and Python on private and/or public Networks
- Solve problems with your connection quality
- Minimise the use of large textures or file sizes
- Avoid using 'Undo'. Use 'delete' instead
Use the #support channel on the multi-user `discord server <https://discord.gg/aBPvGws>`_ to chat, seek help and contribute.

View File

@ -48,8 +48,8 @@ Documentation is organized into the following sections:
getting_started/install
getting_started/quickstart
getting_started/known_problems
getting_started/glossary
getting_started/troubleshooting
.. toctree::
:maxdepth: 1

View File

@ -1,51 +1,36 @@
.. _internet-guide:
=======================
Hosting on the internet
=======================
===================
Hosting on internet
===================
.. warning::
Until now, those communications are not encrypted but are planned to be in a mid-term future (`status <https://gitlab.com/slumber/multi-user/issues/62>`_).
Until now, those communications are not encrypted but are planned to be in a mid-term future (`Status <https://gitlab.com/slumber/multi-user/issues/62>`_).
This tutorial aims to guide you toward hosting a collaborative multi-user session on the internet.
Hosting a session can be achieved in several ways:
This tutorial aims to guide you to host a collaborative Session on internet.
Hosting a session can be done is several ways:
- :ref:`host-blender`: hosting a session directly from the blender add-on panel.
- :ref:`host-dedicated`: hosting a session directly from the command line interface on a computer without blender.
- :ref:`host-cloud`: hosting a session on a dedicated cloud server such as Google Cloud's free tier.
.. _host-blender:
--------------------
-------------
From blender
--------------------
-------------
By default your router doesn't allow anyone to share you connection.
In order grant the server access to people from internet you have two main option:
In order grant server access to people from internet you have tow main option:
* The :ref:`connection-sharing`: the easiest way.
* The :ref:`port-forwarding`: this way is the most unsecure. If you have no networking knowledge, you should definitely follow :ref:`connection-sharing`.
* The :ref:`port-forwarding`: this one is the most unsecure, if you have no networking knowledge, you should definitively go to :ref:`connection-sharing`.
.. _connection-sharing:
Using a connection sharing solution
-----------------------------------
You can either follow `Pierre Schiller's <https://www.youtube.com/c/activemotionpictures/featured>`_ excellent video tutorial or jump to the `text tutorial <zt-installation_>`_.
.. raw:: html
<p>
<iframe width="560" height="315" src="https://www.youtube.com/embed/xV4R5AukkVw" frameborder="0" allow="accelerometer; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
</p>
Many third party software like `ZEROTIER <https://www.zerotier.com/download/>`_ (Free) or `HAMACHI <https://vpn.net/>`_ (Free until 5 users) allow you to share your private network with other people.
For the example I'm gonna use ZeroTier because it's free and open source.
.. _zt-installation:
For the example I'm gonna use ZeroTier because its free and open source.
1. Installation
^^^^^^^^^^^^^^^
@ -62,7 +47,7 @@ To create a ZeroTier private network you need to register a ZeroTier account `on
(click on **login** then register on the bottom)
Once you account it activated, you can connect to `my.zerotier.com <https://my.zerotier.com/login>`_.
Head up to the **Network** section (highlighted in red in the image below).
Head up to the **Network** section(highlighted in red in the image below).
.. figure:: img/hosting_guide_head_network.png
:align: center
@ -101,7 +86,7 @@ Now let's connect everyone.
3. Network authorization
^^^^^^^^^^^^^^^^^^^^^^^^
Since your ZeroTier network is Private, you will need to authorize each new user
Since your ZeroTier network is Private, you will need to authorize each new users
to connect to it.
For each user you want to add, do the following step:
@ -119,7 +104,7 @@ For each user you want to add, do the following step:
:align: center
:width: 450px
Add the client to network-authorized users
Add the client to network authorized users
4. Network connection
^^^^^^^^^^^^^^^^^^^^^
@ -159,7 +144,7 @@ Let's check the connection status. Right click on the tray icon and click on **S
Network status.
The network status must be **OK** for each user(like in the picture above) otherwise it means that you are not connected to the network.
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the section :ref:`network-authorization`
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the :ref:`network-authorization` section.
This is it for the ZeroTier network setup. Now everything should be setup to use the multi-user add-on over internet ! You can now follow the :ref:`quickstart` guide to start using the multi-user add-on !
@ -168,12 +153,12 @@ This is it for the ZeroTier network setup. Now everything should be setup to use
Using port-forwarding
---------------------
The port forwarding method consists of configuring your network router to deny most traffic with a firewall, but to then allow particular internet traffic (like a multiuser connection) through the firewall on specified ports.
The port forwarding method consist to configure you Network route to allow internet trafic throught specific ports.
In order to know which ports are used by the add-on, please check the :ref:`port-setup` section.
In order to know which port are used by the add-on, check the :ref:`port-setup` section.
To set up port forwarding for each port you can follow this `guide <https://www.wikihow.com/Set-Up-Port-Forwarding-on-a-Router>`_ for example.
Once you have set up the network you can follow the :ref:`quickstart` guide to begin using the multi-user add-on !
Once you have set up the network you can follow the :ref:`quickstart` guide to start using the multi-user add-on !
.. _host-dedicated:
@ -182,52 +167,50 @@ From the dedicated server
--------------------------
.. warning::
The dedicated server is developed to run directly on an internet server (like a VPS (Virtual Private Server)). You can also run it at home on a LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first. Please see :ref:`host-cloud` for a detailed walkthrough of cloud hosting using Google Cloud.
The dedicated server is developed to run directly on internet server (like VPS). You can also
run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first.
The dedicated server allows you to host a session with simplicity from any location.
It was developed to improve internet hosting performance (for example poor latency).
The dedicated server allow you to host a session with simplicity from any location.
It was developed to improve intaernet hosting performance.
The dedicated server can be run in two ways:
The dedicated server can be run in tow ways:
- :ref:`cmd-line`
- :ref:`docker`
.. Note:: There are shell scripts to conveniently start a dedicated server via either of these approaches available in the gitlab repository. See section: :ref:`serverstartscripts`
.. _cmd-line:
Using a regular command line
----------------------------
You can run the dedicated server on any platform by following these steps:
You can run the dedicated server on any platform by following those steps:
1. Firstly, download and intall python 3 (3.6 or above).
2. Install the latest version of the replication library:
2. Download and extract the dedicated server from `here <https://gitlab.com/slumber/replication/-/archive/develop/replication-develop.zip>`_
3. Open a terminal in the extracted folder and install python dependencies by running:
.. code-block:: bash
python -m pip install replication==0.1.13
python -m pip install -r requirements.txt
4. Launch the server with:
4. Launch the server from the same terminal with:
.. code-block:: bash
replication.server
python scripts/server.py
.. hint::
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
You can also specify a custom **port** (-p), **timeout** (-t) and **admin password** (-pwd) with the following optionnal argument
.. code-block:: bash
replication.server -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
python scripts/server.py -p 5555 -pwd toto -t 1000
Here, for example, a server is instantiated on port 5555, with password 'admin', a 5 second timeout, and logging enabled.
As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
As soon as the dedicated server is running, you can connect to it from blender (follow :ref:`how-to-join`).
.. hint::
Some server commands are available to enable administrators to manage a multi-user session. Check :ref:`dedicated-management` to learn more.
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
.. _docker:
@ -235,90 +218,22 @@ As soon as the dedicated server is running, you can connect to it from blender b
Using a pre-configured image on docker engine
---------------------------------------------
Launching the dedicated server from a docker server is simple as running:
Launching the dedicated server from a docker server is simple as:
.. code-block:: bash
docker run -d \
-p 5555-5560:5555-5560 \
-e port=5555 \
-e log_level=DEBUG \
-e password=admin \
-e timeout=5000 \
registry.gitlab.com/slumber/multi-user/multi-user-server:latest
-e timeout=1000 \
registry.gitlab.com/slumber/multi-user/multi-user-server:0.0.3
Please use the :latest tag, or otherwise use the URL of the most recent container available in the `multi-user container registry <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_. As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
You can check that your container is running, and find its ID and name with:
.. code-block:: bash
docker ps
.. _docker-logs:
Viewing logs in a docker container
-----------------------------------
Logs for the server running in a docker container can be accessed by outputting the following to a log file:
.. code-block:: bash
docker log your-container-id >& dockerserver.log
.. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available.
.. This may not be true. Need to write up how to locally start a docker container from WSL2
First, you'll need to know your container ID, which you can find by running:
.. code-block:: bash
docker ps
If you're cloud-hosting with e.g. Google Cloud, your container will be the one associated with the `registry address <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_ where your Docker image was located. e.g. registry.gitlab.com/slumber/multi-user/multi-user-server:0.2.0
You can either ssh in to your server and then run
.. code-block:: bash
cat your-log-name.log
or view the docker container logs with
.. code-block:: bash
docker logs your-container-name
OR
.. code-block:: bash
docker logs your-container-id
Note, see these `notes <https://cloud.google.com/compute/docs/containers/deploying-containers?_ga=2.113663175.-1396941296.1606125558#viewing_container_logs>`_ for how to check server logs on Google Cloud.
.. _serverstartscripts:
Server startup scripts
----------------------
Convenient scripts are available in the Gitlab repository: https://gitlab.com/slumber/multi-user/scripts/startup_scripts/
Simply run the relevant script in a shell on the host machine to start a server with one line of code via replication directly or via a docker container. Choose between the two methods:
.. code-block:: bash
./start-server.sh
or
.. code-block:: bash
./run-dockerfile.sh
As soon as the dedicated server is running, you can connect to it from blender.
You can check the :ref:`how-to-join` section.
.. hint::
Once your server is up and running, some commands are available to manage the session :ref:`dedicated-management`
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
.. _dedicated-management:
@ -327,30 +242,11 @@ Dedicated server management
Here is the list of available commands from the dedicated server:
- ``help`` or ``?``: Show all commands. Or, use ``help <command>`` to learn about another command
- ``help``: Show all commands.
- ``exit`` or ``Ctrl+C`` : Stop the server.
- ``kick username``: kick the provided user.
- ``users``: list all online users.
Also, see :ref:`how-to-manage` for more details on managing a server.
.. _cloud-dockermanage:
Managing a docker server from the command line
----------------------------------------------
If you want to be able to manage a server running within a docker container, open the terminal on the host machine (or SSH in, if you are using cloud hosting), and then run
.. code-block:: bash
docker ps
to find your container id, and then
.. code-block:: bash
docker attach your-container-id
to attach to the STDOUT from the container. There, you can issue the server management commands detailed in :ref:`dedicated-management`. Type ``?`` and hit return/enter to see the available commands. Also, see :ref:`how-to-manage` for more details on managing a server.
.. _port-setup:
@ -358,14 +254,14 @@ to attach to the STDOUT from the container. There, you can issue the server mana
Port setup
----------
The multi-user network architecture is based on a client-server model. The communication protocol uses four ports to communicate with clients:
The multi-user network architecture is based on a clients-server model. The communication protocol use four ports to communicate with client:
* Commands: command transmission (such as **snapshots**, **change_rights**, etc.) [user-nominated port]
* Commands: command transmission (such as **snapshots**, **change_rights**, etc.) [given port]
* Subscriber : pull data [Commands port + 1]
* Publisher : push data [Commands port + 2]
* TTL (time to leave) : used to ping each client [Commands port + 3]
To know which ports will be used, you just have to read the port in your preferences.
To know which ports will be used, you just have to read the port in your preference.
.. figure:: img/hosting_guide_port.png
:align: center
@ -373,315 +269,11 @@ To know which ports will be used, you just have to read the port in your prefere
:width: 200px
Port in host settings
In the picture below we have setup our port to **5555** so it will be:
In the picture below we have setup our port to **5555** so the four ports will be:
* Commands: 5555 (**5555**)
* Subscriber: 5556 (**5555** +1)
* Publisher: 5557 (**5555** +2)
* TTL: 5558 (**5555** +3)
* Commands: **5555** (5555)
* Subscriber: **5556** (5555 +1)
* Publisher: **5557** (5555 +2)
* TTL: **5558** (5555 +3)
Those four ports need to be accessible from the client otherwise multi-user won't work at all !
.. _host-cloud:
-------------------------
Cloud Hosting Walkthrough
-------------------------
The following is a walkthrough for how to set up a multi-user dedicated server instance on a cloud hosting provider - in this case, `Google Cloud <https://www.cloud.google.com>`_. Google Cloud is a powerful hosting service with a worldwide network of servers. It offers a free trial which provides free cloud hosting for 90 days, and then a free tier which runs indefinitely thereafter, so long as you stay within the `usage limits <https://cloud.google.com/free/docs/gcp-free-tier#free-tier-usage-limits>`_. ^^Thanks to community member @NotFood for the tip!
Cloud hosting is a little more complicated to set up, but it can be valuable if you are trying to host a session with multiple friends scattered about planet earth. This can resolve issues with data replication or slowdowns due to poor latency of some users (high ping). This guide may seem technical, but if you follow the steps, you should be able to succeed in hosting an internet server to co-create with other multi-user creators around the world.
Setup Process
-------------
1. Sign Up for Google Cloud
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Let's start by activating an account with Google Cloud. Go to https://www.cloud.google.com and click 'Get Started For Free'
.. figure:: img/hosting_guide_gcloud_1.jpg
:align: center
:width: 450px
Google will ask you to login/signup, and to set up a billing account (Don't worry. It will not be charged unless you explicitly enable billing and then run over your `free credit allowance <https://cloud.google.com/free/docs/gcp-free-tier>`_). You will need to choose a billing country (relevant for `tax purposes <https://cloud.google.com/billing/docs/resources/vat-overview>`_). You will choose your server location at a later step.
2. Enable Billing and Compute Engine API
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
From here on, we will mostly stick to the instructions provided `here <https://cloud.google.com/compute/docs/quickstart-linux>`_. Nevertheless, the instructions for multi-user specifically are as follows.
In order to set up a Virtual Machine (VM) to host your server, you will need to enable the billing account which was created during your signup process. From your `console <https://console.cloud.google.com/getting-started>`_, click on 'Go to Checklist' and then 'Create a Billing Account', following the prompts to choose the billing account that was created for you upon signup.
.. figure:: img/hosting_guide_gcloud_2.jpg
:align: center
:width: 300px
.. figure:: img/hosting_guide_gcloud_3.jpg
:align: center
:width: 300px
.. figure:: img/hosting_guide_gcloud_4.jpg
:align: center
:width: 300px
.. figure:: img/hosting_guide_gcloud_5.jpg
:align: center
:width: 300px
.. figure:: img/hosting_guide_gcloud_6.jpg
:align: center
:width: 300px
Now hit 'Set Account', and go back to your `console <https://console.cloud.google.com/getting-started>`_.
Now enable the Compute Engine API. Click `here <https://console.cloud.google.com/apis/api/compute.googleapis.com/overview>`_ to enable.
.. figure:: img/hosting_guide_gcloud_7.jpg
:align: center
:width: 300px
3. Create a Linux Virtual Machine Instance
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Continue following the `instructions <https://cloud.google.com/compute/docs/quickstart-linux#create_a_virtual_machine_instance>`_ to create a VM instance. However, once you've finished step 2 of 'Create a virtual machine instance', use the settings and steps for multi-user as follows.
.. _server-location:
3.1 Choose a Server Location
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The most important settings which you will need to choose for your specific case are the server Region and Zone. You must choose a location which will provide the best ping for all of your fellow creators.
All you need to know is that you'll probably want to choose a location near to where most of your collaborators are located. If your friends are spread out, somewhere in the middle which distributes the ping evenly to all users is best.
You can use `this map <https://cloud.google.com/about/locations/>`_ to make a rough guess of the best server location, if you know your friends' locations.
.. figure:: img/hosting_guide_gcloud_9.jpg
:align: center
:width: 450px
A much better approach is to have your users run a ping test for Google Cloud's servers at https://www.gcping.com/
Have your collaborators open this webpage from their fastest browser, and press the play button. The play button turns to a stop icon while the ping test is running. When it is complete, the play button returns. You may need to refresh your browser to get this to work. You can replay the test to add more server locations to the scan, and stop when you are satisfied that the results are consistent.
Now, gather your friends' data, and work down each user's list from the top, until you find the first location which gives roughly the same ping for all users.
In general, global (using load balancing) will provide the best results, but beyond that, the US Central servers e.g. IOWA generally turn out best for a globally distributed bunch of creators. When in doubt, choose between the servers offered under the `free tier <https://cloud.google.com/free/docs/gcp-free-tier>`_
- Oregon: *us-west1*
- Iowa: *us-central1*
- South Carolina: *us-east1*
For the following example, the server which gave the most balanced, and lowest average ping between two friends based in Europe and Australia was in Iowa. Salt Lake City would also be an excellent choice.
.. figure:: img/hosting_guide_gcloud_10.jpg
:align: center
:width: 450px
Left - European User | Right - Australian User
Now, input this server location in the 'Region' field for your instance, and leave the default zone which is then populated.
.. Note:: You can read `here <https://cloud.google.com/solutions/best-practices-compute-engine-region-selection>`_ for a deeper understanding about how to choose a good server location.
3.2 Configure the VM
^^^^^^^^^^^^^^^^^^^^
You can deploy the replication server to your VM in either of the ways mentioned at :ref:`host-dedicated`. That is, you can set it up :ref:`cmd-line` or :ref:`docker`. We will go through both options in this walkthrough. See :ref:`container_v_direct` for more details on how to choose. Deploying a container is the recommended approach.
.. _cloud-container:
Option 1 - Deploy a container
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you are familiar with Docker, you'll appreciate that it makes life a little simpler for us. While configuring your instance, you can check **Deploy a container to this VM instance** and copy in the URL of the latest docker image available from the `multi-user container registry <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_ to the *Container image* field, or use the tag ``:latest``
.. figure:: img/hosting_guide_gcloud_8b.jpg
:align: center
:width: 450px
Your configuration with Docker should look like this
Make sure to choose the amount of memory you'd like your server to be able to handle (how much memory does your blender scene require?). In this example, I've chosen 4GB of RAM.
Click on **Advanced container options** and turn on *Allocate a buffer for STDIN* and *Allocate a pseudo-TTY* just in case you want to run an interactive shell in your container.
.. _cloud-optional-parameters:
Optional server parameters
^^^^^^^^^^^^^^^^^^^^^^^^^^
The default Docker image essentially runs the equivalent of:
.. code-block:: bash
replication.server -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.
.. Note:: If you'd like to configure different server options from the default docker configuration, you can insert your options here by expanding 'Advanced container options'
For example, I would like to launch my server with a different administrator password than the default, my own log filename, and a shorter 3-second (3000ms) timeout. I'll click *Add argument* under **Command arguments** and paste the following command with options into the "command arguments" field:
.. code-block:: bash
python3 -m replication.server -pwd supersecretpassword -p 5555 -t 3000 -l DEBUG -lf logname.log
Now, my configuration should look like this:
.. figure:: img/hosting_guide_gcloud_8c.jpg
:align: center
:width: 450px
The rest of the settings are now complete. Hit **Create** and your instance will go live. If you've taken this approach, you're already almost there! Skip to :ref:`cloud-firewall`.
.. hint:: You can find further information on configuration options `here <https://cloud.google.com/compute/docs/containers/configuring-options-to-run-containers>`_. Also, see these `notes <https://cloud.google.com/compute/docs/containers/deploying-containers?_ga=2.113663175.-1396941296.1606125558#viewing_container_logs>`_ for other options when deploying your server inside a container, including how to access the server's logs.
.. _cloud-direct:
Option 2 - Over SSH
^^^^^^^^^^^^^^^^^^^
Otherwise, we can run the dedicated server ourselves from the command-line over SSH.
While creating your instance, keep the default settings mentioned in the `guide <https://cloud.google.com/compute/docs/quickstart-linux#create_a_virtual_machine_instance>`_, however at step 4, choose Debian version 10. Also, there is no need to enable HTTP so skip step 6.
.. figure:: img/hosting_guide_gcloud_8a.jpg
:align: center
:width: 450px
Your configuration should look like this
Make sure to choose the amount of memory you'd like your server to be able to handle (how much memory does your blender scene require?). In this example, I've chosen 4GB of RAM.
Now, finally, click 'Create' to generate your Virtual Machine Instance.
.. _cloud-firewall:
4. Setting up Firewall and opening Ports
----------------------------------------
Now that your VM is instanced, you'll need to set up firewall rules, and open the ports required by multi-user. The documentation for VM firewalls on google cloud is `here <https://cloud.google.com/vpc/docs/using-firewalls#listing-rules-vm>`_.
First, go to the dashboard showing your `VM instances <https://console.cloud.google.com/compute/instances>`_ and note the 'External IP' address for later. This is the address of your server. Then, click 'Set up Firewall Rules'.
.. figure:: img/hosting_guide_gcloud_11.jpg
:align: center
:width: 450px
Note down your External IP
Now you will need to create two rules. One to enable communication inbound to your server (ingress), and another to enable outbound communication from your server (egress). Click 'Create Firewall'
.. figure:: img/hosting_guide_gcloud_12.jpg
:align: center
:width: 450px
Now create a rule exactly as in the image below for the outbound communication (egress).
.. figure:: img/hosting_guide_gcloud_13.jpg
:align: center
:width: 450px
Egress
.. Note:: If you set a different port number in :ref:`cloud-optional-parameters`, then use the ports indicated in :ref:`port-setup`
And another rule exactly as in the image below for the inbound communication (ingress).
.. figure:: img/hosting_guide_gcloud_14.jpg
:align: center
:width: 450px
Ingress
Finally, your firewall configuration should look like this.
.. figure:: img/hosting_guide_gcloud_15.jpg
:align: center
:width: 450px
Final Firewall Configuration
5. Install Replication Server into Virtual Machine
--------------------------------------------------
.. Note:: Skip to :ref:`initialise-server` if you've opted to launch the server by deploying a container. Your server is already live!
Now that we have set up our Virtual Machine instance, we can SSH into it, and install the Replication Server. Open the `VM Instances console <https://console.cloud.google.com/compute/instances>`_ once more, and SSH into your instance. It's easiest to use the browser terminal provided by Google Cloud (I had the best luck using the Google Chrome browser), but you can also see `here <https://cloud.google.com/compute/docs/instances/connecting-advanced#thirdpartytools>`_ for how to set up your instance for SSH access from your terminal.
.. figure:: img/hosting_guide_gcloud_16.jpg
:align: center
:width: 450px
Now, a terminal window should pop up in a new browser window looking something like this:
.. figure:: img/hosting_guide_gcloud_17.jpg
:align: center
:width: 450px
Remember, you had set up the VM with Debian 10. This comes with Python 3.7.3 already installed. The only dependency missing is to set up pip3. So, run:
.. code-block:: bash
sudo apt install python3-pip
.. figure:: img/hosting_guide_gcloud_18.jpg
:align: center
:width: 450px
And now lets install the latest version of replication:
.. code-block:: bash
sudo pip3 install replication==0.1.13
6. Launch Replication Server on VM Instance
-------------------------------------------
We're finally ready to launch the server. Simply run:
.. code-block:: bash
python3 -m replication.server -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
See :ref:`cmd-line` for a description of optional parameters
And your replication server is live! It should stay running in the terminal window until you close it. Copy the external IP that you noted down earlier, available `here <https://console.cloud.google.com/networking/addresses/list>`_ and now you can open Blender and connect to your server!
.. _initialise-server:
7. Initialise your Server in Blender
------------------------------------
Once in Blender, make sure your multi-user addon is updated to the latest version. :ref:`update-version`. Then, follow the instructions from :ref:`how-to-join` and connect as an admin user, using the password you launched the server with. Input your external IP, and make sure you're set to JOIN the server. Then, click CONNECT.
.. figure:: img/hosting_guide_gcloud_19.jpg
:align: center
:width: 200px
Now as the admin user, you can choose whether to initialise the server with a preloaded scene, or an empty scene
.. figure:: img/hosting_guide_gcloud_20.jpg
:align: center
:width: 200px
Press okay, and now your session is live!
If you made it this far, congratulations! You can now go ahead and share the external IP address with your friends and co-creators and have fun with real-time collaboration in Blender!
Hopefully, your cloud server setup has improved your group's overall ping readings, and you're in for a smooth and trouble-free co-creation session.
.. Note:: If you should so desire, pay attention to your credit and follow the steps `here <https://cloud.google.com/compute/docs/quickstart-linux#clean-up>`_ to close your instance at your discretion.
.. _container_v_direct:
Should I deploy a Docker Container or launch a server from Linux VM command-line?
------------------------------------------------------
- Directly from Linux VM - This approach gives you control over your session more easily. However, your server may time out once your SSH link to the server is interrupted (for example, if the admin's computer goes to sleep).
- Deploy a Docker Container - This is the recommended approach. This approach is better for leaving a session running without supervision. It can however be more complicated to manage. Use this approach if you'd like a consistent experience with others in the multi-user community, pulling from the most up-to-date docker image maintained by @swann in the multi-user container registry.
Those four ports need to be accessible from the client otherwise it won't work at all !

Binary file not shown.

Before

Width:  |  Height:  |  Size: 757 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 214 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 249 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 116 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 230 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 687 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 635 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 204 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 153 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 109 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 262 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 233 KiB

View File

@ -21,11 +21,11 @@ In order to help with the testing, you have several possibilities:
- Test `development branch <https://gitlab.com/slumber/multi-user/-/branches>`_
--------------------------
Filing an issue on Gitlab
Filling an issue on Gitlab
--------------------------
The `gitlab issue tracker <https://gitlab.com/slumber/multi-user/issues>`_ is used for bug report and enhancement suggestion.
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button in the main multi-user project.
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button.
Here are some useful information you should provide in a bug report:
@ -35,77 +35,8 @@ Here are some useful information you should provide in a bug report:
Contributing code
=================
In general, this project follows the `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_. It may help to understand that there are three different repositories - the upstream (main multi-user project repository, designated in git by 'upstream'), remote (forked repository, designated in git by 'origin'), and the local repository on your machine.
The following example suggests how to contribute a feature.
1. Fork the project into a new repository:
https://gitlab.com/yourname/multi-user
2. Clone the new repository locally:
.. code-block:: bash
git clone https://gitlab.com/yourname/multi-user.git
3. Keep your fork in sync with the main repository by setting up the upstream pointer once. cd into your git repo and then run:
.. code-block:: bash
git remote add upstream https://gitlab.com/slumber/multi-user.git
4. Now, locally check out the develop branch, upon which to base your new feature branch:
.. code-block:: bash
git checkout develop
5. Fetch any changes from the main upstream repository into your fork (especially if some time has passed since forking):
.. code-block:: bash
git fetch upstream
'Fetch' downloads objects and refs from the repository, but doesnt apply them to the branch we are working on. We want to apply the updates to the branch we will work from, which we checked out in step 4.
6. Let's merge any recent changes from the remote upstream (original repository's) 'develop' branch into our local 'develop' branch:
.. code-block:: bash
git merge upstream/develop
7. Update your forked repository's remote 'develop' branch with the fetched changes, just to keep things tidy. Make sure you haven't committed any local changes in the interim:
.. code-block:: bash
git push origin develop
8. Locally create your own new feature branch from the develop branch, using the syntax:
.. code-block:: bash
git checkout -b feature/yourfeaturename
...where 'feature/' designates a feature branch, and 'yourfeaturename' is a name of your choosing
9. Add and commit your changes, including a commit message:
.. code-block:: bash
git commit -am 'Add fooBar'
10. Push committed changes to the remote copy of your new feature branch which will be created in this step:
.. code-block:: bash
git push -u origin feature/yourfeaturename
If it's been some time since performing steps 4 through 7, make sure to checkout 'develop' again and pull the latest changes from upstream before checking out and creating feature/yourfeaturename and pushing changes. Alternatively, checkout 'feature/yourfeaturename' and simply run:
.. code-block:: bash
git rebase upstream/develop
and your staged commits will be merged along with the changes. More information on `rebasing here <https://git-scm.com/book/en/v2/Git-Branching-Rebasing>`_
.. Hint:: -u option sets up your locally created new branch to follow a remote branch which is now created with the same name on your remote repository.
11. Finally, create a new Pull/Merge Request on Gitlab to merge the remote version of this new branch with commited updates, back into the upstream 'develop' branch, finalising the integration of the new feature.
Make sure to set the target branch to 'develop' for features and 'master' for hotfixes. Also, include any milestones or labels, and assignees that may be relevant. By default, the Merge option to 'delete source branch when merge request is activated' will be checked.
12. Thanks for contributing!
.. Note:: For hotfixes, replace 'feature/' with 'hotfix/' and base the new branch off the parent 'master' branch instead of 'develop' branch. Make sure to checkout 'master' before running step 8
.. Note:: Let's follow the Atlassian `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_, except for one main difference - submitting a pull request rather than merging by ourselves.
.. Note:: See `here <https://philna.sh/blog/2018/08/21/git-commands-to-keep-a-fork-up-to-date/>`_ or `here <https://stefanbauer.me/articles/how-to-keep-your-git-fork-up-to-date>`_ for instructions on how to keep a fork up to date.
1. Fork it (https://gitlab.com/yourname/yourproject/fork)
2. Create your feature branch (git checkout -b feature/fooBar)
3. Commit your changes (git commit -am 'Add some fooBar')
4. Push to the branch (git push origin feature/fooBar)
5. Create a new Pull Request

View File

@ -19,9 +19,9 @@
bl_info = {
"name": "Multi-User",
"author": "Swann Martinez",
"version": (0, 2, 0),
"version": (0, 0, 3),
"description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 82, 0),
"blender": (2, 80, 0),
"location": "3D View > Sidebar > Multi-User tab",
"warning": "Unstable addon, use it at your own risks",
"category": "Collaboration",
@ -40,46 +40,45 @@ import sys
import bpy
from bpy.app.handlers import persistent
from . import environment
from . import environment, utils
# TODO: remove dependency as soon as replication will be installed as a module
DEPENDENCIES = {
("replication", '0.1.17'),
("zmq","zmq"),
("jsondiff","jsondiff"),
("deepdiff", "deepdiff"),
("psutil","psutil")
}
module_error_msg = "Insufficient rights to install the multi-user \
dependencies, aunch blender with administrator rights."
libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication\\replication"
def register():
# Setup logging policy
logging.basicConfig(
format='%(asctime)s CLIENT %(levelname)-8s %(message)s',
datefmt='%H:%M:%S',
level=logging.INFO)
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if libs not in sys.path:
sys.path.append(libs)
try:
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
except ModuleNotFoundError:
logging.fatal("Fail to install multi-user dependencies, try to execute blender with admin rights.")
return
from . import presence
from . import operators
from . import ui
from . import preferences
from . import addon_updater_ops
environment.setup(DEPENDENCIES, python_binary_path)
preferences.register()
addon_updater_ops.register(bl_info)
presence.register()
operators.register()
ui.register()
from . import presence
from . import operators
from . import ui
from . import preferences
from . import addon_updater_ops
preferences.register()
addon_updater_ops.register(bl_info)
presence.register()
operators.register()
ui.register()
except ModuleNotFoundError as e:
raise Exception(module_error_msg)
logging.error(module_error_msg)
bpy.types.WindowManager.session = bpy.props.PointerProperty(
type=preferences.SessionProps)
bpy.types.ID.uuid = bpy.props.StringProperty(

View File

@ -23,11 +23,7 @@ https://github.com/CGCookie/blender-addon-updater
"""
__version__ = "1.0.8"
import errno
import traceback
import platform
import ssl
import urllib.request
import urllib
@ -102,7 +98,6 @@ class Singleton_updater(object):
# runtime variables, initial conditions
self._verbose = False
self._use_print_traces = True
self._fake_install = False
self._async_checking = False # only true when async daemon started
self._update_ready = None
@ -138,13 +133,6 @@ class Singleton_updater(object):
self._select_link = select_link_function
# called from except blocks, to print the exception details,
# according to the use_print_traces option
def print_trace():
if self._use_print_traces:
traceback.print_exc()
# -------------------------------------------------------------------------
# Getters and setters
# -------------------------------------------------------------------------
@ -178,7 +166,7 @@ class Singleton_updater(object):
try:
self._auto_reload_post_update = bool(value)
except:
raise ValueError("auto_reload_post_update must be a boolean value")
raise ValueError("Must be a boolean value")
@property
def backup_current(self):
@ -363,7 +351,7 @@ class Singleton_updater(object):
try:
self._repo = str(value)
except:
raise ValueError("repo must be a string value")
raise ValueError("User must be a string")
@property
def select_link(self):
@ -389,7 +377,6 @@ class Singleton_updater(object):
os.makedirs(value)
except:
if self._verbose: print("Error trying to staging path")
self.print_trace()
return
self._updater_path = value
@ -459,16 +446,6 @@ class Singleton_updater(object):
except:
raise ValueError("Verbose must be a boolean value")
@property
def use_print_traces(self):
return self._use_print_traces
@use_print_traces.setter
def use_print_traces(self, value):
try:
self._use_print_traces = bool(value)
except:
raise ValueError("use_print_traces must be a boolean value")
@property
def version_max_update(self):
return self._version_max_update
@ -660,9 +637,6 @@ class Singleton_updater(object):
else:
if self._verbose: print("Tokens not setup for engine yet")
# Always set user agent
request.add_header('User-Agent', "Python/"+str(platform.python_version()))
# run the request
try:
if context:
@ -678,7 +652,6 @@ class Singleton_updater(object):
self._error = "HTTP error"
self._error_msg = str(e.code)
print(self._error, self._error_msg)
self.print_trace()
self._update_ready = None
except urllib.error.URLError as e:
reason = str(e.reason)
@ -690,7 +663,6 @@ class Singleton_updater(object):
self._error = "URL error, check internet connection"
self._error_msg = reason
print(self._error, self._error_msg)
self.print_trace()
self._update_ready = None
return None
else:
@ -712,7 +684,6 @@ class Singleton_updater(object):
self._error_msg = str(e.reason)
self._update_ready = None
print(self._error, self._error_msg)
self.print_trace()
return None
else:
return None
@ -729,17 +700,15 @@ class Singleton_updater(object):
if self._verbose: print("Preparing staging folder for download:\n",local)
if os.path.isdir(local) == True:
try:
shutil.rmtree(local, ignore_errors=True)
shutil.rmtree(local)
os.makedirs(local)
except:
error = "failed to remove existing staging directory"
self.print_trace()
else:
try:
os.makedirs(local)
except:
error = "failed to create staging directory"
self.print_trace()
if error != None:
if self._verbose: print("Error: Aborting update, "+error)
@ -764,10 +733,6 @@ class Singleton_updater(object):
request.add_header('PRIVATE-TOKEN',self._engine.token)
else:
if self._verbose: print("Tokens not setup for selected engine yet")
# Always set user agent
request.add_header('User-Agent', "Python/"+str(platform.python_version()))
self.urlretrieve(urllib.request.urlopen(request,context=context), self._source_zip)
# add additional checks on file size being non-zero
if self._verbose: print("Successfully downloaded update zip")
@ -778,7 +743,6 @@ class Singleton_updater(object):
if self._verbose:
print("Error retrieving download, bad link?")
print("Error: {}".format(e))
self.print_trace()
return False
@ -793,18 +757,16 @@ class Singleton_updater(object):
if os.path.isdir(local):
try:
shutil.rmtree(local, ignore_errors=True)
shutil.rmtree(local)
except:
if self._verbose:print("Failed to removed previous backup folder, contininuing")
self.print_trace()
# remove the temp folder; shouldn't exist but could if previously interrupted
if os.path.isdir(tempdest):
try:
shutil.rmtree(tempdest, ignore_errors=True)
shutil.rmtree(tempdest)
except:
if self._verbose:print("Failed to remove existing temp folder, contininuing")
self.print_trace()
# make the full addon copy, which temporarily places outside the addon folder
if self._backup_ignore_patterns != None:
shutil.copytree(
@ -832,7 +794,7 @@ class Singleton_updater(object):
# make the copy
shutil.move(backuploc,tempdest)
shutil.rmtree(self._addon_root, ignore_errors=True)
shutil.rmtree(self._addon_root)
os.rename(tempdest,self._addon_root)
self._json["backup_date"] = ""
@ -853,7 +815,7 @@ class Singleton_updater(object):
# clear the existing source folder in case previous files remain
outdir = os.path.join(self._updater_path, "source")
try:
shutil.rmtree(outdir, ignore_errors=True)
shutil.rmtree(outdir)
if self._verbose:
print("Source folder cleared")
except:
@ -866,7 +828,6 @@ class Singleton_updater(object):
except Exception as err:
print("Error occurred while making extract dir:")
print(str(err))
self.print_trace()
self._error = "Install failed"
self._error_msg = "Failed to make extract directory"
return -1
@ -908,7 +869,6 @@ class Singleton_updater(object):
if exc.errno != errno.EEXIST:
self._error = "Install failed"
self._error_msg = "Could not create folder from zip"
self.print_trace()
return -1
else:
with open(os.path.join(outdir, subpath), "wb") as outfile:
@ -1002,13 +962,12 @@ class Singleton_updater(object):
print("Clean removing file {}".format(os.path.join(base,f)))
for f in folders:
if os.path.join(base,f)==self._updater_path: continue
shutil.rmtree(os.path.join(base,f), ignore_errors=True)
shutil.rmtree(os.path.join(base,f))
print("Clean removing folder and contents {}".format(os.path.join(base,f)))
except Exception as err:
error = "failed to create clean existing addon folder"
print(error, str(err))
self.print_trace()
# Walk through the base addon folder for rules on pre-removing
# but avoid removing/altering backup and updater file
@ -1024,7 +983,6 @@ class Singleton_updater(object):
if self._verbose: print("Pre-removed file "+file)
except OSError:
print("Failed to pre-remove "+file)
self.print_trace()
# Walk through the temp addon sub folder for replacements
# this implements the overwrite rules, which apply after
@ -1048,7 +1006,7 @@ class Singleton_updater(object):
# otherwise, check each file to see if matches an overwrite pattern
replaced=False
for ptrn in self._overwrite_patterns:
if fnmatch.filter([file],ptrn):
if fnmatch.filter([destFile],ptrn):
replaced=True
break
if replaced:
@ -1064,11 +1022,10 @@ class Singleton_updater(object):
# now remove the temp staging folder and downloaded zip
try:
shutil.rmtree(staging_path, ignore_errors=True)
shutil.rmtree(staging_path)
except:
error = "Error: Failed to remove existing staging directory, consider manually removing "+staging_path
if self._verbose: print(error)
self.print_trace()
def reload_addon(self):
@ -1084,16 +1041,9 @@ class Singleton_updater(object):
# not allowed in restricted context, such as register module
# toggle to refresh
if "addon_disable" in dir(bpy.ops.wm): # 2.7
bpy.ops.wm.addon_disable(module=self._addon_package)
bpy.ops.wm.addon_refresh()
bpy.ops.wm.addon_enable(module=self._addon_package)
print("2.7 reload complete")
else: # 2.8
bpy.ops.preferences.addon_disable(module=self._addon_package)
bpy.ops.preferences.addon_refresh()
bpy.ops.preferences.addon_enable(module=self._addon_package)
print("2.8 reload complete")
bpy.ops.wm.addon_disable(module=self._addon_package)
bpy.ops.wm.addon_refresh()
bpy.ops.wm.addon_enable(module=self._addon_package)
# -------------------------------------------------------------------------
@ -1425,26 +1375,26 @@ class Singleton_updater(object):
if "last_check" not in self._json or self._json["last_check"] == "":
return True
else:
now = datetime.now()
last_check = datetime.strptime(self._json["last_check"],
"%Y-%m-%d %H:%M:%S.%f")
next_check = last_check
offset = timedelta(
days=self._check_interval_days + 30*self._check_interval_months,
hours=self._check_interval_hours,
minutes=self._check_interval_minutes
)
now = datetime.now()
last_check = datetime.strptime(self._json["last_check"],
"%Y-%m-%d %H:%M:%S.%f")
next_check = last_check
offset = timedelta(
days=self._check_interval_days + 30*self._check_interval_months,
hours=self._check_interval_hours,
minutes=self._check_interval_minutes
)
delta = (now - offset) - last_check
if delta.total_seconds() > 0:
if self._verbose:
print("{} Updater: Time to check for updates!".format(self._addon))
return True
if self._verbose:
print("{} Updater: Determined it's not yet time to check for updates".format(self._addon))
return False
delta = (now - offset) - last_check
if delta.total_seconds() > 0:
if self._verbose:
print("{} Updater: Time to check for updates!".format(self._addon))
return True
else:
if self._verbose:
print("{} Updater: Determined it's not yet time to check for updates".format(self._addon))
return False
def get_json_path(self):
"""Returns the full path to the JSON state file used by this updater.
@ -1463,7 +1413,6 @@ class Singleton_updater(object):
except Exception as err:
print("Other OS error occurred while trying to rename old JSON")
print(err)
self.print_trace()
return json_path
def set_updater_json(self):
@ -1564,7 +1513,6 @@ class Singleton_updater(object):
except Exception as exception:
print("Checking for update error:")
print(exception)
self.print_trace()
if not self._error:
self._update_ready = False
self._update_version = None
@ -1676,7 +1624,10 @@ class GitlabEngine(object):
return "{}{}{}".format(self.api_url,"/api/v4/projects/",updater.repo)
def form_tags_url(self, updater):
return "{}{}".format(self.form_repo_url(updater),"/repository/tags")
if updater.use_releases:
return "{}{}".format(self.form_repo_url(updater),"/releases")
else:
return "{}{}".format(self.form_repo_url(updater),"/repository/tags")
def form_branch_list_url(self, updater):
# does not validate branch name.
@ -1704,7 +1655,12 @@ class GitlabEngine(object):
def parse_tags(self, response, updater):
if response == None:
return []
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
# Return asset links from release
if updater.use_releases:
return [{"name": release["name"], "zipball_url": release["assets"]["links"][0]["url"]} for release in response]
else:
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
# -----------------------------------------------------------------------------

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,6 @@
#
# ##### END GPL LICENSE BLOCK #####
import bpy
__all__ = [
'bl_object',
@ -35,20 +34,11 @@ __all__ = [
'bl_metaball',
'bl_lattice',
'bl_lightprobe',
'bl_speaker',
'bl_font',
'bl_sound',
'bl_file',
'bl_sequencer',
'bl_node_group',
'bl_texture',
'bl_speaker'
] # Order here defines execution order
if bpy.app.version[1] >= 91:
__all__.append('bl_volume')
from . import *
from replication.data import ReplicatedDataFactory
from ..libs.replication.replication.data import ReplicatedDataFactory
def types_to_register():
return __all__

View File

@ -42,7 +42,7 @@ KEYFRAME = [
]
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict:
""" Dump a sigle curve to a dict
:arg fcurve: fcurve to dump
@ -59,7 +59,7 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
if use_numpy:
points = fcurve.keyframe_points
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
else: # Legacy method
@ -92,8 +92,7 @@ def load_fcurve(fcurve_data, fcurve):
if use_numpy:
keyframe_points.add(fcurve_data['keyframes_count'])
np_load_collection(
fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
np_load_collection(fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
else:
# paste dumped keyframes
@ -135,9 +134,7 @@ class BlAction(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'ACTION_TWEAK'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.actions.new(data["name"])
@ -155,11 +152,7 @@ class BlAction(BlDatablock):
dumped_data_path, index=dumped_array_index)
load_fcurve(dumped_fcurve, fcurve)
id_root = data.get('id_root')
if id_root:
target.id_root = id_root
target.id_root = data['id_root']
def _dump_implementation(self, data, instance=None):
dumper = Dumper()

View File

@ -31,9 +31,7 @@ class BlArmature(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 0
bl_automatic_push = True
bl_check_common = False
bl_icon = 'ARMATURE_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.armatures.new(data["name"])
@ -94,7 +92,6 @@ class BlArmature(BlDatablock):
new_bone.head = bone_data['head_local']
new_bone.tail_radius = bone_data['tail_radius']
new_bone.head_radius = bone_data['head_radius']
# new_bone.roll = bone_data['roll']
if 'parent' in bone_data:
new_bone.parent = target.edit_bones[data['bones']
@ -126,8 +123,7 @@ class BlArmature(BlDatablock):
'use_connect',
'parent',
'name',
'layers',
# 'roll',
'layers'
]
data = dumper.dump(instance)

View File

@ -29,16 +29,14 @@ class BlCamera(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'CAMERA_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.cameras.new(data["name"])
def _load_implementation(self, data, target):
loader = Loader()
loader = Loader()
loader.load(target, data)
dof_settings = data.get('dof')
@ -47,25 +45,13 @@ class BlCamera(BlDatablock):
if dof_settings:
loader.load(target.dof, dof_settings)
background_images = data.get('background_images')
target.background_images.clear()
if background_images:
for img_name, img_data in background_images.items():
img_id = img_data.get('image')
if img_id:
target_img = target.background_images.new()
target_img.image = bpy.data.images[img_id]
loader.load(target_img, img_data)
def _dump_implementation(self, data, instance=None):
assert(instance)
# TODO: background image support
dumper = Dumper()
dumper.depth = 3
dumper.depth = 2
dumper.include_filter = [
"name",
'type',
@ -84,7 +70,6 @@ class BlCamera(BlDatablock):
'aperture_fstop',
'aperture_blades',
'aperture_rotation',
'ortho_scale',
'aperture_ratio',
'display_size',
'show_limits',
@ -94,24 +79,7 @@ class BlCamera(BlDatablock):
'sensor_fit',
'sensor_height',
'sensor_width',
'show_background_images',
'background_images',
'alpha',
'display_depth',
'frame_method',
'offset',
'rotation',
'scale',
'use_flip_x',
'use_flip_y',
'image'
]
return dumper.dump(instance)
def _resolve_deps_implementation(self):
deps = []
for background in self.instance.background_images:
if background.image:
deps.append(background.image)
return deps

View File

@ -21,66 +21,8 @@ import mathutils
from .. import utils
from .bl_datablock import BlDatablock
from .dump_anything import Loader, Dumper
def dump_collection_children(collection):
collection_children = []
for child in collection.children:
if child not in collection_children:
collection_children.append(child.uuid)
return collection_children
def dump_collection_objects(collection):
collection_objects = []
for object in collection.objects:
if object not in collection_objects:
collection_objects.append(object.uuid)
return collection_objects
def load_collection_objects(dumped_objects, collection):
for object in dumped_objects:
object_ref = utils.find_from_attr('uuid', object, bpy.data.objects)
if object_ref is None:
continue
elif object_ref.name not in collection.objects.keys():
collection.objects.link(object_ref)
for object in collection.objects:
if object.uuid not in dumped_objects:
collection.objects.unlink(object)
def load_collection_childrens(dumped_childrens, collection):
for child_collection in dumped_childrens:
collection_ref = utils.find_from_attr(
'uuid',
child_collection,
bpy.data.collections)
if collection_ref is None:
continue
if collection_ref.name not in collection.children.keys():
collection.children.link(collection_ref)
for child_collection in collection.children:
if child_collection.uuid not in dumped_childrens:
collection.children.unlink(child_collection)
def resolve_collection_dependencies(collection):
deps = []
for child in collection.children:
deps.append(child)
for object in collection.objects:
deps.append(object)
return deps
class BlCollection(BlDatablock):
bl_id = "collections"
bl_icon = 'FILE_FOLDER'
@ -88,50 +30,80 @@ class BlCollection(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True
bl_reload_parent = False
def _construct(self, data):
if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.collections = [
name for name in sourceData.collections if name == self.data['name']]
instance = bpy.data.collections[self.data['name']]
return instance
instance = bpy.data.collections.new(data["name"])
return instance
def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
# Load other meshes metadata
target.name = data["name"]
# Objects
load_collection_objects(data['objects'], target)
for object in data["objects"]:
object_ref = bpy.data.objects.get(object)
if object_ref is None:
continue
if object not in target.objects.keys():
target.objects.link(object_ref)
for object in target.objects:
if object.name not in data["objects"]:
target.objects.unlink(object)
# Link childrens
load_collection_childrens(data['children'], target)
for collection in data["children"]:
collection_ref = bpy.data.collections.get(collection)
if collection_ref is None:
continue
if collection_ref.name not in target.children.keys():
target.children.link(collection_ref)
for collection in target.children:
if collection.name not in data["children"]:
target.children.unlink(collection)
def _dump_implementation(self, data, instance=None):
assert(instance)
dumper = Dumper()
dumper.depth = 1
dumper.include_filter = [
"name",
"instance_offset"
]
data = dumper.dump(instance)
data = {}
data['name'] = instance.name
# dump objects
data['objects'] = dump_collection_objects(instance)
collection_objects = []
for object in instance.objects:
if object not in collection_objects:
collection_objects.append(object.name)
data['objects'] = collection_objects
# dump children collections
data['children'] = dump_collection_children(instance)
collection_children = []
for child in instance.children:
if child not in collection_children:
collection_children.append(child.name)
data['children'] = collection_children
return data
def _resolve_deps_implementation(self):
return resolve_collection_dependencies(self.instance)
deps = []
for child in self.instance.children:
deps.append(child)
for object in self.instance.objects:
deps.append(object)
return deps

View File

@ -46,107 +46,13 @@ SPLINE_POINT = [
"radius",
]
CURVE_METADATA = [
'align_x',
'align_y',
'bevel_depth',
'bevel_factor_end',
'bevel_factor_mapping_end',
'bevel_factor_mapping_start',
'bevel_factor_start',
'bevel_object',
'bevel_resolution',
'body',
'body_format',
'dimensions',
'eval_time',
'extrude',
'family',
'fill_mode',
'follow_curve',
'font',
'font_bold',
'font_bold_italic',
'font_italic',
'make_local',
'materials',
'name',
'offset',
'offset_x',
'offset_y',
'overflow',
'original',
'override_create',
'override_library',
'path_duration',
'preview',
'render_resolution_u',
'render_resolution_v',
'resolution_u',
'resolution_v',
'shape_keys',
'shear',
'size',
'small_caps_scale',
'space_character',
'space_line',
'space_word',
'type',
'taper_object',
'texspace_location',
'texspace_size',
'transform',
'twist_mode',
'twist_smooth',
'underline_height',
'underline_position',
'use_auto_texspace',
'use_deform_bounds',
'use_fake_user',
'use_fill_caps',
'use_fill_deform',
'use_map_taper',
'use_path',
'use_path_follow',
'use_radius',
'use_stretch',
]
SPLINE_METADATA = [
'hide',
'material_index',
# 'order_u',
# 'order_v',
# 'point_count_u',
# 'point_count_v',
'points',
'radius_interpolation',
'resolution_u',
'resolution_v',
'tilt_interpolation',
'type',
'use_bezier_u',
'use_bezier_v',
'use_cyclic_u',
'use_cyclic_v',
'use_endpoint_u',
'use_endpoint_v',
'use_smooth',
]
class BlCurve(BlDatablock):
bl_id = "curves"
bl_class = bpy.types.Curve
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'CURVE_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.curves.new(data["name"], data["type"])
@ -156,7 +62,6 @@ class BlCurve(BlDatablock):
loader.load(target, data)
target.splines.clear()
# load splines
for spline in data['splines'].values():
new_spline = target.splines.new(spline['type'])
@ -167,12 +72,8 @@ class BlCurve(BlDatablock):
bezier_points = new_spline.bezier_points
bezier_points.add(spline['bezier_points_count'])
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
if new_spline.type == 'POLY':
points = new_spline.points
points.add(spline['points_count'])
np_load_collection(spline['points'], points, SPLINE_POINT)
# Not working for now...
# Not really working for now...
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
if new_spline.type == 'NURBS':
logging.error("NURBS not supported.")
@ -182,14 +83,11 @@ class BlCurve(BlDatablock):
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
loader.load(new_spline, spline)
def _dump_implementation(self, data, instance=None):
assert(instance)
dumper = Dumper()
# Conflicting attributes
# TODO: remove them with the NURBS support
dumper.include_filter = CURVE_METADATA
dumper.exclude_filter = [
'users',
'order_u',
@ -207,13 +105,8 @@ class BlCurve(BlDatablock):
for index, spline in enumerate(instance.splines):
dumper.depth = 2
dumper.include_filter = SPLINE_METADATA
spline_data = dumper.dump(spline)
if spline.type == 'POLY':
spline_data['points_count'] = len(spline.points)-1
spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
# spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
spline_data['bezier_points_count'] = len(spline.bezier_points)-1
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
data['splines'][index] = spline_data
@ -225,17 +118,3 @@ class BlCurve(BlDatablock):
elif isinstance(instance, T.Curve):
data['type'] = 'CURVE'
return data
def _resolve_deps_implementation(self):
# TODO: resolve material
deps = []
curve = self.instance
if isinstance(curve, T.TextCurve):
deps.extend([
curve.font,
curve.font_bold,
curve.font_bold_italic,
curve.font_italic])
return deps

View File

@ -16,16 +16,13 @@
# ##### END GPL LICENSE BLOCK #####
import logging
from collections.abc import Iterable
import bpy
import mathutils
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
from replication.data import ReplicatedDatablock
from .. import utils
from .dump_anything import Dumper, Loader
from .dump_anything import Loader, Dumper
from ..libs.replication.replication.data import ReplicatedDatablock
from ..libs.replication.replication.constants import (UP, DIFF_BINARY)
def has_action(target):
@ -89,18 +86,6 @@ def load_driver(target_datablock, src_driver):
loader.load(new_point, src_driver['keyframe_points'][src_point])
def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid:
return default
for category in dir(bpy.data):
root = getattr(bpy.data, category)
if isinstance(root, Iterable) and category not in ignore:
for item in root:
if getattr(item, 'uuid', None) == uuid:
return item
return default
class BlDatablock(ReplicatedDatablock):
"""BlDatablock
@ -110,29 +95,21 @@ class BlDatablock(ReplicatedDatablock):
bl_delay_apply : refresh rate in sec for apply
bl_automatic_push : boolean
bl_icon : type icon (blender icon name)
bl_check_common: enable check even in common rights
bl_reload_parent: reload parent
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
self.preferences = utils.get_preferences()
# TODO: use is_library_indirect
self.is_library = (instance and hasattr(instance, 'library') and
instance.library) or \
(hasattr(self,'data') and self.data and 'library' in self.data)
(self.data and 'library' in self.data)
if instance and hasattr(instance, 'uuid'):
instance.uuid = self.uuid
if logging.getLogger().level == logging.DEBUG:
self.diff_method = DIFF_JSON
else:
self.diff_method = DIFF_BINARY
self.diff_method = DIFF_BINARY
def resolve(self):
datablock_ref = None
@ -140,27 +117,15 @@ class BlDatablock(ReplicatedDatablock):
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
if not datablock_ref:
try:
datablock_ref = datablock_root[self.data['name']]
except Exception:
name = self.data.get('name')
logging.debug(f"Constructing {name}")
datablock_ref = self._construct(data=self.data)
datablock_ref = datablock_root.get(
self.data['name'], # Resolve by name
self._construct(data=self.data)) # If it doesn't exist create it
if datablock_ref:
setattr(datablock_ref, 'uuid', self.uuid)
self.instance = datablock_ref
def remove_instance(self):
"""
Remove instance from blender data
"""
assert(self.instance)
datablock_root = getattr(bpy.data, self.bl_id)
datablock_root.remove(self.instance)
def _dump(self, instance=None):
dumper = Dumper()
data = {}
@ -221,7 +186,6 @@ class BlDatablock(ReplicatedDatablock):
if not self.is_library:
dependencies.extend(self._resolve_deps_implementation())
logging.debug(f"{self.instance} dependencies: {dependencies}")
return dependencies
def _resolve_deps_implementation(self):

View File

@ -1,140 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
import sys
from pathlib import Path
import bpy
import mathutils
from replication.constants import DIFF_BINARY, UP
from replication.data import ReplicatedDatablock
from .. import utils
from .dump_anything import Dumper, Loader
def get_filepath(filename):
"""
Construct the local filepath
"""
return str(Path(
utils.get_preferences().cache_directory,
filename
))
def ensure_unpacked(datablock):
if datablock.packed_file:
logging.info(f"Unpacking {datablock.name}")
filename = Path(bpy.path.abspath(datablock.filepath)).name
datablock.filepath = get_filepath(filename)
datablock.unpack(method="WRITE_ORIGINAL")
class BlFile(ReplicatedDatablock):
bl_id = 'file'
bl_name = "file"
bl_class = Path
bl_delay_refresh = 2
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'FILE'
bl_reload_parent = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.instance = kwargs.get('instance', None)
if self.instance and not self.instance.exists():
raise FileNotFoundError(str(self.instance))
self.preferences = utils.get_preferences()
self.diff_method = DIFF_BINARY
def resolve(self):
if self.data:
self.instance = Path(get_filepath(self.data['name']))
if not self.instance.exists():
logging.debug("File don't exist, loading it.")
self._load(self.data, self.instance)
def push(self, socket, identity=None):
super().push(socket, identity=None)
if self.preferences.clear_memory_filecache:
del self.data['file']
def _dump(self, instance=None):
"""
Read the file and return a dict as:
{
name : filename
extension :
file: file content
}
"""
logging.info(f"Extracting file metadata")
data = {
'name': self.instance.name,
}
logging.info(
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
try:
file = open(self.instance, "rb")
data['file'] = file.read()
file.close()
except IOError:
logging.warning(f"{self.instance} doesn't exist, skipping")
else:
file.close()
return data
def _load(self, data, target):
"""
Writing the file
"""
try:
file = open(target, "wb")
file.write(data['file'])
if self.preferences.clear_memory_filecache:
del self.data['file']
except IOError:
logging.warning(f"{target} doesn't exist, skipping")
else:
file.close()
def diff(self):
if self.preferences.clear_memory_filecache:
return False
else:
memory_size = sys.getsizeof(self.data['file'])-33
disk_size = self.instance.stat().st_size
return memory_size != disk_size

View File

@ -1,75 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy
from .bl_datablock import BlDatablock
from .bl_file import get_filepath, ensure_unpacked
from .dump_anything import Dumper, Loader
class BlFont(BlDatablock):
bl_id = "fonts"
bl_class = bpy.types.VectorFont
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'FILE_FONT'
bl_reload_parent = False
def _construct(self, data):
filename = data.get('filename')
if filename == '<builtin>':
return bpy.data.fonts.load(filename)
else:
return bpy.data.fonts.load(get_filepath(filename))
def _load(self, data, target):
pass
def _dump(self, instance=None):
if instance.filepath == '<builtin>':
filename = '<builtin>'
else:
filename = Path(instance.filepath).name
if not filename:
raise FileExistsError(instance.filepath)
return {
'filename': filename,
'name': instance.name
}
def diff(self):
return False
def _resolve_deps_implementation(self):
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -38,24 +38,6 @@ STROKE_POINT = [
]
STROKE = [
"aspect",
"display_mode",
"end_cap_mode",
"hardness",
"line_width",
"material_index",
"start_cap_mode",
"uv_rotation",
"uv_scale",
"uv_translation",
"vertex_color_fill",
]
if bpy.app.version[1] >= 91:
STROKE.append('use_cyclic')
else:
STROKE.append('draw_cyclic')
if bpy.app.version[1] >= 83:
STROKE_POINT.append('vertex_color')
@ -106,9 +88,12 @@ def load_stroke(stroke_data, stroke):
"""
assert(stroke and stroke_data)
stroke.points.add(stroke_data["p_count"])
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
loader = Loader()
loader.load(stroke, stroke_data)
stroke.points.add(stroke_data["p_count"])
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
def dump_frame(frame):
@ -123,11 +108,11 @@ def dump_frame(frame):
dumped_frame = dict()
dumped_frame['frame_number'] = frame.frame_number
dumped_frame['strokes'] = np_dump_collection(frame.strokes, STROKE)
dumped_frame['strokes_points'] = []
dumped_frame['strokes'] = []
# TODO: took existing strokes in account
for stroke in frame.strokes:
dumped_frame['strokes_points'].append(dump_stroke(stroke))
dumped_frame['strokes'].append(dump_stroke(stroke))
return dumped_frame
@ -143,11 +128,14 @@ def load_frame(frame_data, frame):
assert(frame and frame_data)
for stroke_data in frame_data['strokes_points']:
# frame.frame_number = frame_data['frame_number']
# TODO: took existing stroke in account
for stroke_data in frame_data['strokes']:
target_stroke = frame.strokes.new()
load_stroke(stroke_data, target_stroke)
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
def dump_layer(layer):
""" Dump a grease pencil layer
@ -195,9 +183,6 @@ def dump_layer(layer):
# 'parent_bone',
# 'matrix_inverse',
]
if layer.id_data.is_annotation:
dumper.include_filter.append('thickness')
dumped_layer = dumper.dump(layer)
dumped_layer['frames'] = []
@ -233,9 +218,7 @@ class BlGpencil(BlDatablock):
bl_delay_refresh = 2
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'GREASEPENCIL'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.grease_pencils.new(data["name"])
@ -248,7 +231,7 @@ class BlGpencil(BlDatablock):
loader = Loader()
loader.load(target, data)
# TODO: reuse existing layer
for layer in target.layers:
target.layers.remove(layer)
@ -264,7 +247,8 @@ class BlGpencil(BlDatablock):
# target_layer.clear()
load_layer(layer_data, target_layer)
@ -283,7 +267,7 @@ class BlGpencil(BlDatablock):
data = dumper.dump(instance)
data['layers'] = {}
for layer in instance.layers:
data['layers'][layer.info] = dump_layer(layer)

View File

@ -16,111 +16,90 @@
# ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy
import mathutils
import os
import logging
from .. import utils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader
from .bl_file import get_filepath, ensure_unpacked
format_to_ext = {
'BMP': 'bmp',
'IRIS': 'sgi',
'PNG': 'png',
'JPEG': 'jpg',
'JPEG2000': 'jp2',
'TARGA': 'tga',
'TARGA_RAW': 'tga',
'CINEON': 'cin',
'DPX': 'dpx',
'OPEN_EXR_MULTILAYER': 'exr',
'OPEN_EXR': 'exr',
'HDR': 'hdr',
'TIFF': 'tiff',
'AVI_JPEG': 'avi',
'AVI_RAW': 'avi',
'FFMPEG': 'mpeg',
}
def dump_image(image):
pixels = None
if image.source == "GENERATED" or image.packed_file is not None:
prefs = utils.get_preferences()
img_name = f"{image.name}.png"
# Cache the image on the disk
image.filepath_raw = os.path.join(prefs.cache_directory, img_name)
os.makedirs(prefs.cache_directory, exist_ok=True)
image.file_format = "PNG"
image.save()
if image.source == "FILE":
image_path = bpy.path.abspath(image.filepath_raw)
image_directory = os.path.dirname(image_path)
os.makedirs(image_directory, exist_ok=True)
image.save()
file = open(image_path, "rb")
pixels = file.read()
file.close()
else:
raise ValueError()
return pixels
class BlImage(BlDatablock):
bl_id = "images"
bl_class = bpy.types.Image
bl_delay_refresh = 2
bl_delay_refresh = 0
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_automatic_push = False
bl_icon = 'IMAGE_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.images.new(
name=data['name'],
width=data['size'][0],
height=data['size'][1]
)
name=data['name'],
width=data['size'][0],
height=data['size'][1]
)
def _load(self, data, target):
loader = Loader()
loader.load(data, target)
image = target
prefs = utils.get_preferences()
img_name = f"{image.name}.png"
img_path = os.path.join(prefs.cache_directory,img_name)
os.makedirs(prefs.cache_directory, exist_ok=True)
file = open(img_path, 'wb')
file.write(data["pixels"])
file.close()
image.source = 'FILE'
image.filepath = img_path
image.colorspace_settings.name = data["colorspace_settings"]["name"]
target.source = 'FILE'
target.filepath_raw = get_filepath(data['filename'])
target.colorspace_settings.name = data["colorspace_settings"]["name"]
def _dump(self, instance=None):
assert(instance)
filename = Path(instance.filepath).name
data = {
"filename": filename
}
data = {}
data['pixels'] = dump_image(instance)
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = [
"name",
'size',
'height',
'alpha',
'float_buffer',
'alpha_mode',
'colorspace_settings']
dumper.include_filter = [
"name",
'size',
'height',
'alpha',
'float_buffer',
'filepath',
'source',
'colorspace_settings']
data.update(dumper.dump(instance))
return data
def diff(self):
if self.instance.is_dirty:
self.instance.save()
return False
if self.instance and (self.instance.name != self.data['name']):
return True
else:
return False
def _resolve_deps_implementation(self):
deps = []
if self.instance.packed_file:
filename = Path(bpy.path.abspath(self.instance.filepath)).name
self.instance.filepath_raw = get_filepath(filename)
self.instance.save()
# An image can't be unpacked to the modified path
# TODO: make a bug report
self.instance.unpack(method="REMOVE")
elif self.instance.source == "GENERATED":
filename = f"{self.instance.name}.png"
self.instance.filepath = get_filepath(filename)
self.instance.save()
if self.instance.filepath:
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -21,7 +21,7 @@ import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock
from replication.exception import ContextError
from ..libs.replication.replication.exception import ContextError
POINT = ['co', 'weight_softbody', 'co_deform']
@ -32,9 +32,7 @@ class BlLattice(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LATTICE_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.lattices.new(data["name"])

View File

@ -29,9 +29,7 @@ class BlLibrary(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIBRARY_DATA_DIRECT'
bl_reload_parent = False
def _construct(self, data):
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):

View File

@ -29,9 +29,7 @@ class BlLight(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIGHT_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.lights.new(data["name"], data["type"])

View File

@ -30,9 +30,7 @@ class BlLightprobe(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIGHTPROBE_GRID'
bl_reload_parent = False
def _construct(self, data):
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']

View File

@ -19,14 +19,10 @@
import bpy
import mathutils
import logging
import re
from uuid import uuid4
from .. import utils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
from .bl_datablock import BlDatablock
def load_node(node_data, node_tree):
@ -39,45 +35,22 @@ def load_node(node_data, node_tree):
"""
loader = Loader()
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
target_node.select = False
loader.load(target_node, node_data)
image_uuid = node_data.get('image_uuid', None)
node_tree_uuid = node_data.get('node_tree_uuid', None)
if image_uuid and not target_node.image:
target_node.image = get_datablock_from_uuid(image_uuid, None)
loader.load(target_node, node_data)
if node_tree_uuid:
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
inputs_data = node_data.get('inputs')
if inputs_data:
inputs = target_node.inputs
for idx, inpt in enumerate(inputs_data):
if idx < len(inputs) and hasattr(inputs[idx], "default_value"):
try:
inputs[idx].default_value = inpt
except Exception as e:
logging.warning(f"Node {target_node.name} input {inputs[idx].name} parameter not supported, skipping ({e})")
else:
logging.warning(f"Node {target_node.name} input length mismatch.")
outputs_data = node_data.get('outputs')
if outputs_data:
outputs = target_node.outputs
for idx, output in enumerate(outputs_data):
if idx < len(outputs) and hasattr(outputs[idx], "default_value"):
try:
outputs[idx].default_value = output
except:
logging.warning(f"Node {target_node.name} output {outputs[idx].name} parameter not supported, skipping ({e})")
else:
logging.warning(f"Node {target_node.name} output length mismatch.")
for input in node_data["inputs"]:
if hasattr(target_node.inputs[input], "default_value"):
try:
target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
except:
logging.error(f"Material {input} parameter not supported, skipping")
def load_links(links_data, node_tree):
""" Load node_tree links from a list
:arg links_data: dumped node links
:type links_data: list
:arg node_tree: node links collection
@ -85,10 +58,9 @@ def load_links(links_data, node_tree):
"""
for link in links_data:
input_socket = node_tree.nodes[link['to_node']
].inputs[int(link['to_socket'])]
output_socket = node_tree.nodes[link['from_node']].outputs[int(
link['from_socket'])]
input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])]
output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])]
node_tree.links.new(input_socket, output_socket)
@ -103,15 +75,11 @@ def dump_links(links):
links_data = []
for link in links:
to_socket = NODE_SOCKET_INDEX.search(
link.to_socket.path_from_id()).group(1)
from_socket = NODE_SOCKET_INDEX.search(
link.from_socket.path_from_id()).group(1)
links_data.append({
'to_node': link.to_node.name,
'to_socket': to_socket,
'from_node': link.from_node.name,
'from_socket': from_socket,
'to_node':link.to_node.name,
'to_socket':link.to_socket.path_from_id()[-2:-1],
'from_node':link.from_node.name,
'from_socket':link.from_socket.path_from_id()[-2:-1],
})
return links_data
@ -132,7 +100,6 @@ def dump_node(node):
"show_expanded",
"name_full",
"select",
"bl_label",
"bl_height_min",
"bl_height_max",
"bl_height_default",
@ -149,31 +116,22 @@ def dump_node(node):
"show_preview",
"show_texture",
"outputs",
"width_hidden",
"image"
"width_hidden"
]
dumped_node = node_dumper.dump(node)
dump_io_needed = (node.type not in ['REROUTE','OUTPUT_MATERIAL'])
if hasattr(node, 'inputs'):
dumped_node['inputs'] = {}
if dump_io_needed:
io_dumper = Dumper()
io_dumper.depth = 2
io_dumper.include_filter = ["default_value"]
if hasattr(node, 'inputs'):
dumped_node['inputs'] = []
for idx, inpt in enumerate(node.inputs):
if hasattr(inpt, 'default_value'):
dumped_node['inputs'].append(io_dumper.dump(inpt.default_value))
if hasattr(node, 'outputs'):
dumped_node['outputs'] = []
for idx, output in enumerate(node.outputs):
if hasattr(output, 'default_value'):
dumped_node['outputs'].append(io_dumper.dump(output.default_value))
for i in node.inputs:
input_dumper = Dumper()
input_dumper.depth = 2
input_dumper.include_filter = ["default_value"]
if hasattr(i, 'default_value'):
dumped_node['inputs'][i.name] = input_dumper.dump(
i)
if hasattr(node, 'color_ramp'):
ramp_dumper = Dumper()
ramp_dumper.depth = 4
@ -181,9 +139,7 @@ def dump_node(node):
'elements',
'alpha',
'color',
'position',
'interpolation',
'color_mode'
'position'
]
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
if hasattr(node, 'mapping'):
@ -195,225 +151,106 @@ def dump_node(node):
'location'
]
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
if hasattr(node, 'image') and getattr(node, 'image'):
dumped_node['image_uuid'] = node.image.uuid
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
dumped_node['node_tree_uuid'] = node.node_tree.uuid
return dumped_node
def dump_shader_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
""" Dump a shader node_tree to a dict including links and nodes
:arg node_tree: dumped shader node tree
:type node_tree: bpy.types.ShaderNodeTree
:return: dict
"""
node_tree_data = {
'nodes': {node.name: dump_node(node) for node in node_tree.nodes},
'links': dump_links(node_tree.links),
'name': node_tree.name,
'type': type(node_tree).__name__
}
for socket_id in ['inputs', 'outputs']:
socket_collection = getattr(node_tree, socket_id)
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
return node_tree_data
def dump_node_tree_sockets(sockets: bpy.types.Collection)->dict:
""" dump sockets of a shader_node_tree
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer
:type socket_id: str
:return: dict
"""
sockets_data = []
for socket in sockets:
try:
socket_uuid = socket['uuid']
except Exception:
socket_uuid = str(uuid4())
socket['uuid'] = socket_uuid
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
return sockets_data
def load_node_tree_sockets(sockets: bpy.types.Collection,
sockets_data: dict):
""" load sockets of a shader_node_tree
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer
:type socket_id: str
:arg socket_data: dumped socket data
:type socket_data: dict
"""
# Check for removed sockets
for socket in sockets:
if not [s for s in sockets_data if socket['uuid'] == s[2]]:
sockets.remove(socket)
# Check for new sockets
for idx, socket_data in enumerate(sockets_data):
try:
checked_socket = sockets[idx]
if checked_socket.name != socket_data[0]:
checked_socket.name = socket_data[0]
except Exception:
s = sockets.new(socket_data[1], socket_data[0])
s['uuid'] = socket_data[2]
def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.ShaderNodeTree)->dict:
"""Load a shader node_tree from dumped data
:arg node_tree_data: dumped node data
:type node_tree_data: dict
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
"""
# TODO: load only required nodes
target_node_tree.nodes.clear()
if not target_node_tree.is_property_readonly('name'):
target_node_tree.name = node_tree_data['name']
if 'inputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'inputs')
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
if 'outputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'outputs')
load_node_tree_sockets(socket_collection,node_tree_data['outputs'])
# Load nodes
for node in node_tree_data["nodes"]:
load_node(node_tree_data["nodes"][node], target_node_tree)
# TODO: load only required nodes links
# Load nodes links
target_node_tree.links.clear()
load_links(node_tree_data["links"], target_node_tree)
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
has_node_group = lambda node : (hasattr(node,'node_tree') and node.node_tree)
deps = []
for node in node_tree.nodes:
if has_image(node):
deps.append(node.image)
elif has_node_group(node):
deps.append(node.node_tree)
return deps
class BlMaterial(BlDatablock):
bl_id = "materials"
bl_class = bpy.types.Material
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'MATERIAL_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.materials.new(data["name"])
def _load_implementation(self, data, target):
loader = Loader()
is_grease_pencil = data.get('is_grease_pencil')
use_nodes = data.get('use_nodes')
loader.load(target, data)
if is_grease_pencil:
target.name = data['name']
if data['is_grease_pencil']:
if not target.is_grease_pencil:
bpy.data.materials.create_gpencil_data(target)
loader.load(target.grease_pencil, data['grease_pencil'])
elif use_nodes:
loader.load(
target.grease_pencil, data['grease_pencil'])
if data["use_nodes"]:
if target.node_tree is None:
target.use_nodes = True
load_shader_node_tree(data['node_tree'], target.node_tree)
target.node_tree.nodes.clear()
loader.load(target,data)
# Load nodes
for node in data["node_tree"]["nodes"]:
load_node(data["node_tree"]["nodes"][node], target.node_tree)
# Load nodes links
target.node_tree.links.clear()
load_links(data["node_tree"]["links"], target.node_tree)
def _dump_implementation(self, data, instance=None):
assert(instance)
mat_dumper = Dumper()
mat_dumper.depth = 2
mat_dumper.include_filter = [
'name',
'blend_method',
'shadow_method',
'alpha_threshold',
'show_transparent_back',
'use_backface_culling',
'use_screen_refraction',
'use_sss_translucency',
'refraction_depth',
'preview_render_type',
'use_preview_world',
'pass_index',
'use_nodes',
'diffuse_color',
'specular_color',
'roughness',
'specular_intensity',
'metallic',
'line_color',
'line_priority',
'is_grease_pencil'
mat_dumper.exclude_filter = [
"is_embed_data",
"is_evaluated",
"name_full",
"bl_description",
"bl_icon",
"bl_idname",
"bl_label",
"preview",
"original",
"uuid",
"users",
"alpha_threshold",
"line_color",
"view_center",
]
data = mat_dumper.dump(instance)
if instance.use_nodes:
nodes = {}
for node in instance.node_tree.nodes:
nodes[node.name] = dump_node(node)
data["node_tree"]['nodes'] = nodes
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
if instance.is_grease_pencil:
gp_mat_dumper = Dumper()
gp_mat_dumper.depth = 3
gp_mat_dumper.include_filter = [
'color',
'fill_color',
'mix_color',
'mix_factor',
'mix_stroke_factor',
# 'texture_angle',
# 'texture_scale',
# 'texture_offset',
'pixel_size',
'hide',
'lock',
'ghost',
# 'texture_clamp',
'flip',
'use_overlap_strokes',
'show_stroke',
'show_fill',
'alignment_mode',
'pass_index',
'mode',
'stroke_style',
# 'stroke_image',
'color',
'use_overlap_strokes',
'show_fill',
'fill_style',
'gradient_type',
'fill_color',
'pass_index',
'alignment_mode',
# 'fill_image',
'texture_opacity',
'mix_factor',
'texture_offset',
'texture_angle',
'texture_scale',
'texture_clamp',
'gradient_type',
'mix_color',
'flip'
]
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
elif instance.use_nodes:
data['node_tree'] = dump_shader_node_tree(instance.node_tree)
return data
def _resolve_deps_implementation(self):
@ -421,8 +258,11 @@ class BlMaterial(BlDatablock):
deps = []
if self.instance.use_nodes:
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
for node in self.instance.node_tree.nodes:
if node.type == 'TEX_IMAGE':
deps.append(node.image)
if self.is_library:
deps.append(self.instance.library)
return deps

View File

@ -23,9 +23,10 @@ import logging
import numpy as np
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
from replication.constants import DIFF_BINARY
from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from ..libs.replication.replication.constants import DIFF_BINARY
from ..libs.replication.replication.exception import ContextError
from .bl_datablock import BlDatablock
VERTICE = ['co']
@ -52,9 +53,7 @@ class BlMesh(BlDatablock):
bl_delay_refresh = 2
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'MESH_DATA'
bl_reload_parent = False
def _construct(self, data):
instance = bpy.data.meshes.new(data["name"])
@ -71,17 +70,8 @@ class BlMesh(BlDatablock):
# MATERIAL SLOTS
target.materials.clear()
for mat_uuid, mat_name in data["material_list"]:
mat_ref = None
if mat_uuid is not None:
mat_ref = get_datablock_from_uuid(mat_uuid, None)
else:
mat_ref = bpy.data.materials.get(mat_name, None)
if mat_ref is None:
raise Exception("Material doesn't exist")
target.materials.append(mat_ref)
for m in data["material_list"]:
target.materials.append(bpy.data.materials[m])
# CLEAR GEOMETRY
if target.vertices:
@ -99,34 +89,32 @@ class BlMesh(BlDatablock):
np_load_collection(data["polygons"],target.polygons, POLYGON)
# UV Layers
if 'uv_layers' in data.keys():
for layer in data['uv_layers']:
if layer not in target.uv_layers:
target.uv_layers.new(name=layer)
for layer in data['uv_layers']:
if layer not in target.uv_layers:
target.uv_layers.new(name=layer)
np_load_collection_primitives(
target.uv_layers[layer].data,
'uv',
data["uv_layers"][layer]['data'])
np_load_collection_primitives(
target.uv_layers[layer].data,
'uv',
data["uv_layers"][layer]['data'])
# Vertex color
if 'vertex_colors' in data.keys():
for color_layer in data['vertex_colors']:
if color_layer not in target.vertex_colors:
target.vertex_colors.new(name=color_layer)
for color_layer in data['vertex_colors']:
if color_layer not in target.vertex_colors:
target.vertex_colors.new(name=color_layer)
np_load_collection_primitives(
target.vertex_colors[color_layer].data,
'color',
data["vertex_colors"][color_layer]['data'])
np_load_collection_primitives(
target.vertex_colors[color_layer].data,
'color',
data["vertex_colors"][color_layer]['data'])
target.validate()
target.update()
def _dump_implementation(self, data, instance=None):
assert(instance)
if instance.is_editmode and not self.preferences.sync_flags.sync_during_editmode:
if instance.is_editmode:
raise ContextError("Mesh is in edit mode")
mesh = instance
@ -159,21 +147,24 @@ class BlMesh(BlDatablock):
data["loops"] = np_dump_collection(mesh.loops, LOOP)
# UV Layers
if mesh.uv_layers:
data['uv_layers'] = {}
for layer in mesh.uv_layers:
data['uv_layers'][layer.name] = {}
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
data['uv_layers'] = {}
for layer in mesh.uv_layers:
data['uv_layers'][layer.name] = {}
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
# Vertex color
if mesh.vertex_colors:
data['vertex_colors'] = {}
for color_map in mesh.vertex_colors:
data['vertex_colors'][color_map.name] = {}
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
data['vertex_colors'] = {}
for color_map in mesh.vertex_colors:
data['vertex_colors'][color_map.name] = {}
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
# Fix material index
data['material_list'] = [(m.uuid, m.name) for m in instance.materials if m]
m_list = []
for material in instance.materials:
if material:
m_list.append(material.name)
data['material_list'] = m_list
return data

View File

@ -68,9 +68,7 @@ class BlMetaball(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'META_BALL'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.metaballs.new(data["name"])

View File

@ -1,48 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock
from .bl_material import (dump_shader_node_tree,
load_shader_node_tree,
get_node_tree_dependencies)
class BlNodeGroup(BlDatablock):
bl_id = "node_groups"
bl_class = bpy.types.ShaderNodeTree
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'NODETREE'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.node_groups.new(data["name"], data["type"])
def _load_implementation(self, data, target):
load_shader_node_tree(data, target)
def _dump_implementation(self, data, instance=None):
return dump_shader_node_tree(instance)
def _resolve_deps_implementation(self):
return get_node_tree_dependencies(self.instance)

View File

@ -16,14 +16,13 @@
# ##### END GPL LICENSE BLOCK #####
import logging
import bpy
import mathutils
from replication.exception import ContextError
import logging
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from .dump_anything import Dumper, Loader
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from ..libs.replication.replication.exception import ContextError
def load_pose(target_bone, data):
@ -32,77 +31,13 @@ def load_pose(target_bone, data):
loader.load(target_bone, data)
def find_data_from_name(name=None):
instance = None
if not name:
pass
elif name in bpy.data.meshes.keys():
instance = bpy.data.meshes[name]
elif name in bpy.data.lights.keys():
instance = bpy.data.lights[name]
elif name in bpy.data.cameras.keys():
instance = bpy.data.cameras[name]
elif name in bpy.data.curves.keys():
instance = bpy.data.curves[name]
elif name in bpy.data.metaballs.keys():
instance = bpy.data.metaballs[name]
elif name in bpy.data.armatures.keys():
instance = bpy.data.armatures[name]
elif name in bpy.data.grease_pencils.keys():
instance = bpy.data.grease_pencils[name]
elif name in bpy.data.curves.keys():
instance = bpy.data.curves[name]
elif name in bpy.data.lattices.keys():
instance = bpy.data.lattices[name]
elif name in bpy.data.speakers.keys():
instance = bpy.data.speakers[name]
elif name in bpy.data.lightprobes.keys():
# Only supported since 2.83
if bpy.app.version[1] >= 83:
instance = bpy.data.lightprobes[name]
else:
logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys():
# Only supported since 2.91
instance = bpy.data.volumes[name]
return instance
def load_data(object, name):
logging.info("loading data")
pass
def _is_editmode(object: bpy.types.Object) -> bool:
child_data = getattr(object, 'data', None)
return (child_data and
hasattr(child_data, 'is_editmode') and
child_data.is_editmode)
def find_textures_dependencies(collection):
""" Check collection
"""
textures = []
for item in collection:
for attr in dir(item):
inst = getattr(item, attr)
if issubclass(type(inst), bpy.types.Texture) and inst is not None:
textures.append(inst)
return textures
class BlObject(BlDatablock):
bl_id = "objects"
bl_class = bpy.types.Object
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'OBJECT_DATA'
bl_reload_parent = False
def _construct(self, data):
instance = None
@ -117,35 +52,80 @@ class BlObject(BlDatablock):
return instance
# TODO: refactoring
object_name = data.get("name")
data_uuid = data.get("data_uuid")
data_id = data.get("data")
object_data = get_datablock_from_uuid(
data_uuid,
find_data_from_name(data_id),
ignore=['images']) # TODO: use resolve_from_id
instance = bpy.data.objects.new(object_name, object_data)
if "data" not in data:
pass
elif data["data"] in bpy.data.meshes.keys():
instance = bpy.data.meshes[data["data"]]
elif data["data"] in bpy.data.lights.keys():
instance = bpy.data.lights[data["data"]]
elif data["data"] in bpy.data.cameras.keys():
instance = bpy.data.cameras[data["data"]]
elif data["data"] in bpy.data.curves.keys():
instance = bpy.data.curves[data["data"]]
elif data["data"] in bpy.data.metaballs.keys():
instance = bpy.data.metaballs[data["data"]]
elif data["data"] in bpy.data.armatures.keys():
instance = bpy.data.armatures[data["data"]]
elif data["data"] in bpy.data.grease_pencils.keys():
instance = bpy.data.grease_pencils[data["data"]]
elif data["data"] in bpy.data.curves.keys():
instance = bpy.data.curves[data["data"]]
elif data["data"] in bpy.data.lattices.keys():
instance = bpy.data.lattices[data["data"]]
elif data["data"] in bpy.data.speakers.keys():
instance = bpy.data.speakers[data["data"]]
elif data["data"] in bpy.data.lightprobes.keys():
# Only supported since 2.83
if bpy.app.version[1] >= 83:
instance = bpy.data.lightprobes[data["data"]]
else:
logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
instance = bpy.data.objects.new(data["name"], instance)
instance.uuid = self.uuid
return instance
def _load_implementation(self, data, target):
# Load transformation data
loader = Loader()
loader.load(target, data)
data_uuid = data.get("data_uuid")
data_id = data.get("data")
# Pose
if 'pose' in data:
if not target.pose:
raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups
for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name)
bg_target = target.pose.bone_groups.get(bg_name)
if target.data and (target.data.name != data_id):
target.data = get_datablock_from_uuid(
data_uuid, find_data_from_name(data_id), ignore=['images'])
if not bg_target:
bg_target = target.pose.bone_groups.new(name=bg_name)
loader.load(bg_target, bg_data)
# target.pose.bone_groups.get
# Bones
for bone in data['pose']['bones']:
target_bone = target.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone)
if 'constraints' in bone_data.keys():
loader.load(target_bone, bone_data['constraints'])
load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys():
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
# vertex groups
if 'vertex_groups' in data:
target.vertex_groups.clear()
for vg in data['vertex_groups']:
vertex_group = target.vertex_groups.new(name=vg['name'])
point_attr = 'vertices' if 'vertices' in vg else 'points'
point_attr = 'vertices' if 'vertices' in vg else 'points'
for vert in vg[point_attr]:
vertex_group.add(
[vert['index']], vert['weight'], 'REPLACE')
@ -172,53 +152,13 @@ class BlObject(BlDatablock):
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
# Load transformation data
loader.load(target, data)
loader.load(target.display, data['display'])
# Pose
if 'pose' in data:
if not target.pose:
raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups
for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name)
bg_target = target.pose.bone_groups.get(bg_name)
if not bg_target:
bg_target = target.pose.bone_groups.new(name=bg_name)
loader.load(bg_target, bg_data)
# target.pose.bone_groups.get
# Bones
for bone in data['pose']['bones']:
target_bone = target.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone)
if 'constraints' in bone_data.keys():
loader.load(target_bone, bone_data['constraints'])
load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys():
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
# TODO: find another way...
if target.empty_display_type == "IMAGE":
img_uuid = data.get('data_uuid')
if target.data is None and img_uuid:
target.data = get_datablock_from_uuid(img_uuid, None)
def _dump_implementation(self, data, instance=None):
assert(instance)
if _is_editmode(instance):
if self.preferences.sync_flags.sync_during_editmode:
instance.update_from_editmode()
else:
raise ContextError("Object is in edit-mode.")
child_data = getattr(instance, 'data', None)
if child_data and hasattr(child_data, 'is_editmode') and child_data.is_editmode:
raise ContextError("Object is in edit-mode.")
dumper = Dumper()
dumper.depth = 1
@ -231,77 +171,28 @@ class BlObject(BlDatablock):
"library",
"empty_display_type",
"empty_display_size",
"empty_image_offset",
"empty_image_depth",
"empty_image_side",
"show_empty_image_orthographic",
"show_empty_image_perspective",
"show_empty_image_only_axis_aligned",
"use_empty_image_alpha",
"color",
"instance_collection",
"instance_type",
"location",
"scale",
'lock_location',
'lock_rotation',
'lock_scale',
'hide_render',
'display_type',
'display_bounds_type',
'show_bounds',
'show_name',
'show_axis',
'show_wire',
'show_all_edges',
'show_texture_space',
'show_in_front',
'type',
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
]
data = dumper.dump(instance)
dumper.include_filter = [
'show_shadows',
]
data['display'] = dumper.dump(instance.display)
data['data_uuid'] = getattr(instance.data, 'uuid', None)
if self.is_library:
return data
# MODIFIERS
modifiers = getattr(instance,'modifiers', None )
if modifiers:
if hasattr(instance, 'modifiers'):
dumper.include_filter = None
dumper.depth = 1
dumper.depth = 2
data["modifiers"] = {}
for index, modifier in enumerate(modifiers):
for index, modifier in enumerate(instance.modifiers):
data["modifiers"][modifier.name] = dumper.dump(modifier)
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
if gp_modifiers:
dumper.include_filter = None
dumper.depth = 1
gp_modifiers_data = data["grease_pencil_modifiers"] = {}
for index, modifier in enumerate(gp_modifiers):
gp_mod_data = gp_modifiers_data[modifier.name] = dict()
gp_mod_data.update(dumper.dump(modifier))
if hasattr(modifier, 'use_custom_curve') \
and modifier.use_custom_curve:
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location']
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
# CONSTRAINTS
# OBJECT
if hasattr(instance, 'constraints'):
dumper.depth = 3
data["constraints"] = dumper.dump(instance.constraints)
@ -354,8 +245,7 @@ class BlObject(BlDatablock):
# VERTEx GROUP
if len(instance.vertex_groups) > 0:
points_attr = 'vertices' if isinstance(
instance.data, bpy.types.Mesh) else 'points'
points_attr = 'vertices' if isinstance(instance.data, bpy.types.Mesh) else 'points'
vg_data = []
for vg in instance.vertex_groups:
vg_idx = vg.index
@ -410,7 +300,7 @@ class BlObject(BlDatablock):
def _resolve_deps_implementation(self):
deps = []
# Avoid Empty case
if self.instance.data:
deps.append(self.instance.data)
@ -424,7 +314,5 @@ class BlObject(BlDatablock):
# TODO: uuid based
deps.append(self.instance.instance_collection)
if self.instance.modifiers:
deps.extend(find_textures_dependencies(self.instance.modifiers))
return deps

View File

@ -16,259 +16,13 @@
# ##### END GPL LICENSE BLOCK #####
import logging
import bpy
import mathutils
from deepdiff import DeepDiff
from replication.constants import DIFF_JSON, MODIFIED
from .bl_collection import (dump_collection_children, dump_collection_objects,
load_collection_childrens, load_collection_objects,
resolve_collection_dependencies)
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader
RENDER_SETTINGS = [
'dither_intensity',
'engine',
'film_transparent',
'filter_size',
'fps',
'fps_base',
'frame_map_new',
'frame_map_old',
'hair_subdiv',
'hair_type',
'line_thickness',
'line_thickness_mode',
'metadata_input',
'motion_blur_shutter',
'pixel_aspect_x',
'pixel_aspect_y',
'preview_pixel_size',
'preview_start_resolution',
'resolution_percentage',
'resolution_x',
'resolution_y',
'sequencer_gl_preview',
'use_bake_clear',
'use_bake_lores_mesh',
'use_bake_multires',
'use_bake_selected_to_active',
'use_bake_user_scale',
'use_border',
'use_compositing',
'use_crop_to_border',
'use_file_extension',
'use_freestyle',
'use_full_sample',
'use_high_quality_normals',
'use_lock_interface',
'use_motion_blur',
'use_multiview',
'use_sequencer',
'use_sequencer_override_scene_strip',
'use_single_layer',
'views_format',
]
EVEE_SETTINGS = [
'gi_diffuse_bounces',
'gi_cubemap_resolution',
'gi_visibility_resolution',
'gi_irradiance_smoothing',
'gi_glossy_clamp',
'gi_filter_quality',
'gi_show_irradiance',
'gi_show_cubemaps',
'gi_irradiance_display_size',
'gi_cubemap_display_size',
'gi_auto_bake',
'taa_samples',
'taa_render_samples',
'use_taa_reprojection',
'sss_samples',
'sss_jitter_threshold',
'use_ssr',
'use_ssr_refraction',
'use_ssr_halfres',
'ssr_quality',
'ssr_max_roughness',
'ssr_thickness',
'ssr_border_fade',
'ssr_firefly_fac',
'volumetric_start',
'volumetric_end',
'volumetric_tile_size',
'volumetric_samples',
'volumetric_sample_distribution',
'use_volumetric_lights',
'volumetric_light_clamp',
'use_volumetric_shadows',
'volumetric_shadow_samples',
'use_gtao',
'use_gtao_bent_normals',
'use_gtao_bounce',
'gtao_factor',
'gtao_quality',
'gtao_distance',
'bokeh_max_size',
'bokeh_threshold',
'use_bloom',
'bloom_threshold',
'bloom_color',
'bloom_knee',
'bloom_radius',
'bloom_clamp',
'bloom_intensity',
'use_motion_blur',
'motion_blur_shutter',
'motion_blur_depth_scale',
'motion_blur_max',
'motion_blur_steps',
'shadow_cube_size',
'shadow_cascade_size',
'use_shadow_high_bitdepth',
'gi_diffuse_bounces',
'gi_cubemap_resolution',
'gi_visibility_resolution',
'gi_irradiance_smoothing',
'gi_glossy_clamp',
'gi_filter_quality',
'gi_show_irradiance',
'gi_show_cubemaps',
'gi_irradiance_display_size',
'gi_cubemap_display_size',
'gi_auto_bake',
'taa_samples',
'taa_render_samples',
'use_taa_reprojection',
'sss_samples',
'sss_jitter_threshold',
'use_ssr',
'use_ssr_refraction',
'use_ssr_halfres',
'ssr_quality',
'ssr_max_roughness',
'ssr_thickness',
'ssr_border_fade',
'ssr_firefly_fac',
'volumetric_start',
'volumetric_end',
'volumetric_tile_size',
'volumetric_samples',
'volumetric_sample_distribution',
'use_volumetric_lights',
'volumetric_light_clamp',
'use_volumetric_shadows',
'volumetric_shadow_samples',
'use_gtao',
'use_gtao_bent_normals',
'use_gtao_bounce',
'gtao_factor',
'gtao_quality',
'gtao_distance',
'bokeh_max_size',
'bokeh_threshold',
'use_bloom',
'bloom_threshold',
'bloom_color',
'bloom_knee',
'bloom_radius',
'bloom_clamp',
'bloom_intensity',
'use_motion_blur',
'motion_blur_shutter',
'motion_blur_depth_scale',
'motion_blur_max',
'motion_blur_steps',
'shadow_cube_size',
'shadow_cascade_size',
'use_shadow_high_bitdepth',
]
CYCLES_SETTINGS = [
'shading_system',
'progressive',
'use_denoising',
'denoiser',
'use_square_samples',
'samples',
'aa_samples',
'diffuse_samples',
'glossy_samples',
'transmission_samples',
'ao_samples',
'mesh_light_samples',
'subsurface_samples',
'volume_samples',
'sampling_pattern',
'use_layer_samples',
'sample_all_lights_direct',
'sample_all_lights_indirect',
'light_sampling_threshold',
'use_adaptive_sampling',
'adaptive_threshold',
'adaptive_min_samples',
'min_light_bounces',
'min_transparent_bounces',
'caustics_reflective',
'caustics_refractive',
'blur_glossy',
'max_bounces',
'diffuse_bounces',
'glossy_bounces',
'transmission_bounces',
'volume_bounces',
'transparent_max_bounces',
'volume_step_rate',
'volume_max_steps',
'dicing_rate',
'max_subdivisions',
'dicing_camera',
'offscreen_dicing_scale',
'film_exposure',
'film_transparent_glass',
'film_transparent_roughness',
'filter_type',
'pixel_filter_type',
'filter_width',
'seed',
'use_animated_seed',
'sample_clamp_direct',
'sample_clamp_indirect',
'tile_order',
'use_progressive_refine',
'bake_type',
'use_camera_cull',
'camera_cull_margin',
'use_distance_cull',
'distance_cull_margin',
'motion_blur_position',
'rolling_shutter_type',
'rolling_shutter_duration',
'texture_limit',
'texture_limit_render',
'ao_bounces',
'ao_bounces_render',
]
VIEW_SETTINGS = [
'look',
'view_transform',
'exposure',
'gamma',
'use_curve_mapping',
'white_level',
'black_level'
]
from ..utils import get_preferences
class BlScene(BlDatablock):
bl_id = "scenes"
@ -276,14 +30,7 @@ class BlScene(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True
bl_icon = 'SCENE_DATA'
bl_reload_parent = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.diff_method = DIFF_JSON
def _construct(self, data):
instance = bpy.data.scenes.new(data["name"])
@ -295,135 +42,121 @@ class BlScene(BlDatablock):
loader.load(target, data)
# Load master collection
load_collection_objects(
data['collection']['objects'], target.collection)
load_collection_childrens(
data['collection']['children'], target.collection)
for object in data["collection"]["objects"]:
if object not in target.collection.objects.keys():
target.collection.objects.link(bpy.data.objects[object])
for object in target.collection.objects.keys():
if object not in data["collection"]["objects"]:
target.collection.objects.unlink(bpy.data.objects[object])
# load collections
for collection in data["collection"]["children"]:
if collection not in target.collection.children.keys():
target.collection.children.link(
bpy.data.collections[collection])
for collection in target.collection.children.keys():
if collection not in data["collection"]["children"]:
target.collection.children.unlink(
bpy.data.collections[collection])
if 'world' in data.keys():
target.world = bpy.data.worlds[data['world']]
# Annotation
if 'grease_pencil' in data.keys():
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
if self.preferences.sync_flags.sync_render_settings:
if 'eevee' in data.keys():
loader.load(target.eevee, data['eevee'])
if 'eevee' in data.keys():
loader.load(target.eevee, data['eevee'])
if 'cycles' in data.keys():
loader.load(target.eevee, data['cycles'])
if 'cycles' in data.keys():
loader.load(target.cycles, data['cycles'])
if 'render' in data.keys():
loader.load(target.render, data['render'])
if 'view_settings' in data.keys():
loader.load(target.view_settings, data['view_settings'])
if target.view_settings.use_curve_mapping and \
'curve_mapping' in data['view_settings']:
# TODO: change this ugly fix
target.view_settings.curve_mapping.white_level = data[
'view_settings']['curve_mapping']['white_level']
target.view_settings.curve_mapping.black_level = data[
'view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update()
if 'view_settings' in data.keys():
loader.load(target.view_settings, data['view_settings'])
if target.view_settings.use_curve_mapping:
#TODO: change this ugly fix
target.view_settings.curve_mapping.white_level = data['view_settings']['curve_mapping']['white_level']
target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update()
def _dump_implementation(self, data, instance=None):
assert(instance)
data = {}
# Metadata
scene_dumper = Dumper()
scene_dumper.depth = 1
scene_dumper.include_filter = [
'name',
'world',
'id',
'camera',
'grease_pencil',
'frame_start',
'frame_end',
'frame_step',
]
if self.preferences.sync_flags.sync_active_camera:
scene_dumper.include_filter.append('camera')
data = scene_dumper.dump(instance)
data.update(scene_dumper.dump(instance))
# Master collection
data['collection'] = {}
data['collection']['children'] = dump_collection_children(
instance.collection)
data['collection']['objects'] = dump_collection_objects(
instance.collection)
scene_dumper.depth = 3
scene_dumper.include_filter = ['children','objects','name']
data['collection'] = scene_dumper.dump(instance.collection)
scene_dumper.depth = 1
scene_dumper.include_filter = None
pref = get_preferences()
# Render settings
if self.preferences.sync_flags.sync_render_settings:
scene_dumper.include_filter = RENDER_SETTINGS
data['render'] = scene_dumper.dump(instance.render)
if instance.render.engine == 'BLENDER_EEVEE':
scene_dumper.include_filter = EVEE_SETTINGS
data['eevee'] = scene_dumper.dump(instance.eevee)
elif instance.render.engine == 'CYCLES':
scene_dumper.include_filter = CYCLES_SETTINGS
data['cycles'] = scene_dumper.dump(instance.cycles)
scene_dumper.include_filter = VIEW_SETTINGS
if pref.sync_flags.sync_render_settings:
scene_dumper.exclude_filter = [
'gi_cache_info',
'feature_set',
'debug_use_hair_bvh',
'aa_samples',
'blur_glossy',
'glossy_bounces',
'device',
'max_bounces',
'preview_aa_samples',
'preview_samples',
'sample_clamp_indirect',
'samples',
'volume_bounces'
]
data['eevee'] = scene_dumper.dump(instance.eevee)
data['cycles'] = scene_dumper.dump(instance.cycles)
data['view_settings'] = scene_dumper.dump(instance.view_settings)
if instance.view_settings.use_curve_mapping:
data['view_settings']['curve_mapping'] = scene_dumper.dump(
instance.view_settings.curve_mapping)
data['view_settings']['curve_mapping'] = scene_dumper.dump(instance.view_settings.curve_mapping)
scene_dumper.depth = 5
scene_dumper.include_filter = [
'curves',
'points',
'location',
'location'
]
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
instance.view_settings.curve_mapping.curves)
if instance.sequence_editor:
data['has_sequence'] = True
else:
data['has_sequence'] = False
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(instance.view_settings.curve_mapping.curves)
return data
def _resolve_deps_implementation(self):
deps = []
# Master Collection
deps.extend(resolve_collection_dependencies(self.instance.collection))
# child collections
for child in self.instance.collection.children:
deps.append(child)
# childs objects
for object in self.instance.objects:
deps.append(object)
# world
if self.instance.world:
deps.append(self.instance.world)
# annotations
if self.instance.grease_pencil:
deps.append(self.instance.grease_pencil)
# Sequences
# deps.extend(list(self.instance.sequence_editor.sequences_all))
if self.instance.sequence_editor:
deps.append(self.instance.sequence_editor)
return deps
def diff(self):
exclude_path = []
if not self.preferences.sync_flags.sync_render_settings:
exclude_path.append("root['eevee']")
exclude_path.append("root['cycles']")
exclude_path.append("root['view_settings']")
exclude_path.append("root['render']")
if not self.preferences.sync_flags.sync_active_camera:
exclude_path.append("root['camera']")
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)

View File

@ -1,198 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from pathlib import Path
import logging
from .bl_file import get_filepath
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid
def dump_sequence(sequence: bpy.types.Sequence) -> dict:
""" Dump a sequence to a dict
:arg sequence: sequence to dump
:type sequence: bpy.types.Sequence
:return dict:
"""
dumper = Dumper()
dumper.exclude_filter = [
'lock',
'select',
'select_left_handle',
'select_right_handle',
'strobe'
]
dumper.depth = 1
data = dumper.dump(sequence)
# TODO: Support multiple images
if sequence.type == 'IMAGE':
data['filenames'] = [e.filename for e in sequence.elements]
# Effect strip inputs
input_count = getattr(sequence, 'input_count', None)
if input_count:
for n in range(input_count):
input_name = f"input_{n+1}"
data[input_name] = getattr(sequence, input_name).name
return data
def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor):
""" Load sequence from dumped data
:arg sequence_data: sequence to dump
:type sequence_data:dict
:arg sequence_editor: root sequence editor
:type sequence_editor: bpy.types.SequenceEditor
"""
strip_type = sequence_data.get('type')
strip_name = sequence_data.get('name')
strip_channel = sequence_data.get('channel')
strip_frame_start = sequence_data.get('frame_start')
sequence = sequence_editor.sequences_all.get(strip_name, None)
if sequence is None:
if strip_type == 'SCENE':
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
sequence = sequence_editor.sequences.new_scene(strip_name,
strip_scene,
strip_channel,
strip_frame_start)
elif strip_type == 'MOVIE':
filepath = get_filepath(Path(sequence_data['filepath']).name)
sequence = sequence_editor.sequences.new_movie(strip_name,
filepath,
strip_channel,
strip_frame_start)
elif strip_type == 'SOUND':
filepath = bpy.data.sounds[sequence_data['sound']].filepath
sequence = sequence_editor.sequences.new_sound(strip_name,
filepath,
strip_channel,
strip_frame_start)
elif strip_type == 'IMAGE':
images_name = sequence_data.get('filenames')
filepath = get_filepath(images_name[0])
sequence = sequence_editor.sequences.new_image(strip_name,
filepath,
strip_channel,
strip_frame_start)
# load other images
if len(images_name)>1:
for img_idx in range(1,len(images_name)):
sequence.elements.append((images_name[img_idx]))
else:
seq = {}
for i in range(sequence_data['input_count']):
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(sequence_data.get(f"input_{i+1}", None))
sequence = sequence_editor.sequences.new_effect(name=strip_name,
type=strip_type,
channel=strip_channel,
frame_start=strip_frame_start,
frame_end=sequence_data['frame_final_end'],
**seq)
loader = Loader()
loader.load(sequence, sequence_data)
sequence.select = False
class BlSequencer(BlDatablock):
bl_id = "scenes"
bl_class = bpy.types.SequenceEditor
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True
bl_icon = 'SEQUENCE'
bl_reload_parent = False
def _construct(self, data):
# Get the scene
scene_id = data.get('name')
scene = bpy.data.scenes.get(scene_id, None)
# Create sequencer data
scene.sequence_editor_clear()
scene.sequence_editor_create()
return scene.sequence_editor
def resolve(self):
scene = bpy.data.scenes.get(self.data['name'], None)
if scene:
if scene.sequence_editor is None:
self.instance = self._construct(self.data)
else:
self.instance = scene.sequence_editor
else:
logging.warning("Sequencer editor scene not found")
def _load_implementation(self, data, target):
loader = Loader()
# Sequencer
sequences = data.get('sequences')
if sequences:
for seq in target.sequences_all:
if seq.name not in sequences:
target.sequences.remove(seq)
for seq_name, seq_data in sequences.items():
load_sequence(seq_data, target)
def _dump_implementation(self, data, instance=None):
assert(instance)
sequence_dumper = Dumper()
sequence_dumper.depth = 1
sequence_dumper.include_filter = [
'proxy_storage',
]
data = {}#sequence_dumper.dump(instance)
# Sequencer
sequences = {}
for seq in instance.sequences_all:
sequences[seq.name] = dump_sequence(seq)
data['sequences'] = sequences
data['name'] = instance.id_data.name
return data
def _resolve_deps_implementation(self):
deps = []
for seq in self.instance.sequences_all:
if seq.type == 'MOVIE' and seq.filepath:
deps.append(Path(bpy.path.abspath(seq.filepath)))
elif seq.type == 'SOUND' and seq.sound:
deps.append(seq.sound)
elif seq.type == 'IMAGE':
for e in seq.elements:
deps.append(Path(bpy.path.abspath(seq.directory), e.filename))
return deps

View File

@ -1,70 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy
from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader
class BlSound(BlDatablock):
bl_id = "sounds"
bl_class = bpy.types.Sound
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'SOUND'
bl_reload_parent = False
def _construct(self, data):
filename = data.get('filename')
return bpy.data.sounds.load(get_filepath(filename))
def _load(self, data, target):
loader = Loader()
loader.load(target, data)
def diff(self):
return False
def _dump(self, instance=None):
filename = Path(instance.filepath).name
if not filename:
raise FileExistsError(instance.filepath)
return {
'filename': filename,
'name': instance.name
}
def _resolve_deps_implementation(self):
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -29,9 +29,7 @@ class BlSpeaker(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'SPEAKER'
bl_reload_parent = False
def _load_implementation(self, data, target):
loader = Loader()
@ -50,7 +48,6 @@ class BlSpeaker(BlDatablock):
'volume',
'name',
'pitch',
'sound',
'volume_min',
'volume_max',
'attenuation',
@ -63,15 +60,6 @@ class BlSpeaker(BlDatablock):
return dumper.dump(instance)
def _resolve_deps_implementation(self):
# TODO: resolve material
deps = []
sound = self.instance.sound
if sound:
deps.append(sound)
return deps

View File

@ -1,78 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
class BlTexture(BlDatablock):
bl_id = "textures"
bl_class = bpy.types.Texture
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'TEXTURE'
bl_reload_parent = False
def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
def _construct(self, data):
return bpy.data.textures.new(data["name"], data["type"])
def _dump_implementation(self, data, instance=None):
assert(instance)
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = [
'tag',
'original',
'users',
'uuid',
'is_embedded_data',
'is_evaluated',
'name_full'
]
data = dumper.dump(instance)
color_ramp = getattr(instance, 'color_ramp', None)
if color_ramp:
dumper.depth = 4
data['color_ramp'] = dumper.dump(color_ramp)
return data
def _resolve_deps_implementation(self):
# TODO: resolve material
deps = []
image = getattr(self.instance,"image", None)
if image:
deps.append(image)
return deps

View File

@ -1,99 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from pathlib import Path
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid
class BlVolume(BlDatablock):
bl_id = "volumes"
bl_class = bpy.types.Volume
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'VOLUME_DATA'
bl_reload_parent = False
def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
loader.load(target.display, data['display'])
# MATERIAL SLOTS
target.materials.clear()
for mat_uuid, mat_name in data["material_list"]:
mat_ref = None
if mat_uuid is not None:
mat_ref = get_datablock_from_uuid(mat_uuid, None)
else:
mat_ref = bpy.data.materials.get(mat_name, None)
if mat_ref is None:
raise Exception("Material doesn't exist")
target.materials.append(mat_ref)
def _construct(self, data):
return bpy.data.volumes.new(data["name"])
def _dump_implementation(self, data, instance=None):
assert(instance)
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = [
'tag',
'original',
'users',
'uuid',
'is_embedded_data',
'is_evaluated',
'name_full',
'use_fake_user'
]
data = dumper.dump(instance)
data['display'] = dumper.dump(instance.display)
# Fix material index
data['material_list'] = [(m.uuid, m.name) for m in instance.materials if m]
return data
def _resolve_deps_implementation(self):
# TODO: resolve material
deps = []
external_vdb = Path(bpy.path.abspath(self.instance.filepath))
if external_vdb.exists() and not external_vdb.is_dir():
deps.append(external_vdb)
for material in self.instance.materials:
if material:
deps.append(material)
return deps

View File

@ -21,9 +21,7 @@ import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from .bl_material import (load_shader_node_tree,
dump_shader_node_tree,
get_node_tree_dependencies)
from .bl_material import load_links, load_node, dump_node, dump_links
class BlWorld(BlDatablock):
@ -32,36 +30,52 @@ class BlWorld(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True
bl_icon = 'WORLD_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.worlds.new(data["name"])
def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
if data["use_nodes"]:
if target.node_tree is None:
target.use_nodes = True
load_shader_node_tree(data['node_tree'], target.node_tree)
target.node_tree.nodes.clear()
for node in data["node_tree"]["nodes"]:
load_node(data["node_tree"]["nodes"][node], target.node_tree)
# Load nodes links
target.node_tree.links.clear()
load_links(data["node_tree"]["links"], target.node_tree)
def _dump_implementation(self, data, instance=None):
assert(instance)
world_dumper = Dumper()
world_dumper.depth = 1
world_dumper.include_filter = [
"use_nodes",
"name",
"color"
world_dumper.depth = 2
world_dumper.exclude_filter = [
"preview",
"original",
"uuid",
"color",
"cycles",
"light_settings",
"users",
"view_center"
]
data = world_dumper.dump(instance)
if instance.use_nodes:
data['node_tree'] = dump_shader_node_tree(instance.node_tree)
nodes = {}
for node in instance.node_tree.nodes:
nodes[node.name] = dump_node(node)
data["node_tree"]['nodes'] = nodes
data["node_tree"]['links'] = dump_links(instance.node_tree.links)
return data
@ -69,7 +83,10 @@ class BlWorld(BlDatablock):
deps = []
if self.instance.use_nodes:
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
for node in self.instance.node_tree.nodes:
if node.type == 'TEX_IMAGE':
deps.append(node.image)
if self.is_library:
deps.append(self.instance.library)
return deps

View File

@ -24,8 +24,8 @@ import numpy as np
BPY_TO_NUMPY_TYPES = {
'FLOAT': np.float32,
'INT': np.int32,
'FLOAT': np.float,
'INT': np.int,
'BOOL': np.bool}
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
@ -47,7 +47,7 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
:type attributes: list
"""
if not dikt or len(collection) == 0:
logging.debug(f'Skipping collection {collection}')
logging.warning(f'Skipping collection')
return
if attributes is None:
@ -115,7 +115,7 @@ def np_dump_collection_primitive(collection: bpy.types.CollectionProperty, attri
:return: numpy byte buffer
"""
if len(collection) == 0:
logging.debug(f'Skipping empty {attribute} attribute')
logging.warning(f'Skipping empty {attribute} attribute')
return {}
attr_infos = collection[0].bl_rna.properties.get(attribute)
@ -192,7 +192,7 @@ def np_load_collection_primitives(collection: bpy.types.CollectionProperty, attr
:type sequence: strr
"""
if len(collection) == 0 or not sequence:
logging.debug(f"Skipping loading {attribute}")
logging.warning(f"Skipping loadin {attribute}")
return
attr_infos = collection[0].bl_rna.properties.get(attribute)
@ -301,7 +301,7 @@ class Dumper:
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
self._dump_collection = (
self._dump_default_as_leaf, self._dump_collection_as_branch)
self._dump_array = (self._dump_array_as_branch,
self._dump_array = (self._dump_default_as_leaf,
self._dump_array_as_branch)
self._dump_matrix = (self._dump_matrix_as_leaf,
self._dump_matrix_as_leaf)
@ -506,14 +506,12 @@ class Loader:
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
}
destructors = {
T.ColorRampElement: DESTRUCTOR_REMOVE,
T.Modifier: DESTRUCTOR_CLEAR,
T.GpencilModifier: DESTRUCTOR_CLEAR,
T.Constraint: CONSTRUCTOR_NEW,
}
element_type = element.bl_rna_property.fixed_type
@ -576,7 +574,6 @@ class Loader:
dst_curve.points[int(point_idx)].location = pos
else:
dst_curve.points.new(pos[0], pos[1])
curves.update()
def _load_pointer(self, instance, dump):
rna_property_type = instance.bl_rna_property.fixed_type
@ -596,10 +593,6 @@ class Loader:
instance.write(bpy.data.materials.get(dump))
elif isinstance(rna_property_type, T.Collection):
instance.write(bpy.data.collections.get(dump))
elif isinstance(rna_property_type, T.VectorFont):
instance.write(bpy.data.fonts.get(dump))
elif isinstance(rna_property_type, T.Sound):
instance.write(bpy.data.sounds.get(dump))
def _load_matrix(self, matrix, dump):
matrix.write(mathutils.Matrix(dump))
@ -629,11 +622,11 @@ class Loader:
for k in self._ordered_keys(dump.keys()):
v = dump[k]
if not hasattr(default.read(), k):
continue
logging.debug(f"Load default, skipping {default} : {k}")
try:
self._load_any(default.extend(k), v)
except Exception as err:
logging.debug(f"Skipping {k}")
logging.debug(f"Cannot load {k}: {err}")
@property
def match_subset_all(self):

View File

@ -19,34 +19,16 @@ import logging
import bpy
from . import utils
from .presence import (renderer,
UserFrustumWidget,
UserNameWidget,
UserSelectionWidget,
refresh_3d_view,
generate_user_camera,
get_view_matrix,
refresh_sidebar_view)
from . import operators
from replication.constants import (FETCHED,
UP,
RP_COMMON,
STATE_INITIAL,
STATE_QUITTING,
STATE_ACTIVE,
STATE_SYNCING,
STATE_LOBBY,
STATE_SRV_SYNC)
from . import operators, presence, utils
from .libs.replication.replication.constants import (FETCHED,
RP_COMMON,
STATE_INITIAL,
STATE_QUITTING,
STATE_ACTIVE,
STATE_SYNCING,
STATE_LOBBY,
STATE_SRV_SYNC)
from replication.interface import session
from replication.exception import NonAuthorizedOperationError
def is_annotating(context: bpy.types.Context):
""" Check if the annotate mode is enabled
"""
return bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False).idname == 'builtin.annotate'
class Delayable():
"""Delayable task interface
@ -69,32 +51,19 @@ class Timer(Delayable):
"""
def __init__(self, duration=1):
super().__init__()
self._timeout = duration
self.is_running = False
self._running = True
def register(self):
"""Register the timer into the blender timer system
"""
if not self.is_running:
bpy.app.timers.register(self.main)
self.is_running = True
logging.debug(f"Register {self.__class__.__name__}")
else:
logging.debug(
f"Timer {self.__class__.__name__} already registered")
bpy.app.timers.register(self.main)
def main(self):
try:
self.execute()
except Exception as e:
logging.error(e)
self.unregister()
session.disconnect()
else:
if self.is_running:
return self._timeout
self.execute()
if self._running:
return self._timeout
def execute(self):
"""Main timer loop
@ -107,7 +76,7 @@ class Timer(Delayable):
if bpy.app.timers.is_registered(self.main):
bpy.app.timers.unregister(self.main)
self.is_running = False
self._running = False
class ApplyTimer(Timer):
@ -116,37 +85,29 @@ class ApplyTimer(Timer):
super().__init__(timout)
def execute(self):
if session and session.state['STATE'] == STATE_ACTIVE:
if self._type:
nodes = session.list(filter=self._type)
else:
nodes = session.list()
client = operators.client
if client and client.state['STATE'] == STATE_ACTIVE:
nodes = client.list(filter=self._type)
for node in nodes:
node_ref = session.get(uuid=node)
node_ref = client.get(uuid=node)
if node_ref.state == FETCHED:
try:
session.apply(node)
client.apply(node)
except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}: {e}")
else:
if self._type.bl_reload_parent:
parents = []
for n in session.list():
deps = session.get(uuid=n).dependencies
if deps and node in deps:
session.apply(n, force=True)
class DynamicRightSelectTimer(Timer):
def __init__(self, timout=.1):
super().__init__(timout)
self._last_selection = []
self._user = None
self._annotating = False
self._right_strategy = RP_COMMON
def execute(self):
session = operators.client
settings = utils.get_preferences()
if session and session.state['STATE'] == STATE_ACTIVE:
@ -155,30 +116,6 @@ class DynamicRightSelectTimer(Timer):
self._user = session.online_users.get(settings.username)
if self._user:
ctx = bpy.context
annotation_gp = ctx.scene.grease_pencil
# if an annotation exist and is tracked
if annotation_gp and annotation_gp.uuid:
registered_gp = session.get(uuid=annotation_gp.uuid)
if is_annotating(bpy.context):
# try to get the right on it
if registered_gp.owner == RP_COMMON:
self._annotating = True
logging.debug(
"Getting the right on the annotation GP")
session.change_owner(
registered_gp.uuid,
settings.username,
ignore_warnings=True,
affect_dependencies=False)
elif self._annotating:
session.change_owner(
registered_gp.uuid,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=False)
current_selection = utils.get_selected_objects(
bpy.context.scene,
bpy.data.window_managers['WinMan'].windows[0].view_layer
@ -197,15 +134,10 @@ class DynamicRightSelectTimer(Timer):
recursive = True
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
try:
session.change_owner(
node.uuid,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {node} owner")
session.change_owner(
node.uuid,
RP_COMMON,
recursive=recursive)
# change new selection to our
for obj in obj_ours:
@ -216,15 +148,10 @@ class DynamicRightSelectTimer(Timer):
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
try:
session.change_owner(
node.uuid,
settings.username,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {node} owner")
session.change_owner(
node.uuid,
settings.username,
recursive=recursive)
else:
return
@ -238,56 +165,106 @@ class DynamicRightSelectTimer(Timer):
logging.debug("Update selection")
# Fix deselection until right managment refactoring (with Roles concepts)
if len(current_selection) == 0 :
if len(current_selection) == 0 and self._right_strategy == RP_COMMON:
owned_keys = session.list(
filter_owner=settings.username)
for key in owned_keys:
node = session.get(uuid=key)
try:
session.change_owner(
key,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {key} owner")
for obj in bpy.data.objects:
object_uuid = getattr(obj, 'uuid', None)
if object_uuid:
is_selectable = not session.is_readonly(object_uuid)
if obj.hide_select != is_selectable:
obj.hide_select = is_selectable
session.change_owner(
key,
RP_COMMON,
recursive=recursive)
for user, user_info in session.online_users.items():
if user != settings.username:
metadata = user_info.get('metadata')
if 'selected_objects' in metadata:
# Update selectionnable objects
for obj in bpy.data.objects:
if obj.hide_select and obj.uuid not in metadata['selected_objects']:
obj.hide_select = False
elif not obj.hide_select and obj.uuid in metadata['selected_objects']:
obj.hide_select = True
class Draw(Delayable):
def __init__(self):
self._handler = None
def register(self):
self._handler = bpy.types.SpaceView3D.draw_handler_add(
self.execute, (), 'WINDOW', 'POST_VIEW')
def execute(self):
raise NotImplementedError()
def unregister(self):
try:
bpy.types.SpaceView3D.draw_handler_remove(
self._handler, "WINDOW")
except:
pass
class DrawClient(Draw):
def execute(self):
session = getattr(operators, 'client', None)
renderer = getattr(presence, 'renderer', None)
prefs = utils.get_preferences()
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
settings = bpy.context.window_manager.session
users = session.online_users
# Update users
for user in users.values():
metadata = user.get('metadata')
color = metadata.get('color')
scene_current = metadata.get('scene_current')
user_showable = scene_current == bpy.context.scene.name or settings.presence_show_far_user
if color and scene_current and user_showable:
if settings.presence_show_selected and 'selected_objects' in metadata.keys():
renderer.draw_client_selection(
user['id'], color, metadata['selected_objects'])
if settings.presence_show_user and 'view_corners' in metadata:
renderer.draw_client_camera(
user['id'], metadata['view_corners'], color)
if not user_showable:
# TODO: remove this when user event drivent update will be
# ready
renderer.flush_selection()
renderer.flush_users()
class ClientUpdate(Timer):
def __init__(self, timout=.1):
def __init__(self, timout=.016):
super().__init__(timout)
self.handle_quit = False
self.users_metadata = {}
def execute(self):
settings = utils.get_preferences()
session = getattr(operators, 'client', None)
renderer = getattr(presence, 'renderer', None)
if session and renderer:
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
local_user = session.online_users.get(
settings.username)
local_user = operators.client.online_users.get(settings.username)
if not local_user:
return
else:
for username, user_data in session.online_users.items():
for username, user_data in operators.client.online_users.items():
if username != settings.username:
cached_user_data = self.users_metadata.get(
username)
new_user_data = session.online_users[username]['metadata']
cached_user_data = self.users_metadata.get(username)
new_user_data = operators.client.online_users[username]['metadata']
if cached_user_data is None:
self.users_metadata[username] = user_data['metadata']
elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']:
refresh_3d_view()
presence.refresh_3d_view()
self.users_metadata[username] = user_data['metadata']
break
else:
@ -295,18 +272,18 @@ class ClientUpdate(Timer):
local_user_metadata = local_user.get('metadata')
scene_current = bpy.context.scene.name
local_user = session.online_users.get(settings.username)
current_view_corners = generate_user_camera()
local_user = session.online_users.get(settings.username)
current_view_corners = presence.get_view_corners()
# Init client metadata
if not local_user_metadata or 'color' not in local_user_metadata.keys():
metadata = {
'view_corners': get_view_matrix(),
'view_matrix': get_view_matrix(),
'view_corners': presence.get_view_matrix(),
'view_matrix': presence.get_view_matrix(),
'color': (settings.client_color.r,
settings.client_color.g,
settings.client_color.b,
1),
settings.client_color.g,
settings.client_color.b,
1),
'frame_current': bpy.context.scene.frame_current,
'scene_current': scene_current
}
@ -319,60 +296,33 @@ class ClientUpdate(Timer):
session.update_user_metadata(local_user_metadata)
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
local_user_metadata['view_corners'] = current_view_corners
local_user_metadata['view_matrix'] = get_view_matrix(
)
local_user_metadata['view_matrix'] = presence.get_view_matrix()
session.update_user_metadata(local_user_metadata)
# sync online users
session_users = operators.client.online_users
ui_users = bpy.context.window_manager.online_users
for index, user in enumerate(ui_users):
if user.username not in session_users.keys():
ui_users.remove(index)
renderer.flush_selection()
renderer.flush_users()
break
class SessionStatusUpdate(Timer):
def __init__(self, timout=1):
super().__init__(timout)
for user in session_users:
if user not in ui_users:
new_key = ui_users.add()
new_key.name = user
new_key.username = user
elif session.state['STATE'] == STATE_QUITTING:
presence.refresh_sidebar_view()
self.handle_quit = True
elif session.state['STATE'] == STATE_INITIAL and self.handle_quit:
self.handle_quit = False
presence.refresh_sidebar_view()
def execute(self):
refresh_sidebar_view()
operators.unregister_delayables()
presence.renderer.stop()
class SessionUserSync(Timer):
def __init__(self, timout=1):
super().__init__(timout)
self.settings = utils.get_preferences()
def execute(self):
if session and renderer:
# sync online users
session_users = session.online_users
ui_users = bpy.context.window_manager.online_users
for index, user in enumerate(ui_users):
if user.username not in session_users.keys() and \
user.username != self.settings.username:
renderer.remove_widget(f"{user.username}_cam")
renderer.remove_widget(f"{user.username}_select")
renderer.remove_widget(f"{user.username}_name")
ui_users.remove(index)
break
for user in session_users:
if user not in ui_users:
new_key = ui_users.add()
new_key.name = user
new_key.username = user
if user != self.settings.username:
renderer.add_widget(
f"{user}_cam", UserFrustumWidget(user))
renderer.add_widget(
f"{user}_select", UserSelectionWidget(user))
renderer.add_widget(
f"{user}_name", UserNameWidget(user))
class MainThreadExecutor(Timer):
def __init__(self, timout=1, execution_queue=None):
super().__init__(timout)
self.execution_queue = execution_queue
def execute(self):
while not self.execution_queue.empty():
function, kwargs = self.execution_queue.get()
logging.debug(f"Executing {function.__name__}")
function(**kwargs)
presence.refresh_sidebar_view()

View File

@ -23,9 +23,6 @@ import subprocess
import sys
from pathlib import Path
import socket
import re
VERSION_EXPR = re.compile('\d+.\d+.\d+')
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
DEFAULT_CACHE_DIR = os.path.join(
@ -50,32 +47,10 @@ def install_pip():
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
def install_package(name, version):
logging.info(f"installing {name} version...")
env = os.environ
if "PIP_REQUIRE_VIRTUALENV" in env:
# PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env
# https://docs.python-guide.org/dev/pip-virtualenv/
# But since Blender's pip is outside of a virtual env, it can block our packages installation, so we unset the
# env var for the subprocess.
env = os.environ.copy()
del env["PIP_REQUIRE_VIRTUALENV"]
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
def install_package(name):
logging.debug(f"Using {PYTHON_PATH} for installation")
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", name])
if name in sys.modules:
del sys.modules[name]
def check_package_version(name, required_version):
logging.info(f"Checking {name} version...")
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
version = VERSION_EXPR.search(out.stdout.decode())
if version and version.group() == required_version:
logging.info(f"{name} is up to date")
return True
else:
logging.info(f"{name} need an update")
return False
def get_ip():
"""
@ -103,9 +78,7 @@ def setup(dependencies, python_path):
if not module_can_be_imported("pip"):
install_pip()
for package_name, package_version in dependencies:
if not module_can_be_imported(package_name):
install_package(package_name, package_version)
for module_name, package_name in dependencies:
if not module_can_be_imported(module_name):
install_package(package_name)
module_can_be_imported(package_name)
elif not check_package_version(package_name, package_version):
install_package(package_name, package_version)

View File

View File

@ -21,105 +21,46 @@ import logging
import os
import queue
import random
import shutil
import string
import sys
import time
from operator import itemgetter
from pathlib import Path
from queue import Queue
from subprocess import PIPE, Popen, TimeoutExpired
import zmq
import bpy
import mathutils
from bpy.app.handlers import persistent
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP)
from replication.data import ReplicatedDataFactory
from replication.exception import NonAuthorizedOperationError
from replication.interface import session
from . import bl_types, delayable, environment, ui, utils
from .presence import SessionStatusWidget, renderer, view3d_find
from . import bl_types, delayable, environment, presence, ui, utils
from .libs.replication.replication.constants import (FETCHED, STATE_ACTIVE,
STATE_INITIAL,
STATE_SYNCING)
from .libs.replication.replication.data import ReplicatedDataFactory
from .libs.replication.replication.exception import NonAuthorizedOperationError
from .libs.replication.replication.interface import Session
background_execution_queue = Queue()
deleyables = []
client = None
delayables = []
stop_modal_executor = False
modal_executor_queue = None
def session_callback(name):
""" Session callback wrapper
def unregister_delayables():
global delayables, stop_modal_executor
This allow to encapsulate session callbacks to background_execution_queue.
By doing this way callback are executed from the main thread.
"""
def func_wrapper(func):
@session.register(name)
def add_background_task(**kwargs):
background_execution_queue.put((func, kwargs))
return add_background_task
return func_wrapper
@session_callback('on_connection')
def initialize_session():
"""Session connection init hander
"""
settings = utils.get_preferences()
runtime_settings = bpy.context.window_manager.session
# Step 1: Constrect nodes
for node in session._graph.list_ordered():
node_ref = session.get(node)
if node_ref.state == FETCHED:
node_ref.resolve()
# Step 2: Load nodes
for node in session._graph.list_ordered():
node_ref = session.get(node)
if node_ref.state == FETCHED:
node_ref.apply()
# Step 4: Register blender timers
for d in deleyables:
d.register()
if settings.update_method == 'DEPSGRAPH':
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
@session_callback('on_exit')
def on_connection_end(reason="none"):
"""Session connection finished handler
"""
global deleyables, stop_modal_executor
settings = utils.get_preferences()
# Step 1: Unregister blender timers
for d in deleyables:
for d in delayables:
try:
d.unregister()
except:
continue
deleyables.clear()
stop_modal_executor = True
if settings.update_method == 'DEPSGRAPH':
bpy.app.handlers.depsgraph_update_post.remove(
depsgraph_evaluation)
# Step 3: remove file handled
logger = logging.getLogger()
for handler in logger.handlers:
if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler)
if reason != "user":
bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
# OPERATORS
class SessionStartOperator(bpy.types.Operator):
bl_idname = "session.start"
bl_label = "start"
@ -132,37 +73,16 @@ class SessionStartOperator(bpy.types.Operator):
return True
def execute(self, context):
global deleyables
global client, delayables
settings = utils.get_preferences()
runtime_settings = context.window_manager.session
users = bpy.data.window_managers['WinMan'].online_users
admin_pass = runtime_settings.password
use_extern_update = settings.update_method == 'DEPSGRAPH'
unregister_delayables()
users.clear()
deleyables.clear()
logger = logging.getLogger()
if len(logger.handlers) == 1:
formatter = logging.Formatter(
fmt='%(asctime)s CLIENT %(levelname)-8s %(message)s',
datefmt='%H:%M:%S'
)
log_directory = os.path.join(
settings.cache_directory,
"multiuser_client.log")
os.makedirs(settings.cache_directory, exist_ok=True)
handler = logging.FileHandler(log_directory, mode='w')
logger.addHandler(handler)
for handler in logger.handlers:
if isinstance(handler, logging.NullHandler):
continue
handler.setFormatter(formatter)
delayables.clear()
bpy_factory = ReplicatedDataFactory()
supported_bl_types = []
@ -170,46 +90,29 @@ class SessionStartOperator(bpy.types.Operator):
# init the factory with supported types
for type in bl_types.types_to_register():
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_impl_name = f"Bl{type.split('_')[1].capitalize()}"
type_module_class = getattr(type_module, type_impl_name)
supported_bl_types.append(type_module_class.bl_id)
if type_impl_name not in settings.supported_datablocks:
logging.info(f"{type_impl_name} not found, \
regenerate type settings...")
settings.generate_supported_types()
# Retreive local replicated types settings
type_local_config = settings.supported_datablocks[type_impl_name]
bpy_factory.register_type(
type_module_class.bl_class,
type_module_class,
timer=type_local_config.bl_delay_refresh*1000,
automatic=type_local_config.auto_push,
check_common=type_module_class.bl_check_common)
timer=type_local_config.bl_delay_refresh,
automatic=type_local_config.auto_push)
if settings.update_method == 'DEFAULT':
if type_local_config.bl_delay_apply > 0:
deleyables.append(
delayable.ApplyTimer(
timout=type_local_config.bl_delay_apply,
target_type=type_module_class))
if type_local_config.bl_delay_apply > 0:
delayables.append(
delayable.ApplyTimer(
timout=type_local_config.bl_delay_apply,
target_type=type_module_class))
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
session.configure(
client = Session(
factory=bpy_factory,
python_path=python_binary_path,
external_update_handling=use_extern_update)
if settings.update_method == 'DEPSGRAPH':
deleyables.append(delayable.ApplyTimer(
settings.depsgraph_update_rate/1000))
python_path=bpy.app.binary_path_python)
# Host a session
if self.host:
@ -219,34 +122,30 @@ class SessionStartOperator(bpy.types.Operator):
runtime_settings.is_host = True
runtime_settings.internet_ip = environment.get_ip()
try:
for scene in bpy.data.scenes:
session.add(scene)
for scene in bpy.data.scenes:
client.add(scene)
session.host(
try:
client.host(
id=settings.username,
port=settings.port,
ipc_port=settings.ipc_port,
timeout=settings.connection_timeout,
password=admin_pass,
cache_directory=settings.cache_directory,
server_log_level=logging.getLevelName(
logging.getLogger().level),
password=admin_pass
)
except Exception as e:
self.report({'ERROR'}, repr(e))
logging.error(f"Error: {e}")
import traceback
traceback.print_exc()
# Join a session
else:
if not runtime_settings.admin:
utils.clean_scene()
# regular session, no password needed
# regular client, no password needed
admin_pass = None
try:
session.connect(
client.connect(
id=settings.username,
address=settings.ip,
port=settings.port,
@ -259,23 +158,22 @@ class SessionStartOperator(bpy.types.Operator):
logging.error(str(e))
# Background client updates service
deleyables.append(delayable.ClientUpdate())
deleyables.append(delayable.DynamicRightSelectTimer())
#TODO: Refactoring
delayables.append(delayable.ClientUpdate())
delayables.append(delayable.DrawClient())
delayables.append(delayable.DynamicRightSelectTimer())
session_update = delayable.SessionStatusUpdate()
session_user_sync = delayable.SessionUserSync()
session_background_executor = delayable.MainThreadExecutor(
execution_queue=background_execution_queue)
# Launch drawing module
if runtime_settings.enable_presence:
presence.renderer.run()
session_update.register()
session_user_sync.register()
session_background_executor.register()
# Register blender main thread tools
for d in delayables:
d.register()
deleyables.append(session_background_executor)
deleyables.append(session_update)
deleyables.append(session_user_sync)
global modal_executor_queue
modal_executor_queue = queue.Queue()
bpy.ops.session.apply_armature_operator()
self.report(
{'INFO'},
@ -311,13 +209,15 @@ class SessionInitOperator(bpy.types.Operator):
return wm.invoke_props_dialog(self)
def execute(self, context):
global client
if self.init_method == 'EMPTY':
utils.clean_scene()
for scene in bpy.data.scenes:
session.add(scene)
client.add(scene)
session.init()
client.init()
return {"FINISHED"}
@ -333,12 +233,11 @@ class SessionStopOperator(bpy.types.Operator):
return True
def execute(self, context):
global deleyables, stop_modal_executor
global client, delayables, stop_modal_executor
if session:
if client:
try:
session.disconnect()
client.disconnect()
except Exception as e:
self.report({'ERROR'}, repr(e))
else:
@ -350,7 +249,7 @@ class SessionStopOperator(bpy.types.Operator):
class SessionKickOperator(bpy.types.Operator):
bl_idname = "session.kick"
bl_label = "Kick"
bl_description = "Kick the target user"
bl_description = "Kick the user"
bl_options = {"REGISTER"}
user: bpy.props.StringProperty()
@ -360,11 +259,11 @@ class SessionKickOperator(bpy.types.Operator):
return True
def execute(self, context):
global deleyables, stop_modal_executor
assert(session)
global client, delayables, stop_modal_executor
assert(client)
try:
session.kick(self.user)
client.kick(self.user)
except Exception as e:
self.report({'ERROR'}, repr(e))
@ -380,9 +279,8 @@ class SessionKickOperator(bpy.types.Operator):
class SessionPropertyRemoveOperator(bpy.types.Operator):
bl_idname = "session.remove_prop"
bl_label = "Delete cache"
bl_description = "Stop tracking modification on the target datablock." + \
"The datablock will no longer be updated for others client. "
bl_label = "remove"
bl_description = "broadcast a property to connected client_instances"
bl_options = {"REGISTER"}
property_path: bpy.props.StringProperty(default="None")
@ -392,8 +290,9 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
return True
def execute(self, context):
global client
try:
session.remove(self.property_path)
client.remove(self.property_path)
return {"FINISHED"}
except: # NonAuthorizedOperationError:
@ -405,12 +304,11 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
class SessionPropertyRightOperator(bpy.types.Operator):
bl_idname = "session.right"
bl_label = "Change modification rights"
bl_description = "Modify the owner of the target datablock"
bl_label = "Change owner to"
bl_description = "Change owner of specified datablock"
bl_options = {"REGISTER"}
key: bpy.props.StringProperty(default="None")
recursive: bpy.props.BoolProperty(default=True)
@classmethod
def poll(cls, context):
@ -424,21 +322,15 @@ class SessionPropertyRightOperator(bpy.types.Operator):
layout = self.layout
runtime_settings = context.window_manager.session
row = layout.row()
row.label(text="Give the owning rights to:")
row.prop(runtime_settings, "clients", text="")
row = layout.row()
row.label(text="Affect dependencies")
row.prop(self, "recursive", text="")
col = layout.column()
col.prop(runtime_settings, "clients")
def execute(self, context):
runtime_settings = context.window_manager.session
global client
if session:
session.change_owner(self.key,
runtime_settings.clients,
ignore_warnings=True,
affect_dependencies=self.recursive)
if client:
client.change_owner(self.key, runtime_settings.clients)
return {"FINISHED"}
@ -484,10 +376,11 @@ class SessionSnapUserOperator(bpy.types.Operator):
return {'CANCELLED'}
if event.type == 'TIMER':
area, region, rv3d = view3d_find()
area, region, rv3d = presence.view3d_find()
global client
if session:
target_ref = session.online_users.get(self.target_client)
if client:
target_ref = client.online_users.get(self.target_client)
if target_ref:
target_scene = target_ref['metadata']['scene_current']
@ -496,16 +389,14 @@ class SessionSnapUserOperator(bpy.types.Operator):
if target_scene != context.scene.name:
blender_scene = bpy.data.scenes.get(target_scene, None)
if blender_scene is None:
self.report(
{'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.")
self.report({'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.")
session_sessings.time_snap_running = False
return {"CANCELLED"}
bpy.context.window.scene = blender_scene
# Update client viewmatrix
client_vmatrix = target_ref['metadata'].get(
'view_matrix', None)
client_vmatrix = target_ref['metadata'].get('view_matrix', None)
if client_vmatrix:
rv3d.view_matrix = mathutils.Matrix(client_vmatrix)
@ -558,8 +449,10 @@ class SessionSnapTimeOperator(bpy.types.Operator):
return {'CANCELLED'}
if event.type == 'TIMER':
if session:
target_ref = session.online_users.get(self.target_client)
global client
if client:
target_ref = client.online_users.get(self.target_client)
if target_ref:
context.scene.frame_current = target_ref['metadata']['frame_current']
@ -571,35 +464,28 @@ class SessionSnapTimeOperator(bpy.types.Operator):
class SessionApply(bpy.types.Operator):
bl_idname = "session.apply"
bl_label = "Revert"
bl_description = "Revert the selected datablock from his cached" + \
" version."
bl_label = "apply selected block into blender"
bl_description = "Apply selected block into blender"
bl_options = {"REGISTER"}
target: bpy.props.StringProperty()
reset_dependencies: bpy.props.BoolProperty(default=False)
@classmethod
def poll(cls, context):
return True
def execute(self, context):
logging.debug(f"Running apply on {self.target}")
try:
session.apply(self.target,
force=True,
force_dependencies=self.reset_dependencies)
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
global client
client.apply(self.target)
return {"FINISHED"}
class SessionCommit(bpy.types.Operator):
bl_idname = "session.commit"
bl_label = "Force server update"
bl_description = "Commit and push the target datablock to server"
bl_label = "commit and push selected datablock to server"
bl_description = "commit and push selected datablock to server"
bl_options = {"REGISTER"}
target: bpy.props.StringProperty()
@ -609,13 +495,12 @@ class SessionCommit(bpy.types.Operator):
return True
def execute(self, context):
try:
session.commit(uuid=self.target)
session.push(self.target)
return {"FINISHED"}
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
global client
# client.get(uuid=target).diff()
client.commit(uuid=self.target)
client.push(self.target)
return {"FINISHED"}
class ApplyArmatureOperator(bpy.types.Operator):
"""Operator which runs its self from a timer"""
@ -631,17 +516,18 @@ class ApplyArmatureOperator(bpy.types.Operator):
return {'CANCELLED'}
if event.type == 'TIMER':
if session and session.state['STATE'] == STATE_ACTIVE:
nodes = session.list(filter=bl_types.bl_armature.BlArmature)
global client
if client and client.state['STATE'] == STATE_ACTIVE:
nodes = client.list(filter=bl_types.bl_armature.BlArmature)
for node in nodes:
node_ref = session.get(uuid=node)
node_ref = client.get(uuid=node)
if node_ref.state == FETCHED:
try:
session.apply(node)
client.apply(node)
except Exception as e:
logging.error("Fail to apply armature: {e}")
logging.error("Dail to apply armature: {e}")
return {'PASS_THROUGH'}
@ -660,58 +546,6 @@ class ApplyArmatureOperator(bpy.types.Operator):
stop_modal_executor = False
class SessionClearCache(bpy.types.Operator):
"Clear local session cache"
bl_idname = "session.clear_cache"
bl_label = "Modal Executor Operator"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
cache_dir = utils.get_preferences().cache_directory
try:
for root, dirs, files in os.walk(cache_dir):
for name in files:
Path(root, name).unlink()
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"FINISHED"}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
class SessionNotifyOperator(bpy.types.Operator):
"""Dialog only operator"""
bl_idname = "session.notify"
bl_label = "Multi-user"
bl_description = "multiuser notification"
message: bpy.props.StringProperty()
@classmethod
def poll(cls, context):
return True
def execute(self, context):
return {'FINISHED'}
def draw(self, context):
layout = self.layout
layout.row().label(text=self.message)
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
classes = (
SessionStartOperator,
SessionStopOperator,
@ -724,8 +558,7 @@ classes = (
ApplyArmatureOperator,
SessionKickOperator,
SessionInitOperator,
SessionClearCache,
SessionNotifyOperator,
)
@ -737,63 +570,31 @@ def sanitize_deps_graph(dummy):
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state['STATE'] == STATE_ACTIVE:
for node_key in session.list():
session.get(node_key).resolve()
global client
if client and client.state['STATE'] == STATE_ACTIVE:
for node_key in client.list():
client.get(node_key).resolve()
@persistent
def load_pre_handler(dummy):
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
global client
if client and client.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
session.update_user_metadata({
if client and client.state['STATE'] == STATE_ACTIVE:
client.update_user_metadata({
'frame_current': scene.frame_current
})
@persistent
def depsgraph_evaluation(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.get(update.id.uuid)
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
# - if its to someone else, ignore the update (go deeper ?)
if node and node.owner in [session.id, RP_COMMON] and node.state == UP:
# Avoid slow geometry update
if 'EDIT' in context.mode and \
not settings.sync_flags.sync_during_editmode:
break
session.stash(node.uuid)
else:
# Distant update
continue
# else:
# # New items !
# logger.error("UPDATE: ADD")
def register():
from bpy.utils import register_class
for cls in classes:
register_class(cls)
@ -805,8 +606,11 @@ def register():
def unregister():
if session and session.state['STATE'] == STATE_ACTIVE:
session.disconnect()
global client
if client and client.state['STATE'] == 2:
client.disconnect()
client = None
from bpy.utils import unregister_class
for cls in reversed(classes):
@ -817,3 +621,7 @@ def unregister():
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
if __name__ == "__main__":
register()

View File

@ -20,18 +20,12 @@ import logging
import bpy
import string
import re
import os
from pathlib import Path
from . import utils, bl_types, environment, addon_updater_ops, presence, ui
from .libs.replication.replication.constants import RP_COMMON
from . import bl_types, environment, addon_updater_ops, presence, ui
from .utils import get_preferences, get_expanded_icon
from replication.constants import RP_COMMON
from replication.interface import session
IP_EXPR = re.compile('\d+\.\d+\.\d+\.\d+')
# From https://stackoverflow.com/a/106223
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
def randomColor():
"""Generate a random color """
@ -42,7 +36,7 @@ def randomColor():
def random_string_digits(stringLength=6):
"""Generate a random string of letters and digits"""
"""Generate a random string of letters and digits """
lettersAndDigits = string.ascii_letters + string.digits
return ''.join(random.choices(lettersAndDigits, k=stringLength))
@ -52,48 +46,23 @@ def update_panel_category(self, context):
ui.SESSION_PT_settings.bl_category = self.panel_category
ui.register()
def update_ip(self, context):
ip = IP_REGEX.search(self.ip)
dns = HOSTNAME_REGEX.search(self.ip)
ip = IP_EXPR.search(self.ip)
if ip:
self['ip'] = ip.group()
elif dns:
self['ip'] = dns.group()
else:
logging.error("Wrong IP format")
self['ip'] = "127.0.0.1"
def update_port(self, context):
max_port = self.port + 3
if self.ipc_port < max_port and \
self['ipc_port'] >= self.port:
logging.error(
"IPC Port in conflict with the port, assigning a random value")
self['ipc_port'] >= self.port:
logging.error("IPC Port in conflic with the port, assigning a random value")
self['ipc_port'] = random.randrange(self.port+4, 10000)
def update_directory(self, context):
new_dir = Path(self.cache_directory)
if new_dir.exists() and any(Path(self.cache_directory).iterdir()):
logging.error("The folder is not empty, choose another one.")
self['cache_directory'] = environment.DEFAULT_CACHE_DIR
elif not new_dir.exists():
logging.info("Target cache folder doesn't exist, creating it.")
os.makedirs(self.cache_directory, exist_ok=True)
def set_log_level(self, value):
logging.getLogger().setLevel(value)
def get_log_level(self):
return logging.getLogger().level
class ReplicatedDatablock(bpy.types.PropertyGroup):
type_name: bpy.props.StringProperty()
bl_name: bpy.props.StringProperty()
@ -104,49 +73,11 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
icon: bpy.props.StringProperty()
def set_sync_render_settings(self, value):
self['sync_render_settings'] = value
if session and bpy.context.scene.uuid and value:
bpy.ops.session.apply('INVOKE_DEFAULT',
target=bpy.context.scene.uuid,
reset_dependencies=False)
def set_sync_active_camera(self, value):
self['sync_active_camera'] = value
if session and bpy.context.scene.uuid and value:
bpy.ops.session.apply('INVOKE_DEFAULT',
target=bpy.context.scene.uuid,
reset_dependencies=False)
class ReplicationFlags(bpy.types.PropertyGroup):
def get_sync_render_settings(self):
return self.get('sync_render_settings', True)
def get_sync_active_camera(self):
return self.get('sync_active_camera', True)
sync_render_settings: bpy.props.BoolProperty(
name="Synchronize render settings",
description="Synchronize render settings (eevee and cycles only)",
default=False,
set=set_sync_render_settings,
get=get_sync_render_settings
)
sync_during_editmode: bpy.props.BoolProperty(
name="Edit mode updates",
description="Enable objects update in edit mode (! Impact performances !)",
default=False
)
sync_active_camera: bpy.props.BoolProperty(
name="Synchronize active camera",
description="Synchronize the active camera",
default=True,
get=get_sync_active_camera,
set=set_sync_active_camera
)
default=True)
class SessionPrefs(bpy.types.AddonPreferences):
@ -178,9 +109,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
)
ipc_port: bpy.props.IntProperty(
name="ipc_port",
description='internal ttl port(only useful for multiple local instances)',
default=random.randrange(5570, 70000),
update=update_port,
description='internal ttl port(only usefull for multiple local instances)',
default=5561,
update=update_port
)
init_method: bpy.props.EnumProperty(
name='init_method',
@ -192,80 +123,33 @@ class SessionPrefs(bpy.types.AddonPreferences):
cache_directory: bpy.props.StringProperty(
name="cache directory",
subtype="DIR_PATH",
default=environment.DEFAULT_CACHE_DIR,
update=update_directory)
default=environment.DEFAULT_CACHE_DIR)
connection_timeout: bpy.props.IntProperty(
name='connection timeout',
description='connection timeout before disconnection',
default=1000
)
update_method: bpy.props.EnumProperty(
name='update method',
description='replication update method',
items=[
('DEFAULT', "Default", "Default: Use threads to monitor databloc changes"),
('DEPSGRAPH', "Depsgraph",
"Experimental: Use the blender dependency graph to trigger updates"),
],
)
# Replication update settings
depsgraph_update_rate: bpy.props.IntProperty(
name='depsgraph update rate',
description='Dependency graph uppdate rate (milliseconds)',
default=1000
)
clear_memory_filecache: bpy.props.BoolProperty(
name="Clear memory filecache",
description="Remove filecache from memory",
default=False
)
# for UI
category: bpy.props.EnumProperty(
name="Category",
description="Preferences Category",
items=[
('CONFIG', "Configuration", "Configuration of this add-on"),
('CONFIG', "Configuration", "Configuration about this add-on"),
('UPDATE', "Update", "Update this add-on"),
],
default='CONFIG'
)
# WIP
logging_level: bpy.props.EnumProperty(
name="Log level",
description="Log verbosity level",
items=[
('ERROR', "error", "show only errors", logging.ERROR),
('WARNING', "warning", "only show warnings and errors", logging.WARNING),
('INFO', "info", "default level", logging.INFO),
('DEBUG', "debug", "show all logs", logging.DEBUG),
('ERROR', "error", "show only errors"),
('WARNING', "warning", "only show warnings and errors"),
('INFO', "info", "default level"),
('DEBUG', "debug", "show all logs"),
],
default='INFO',
set=set_log_level,
get=get_log_level
)
presence_hud_scale: bpy.props.FloatProperty(
name="Text scale",
description="Adjust the session widget text scale",
min=7,
max=90,
default=25,
)
presence_hud_hpos: bpy.props.FloatProperty(
name="Horizontal position",
description="Adjust the session widget horizontal position",
min=1,
max=90,
default=1,
step=1,
subtype='PERCENTAGE',
)
presence_hud_vpos: bpy.props.FloatProperty(
name="Vertical position",
description="Adjust the session widget vertical position",
min=1,
max=94,
default=1,
step=1,
subtype='PERCENTAGE',
default='INFO'
)
conf_session_identity_expanded: bpy.props.BoolProperty(
name="Identity",
@ -297,26 +181,6 @@ class SessionPrefs(bpy.types.AddonPreferences):
description="Interface",
default=False
)
sidebar_advanced_rep_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_rep_expanded",
description="sidebar_advanced_rep_expanded",
default=False
)
sidebar_advanced_log_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_log_expanded",
description="sidebar_advanced_log_expanded",
default=False
)
sidebar_advanced_net_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_net_expanded",
description="sidebar_advanced_net_expanded",
default=False
)
sidebar_advanced_cache_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_cache_expanded",
description="sidebar_advanced_cache_expanded",
default=False
)
auto_check_update: bpy.props.BoolProperty(
name="Auto-check for Update",
@ -368,9 +232,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
# USER INFORMATIONS
box = grid.box()
box.prop(
self, "conf_session_identity_expanded", text="User information",
icon=get_expanded_icon(self.conf_session_identity_expanded),
emboss=False)
self, "conf_session_identity_expanded", text="User informations",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_identity_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
if self.conf_session_identity_expanded:
box.row().prop(self, "username", text="name")
box.row().prop(self, "client_color", text="color")
@ -378,27 +242,24 @@ class SessionPrefs(bpy.types.AddonPreferences):
# NETWORK SETTINGS
box = grid.box()
box.prop(
self, "conf_session_net_expanded", text="Networking",
icon=get_expanded_icon(self.conf_session_net_expanded),
emboss=False)
self, "conf_session_net_expanded", text="Netorking",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_net_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
if self.conf_session_net_expanded:
box.row().prop(self, "ip", text="Address")
row = box.row()
row.label(text="Port:")
row.prop(self, "port", text="")
row.prop(self, "port", text="Address")
row = box.row()
row.label(text="Init the session from:")
row.prop(self, "init_method", text="")
row = box.row()
row.label(text="Update method:")
row.prop(self, "update_method", text="")
table = box.box()
table.row().prop(
self, "conf_session_timing_expanded", text="Refresh rates",
icon=get_expanded_icon(self.conf_session_timing_expanded),
emboss=False)
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_timing_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
if self.conf_session_timing_expanded:
line = table.row()
@ -416,8 +277,8 @@ class SessionPrefs(bpy.types.AddonPreferences):
box = grid.box()
box.prop(
self, "conf_session_hosting_expanded", text="Hosting",
icon=get_expanded_icon(self.conf_session_hosting_expanded),
emboss=False)
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_hosting_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
if self.conf_session_hosting_expanded:
row = box.row()
row.label(text="Init the session from:")
@ -427,33 +288,23 @@ class SessionPrefs(bpy.types.AddonPreferences):
box = grid.box()
box.prop(
self, "conf_session_cache_expanded", text="Cache",
icon=get_expanded_icon(self.conf_session_cache_expanded),
emboss=False)
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_cache_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
if self.conf_session_cache_expanded:
box.row().prop(self, "cache_directory", text="Cache directory")
box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache")
# INTERFACE SETTINGS
box = grid.box()
box.prop(
self, "conf_session_ui_expanded", text="Interface",
icon=get_expanded_icon(self.conf_session_ui_expanded),
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_ui_expanded else 'DISCLOSURE_TRI_RIGHT',
emboss=False)
if self.conf_session_ui_expanded:
box.row().prop(self, "panel_category", text="Panel category", expand=True)
row = box.row()
row.label(text="Session widget:")
col = box.column(align=True)
col.prop(self, "presence_hud_scale", expand=True)
col.prop(self, "presence_hud_hpos", expand=True)
col.prop(self, "presence_hud_vpos", expand=True)
if self.category == 'UPDATE':
from . import addon_updater_ops
addon_updater_ops.update_settings_ui(self, context)
addon_updater_ops.update_settings_ui_condensed(self, context)
def generate_supported_types(self):
self.supported_datablocks.clear()
@ -462,9 +313,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
new_db = self.supported_datablocks.add()
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_impl_name = f"Bl{type.split('_')[1].capitalize()}"
type_module_class = getattr(type_module, type_impl_name)
new_db.name = type_impl_name
new_db.type_name = type_impl_name
new_db.bl_delay_refresh = type_module_class.bl_delay_refresh
@ -480,10 +331,10 @@ def client_list_callback(scene, context):
items = [(RP_COMMON, RP_COMMON, "")]
username = get_preferences().username
if session:
client_ids = session.online_users.keys()
username = utils.get_preferences().username
cli = operators.client
if cli:
client_ids = cli.online_users.keys()
for id in client_ids:
name_desc = id
if id == username:
@ -519,26 +370,25 @@ class SessionProps(bpy.types.PropertyGroup):
name="Presence overlay",
description='Enable overlay drawing module',
default=True,
update=presence.update_presence
)
presence_show_selected: bpy.props.BoolProperty(
name="Show selected objects",
description='Enable selection overlay ',
default=True,
update=presence.update_overlay_settings
)
presence_show_user: bpy.props.BoolProperty(
name="Show users",
description='Enable user overlay ',
default=True,
update=presence.update_overlay_settings
)
presence_show_far_user: bpy.props.BoolProperty(
name="Show users on different scenes",
description="Show user on different scenes",
default=False,
)
presence_show_session_status: bpy.props.BoolProperty(
name="Show session status ",
description="Show session status on the viewport",
default=True,
update=presence.update_overlay_settings
)
filter_owned: bpy.props.BoolProperty(
name="filter_owned",

View File

@ -19,8 +19,6 @@
import copy
import logging
import math
import sys
import traceback
import bgl
import blf
@ -29,17 +27,13 @@ import gpu
import mathutils
from bpy_extras import view3d_utils
from gpu_extras.batch import batch_for_shader
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
STATE_INITIAL, STATE_LAUNCHING_SERVICES,
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
STATE_SYNCING, STATE_WAITING)
from replication.interface import session
from .utils import find_from_attr, get_state_str, get_preferences
from . import utils
# Helper functions
renderer = None
def view3d_find() -> tuple:
def view3d_find():
""" Find the first 'VIEW_3D' windows found in areas
:return: tuple(Area, Region, RegionView3D)
@ -61,48 +55,35 @@ def refresh_3d_view():
if area and region and rv3d:
area.tag_redraw()
def refresh_sidebar_view():
""" Refresh the blender viewport sidebar
""" Refresh the blender sidebar
"""
area, region, rv3d = view3d_find()
if area:
area.regions[3].tag_redraw()
area.regions[3].tag_redraw()
def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D, coords: list, distance: float = 1.0) -> list:
""" Compute a projection from 2D to 3D viewport coordinate
:param region: target windows region
:type region: bpy.types.Region
:param rv3d: view 3D
:type rv3d: bpy.types.RegionView3D
:param coords: coordinate to project
:type coords: list
:param distance: distance offset into viewport
:type distance: float
:return: list of coordinates [x,y,z]
"""
def get_target(region, rv3d, coord):
target = [0, 0, 0]
if coords and region and rv3d:
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coords)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coords)
target = ray_origin + view_vector * distance
if coord and region and rv3d:
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
target = ray_origin + view_vector
return [target.x, target.y, target.z]
def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
""" Generate a bounding box for a given object by using its world matrix
def get_target_far(region, rv3d, coord, distance):
target = [0, 0, 0]
:param obj: target object
:type obj: bpy.types.Object
:param radius: bounding box radius
:type radius: float
:return: list of 8 points [(x,y,z),...]
"""
if coord and region and rv3d:
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
target = ray_origin + view_vector * distance
return [target.x, target.y, target.z]
def get_default_bbox(obj, radius):
coords = [
(-radius, -radius, -radius), (+radius, -radius, -radius),
(-radius, +radius, -radius), (+radius, +radius, -radius),
@ -110,384 +91,264 @@ def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
(-radius, +radius, +radius), (+radius, +radius, +radius)]
base = obj.matrix_world
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
return [(point.x, point.y, point.z)
for point in bbox_corners]
for point in bbox_corners]
def generate_user_camera() -> list:
""" Generate a basic camera represention of the user point of view
:return: list of 7 points
"""
def get_view_corners():
area, region, rv3d = view3d_find()
v1 = v2 = v3 = v4 = v5 = v6 = v7 = [0, 0, 0]
v1 = [0, 0, 0]
v2 = [0, 0, 0]
v3 = [0, 0, 0]
v4 = [0, 0, 0]
v5 = [0, 0, 0]
v6 = [0, 0, 0]
v7 = [0, 0, 0]
if area and region and rv3d:
width = region.width
height = region.height
v1 = project_to_viewport(region, rv3d, (0, 0))
v3 = project_to_viewport(region, rv3d, (0, height))
v2 = project_to_viewport(region, rv3d, (width, height))
v4 = project_to_viewport(region, rv3d, (width, 0))
v1 = get_target(region, rv3d, (0, 0))
v3 = get_target(region, rv3d, (0, height))
v2 = get_target(region, rv3d, (width, height))
v4 = get_target(region, rv3d, (width, 0))
v5 = project_to_viewport(region, rv3d, (width/2, height/2))
v5 = get_target(region, rv3d, (width/2, height/2))
v6 = list(rv3d.view_location)
v7 = project_to_viewport(
region, rv3d, (width/2, height/2), distance=-.8)
v7 = get_target_far(region, rv3d, (width/2, height/2), -.8)
coords = [v1, v2, v3, v4, v5, v6, v7]
return coords
def project_to_screen(coords: list) -> list:
""" Project 3D coordinate to 2D screen coordinates
:param coords: 3D coordinates (x,y,z)
:type coords: list
:return: list of 2D coordinates [x,y]
"""
def get_client_2d(coords):
area, region, rv3d = view3d_find()
if area and region and rv3d:
return view3d_utils.location_3d_to_region_2d(region, rv3d, coords)
else:
return (0, 0)
def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object = None) -> list:
""" Generate bounding box in world coordinate from object bound box
:param object: target object
:type object: bpy.types.Object
:param instance: optionnal instance
:type instance: bpy.types.Object
:return: list of 8 points [(x,y,z),...]
"""
base = object.matrix_world
if instance:
scale = mathutils.Matrix.Diagonal(object.matrix_world.to_scale())
base = instance.matrix_world @ scale.to_4x4()
def get_bb_coords_from_obj(object, parent=None):
base = object.matrix_world if parent is None else parent.matrix_world
bbox_corners = [base @ mathutils.Vector(
corner) for corner in object.bound_box]
corner) for corner in object.bound_box]
return [(point.x, point.y, point.z) for point in bbox_corners]
return [(point.x, point.y, point.z)
for point in bbox_corners]
def get_view_matrix() -> list:
""" Return the 3d viewport view matrix
:return: view matrix as a 4x4 list
"""
def get_view_matrix():
area, region, rv3d = view3d_find()
if area and region and rv3d:
if area and region and rv3d:
return [list(v) for v in rv3d.view_matrix]
def update_presence(self, context):
global renderer
class Widget(object):
""" Base class to define an interface element
"""
draw_type: str = 'POST_VIEW' # Draw event type
def poll(self) -> bool:
"""Test if the widget can be drawn or not
:return: bool
"""
return True
def draw(self):
"""How to draw the widget
"""
raise NotImplementedError()
class UserFrustumWidget(Widget):
# Camera widget indices
indices = ((1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6))
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
if 'renderer' in globals() and hasattr(renderer, 'run'):
if self.enable_presence:
renderer.run()
else:
return None
def poll(self):
if self.data is None:
return False
scene_current = self.data.get('scene_current')
view_corners = self.data.get('view_corners')
return (scene_current == bpy.context.scene.name or
self.settings.presence_show_far_user) and \
view_corners and \
self.settings.presence_show_user and \
self.settings.enable_presence
def draw(self):
location = self.data.get('view_corners')
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
positions = [tuple(coord) for coord in location]
if len(positions) != 7:
return
batch = batch_for_shader(
shader,
'LINES',
{"pos": positions},
indices=self.indices)
bgl.glLineWidth(2.)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
renderer.stop()
class UserSelectionWidget(Widget):
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
def update_overlay_settings(self, context):
global renderer
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
user_selection = self.data.get('selected_objects')
scene_current = self.data.get('scene_current')
return (scene_current == bpy.context.scene.name or
self.settings.presence_show_far_user) and \
user_selection and \
self.settings.presence_show_selected and \
self.settings.enable_presence
def draw(self):
user_selection = self.data.get('selected_objects')
for select_ob in user_selection:
ob = find_from_attr("uuid", select_ob, bpy.data.objects)
if not ob:
return
vertex_pos = bbox_from_obj(ob, 1.0)
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
if ob.instance_collection:
for obj in ob.instance_collection.objects:
if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
vertex_pos = get_bb_coords_from_obj(obj, instance=ob)
break
elif ob.type == 'EMPTY':
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
elif ob.type == 'LIGHT':
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
elif ob.type == 'LIGHT_PROBE':
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
elif ob.type == 'CAMERA':
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
elif hasattr(ob, 'bound_box'):
vertex_indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
vertex_pos = get_bb_coords_from_obj(ob)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(
shader,
'LINES',
{"pos": vertex_pos},
indices=vertex_indices)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
class UserNameWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
scene_current = self.data.get('scene_current')
view_corners = self.data.get('view_corners')
return (scene_current == bpy.context.scene.name or
self.settings.presence_show_far_user) and \
view_corners and \
self.settings.presence_show_user and \
self.settings.enable_presence
def draw(self):
view_corners = self.data.get('view_corners')
color = self.data.get('color')
position = [tuple(coord) for coord in view_corners]
coords = project_to_screen(position[1])
if coords:
blf.position(0, coords[0], coords[1]+10, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, self.username)
class SessionStatusWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(self):
self.preferences = get_preferences()
@property
def settings(self):
return getattr(bpy.context.window_manager, 'session', None)
def poll(self):
return self.settings and self.settings.presence_show_session_status and \
self.settings.enable_presence
def draw(self):
text_scale = self.preferences.presence_hud_scale
ui_scale = bpy.context.preferences.view.ui_scale
color = [1, 1, 0, 1]
state = session.state.get('STATE')
state_str = f"{get_state_str(state)}"
if state == STATE_ACTIVE:
color = [0, 1, 0, 1]
elif state == STATE_INITIAL:
color = [1, 0, 0, 1]
hpos = (self.preferences.presence_hud_hpos*bpy.context.area.width)/100
vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100
blf.position(0, hpos, vpos, 0)
blf.size(0, int(text_scale*ui_scale), 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, state_str)
if renderer and not self.presence_show_selected:
renderer.flush_selection()
if renderer and not self.presence_show_user:
renderer.flush_users()
class DrawFactory(object):
def __init__(self):
self.post_view_handle = None
self.post_pixel_handle = None
self.widgets = {}
self.d3d_items = {}
self.d2d_items = {}
self.draw3d_handle = None
self.draw2d_handle = None
self.draw_event = None
self.coords = None
self.active_object = None
def add_widget(self, name: str, widget: Widget):
self.widgets[name] = widget
def run(self):
self.register_handlers()
def remove_widget(self, name: str):
if name in self.widgets:
del self.widgets[name]
else:
logging.error(f"Widget {name} not existing")
def stop(self):
self.flush_users()
self.flush_selection()
self.unregister_handlers()
def clear_widgets(self):
self.widgets.clear()
refresh_3d_view()
def register_handlers(self):
self.post_view_handle = bpy.types.SpaceView3D.draw_handler_add(
self.post_view_callback,
(),
'WINDOW',
'POST_VIEW')
self.post_pixel_handle = bpy.types.SpaceView3D.draw_handler_add(
self.post_pixel_callback,
(),
'WINDOW',
'POST_PIXEL')
self.draw3d_handle = bpy.types.SpaceView3D.draw_handler_add(
self.draw3d_callback, (), 'WINDOW', 'POST_VIEW')
self.draw2d_handle = bpy.types.SpaceView3D.draw_handler_add(
self.draw2d_callback, (), 'WINDOW', 'POST_PIXEL')
def unregister_handlers(self):
if self.post_pixel_handle:
if self.draw2d_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.post_pixel_handle,
"WINDOW")
self.post_pixel_handle = None
self.draw2d_handle, "WINDOW")
self.draw2d_handle = None
if self.post_view_handle:
if self.draw3d_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.post_view_handle,
"WINDOW")
self.post_view_handle = None
self.draw3d_handle, "WINDOW")
self.draw3d_handle = None
self.d3d_items.clear()
self.d2d_items.clear()
def flush_selection(self, user=None):
key_to_remove = []
select_key = f"{user}_select" if user else "select"
for k in self.d3d_items.keys():
if select_key in k:
key_to_remove.append(k)
for k in key_to_remove:
del self.d3d_items[k]
def flush_users(self):
key_to_remove = []
for k in self.d3d_items.keys():
if "select" not in k:
key_to_remove.append(k)
for k in key_to_remove:
del self.d3d_items[k]
self.d2d_items.clear()
def draw_client_selection(self, client_id, client_color, client_selection):
local_user = utils.get_preferences().username
if local_user != client_id:
self.flush_selection(client_id)
for select_ob in client_selection:
drawable_key = f"{client_id}_select_{select_ob}"
ob = utils.find_from_attr("uuid", select_ob, bpy.data.objects)
if not ob:
return
if ob.type == 'EMPTY':
# TODO: Child case
# Collection instance case
indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
if ob.instance_collection:
for obj in ob.instance_collection.objects:
if obj.type == 'MESH':
self.append_3d_item(
drawable_key,
client_color,
get_bb_coords_from_obj(obj, parent=ob),
indices)
if ob.type in ['MESH','META']:
indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
self.append_3d_item(
drawable_key,
client_color,
get_bb_coords_from_obj(ob),
indices)
else:
indices = (
(0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
self.append_3d_item(
drawable_key,
client_color,
get_default_bbox(ob, ob.scale.x),
indices)
def append_3d_item(self,key,color, coords, indices):
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
color = color
batch = batch_for_shader(
shader, 'LINES', {"pos": coords}, indices=indices)
self.d3d_items[key] = (shader, batch, color)
def draw_client_camera(self, client_id, client_location, client_color):
if client_location:
local_user = utils.get_preferences().username
if local_user != client_id:
try:
indices = (
(1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6)
)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
position = [tuple(coord) for coord in client_location]
color = client_color
batch = batch_for_shader(
shader, 'LINES', {"pos": position}, indices=indices)
self.d3d_items[client_id] = (shader, batch, color)
self.d2d_items[client_id] = (position[1], client_id, color)
except Exception as e:
logging.error(f"Draw client exception: {e}")
def draw3d_callback(self):
bgl.glLineWidth(1.5)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
def post_view_callback(self):
try:
for widget in self.widgets.values():
if widget.draw_type == 'POST_VIEW' and widget.poll():
widget.draw()
except Exception as e:
logging.error(
f"Post view widget exception: {e} \n {traceback.print_exc()}")
for shader, batch, color in self.d3d_items.values():
shader.bind()
shader.uniform_float("color", color)
batch.draw(shader)
except Exception:
logging.error("3D Exception")
def post_pixel_callback(self):
try:
for widget in self.widgets.values():
if widget.draw_type == 'POST_PIXEL' and widget.poll():
widget.draw()
except Exception as e:
logging.error(
f"Post pixel widget Exception: {e} \n {traceback.print_exc()}")
def draw2d_callback(self):
for position, font, color in self.d2d_items.values():
try:
coords = get_client_2d(position)
if coords:
blf.position(0, coords[0], coords[1]+10, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, font)
this = sys.modules[__name__]
this.renderer = DrawFactory()
except Exception:
logging.error("2D EXCEPTION")
def register():
this.renderer.register_handlers()
this.renderer.add_widget("session_status", SessionStatusWidget())
global renderer
renderer = DrawFactory()
def unregister():
this.renderer.unregister_handlers()
global renderer
renderer.unregister_handlers()
this.renderer.clear_widgets()
del renderer

View File

@ -18,8 +18,8 @@
import bpy
from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str
from replication.constants import (ADDED, ERROR, FETCHED,
from . import operators, utils
from .libs.replication.replication.constants import (ADDED, ERROR, FETCHED,
MODIFIED, RP_COMMON, UP,
STATE_ACTIVE, STATE_AUTH,
STATE_CONFIG, STATE_SYNCING,
@ -27,16 +27,13 @@ from replication.constants import (ADDED, ERROR, FETCHED,
STATE_WAITING, STATE_QUITTING,
STATE_LOBBY,
STATE_LAUNCHING_SERVICES)
from replication import __version__
from replication.interface import session
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
'TRIA_UP', # COMMITED
'KEYTYPE_KEYFRAME_VEC', # PUSHED
'TRIA_DOWN', # FETCHED
'RECOVER_LAST', # RESET
'TRIA_UP', # CHANGED
'ERROR'] # ERROR
'FILE_REFRESH', # UP
'TRIA_UP'] # CHANGED
def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='', fill_empty=' '):
@ -53,26 +50,50 @@ def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=
From here:
https://gist.github.com/greenstick/b23e475d2bfdc3a82e34eaa1f6781ee4
"""
if total == 0:
return ""
filledLength = int(length * iteration // total)
bar = fill * filledLength + fill_empty * (length - filledLength)
return f"{prefix} |{bar}| {iteration}/{total}{suffix}"
def get_state_str(state):
state_str = 'UNKNOWN'
if state == STATE_WAITING:
state_str = 'WARMING UP DATA'
elif state == STATE_SYNCING:
state_str = 'FETCHING'
elif state == STATE_AUTH:
state_str = 'AUTHENTIFICATION'
elif state == STATE_CONFIG:
state_str = 'CONFIGURATION'
elif state == STATE_ACTIVE:
state_str = 'ONLINE'
elif state == STATE_SRV_SYNC:
state_str = 'PUSHING'
elif state == STATE_INITIAL:
state_str = 'INIT'
elif state == STATE_QUITTING:
state_str = 'QUITTING'
elif state == STATE_LAUNCHING_SERVICES:
state_str = 'LAUNCHING SERVICES'
elif state == STATE_LOBBY:
state_str = 'LOBBY'
return state_str
class SESSION_PT_settings(bpy.types.Panel):
"""Settings panel"""
bl_idname = "MULTIUSER_SETTINGS_PT_panel"
bl_label = " "
bl_label = ""
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Multiuser"
def draw_header(self, context):
layout = self.layout
if session and session.state['STATE'] != STATE_INITIAL:
cli_state = session.state
state = session.state.get('STATE')
if operators.client and operators.client.state['STATE'] != STATE_INITIAL:
cli_state = operators.client.state
state = operators.client.state.get('STATE')
connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
if state == STATE_ACTIVE:
@ -82,53 +103,72 @@ class SESSION_PT_settings(bpy.types.Panel):
layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
else:
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
layout.label(text="Session",icon="PROP_OFF")
def draw(self, context):
layout = self.layout
layout.use_property_split = True
row = layout.row()
runtime_settings = context.window_manager.session
settings = get_preferences()
settings = utils.get_preferences()
if hasattr(context.window_manager, 'session'):
# STATE INITIAL
if not session \
or (session and session.state['STATE'] == STATE_INITIAL):
if not operators.client \
or (operators.client and operators.client.state['STATE'] == STATE_INITIAL):
pass
else:
cli_state = session.state
cli_state = operators.client.state
row = layout.row()
current_state = cli_state['STATE']
info_msg = None
if current_state in [STATE_ACTIVE]:
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE')
# STATE ACTIVE
if current_state in [STATE_ACTIVE, STATE_LOBBY]:
row.operator("session.stop", icon='QUIT', text="Exit")
row = layout.row()
if runtime_settings.is_host:
row = row.box()
row.label(text=f"{runtime_settings.internet_ip}:{settings.port}", icon='INFO')
row = layout.row()
row= layout.row()
# CONNECTION STATE
elif current_state in [STATE_SRV_SYNC,
STATE_SYNCING,
STATE_AUTH,
STATE_CONFIG,
STATE_WAITING]:
if current_state in [STATE_ACTIVE] and runtime_settings.is_host:
info_msg = f"LAN: {runtime_settings.internet_ip}"
if current_state == STATE_LOBBY:
info_msg = "Waiting for the session to start."
if cli_state['STATE'] in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
box = row.box()
box.label(text=printProgressBar(
cli_state['CURRENT'],
cli_state['TOTAL'],
length=16
))
if info_msg:
info_box = row.box()
info_box.row().label(text=info_msg,icon='INFO')
row = layout.row()
row.operator("session.stop", icon='QUIT', text="CANCEL")
elif current_state == STATE_QUITTING:
row = layout.row()
box = row.box()
# Progress bar
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
info_box = row.box()
info_box.row().label(text=printProgressBar(
cli_state['CURRENT'],
cli_state['TOTAL'],
num_online_services = 0
for name, state in operators.client.services_state.items():
if state == STATE_ACTIVE:
num_online_services += 1
total_online_services = len(
operators.client.services_state)
box.label(text=printProgressBar(
total_online_services-num_online_services,
total_online_services,
length=16
))
layout.row().operator("session.stop", icon='QUIT', text="Exit")
class SESSION_PT_settings_network(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel"
@ -139,8 +179,8 @@ class SESSION_PT_settings_network(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not session \
or (session and session.state['STATE'] == 0)
return not operators.client \
or (operators.client and operators.client.state['STATE'] == 0)
def draw_header(self, context):
self.layout.label(text="", icon='URL')
@ -149,7 +189,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences()
settings = utils.get_preferences()
# USER SETTINGS
row = layout.row()
@ -197,8 +237,8 @@ class SESSION_PT_settings_user(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not session \
or (session and session.state['STATE'] == 0)
return not operators.client \
or (operators.client and operators.client.state['STATE'] == 0)
def draw_header(self, context):
self.layout.label(text="", icon='USER')
@ -207,7 +247,7 @@ class SESSION_PT_settings_user(bpy.types.Panel):
layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences()
settings = utils.get_preferences()
row = layout.row()
# USER SETTINGS
@ -228,8 +268,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not session \
or (session and session.state['STATE'] == 0)
return not operators.client \
or (operators.client and operators.client.state['STATE'] == 0)
def draw_header(self, context):
self.layout.label(text="", icon='PREFERENCES')
@ -238,107 +278,44 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences()
settings = utils.get_preferences()
net_section = layout.row().box()
net_section.prop(
settings,
"sidebar_advanced_net_expanded",
text="Network",
icon=get_expanded_icon(settings.sidebar_advanced_net_expanded),
emboss=False)
if settings.sidebar_advanced_net_expanded:
net_section_row = net_section.row()
net_section_row.label(text="IPC Port:")
net_section_row.prop(settings, "ipc_port", text="")
net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="")
net_section.label(text="Network ", icon='TRIA_DOWN')
net_section_row = net_section.row()
net_section_row.label(text="IPC Port:")
net_section_row.prop(settings, "ipc_port", text="")
net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="")
replication_section = layout.row().box()
replication_section.prop(
settings,
"sidebar_advanced_rep_expanded",
text="Replication",
icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded),
emboss=False)
if settings.sidebar_advanced_rep_expanded:
replication_section_row = replication_section.row()
replication_section_row.label(text="Sync flags", icon='COLLECTION_NEW')
replication_section_row = replication_section.row()
replication_section.label(text="Replication ", icon='TRIA_DOWN')
replication_section_row = replication_section.row()
if runtime_settings.session_mode == 'HOST':
replication_section_row.prop(settings.sync_flags, "sync_render_settings")
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_active_camera")
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_during_editmode")
replication_section_row = replication_section.row()
if settings.sync_flags.sync_during_editmode:
warning = replication_section_row.box()
warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
replication_section_row = replication_section.row()
replication_section_row = replication_section.row()
replication_section_row.label(text="Per data type timers:")
replication_section_row = replication_section.row()
# Replication frequencies
flow = replication_section_row .grid_flow(
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
line = flow.row(align=True)
line.label(text=" ")
line.separator()
line.label(text="refresh (sec)")
line.label(text="apply (sec)")
replication_section_row.label(text="Update method", icon='RECOVER_LAST')
replication_section_row = replication_section.row()
replication_section_row.prop(settings, "update_method", expand=True)
replication_section_row = replication_section.row()
replication_timers = replication_section_row.box()
replication_timers.label(text="Replication timers", icon='TIME')
if settings.update_method == "DEFAULT":
replication_timers = replication_timers.row()
# Replication frequencies
flow = replication_timers.grid_flow(
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
line = flow.row(align=True)
line.label(text=" ")
line.separator()
line.label(text="refresh (sec)")
line.label(text="apply (sec)")
for item in settings.supported_datablocks:
line = flow.row(align=True)
line.prop(item, "auto_push", text="", icon=item.icon)
line.separator()
line.prop(item, "bl_delay_refresh", text="")
line.prop(item, "bl_delay_apply", text="")
for item in settings.supported_datablocks:
line = flow.row(align=True)
line.prop(item, "auto_push", text="", icon=item.icon)
line.separator()
line.prop(item, "bl_delay_refresh", text="")
line.prop(item, "bl_delay_apply", text="")
else:
replication_timers = replication_timers.row()
replication_timers.label(text="Update rate (ms):")
replication_timers.prop(settings, "depsgraph_update_rate", text="")
cache_section = layout.row().box()
cache_section.prop(
settings,
"sidebar_advanced_cache_expanded",
text="Cache",
icon=get_expanded_icon(settings.sidebar_advanced_cache_expanded),
emboss=False)
if settings.sidebar_advanced_cache_expanded:
cache_section_row = cache_section.row()
cache_section_row.label(text="Cache directory:")
cache_section_row = cache_section.row()
cache_section_row.prop(settings, "cache_directory", text="")
cache_section_row = cache_section.row()
cache_section_row.label(text="Clear memory filecache:")
cache_section_row.prop(settings, "clear_memory_filecache", text="")
cache_section_row = cache_section.row()
cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})")
log_section = layout.row().box()
log_section.prop(
settings,
"sidebar_advanced_log_expanded",
text="Logging",
icon=get_expanded_icon(settings.sidebar_advanced_log_expanded),
emboss=False)
if settings.sidebar_advanced_log_expanded:
log_section_row = log_section.row()
log_section_row.label(text="Log level:")
log_section_row.prop(settings, 'logging_level', text="")
class SESSION_PT_user(bpy.types.Panel):
bl_idname = "MULTIUSER_USER_PT_panel"
bl_label = "Online users"
@ -348,7 +325,7 @@ class SESSION_PT_user(bpy.types.Panel):
@classmethod
def poll(cls, context):
return session and session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
return operators.client and operators.client.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
def draw_header(self, context):
self.layout.label(text="", icon='USER')
@ -357,7 +334,7 @@ class SESSION_PT_user(bpy.types.Panel):
layout = self.layout
online_users = context.window_manager.online_users
selected_user = context.window_manager.user_index
settings = get_preferences()
settings = utils.get_preferences()
active_user = online_users[selected_user] if len(
online_users)-1 >= selected_user else 0
runtime_settings = context.window_manager.session
@ -379,21 +356,19 @@ class SESSION_PT_user(bpy.types.Panel):
if active_user != 0 and active_user.username != settings.username:
row = layout.row()
user_operations = row.split()
if session.state['STATE'] == STATE_ACTIVE:
user_operations.alert = context.window_manager.session.time_snap_running
user_operations.operator(
"session.snapview",
text="",
icon='VIEW_CAMERA').target_client = active_user.username
user_operations.alert = context.window_manager.session.time_snap_running
user_operations.operator(
"session.snapview",
text="",
icon='VIEW_CAMERA').target_client = active_user.username
user_operations.alert = context.window_manager.session.user_snap_running
user_operations.operator(
"session.snaptime",
text="",
icon='TIME').target_client = active_user.username
user_operations.alert = context.window_manager.session.user_snap_running
user_operations.operator(
"session.snaptime",
text="",
icon='TIME').target_client = active_user.username
if session.online_users[settings.username]['admin']:
if operators.client.online_users[settings.username]['admin']:
user_operations.operator(
"session.kick",
text="",
@ -402,7 +377,8 @@ class SESSION_PT_user(bpy.types.Panel):
class SESSION_UL_users(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
settings = get_preferences()
session = operators.client
settings = utils.get_preferences()
is_local_user = item.username == settings.username
ping = '-'
frame_current = '-'
@ -414,8 +390,8 @@ class SESSION_UL_users(bpy.types.UIList):
ping = str(user['latency'])
metadata = user.get('metadata')
if metadata and 'frame_current' in metadata:
frame_current = str(metadata.get('frame_current','-'))
scene_current = metadata.get('scene_current','-')
frame_current = str(metadata['frame_current'])
scene_current = metadata['scene_current']
if user['admin']:
status_icon = 'FAKE_USER_ON'
split = layout.split(factor=0.35)
@ -436,8 +412,8 @@ class SESSION_PT_presence(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not session \
or (session and session.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
return not operators.client \
or (operators.client and operators.client.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
def draw_header(self, context):
self.layout.prop(context.window_manager.session,
@ -447,35 +423,56 @@ class SESSION_PT_presence(bpy.types.Panel):
layout = self.layout
settings = context.window_manager.session
pref = get_preferences()
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status")
row = col.column()
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_scale", expand=True)
row = col.column(align=True)
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_hpos", expand=True)
row.prop(pref, "presence_hud_vpos", expand=True)
col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
def draw_property(context, parent, property_uuid, level=0):
settings = get_preferences()
runtime_settings = context.window_manager.session
item = session.get(uuid=property_uuid)
area_msg = parent.row(align=True)
class SESSION_PT_services(bpy.types.Panel):
bl_idname = "MULTIUSER_SERVICE_PT_panel"
bl_label = "Services"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return operators.client and operators.client.state['STATE'] == 2
def draw_header(self, context):
self.layout.label(text="", icon='FILE_CACHE')
def draw(self, context):
layout = self.layout
online_users = context.window_manager.online_users
selected_user = context.window_manager.user_index
settings = context.window_manager.session
active_user = online_users[selected_user] if len(online_users)-1 >= selected_user else 0
# Create a simple row.
for name, state in operators.client.services_state.items():
row = layout.row()
row.label(text=name)
row.label(text=get_state_str(state))
def draw_property(context, parent, property_uuid, level=0):
settings = utils.get_preferences()
runtime_settings = context.window_manager.session
item = operators.client.get(uuid=property_uuid)
if item.state == ERROR:
area_msg.alert=True
else:
area_msg.alert=False
return
area_msg = parent.row(align=True)
if level > 0:
for i in range(level):
area_msg.label(text="")
line = area_msg.box()
name = item.data['name'] if item.data else item.uuid
@ -488,8 +485,8 @@ def draw_property(context, parent, property_uuid, level=0):
# Operations
have_right_to_modify = (item.owner == settings.username or \
item.owner == RP_COMMON) and item.state != ERROR
have_right_to_modify = item.owner == settings.username or \
item.owner == RP_COMMON
if have_right_to_modify:
detail_item_box.operator(
@ -499,12 +496,10 @@ def draw_property(context, parent, property_uuid, level=0):
detail_item_box.separator()
if item.state in [FETCHED, UP]:
apply = detail_item_box.operator(
detail_item_box.operator(
"session.apply",
text="",
icon=ICONS_PROP_STATES[item.state])
apply.target = item.uuid
apply.reset_dependencies = True
icon=ICONS_PROP_STATES[item.state]).target = item.uuid
elif item.state in [MODIFIED, ADDED]:
detail_item_box.operator(
"session.commit",
@ -527,6 +522,7 @@ def draw_property(context, parent, property_uuid, level=0):
else:
detail_item_box.label(text="", icon="DECORATE_LOCKED")
class SESSION_PT_repository(bpy.types.Panel):
bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
bl_label = "Repository"
@ -536,17 +532,9 @@ class SESSION_PT_repository(bpy.types.Panel):
@classmethod
def poll(cls, context):
settings = get_preferences()
admin = False
if session and hasattr(session,'online_users'):
usr = session.online_users.get(settings.username)
if usr:
admin = usr['admin']
return hasattr(context.window_manager, 'session') and \
session and \
(session.state['STATE'] == STATE_ACTIVE or \
session.state['STATE'] == STATE_LOBBY and admin)
operators.client and \
operators.client.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
def draw_header(self, context):
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
@ -555,9 +543,10 @@ class SESSION_PT_repository(bpy.types.Panel):
layout = self.layout
# Filters
settings = get_preferences()
settings = utils.get_preferences()
runtime_settings = context.window_manager.session
session = operators.client
usr = session.online_users.get(settings.username)
row = layout.row()
@ -583,11 +572,11 @@ class SESSION_PT_repository(bpy.types.Panel):
types_filter = [t.type_name for t in settings.supported_datablocks
if t.use_as_filter]
key_to_filter = session.list(
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
key_to_filter = operators.client.list(
filter_owner=settings.username) if runtime_settings.filter_owned else operators.client.list()
client_keys = [key for key in key_to_filter
if session.get(uuid=key).str_type
if operators.client.get(uuid=key).str_type
in types_filter]
if client_keys:
@ -603,36 +592,6 @@ class SESSION_PT_repository(bpy.types.Panel):
else:
row.label(text="Waiting to start")
class VIEW3D_PT_overlay_session(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Multi-user"
@classmethod
def poll(cls, context):
return True
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
row = col.row(align=True)
settings = context.window_manager.session
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status")
col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
classes = (
SESSION_UL_users,
@ -642,8 +601,9 @@ classes = (
SESSION_PT_presence,
SESSION_PT_advanced_settings,
SESSION_PT_user,
SESSION_PT_services,
SESSION_PT_repository,
VIEW3D_PT_overlay_session,
)

View File

@ -21,22 +21,13 @@ import logging
import os
import sys
import time
from collections.abc import Iterable
from pathlib import Path
from uuid import uuid4
import math
from collections.abc import Iterable
import bpy
import mathutils
from . import environment
from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_CONFIG, STATE_SYNCING,
STATE_INITIAL, STATE_SRV_SYNC,
STATE_WAITING, STATE_QUITTING,
STATE_LOBBY,
STATE_LAUNCHING_SERVICES)
from . import environment, presence
def find_from_attr(attr_name, attr_value, list):
@ -48,7 +39,7 @@ def find_from_attr(attr_name, attr_value, list):
def get_datablock_users(datablock):
users = []
supported_types = get_preferences().supported_datablocks
supported_types = get_preferences().supported_datablocks
if hasattr(datablock, 'users_collection') and datablock.users_collection:
users.extend(list(datablock.users_collection))
if hasattr(datablock, 'users_scene') and datablock.users_scene:
@ -56,7 +47,7 @@ def get_datablock_users(datablock):
if hasattr(datablock, 'users_group') and datablock.users_scene:
users.extend(list(datablock.users_scene))
for datatype in supported_types:
if datatype.bl_name != 'users' and hasattr(bpy.data, datatype.bl_name):
if datatype.bl_name != 'users':
root = getattr(bpy.data, datatype.bl_name)
for item in root:
if hasattr(item, 'data') and datablock == item.data or \
@ -65,32 +56,6 @@ def get_datablock_users(datablock):
return users
def get_state_str(state):
state_str = 'UNKOWN'
if state == STATE_WAITING:
state_str = 'WARMING UP DATA'
elif state == STATE_SYNCING:
state_str = 'FETCHING'
elif state == STATE_AUTH:
state_str = 'AUTHENTICATION'
elif state == STATE_CONFIG:
state_str = 'CONFIGURATION'
elif state == STATE_ACTIVE:
state_str = 'ONLINE'
elif state == STATE_SRV_SYNC:
state_str = 'PUSHING'
elif state == STATE_INITIAL:
state_str = 'OFFLINE'
elif state == STATE_QUITTING:
state_str = 'QUITTING'
elif state == STATE_LAUNCHING_SERVICES:
state_str = 'LAUNCHING SERVICES'
elif state == STATE_LOBBY:
state_str = 'LOBBY'
return state_str
def clean_scene():
for type_name in dir(bpy.data):
try:
@ -99,9 +64,7 @@ def clean_scene():
type_collection.remove(item)
except:
continue
# Clear sequencer
bpy.context.scene.sequence_editor_clear()
def get_selected_objects(scene, active_view_layer):
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]
@ -114,76 +77,10 @@ def resolve_from_id(id, optionnal_type=None):
if id in root and ((optionnal_type is None) or (optionnal_type.lower() in root[id].__class__.__name__.lower())):
return root[id]
return None
def get_preferences():
return bpy.context.preferences.addons[__package__].preferences
def current_milli_time():
return int(round(time.time() * 1000))
def get_expanded_icon(prop: bpy.types.BoolProperty) -> str:
if prop:
return 'DISCLOSURE_TRI_DOWN'
else:
return 'DISCLOSURE_TRI_RIGHT'
# Taken from here: https://stackoverflow.com/a/55659577
def get_folder_size(folder):
return ByteSize(sum(file.stat().st_size for file in Path(folder).rglob('*')))
class ByteSize(int):
_kB = 1024
_suffixes = 'B', 'kB', 'MB', 'GB', 'PB'
def __new__(cls, *args, **kwargs):
return super().__new__(cls, *args, **kwargs)
def __init__(self, *args, **kwargs):
self.bytes = self.B = int(self)
self.kilobytes = self.kB = self / self._kB**1
self.megabytes = self.MB = self / self._kB**2
self.gigabytes = self.GB = self / self._kB**3
self.petabytes = self.PB = self / self._kB**4
*suffixes, last = self._suffixes
suffix = next((
suffix
for suffix in suffixes
if 1 < getattr(self, suffix) < self._kB
), last)
self.readable = suffix, getattr(self, suffix)
super().__init__()
def __str__(self):
return self.__format__('.2f')
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, super().__repr__())
def __format__(self, format_spec):
suffix, val = self.readable
return '{val:{fmt}} {suf}'.format(val=math.ceil(val), fmt=format_spec, suf=suffix)
def __sub__(self, other):
return self.__class__(super().__sub__(other))
def __add__(self, other):
return self.__class__(super().__add__(other))
def __mul__(self, other):
return self.__class__(super().__mul__(other))
def __rsub__(self, other):
return self.__class__(super().__sub__(other))
def __radd__(self, other):
return self.__class__(super().__add__(other))
def __rmul__(self, other):
return self.__class__(super().__rmul__(other))
return int(round(time.time() * 1000))

View File

@ -1,25 +0,0 @@
# Download base image debian jessie
FROM python:slim
ARG replication_version=0.1.13
ARG version=0.1.1
# Infos
LABEL maintainer="Swann Martinez"
LABEL version=$version
LABEL description="Blender multi-user addon \
dedicated server image."
# Argument
ENV password='admin'
ENV port=5555
ENV timeout=5000
ENV log_level=DEBUG
ENV log_file="multiuser_server.log"
#Install replication
RUN pip install replication==$replication_version
# Run the server with parameters
ENTRYPOINT ["/bin/sh", "-c"]
CMD ["python3 -m replication.server -pwd ${password} -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]

View File

@ -1,6 +0,0 @@
import re
init_py = open("multi_user/__init__.py").read()
version = re.search("\d+, \d+, \d+", init_py).group(0)
digits = version.split(',')
print('.'.join(digits).replace(" ",""))

View File

@ -1,4 +0,0 @@
import re
init_py = open("multi_user/__init__.py").read()
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))

View File

@ -1,10 +0,0 @@
#! /bin/bash
# Start server in docker container, from image hosted on the multi-user gitlab's container registry
docker run -d \
-p 5555-5560:5555-5560 \
-e port=5555 \
-e log-level DEBUG \
-e password=admin \
-e timeout=1000 \
registry.gitlab.com/slumber/multi-user/multi-user-server:0.1.0

View File

@ -1,5 +0,0 @@
#! /bin/bash
# Start replication server locally, and include logging (requires replication_version=0.0.21a15)
clear
replication.serve -p 5555 -pwd admin -t 1000 -l DEBUG -lf server.log

View File

@ -13,7 +13,7 @@ def main():
if len(sys.argv) > 2:
blender_rev = sys.argv[2]
else:
blender_rev = "2.91.0"
blender_rev = "2.90.0"
try:
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)

View File

@ -2,7 +2,7 @@ import os
import pytest
from deepdiff import DeepDiff
from uuid import uuid4
import bpy
import random
from multi_user.bl_types.bl_collection import BlCollection
@ -10,13 +10,8 @@ from multi_user.bl_types.bl_collection import BlCollection
def test_collection(clear_blend):
# Generate a collection with childrens and a cube
datablock = bpy.data.collections.new("root")
datablock.uuid = str(uuid4())
s1 = bpy.data.collections.new("child")
s1.uuid = str(uuid4())
s2 = bpy.data.collections.new("child2")
s2.uuid = str(uuid4())
datablock.children.link(s1)
datablock.children.link(s2)
datablock.children.link(bpy.data.collections.new("child"))
datablock.children.link(bpy.data.collections.new("child2"))
bpy.ops.mesh.primitive_cube_add()
datablock.objects.link(bpy.data.objects[0])

Some files were not shown because too many files have changed in this diff Show More