Compare commits
121 Commits
173-differ
...
199-filter
Author | SHA1 | Date | |
---|---|---|---|
fcf5a12dd0 | |||
b0529e4444 | |||
bdfd89c085 | |||
ff1630f9cc | |||
5830fe1abb | |||
c609f72080 | |||
a28a6f91bd | |||
a996f39d3b | |||
7790a16034 | |||
836fdd02b8 | |||
7cb3482353 | |||
041022056c | |||
05f3eb1445 | |||
17193bde3a | |||
a14b4313f5 | |||
b203d9dffd | |||
f64db2155e | |||
e07ebdeff5 | |||
3d6453f7a2 | |||
7421511079 | |||
bc24525cec | |||
699cf578e2 | |||
e9b4afb440 | |||
0c6491590e | |||
b87e733ddc | |||
cb0962b484 | |||
1fc25412ac | |||
b5405553dc | |||
a1b6fb0533 | |||
b6a8a2ec01 | |||
3e41b18af1 | |||
f7c4f5d1fe | |||
c616054878 | |||
5c08493774 | |||
af8a138b4f | |||
6d9216f14a | |||
fc4fb088bb | |||
98553ba00c | |||
1e15a12b10 | |||
569543650f | |||
07358802f7 | |||
a059fafe12 | |||
297f68ccfe | |||
c9c70d1e08 | |||
a34f58ef3f | |||
e7b7f38991 | |||
392e0aaaa3 | |||
4c774d5d53 | |||
4c4cf8a970 | |||
211d0848c2 | |||
c9665c4719 | |||
431fe0d840 | |||
df7ca66ad8 | |||
c2d2db78e6 | |||
ad89a4e389 | |||
6ca6d4443d | |||
81c9b5fc06 | |||
9fddfe084c | |||
ca40523393 | |||
76e28ced21 | |||
55c6002b28 | |||
8d5c8aded3 | |||
8ebba80b97 | |||
50d6c6b3c8 | |||
f0b03c50f2 | |||
28e83a38e6 | |||
2e261cd66b | |||
3f6e4f7333 | |||
49fadf084a | |||
e2e0dc31c1 | |||
389bbd97d5 | |||
19602691d3 | |||
2e2ff5d4bf | |||
fef6559ce0 | |||
5f669fd49a | |||
330ff08fd3 | |||
f3be8f9623 | |||
ffb70ab74c | |||
26140eefb2 | |||
cdf0433e8a | |||
acd70f73bf | |||
36c3a9ab0b | |||
cfb1afdd72 | |||
4eeb80350e | |||
fb1c985f31 | |||
689c2473d6 | |||
41620fce90 | |||
249bcf827b | |||
d47eab4f26 | |||
f011089d82 | |||
acc58a1c9f | |||
24d850de9f | |||
b045911a59 | |||
a67be76422 | |||
32033c743c | |||
5da8650611 | |||
aec5096f87 | |||
fba39b9980 | |||
6af3e4b777 | |||
58d639e9d8 | |||
0efe5d5a10 | |||
2ad93cf304 | |||
771d76a98b | |||
1e83241494 | |||
1bcbff3ed4 | |||
9a45fe7125 | |||
207901afdd | |||
c6eb1ba22f | |||
ba4168d0fd | |||
00e7adf022 | |||
d9d8ca7ca0 | |||
e8cd271bd8 | |||
e71af6402c | |||
dd1c6a4fc7 | |||
7fe1ae83b1 | |||
a7ad9d30c3 | |||
14779be1ed | |||
a36c3740cc | |||
d2108facab | |||
e5651151d9 | |||
fb61b380b6 |
3
.gitignore
vendored
3
.gitignore
vendored
@ -13,4 +13,5 @@ multi_user_updater/
|
||||
_build
|
||||
|
||||
# ignore generated zip generated from blender_addon_tester
|
||||
*.zip
|
||||
*.zip
|
||||
libs
|
13
.gitlab-ci.yml
Normal file
13
.gitlab-ci.yml
Normal file
@ -0,0 +1,13 @@
|
||||
stages:
|
||||
- test
|
||||
- build
|
||||
- deploy
|
||||
- doc
|
||||
|
||||
|
||||
|
||||
include:
|
||||
- local: .gitlab/ci/test.gitlab-ci.yml
|
||||
- local: .gitlab/ci/build.gitlab-ci.yml
|
||||
- local: .gitlab/ci/deploy.gitlab-ci.yml
|
||||
- local: .gitlab/ci/doc.gitlab-ci.yml
|
@ -8,3 +8,5 @@ build:
|
||||
name: multi_user
|
||||
paths:
|
||||
- multi_user
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
@ -5,6 +5,7 @@ deploy:
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
|
@ -3,3 +3,5 @@ test:
|
||||
image: slumber/blender-addon-testing:latest
|
||||
script:
|
||||
- python3 scripts/test_addon.py
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -1,3 +1,3 @@
|
||||
[submodule "multi_user/libs/replication"]
|
||||
path = multi_user/libs/replication
|
||||
url = https://gitlab.com/slumber/replication
|
||||
url = https://gitlab.com/slumber/replication.git
|
||||
|
18
README.md
18
README.md
@ -32,32 +32,32 @@ Currently, not all data-block are supported for replication over the wire. The f
|
||||
| Name | Status | Comment |
|
||||
| -------------- | :----: | :----------------------------------------------------------: |
|
||||
| action | ✔️ | |
|
||||
| armature | ❗ | Not stable |
|
||||
| camera | ✔️ | |
|
||||
| collection | ✔️ | |
|
||||
| curve | ❗ | Nurbs surfaces not supported |
|
||||
| gpencil | ✔️ | |
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| node_groups | ❗ | Material & Geometry only |
|
||||
| node_groups | ✔️ | Material & Geometry only |
|
||||
| geometry nodes | ✔️ | |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| textures | ❗ | Supported for modifiers/materials/geo nodes only |
|
||||
| texts | ✔️ | |
|
||||
| scene | ✔️ | |
|
||||
| world | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| nla | ❌ | |
|
||||
| volumes | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| physics | ✔️ | |
|
||||
| curve | ❗ | Nurbs surfaces not supported |
|
||||
| textures | ❗ | Supported for modifiers/materials/geo nodes only |
|
||||
| armature | ❗ | Not stable |
|
||||
| particles | ❗ | The cache isn't syncing. |
|
||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||
| vse | ❗ | Mask and Clip not supported yet |
|
||||
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||
| libraries | ❗ | Partial |
|
||||
| nla | ❌ | |
|
||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
|
||||
|
||||
|
||||
|
BIN
docs/getting_started/img/server_preset_exemple.gif
Normal file
BIN
docs/getting_started/img/server_preset_exemple.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 320 KiB |
BIN
docs/getting_started/img/server_preset_image_add.png
Normal file
BIN
docs/getting_started/img/server_preset_image_add.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.3 KiB |
BIN
docs/getting_started/img/server_preset_image_admin.png
Normal file
BIN
docs/getting_started/img/server_preset_image_admin.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.2 KiB |
BIN
docs/getting_started/img/server_preset_image_normal_server.png
Normal file
BIN
docs/getting_started/img/server_preset_image_normal_server.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 9.0 KiB |
BIN
docs/getting_started/img/server_preset_image_report.png
Normal file
BIN
docs/getting_started/img/server_preset_image_report.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.2 KiB |
@ -108,36 +108,69 @@ Before starting make sure that you have access to the session IP address and por
|
||||
1. Fill in your user information
|
||||
--------------------------------
|
||||
|
||||
Follow the user-info_ section for this step.
|
||||
Joining a server
|
||||
=======================
|
||||
|
||||
----------------
|
||||
2. Network setup
|
||||
----------------
|
||||
--------------
|
||||
Network setup
|
||||
--------------
|
||||
|
||||
In the network panel, select **JOIN**.
|
||||
The **join sub-panel** (see image below) allows you to configure your client to join a
|
||||
collaborative session which is already hosted.
|
||||
|
||||
.. figure:: img/quickstart_join.png
|
||||
:align: center
|
||||
:alt: Connect menu
|
||||
.. figure:: img/server_preset_image_normal_server.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
Connection panel
|
||||
Connection pannel
|
||||
|
||||
Fill in the fields with your information:
|
||||
|
||||
- **IP**: the host's IP address.
|
||||
- **Port**: the host's port number.
|
||||
- **Connect as admin**: connect yourself with **admin rights** (see :ref:`admin` ) to the session.
|
||||
|
||||
.. Maybe something more explicit here
|
||||
|
||||
.. note::
|
||||
Additional configuration settings can be found in the :ref:`advanced` section.
|
||||
|
||||
Once you've configured every field, hit the button **CONNECT** to join the session !
|
||||
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
|
||||
|
||||
.. note::
|
||||
|
||||
If you want to have **administrator rights** (see :ref:`admin` ) on the server, just enter the password created by the host in the **Connect as admin** section
|
||||
|
||||
.. figure:: img/server_preset_image_admin.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
Admin password
|
||||
|
||||
---------------
|
||||
Server presets
|
||||
---------------
|
||||
|
||||
You can save your server presets in a preset list below the 'JOIN' and 'HOST' buttons. This allows you to quickly access and manage your servers.
|
||||
|
||||
To add a server, first enter the ip address and the port (plus the password if needed), then click on the + icon to add a name to your preset. To remove a server from the list, select it and click on the - icon.
|
||||
|
||||
.. figure:: img/server_preset_exemple.gif
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
.. warning:: Be careful, if you don't rename your new preset, or if it has the same name as an existing preset, the old preset will be overwritten.
|
||||
|
||||
.. figure:: img/server_preset_image_report.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
.. note::
|
||||
|
||||
Two presets are already present when the addon is launched:
|
||||
|
||||
- The 'localhost' preset, to host and join a local session quickly
|
||||
- The 'public session' preset, to join the public sessions of the multi-user server (official discord to participate : https://discord.gg/aBPvGws)
|
||||
|
||||
.. note::
|
||||
Additional configuration settings can be found in the :ref:`advanced` section.
|
||||
|
||||
.. note::
|
||||
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
|
||||
|
||||
|
@ -76,7 +76,7 @@ Hit 'Create a network'(see image below) and go to the network settings.
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Network page
|
||||
Admin password
|
||||
|
||||
Now that the network is created, let's configure it.
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
||||
bl_info = {
|
||||
"name": "Multi-User",
|
||||
"author": "Swann Martinez",
|
||||
"version": (0, 3, 0),
|
||||
"version": (0, 5, 0),
|
||||
"description": "Enable real-time collaborative workflow inside blender",
|
||||
"blender": (2, 82, 0),
|
||||
"location": "3D View > Sidebar > Multi-User tab",
|
||||
@ -41,11 +41,12 @@ import bpy
|
||||
from bpy.app.handlers import persistent
|
||||
|
||||
from . import environment
|
||||
from uuid import uuid4
|
||||
LIBS = os.path.dirname(os.path.abspath(__file__))+"/libs/replication"
|
||||
|
||||
|
||||
module_error_msg = "Insufficient rights to install the multi-user \
|
||||
dependencies, aunch blender with administrator rights."
|
||||
|
||||
|
||||
def register():
|
||||
# Setup logging policy
|
||||
logging.basicConfig(
|
||||
@ -53,17 +54,12 @@ def register():
|
||||
datefmt='%H:%M:%S',
|
||||
level=logging.INFO)
|
||||
|
||||
for module_name in list(sys.modules.keys()):
|
||||
if 'replication' in module_name:
|
||||
del sys.modules[module_name]
|
||||
|
||||
if LIBS not in sys.path:
|
||||
logging.info('Adding local modules dir to the path')
|
||||
sys.path.insert(0, LIBS)
|
||||
|
||||
try:
|
||||
environment.register()
|
||||
|
||||
from . import presence
|
||||
from . import operators
|
||||
from . import handlers
|
||||
from . import ui
|
||||
from . import preferences
|
||||
from . import addon_updater_ops
|
||||
@ -72,10 +68,11 @@ def register():
|
||||
addon_updater_ops.register(bl_info)
|
||||
presence.register()
|
||||
operators.register()
|
||||
handlers.register()
|
||||
ui.register()
|
||||
except ModuleNotFoundError as e:
|
||||
raise Exception(module_error_msg)
|
||||
logging.error(e)
|
||||
logging.error(module_error_msg)
|
||||
|
||||
bpy.types.WindowManager.session = bpy.props.PointerProperty(
|
||||
type=preferences.SessionProps)
|
||||
@ -92,6 +89,7 @@ def register():
|
||||
def unregister():
|
||||
from . import presence
|
||||
from . import operators
|
||||
from . import handlers
|
||||
from . import ui
|
||||
from . import preferences
|
||||
from . import addon_updater_ops
|
||||
@ -101,6 +99,7 @@ def unregister():
|
||||
presence.unregister()
|
||||
addon_updater_ops.unregister()
|
||||
ui.unregister()
|
||||
handlers.unregister()
|
||||
operators.unregister()
|
||||
preferences.unregister()
|
||||
|
||||
@ -108,3 +107,5 @@ def unregister():
|
||||
del bpy.types.ID.uuid
|
||||
del bpy.types.WindowManager.online_users
|
||||
del bpy.types.WindowManager.user_index
|
||||
|
||||
environment.unregister()
|
||||
|
@ -1688,10 +1688,7 @@ class GitlabEngine(object):
|
||||
# Could clash with tag names and if it does, it will
|
||||
# download TAG zip instead of branch zip to get
|
||||
# direct path, would need.
|
||||
return "{}{}{}".format(
|
||||
self.form_repo_url(updater),
|
||||
"/repository/archive.zip?sha=",
|
||||
branch)
|
||||
return f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{branch}/download?job=build"
|
||||
|
||||
def get_zip_url(self, sha, updater):
|
||||
return "{base}/repository/archive.zip?sha={sha}".format(
|
||||
|
@ -16,38 +16,40 @@
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import bpy
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
__all__ = [
|
||||
'bl_object',
|
||||
'bl_mesh',
|
||||
# 'bl_camera',
|
||||
'bl_camera',
|
||||
'bl_collection',
|
||||
# 'bl_curve',
|
||||
# 'bl_gpencil',
|
||||
# 'bl_image',
|
||||
# 'bl_light',
|
||||
'bl_curve',
|
||||
'bl_gpencil',
|
||||
'bl_image',
|
||||
'bl_light',
|
||||
'bl_scene',
|
||||
'bl_material',
|
||||
# 'bl_library',
|
||||
# 'bl_armature',
|
||||
# 'bl_action',
|
||||
# 'bl_world',
|
||||
# 'bl_metaball',
|
||||
# 'bl_lattice',
|
||||
# 'bl_lightprobe',
|
||||
# 'bl_speaker',
|
||||
# 'bl_font',
|
||||
# 'bl_sound',
|
||||
# 'bl_file',
|
||||
# 'bl_sequencer',
|
||||
# 'bl_node_group',
|
||||
# 'bl_texture',
|
||||
# "bl_particle",
|
||||
'bl_armature',
|
||||
'bl_action',
|
||||
'bl_world',
|
||||
'bl_metaball',
|
||||
'bl_lattice',
|
||||
'bl_lightprobe',
|
||||
'bl_speaker',
|
||||
'bl_font',
|
||||
'bl_sound',
|
||||
'bl_file',
|
||||
'bl_node_group',
|
||||
'bl_texture',
|
||||
"bl_particle",
|
||||
] # Order here defines execution order
|
||||
|
||||
# if bpy.app.version[1] >= 91:
|
||||
# __all__.append('bl_volume')
|
||||
if bpy.app.version[1] >= 91:
|
||||
__all__.append('bl_volume')
|
||||
|
||||
from . import *
|
||||
|
||||
def types_to_register():
|
||||
return __all__
|
||||
|
||||
from replication.protocol import DataTranslationProtocol
|
||||
|
@ -24,14 +24,9 @@ from enum import Enum
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import (
|
||||
Dumper,
|
||||
Loader,
|
||||
np_dump_collection,
|
||||
np_load_collection,
|
||||
remove_items_from_dict)
|
||||
from .bl_datablock import stamp_uuid
|
||||
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.objects import Node
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
KEYFRAME = [
|
||||
'amplitude',
|
||||
@ -46,7 +41,6 @@ KEYFRAME = [
|
||||
'interpolation',
|
||||
]
|
||||
|
||||
|
||||
def has_action(datablock):
|
||||
""" Check if the datablock datablock has actions
|
||||
"""
|
||||
@ -75,8 +69,7 @@ def load_driver(target_datablock, src_driver):
|
||||
loader = Loader()
|
||||
drivers = target_datablock.animation_data.drivers
|
||||
src_driver_data = src_driver['driver']
|
||||
new_driver = drivers.new(
|
||||
src_driver['data_path'], index=src_driver['array_index'])
|
||||
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
|
||||
|
||||
# Settings
|
||||
new_driver.driver.type = src_driver_data['type']
|
||||
@ -92,10 +85,10 @@ def load_driver(target_datablock, src_driver):
|
||||
|
||||
for src_target in src_var_data['targets']:
|
||||
src_target_data = src_var_data['targets'][src_target]
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(
|
||||
src_target_data['id'], src_target_data['id_type'])
|
||||
loader.load(
|
||||
new_var.targets[src_target], src_target_data)
|
||||
src_id = src_target_data.get('id')
|
||||
if src_id:
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
|
||||
loader.load(new_var.targets[src_target], src_target_data)
|
||||
|
||||
# Fcurve
|
||||
new_fcurve = new_driver.keyframe_points
|
||||
@ -128,7 +121,6 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
||||
points = fcurve.keyframe_points
|
||||
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
|
||||
|
||||
else: # Legacy method
|
||||
dumper = Dumper()
|
||||
fcurve_data["keyframe_points"] = []
|
||||
@ -138,6 +130,18 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
||||
dumper.dump(k)
|
||||
)
|
||||
|
||||
if fcurve.modifiers:
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'is_valid',
|
||||
'active'
|
||||
]
|
||||
dumped_modifiers = []
|
||||
for modfifier in fcurve.modifiers:
|
||||
dumped_modifiers.append(dumper.dump(modfifier))
|
||||
|
||||
fcurve_data['modifiers'] = dumped_modifiers
|
||||
|
||||
return fcurve_data
|
||||
|
||||
|
||||
@ -150,7 +154,7 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
:type fcurve: bpy.types.FCurve
|
||||
"""
|
||||
use_numpy = fcurve_data.get('use_numpy')
|
||||
|
||||
loader = Loader()
|
||||
keyframe_points = fcurve.keyframe_points
|
||||
|
||||
# Remove all keyframe points
|
||||
@ -195,37 +199,53 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
|
||||
fcurve.update()
|
||||
|
||||
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
|
||||
|
||||
def dump_animation_data(datablock, data):
|
||||
if dumped_fcurve_modifiers:
|
||||
# clear modifiers
|
||||
for fmod in fcurve.modifiers:
|
||||
fcurve.modifiers.remove(fmod)
|
||||
|
||||
# Load each modifiers in order
|
||||
for modifier_data in dumped_fcurve_modifiers:
|
||||
modifier = fcurve.modifiers.new(modifier_data['type'])
|
||||
|
||||
loader.load(modifier, modifier_data)
|
||||
elif fcurve.modifiers:
|
||||
for fmod in fcurve.modifiers:
|
||||
fcurve.modifiers.remove(fmod)
|
||||
|
||||
|
||||
def dump_animation_data(datablock):
|
||||
animation_data = {}
|
||||
if has_action(datablock):
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = ['action']
|
||||
data['animation_data'] = dumper.dump(datablock.animation_data)
|
||||
|
||||
animation_data['action'] = datablock.animation_data.action.name
|
||||
if has_driver(datablock):
|
||||
dumped_drivers = {'animation_data': {'drivers': []}}
|
||||
animation_data['drivers'] = []
|
||||
for driver in datablock.animation_data.drivers:
|
||||
dumped_drivers['animation_data']['drivers'].append(
|
||||
dump_driver(driver))
|
||||
animation_data['drivers'].append(dump_driver(driver))
|
||||
|
||||
data.update(dumped_drivers)
|
||||
return animation_data
|
||||
|
||||
|
||||
def load_animation_data(data, datablock):
|
||||
def load_animation_data(animation_data, datablock):
|
||||
# Load animation data
|
||||
if 'animation_data' in data.keys():
|
||||
if animation_data:
|
||||
if datablock.animation_data is None:
|
||||
datablock.animation_data_create()
|
||||
|
||||
for d in datablock.animation_data.drivers:
|
||||
datablock.animation_data.drivers.remove(d)
|
||||
|
||||
if 'drivers' in data['animation_data']:
|
||||
for driver in data['animation_data']['drivers']:
|
||||
if 'drivers' in animation_data:
|
||||
for driver in animation_data['drivers']:
|
||||
load_driver(datablock, driver)
|
||||
|
||||
if 'action' in data['animation_data']:
|
||||
datablock.animation_data.action = bpy.data.actions[data['animation_data']['action']]
|
||||
if 'action' in animation_data:
|
||||
datablock.animation_data.action = bpy.data.actions[animation_data['action']]
|
||||
elif datablock.animation_data.action:
|
||||
datablock.animation_data.action = None
|
||||
|
||||
# Remove existing animation data if there is not more to load
|
||||
elif hasattr(datablock, 'animation_data') and datablock.animation_data:
|
||||
datablock.animation_data_clear()
|
||||
@ -271,8 +291,6 @@ class BlAction(ReplicatedDatablock):
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
stamp_uuid(datablock)
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'name_full',
|
||||
@ -295,3 +313,15 @@ class BlAction(ReplicatedDatablock):
|
||||
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.actions)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
_type = bpy.types.Action
|
||||
_class = BlAction
|
@ -22,8 +22,9 @@ import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .. import presence, operators, utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
def get_roll(bone: bpy.types.Bone) -> float:
|
||||
""" Compute the actuall roll of a pose bone
|
||||
@ -35,16 +36,18 @@ def get_roll(bone: bpy.types.Bone) -> float:
|
||||
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
|
||||
|
||||
|
||||
class BlArmature(BlDatablock):
|
||||
class BlArmature(ReplicatedDatablock):
|
||||
bl_id = "armatures"
|
||||
bl_class = bpy.types.Armature
|
||||
bl_check_common = False
|
||||
bl_icon = 'ARMATURE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.armatures.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
# Load parent object
|
||||
parent_object = utils.find_from_attr(
|
||||
@ -55,7 +58,7 @@ class BlArmature(BlDatablock):
|
||||
|
||||
if parent_object is None:
|
||||
parent_object = bpy.data.objects.new(
|
||||
data['user_name'], target)
|
||||
data['user_name'], datablock)
|
||||
parent_object.uuid = data['user']
|
||||
|
||||
is_object_in_master = (
|
||||
@ -90,10 +93,10 @@ class BlArmature(BlDatablock):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for bone in data['bones']:
|
||||
if bone not in target.edit_bones:
|
||||
new_bone = target.edit_bones.new(bone)
|
||||
if bone not in datablock.edit_bones:
|
||||
new_bone = datablock.edit_bones.new(bone)
|
||||
else:
|
||||
new_bone = target.edit_bones[bone]
|
||||
new_bone = datablock.edit_bones[bone]
|
||||
|
||||
bone_data = data['bones'].get(bone)
|
||||
|
||||
@ -104,7 +107,7 @@ class BlArmature(BlDatablock):
|
||||
new_bone.roll = bone_data['roll']
|
||||
|
||||
if 'parent' in bone_data:
|
||||
new_bone.parent = target.edit_bones[data['bones']
|
||||
new_bone.parent = datablock.edit_bones[data['bones']
|
||||
[bone]['parent']]
|
||||
new_bone.use_connect = bone_data['use_connect']
|
||||
|
||||
@ -119,9 +122,10 @@ class BlArmature(BlDatablock):
|
||||
if 'EDIT' in current_mode:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 4
|
||||
dumper.include_filter = [
|
||||
@ -135,14 +139,14 @@ class BlArmature(BlDatablock):
|
||||
'name',
|
||||
'layers',
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
for bone in instance.bones:
|
||||
for bone in datablock.bones:
|
||||
if bone.parent:
|
||||
data['bones'][bone.name]['parent'] = bone.parent.name
|
||||
# get the parent Object
|
||||
# TODO: Use id_data instead
|
||||
object_users = utils.get_datablock_users(instance)[0]
|
||||
object_users = utils.get_datablock_users(datablock)[0]
|
||||
data['user'] = object_users.uuid
|
||||
data['user_name'] = object_users.name
|
||||
|
||||
@ -153,7 +157,25 @@ class BlArmature(BlDatablock):
|
||||
data['user_scene'] = [
|
||||
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
||||
|
||||
for bone in instance.bones:
|
||||
for bone in datablock.bones:
|
||||
data['bones'][bone.name]['roll'] = get_roll(bone)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
name = data.get('name')
|
||||
datablock = resolve_datablock_from_uuid(uuid, bpy.data.armatures)
|
||||
if datablock is None:
|
||||
datablock = bpy.data.armatures.get(name)
|
||||
|
||||
return datablock
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_animation_dependencies(datablock)
|
||||
|
||||
_type = bpy.types.Armature
|
||||
_class = BlArmature
|
@ -20,47 +20,56 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlCamera(BlDatablock):
|
||||
class BlCamera(ReplicatedDatablock):
|
||||
bl_id = "cameras"
|
||||
bl_class = bpy.types.Camera
|
||||
bl_check_common = False
|
||||
bl_icon = 'CAMERA_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.cameras.new(data["name"])
|
||||
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
dof_settings = data.get('dof')
|
||||
|
||||
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
# DOF settings
|
||||
if dof_settings:
|
||||
loader.load(target.dof, dof_settings)
|
||||
loader.load(datablock.dof, dof_settings)
|
||||
|
||||
background_images = data.get('background_images')
|
||||
|
||||
target.background_images.clear()
|
||||
|
||||
datablock.background_images.clear()
|
||||
|
||||
if background_images:
|
||||
for img_name, img_data in background_images.items():
|
||||
img_id = img_data.get('image')
|
||||
if img_id:
|
||||
target_img = target.background_images.new()
|
||||
target_img = datablock.background_images.new()
|
||||
target_img.image = bpy.data.images[img_id]
|
||||
loader.load(target_img, img_data)
|
||||
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
img_user = img_data.get('image_user')
|
||||
if img_user:
|
||||
loader.load(target_img.image_user, img_user)
|
||||
|
||||
# TODO: background image support
|
||||
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
@ -101,15 +110,37 @@ class BlCamera(BlDatablock):
|
||||
'scale',
|
||||
'use_flip_x',
|
||||
'use_flip_y',
|
||||
'image'
|
||||
'image_user',
|
||||
'image',
|
||||
'frame_duration',
|
||||
'frame_start',
|
||||
'frame_offset',
|
||||
'use_cyclic',
|
||||
'use_auto_refresh'
|
||||
]
|
||||
return dumper.dump(instance)
|
||||
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
for index, image in enumerate(datablock.background_images):
|
||||
if image.image_user:
|
||||
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.cameras)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
for background in datablock.background_images:
|
||||
if background.image:
|
||||
deps.append(background.image)
|
||||
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Camera
|
||||
_class = BlCamera
|
@ -19,10 +19,12 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from deepdiff import DeepDiff, Delta
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.objects import Node
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
def dump_collection_children(collection):
|
||||
collection_children = []
|
||||
@ -87,15 +89,17 @@ class BlCollection(ReplicatedDatablock):
|
||||
bl_class = bpy.types.Collection
|
||||
bl_check_common = True
|
||||
bl_reload_parent = False
|
||||
|
||||
|
||||
use_delta = True
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
datablock = bpy.data.collections.new(node.data["name"])
|
||||
return datablock
|
||||
instance = bpy.data.collections.new(data["name"])
|
||||
return instance
|
||||
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
data = node.data
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
@ -109,10 +113,9 @@ class BlCollection(ReplicatedDatablock):
|
||||
# Keep other user from deleting collection object by flushing their history
|
||||
utils.flush_history()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(datablock)
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -129,9 +132,33 @@ class BlCollection(ReplicatedDatablock):
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.collections)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_collection_dependencies(datablock)
|
||||
|
||||
@staticmethod
|
||||
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||
diff_params = {
|
||||
'ignore_order': True,
|
||||
'report_repetition': True
|
||||
}
|
||||
delta_params = {
|
||||
# 'mutate': True
|
||||
}
|
||||
|
||||
return Delta(
|
||||
DeepDiff(last_data,
|
||||
current_data,
|
||||
cache_size=5000,
|
||||
**diff_params),
|
||||
**delta_params)
|
||||
|
||||
_type = bpy.types.Collection
|
||||
_class = BlCollection
|
@ -21,13 +21,15 @@ import bpy.types as T
|
||||
import mathutils
|
||||
import logging
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from ..utils import get_preferences
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import (Dumper, Loader,
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
SPLINE_BEZIER_POINT = [
|
||||
# "handle_left_type",
|
||||
@ -134,25 +136,29 @@ SPLINE_METADATA = [
|
||||
]
|
||||
|
||||
|
||||
class BlCurve(BlDatablock):
|
||||
class BlCurve(ReplicatedDatablock):
|
||||
bl_id = "curves"
|
||||
bl_class = bpy.types.Curve
|
||||
bl_check_common = False
|
||||
bl_icon = 'CURVE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.curves.new(data["name"], data["type"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
target.splines.clear()
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
datablock.splines.clear()
|
||||
|
||||
# load splines
|
||||
for spline in data['splines'].values():
|
||||
new_spline = target.splines.new(spline['type'])
|
||||
new_spline = datablock.splines.new(spline['type'])
|
||||
|
||||
# Load curve geometry data
|
||||
if new_spline.type == 'BEZIER':
|
||||
@ -173,15 +179,14 @@ class BlCurve(BlDatablock):
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, target.materials)
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
dumper = Dumper()
|
||||
# Conflicting attributes
|
||||
# TODO: remove them with the NURBS support
|
||||
dumper.include_filter = CURVE_METADATA
|
||||
|
||||
dumper.exclude_filter = [
|
||||
'users',
|
||||
'order_u',
|
||||
@ -190,14 +195,16 @@ class BlCurve(BlDatablock):
|
||||
'point_count_u',
|
||||
'active_textbox'
|
||||
]
|
||||
if instance.use_auto_texspace:
|
||||
if datablock.use_auto_texspace:
|
||||
dumper.exclude_filter.extend([
|
||||
'texspace_location',
|
||||
'texspace_size'])
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
data['splines'] = {}
|
||||
|
||||
for index, spline in enumerate(instance.splines):
|
||||
for index, spline in enumerate(datablock.splines):
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = SPLINE_METADATA
|
||||
spline_data = dumper.dump(spline)
|
||||
@ -211,19 +218,25 @@ class BlCurve(BlDatablock):
|
||||
spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||
data['splines'][index] = spline_data
|
||||
|
||||
if isinstance(instance, T.SurfaceCurve):
|
||||
if isinstance(datablock, T.SurfaceCurve):
|
||||
data['type'] = 'SURFACE'
|
||||
elif isinstance(instance, T.TextCurve):
|
||||
elif isinstance(datablock, T.TextCurve):
|
||||
data['type'] = 'FONT'
|
||||
elif isinstance(instance, T.Curve):
|
||||
elif isinstance(datablock, T.Curve):
|
||||
data['type'] = 'CURVE'
|
||||
|
||||
data['materials'] = dump_materials_slots(instance.materials)
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.curves)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
curve = datablock
|
||||
|
||||
@ -234,15 +247,19 @@ class BlCurve(BlDatablock):
|
||||
curve.font_bold_italic,
|
||||
curve.font_italic])
|
||||
|
||||
for material in curve.materials:
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
def diff(self):
|
||||
if 'EDIT' in bpy.context.mode \
|
||||
and not self.preferences.sync_flags.sync_during_editmode:
|
||||
return False
|
||||
else:
|
||||
return super().diff()
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return 'EDIT' not in bpy.context.mode \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
|
||||
_type = [bpy.types.Curve, bpy.types.TextCurve]
|
||||
_class = BlCurve
|
@ -23,14 +23,10 @@ import bpy
|
||||
import mathutils
|
||||
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.objects import Node
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
|
||||
def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||
if not uuid:
|
||||
return default
|
||||
@ -42,18 +38,8 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||
return item
|
||||
return default
|
||||
|
||||
|
||||
def resolve_datablock_from_root(node:Node, root)->object:
|
||||
datablock_ref = utils.find_from_attr('uuid', node.uuid, root)
|
||||
|
||||
if not datablock_ref:
|
||||
try:
|
||||
datablock_ref = root[node.data['name']]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return datablock_ref
|
||||
|
||||
def stamp_uuid(datablock):
|
||||
if not datablock.uuid:
|
||||
datablock.uuid = str(uuid4())
|
||||
def resolve_datablock_from_uuid(uuid, bpy_collection):
|
||||
for item in bpy_collection:
|
||||
if getattr(item, 'uuid', None) == uuid:
|
||||
return item
|
||||
return None
|
@ -19,7 +19,7 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from pathlib import Path, WindowsPath, PosixPath
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
@ -27,6 +27,7 @@ from replication.constants import DIFF_BINARY, UP
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .. import utils
|
||||
from ..utils import get_preferences
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
|
||||
@ -58,33 +59,16 @@ class BlFile(ReplicatedDatablock):
|
||||
bl_icon = 'FILE'
|
||||
bl_reload_parent = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.instance = kwargs.get('instance', None)
|
||||
|
||||
if self.instance and not self.instance.exists():
|
||||
raise FileNotFoundError(str(self.instance))
|
||||
|
||||
self.preferences = utils.get_preferences()
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return Path(get_filepath(data['name']))
|
||||
|
||||
def resolve(self, construct = True):
|
||||
self.instance = Path(get_filepath(self.data['name']))
|
||||
|
||||
file_exists = self.instance.exists()
|
||||
if not file_exists:
|
||||
logging.debug("File don't exist, loading it.")
|
||||
self._load(self.data, self.instance)
|
||||
|
||||
return file_exists
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
return Path(get_filepath(data['name']))
|
||||
|
||||
|
||||
def push(self, socket, identity=None, check_data=False):
|
||||
super().push(socket, identity=None, check_data=False)
|
||||
|
||||
if self.preferences.clear_memory_filecache:
|
||||
del self.data['file']
|
||||
|
||||
def dump(self, instance=None):
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
"""
|
||||
Read the file and return a dict as:
|
||||
{
|
||||
@ -96,44 +80,62 @@ class BlFile(ReplicatedDatablock):
|
||||
logging.info(f"Extracting file metadata")
|
||||
|
||||
data = {
|
||||
'name': self.instance.name,
|
||||
'name': datablock.name,
|
||||
}
|
||||
|
||||
logging.info(
|
||||
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
|
||||
logging.info(f"Reading {datablock.name} content: {datablock.stat().st_size} bytes")
|
||||
|
||||
try:
|
||||
file = open(self.instance, "rb")
|
||||
file = open(datablock, "rb")
|
||||
data['file'] = file.read()
|
||||
|
||||
file.close()
|
||||
except IOError:
|
||||
logging.warning(f"{self.instance} doesn't exist, skipping")
|
||||
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||
else:
|
||||
file.close()
|
||||
|
||||
return data
|
||||
|
||||
def load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
"""
|
||||
Writing the file
|
||||
"""
|
||||
|
||||
try:
|
||||
file = open(target, "wb")
|
||||
file = open(datablock, "wb")
|
||||
file.write(data['file'])
|
||||
|
||||
if self.preferences.clear_memory_filecache:
|
||||
del self.data['file']
|
||||
if get_preferences().clear_memory_filecache:
|
||||
del data['file']
|
||||
except IOError:
|
||||
logging.warning(f"{target} doesn't exist, skipping")
|
||||
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||
else:
|
||||
file.close()
|
||||
|
||||
def diff(self):
|
||||
if self.preferences.clear_memory_filecache:
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
if get_preferences().clear_memory_filecache:
|
||||
return False
|
||||
else:
|
||||
memory_size = sys.getsizeof(self.data['file'])-33
|
||||
disk_size = self.instance.stat().st_size
|
||||
return memory_size != disk_size
|
||||
if not datablock:
|
||||
return None
|
||||
|
||||
if not data:
|
||||
return True
|
||||
|
||||
memory_size = sys.getsizeof(data['file'])-33
|
||||
disk_size = datablock.stat().st_size
|
||||
|
||||
if memory_size != disk_size:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
_type = [WindowsPath, PosixPath]
|
||||
_class = BlFile
|
@ -22,18 +22,19 @@ from pathlib import Path
|
||||
|
||||
import bpy
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlFont(BlDatablock):
|
||||
class BlFont(ReplicatedDatablock):
|
||||
bl_id = "fonts"
|
||||
bl_class = bpy.types.VectorFont
|
||||
bl_check_common = False
|
||||
bl_icon = 'FILE_FONT'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
filename = data.get('filename')
|
||||
|
||||
@ -42,25 +43,29 @@ class BlFont(BlDatablock):
|
||||
else:
|
||||
return bpy.data.fonts.load(get_filepath(filename))
|
||||
|
||||
def load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
pass
|
||||
|
||||
def dump(self, instance=None):
|
||||
if instance.filepath == '<builtin>':
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if datablock.filepath == '<builtin>':
|
||||
filename = '<builtin>'
|
||||
else:
|
||||
filename = Path(instance.filepath).name
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
if not filename:
|
||||
raise FileExistsError(instance.filepath)
|
||||
raise FileExistsError(datablock.filepath)
|
||||
|
||||
return {
|
||||
'filename': filename,
|
||||
'name': instance.name
|
||||
'name': datablock.name
|
||||
}
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.fonts)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
@ -71,3 +76,10 @@ class BlFont(BlDatablock):
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
return False
|
||||
|
||||
_type = bpy.types.VectorFont
|
||||
_class = BlFont
|
@ -24,10 +24,11 @@ from .dump_anything import (Dumper,
|
||||
Loader,
|
||||
np_dump_collection,
|
||||
np_load_collection)
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from ..utils import get_preferences
|
||||
|
||||
# GPencil data api is structured as it follow:
|
||||
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
|
||||
|
||||
STROKE_POINT = [
|
||||
'co',
|
||||
@ -113,6 +114,7 @@ def load_stroke(stroke_data, stroke):
|
||||
# fix fill issues
|
||||
stroke.uv_scale = stroke_data["uv_scale"]
|
||||
|
||||
|
||||
def dump_frame(frame):
|
||||
""" Dump a grease pencil frame to a dict
|
||||
|
||||
@ -151,6 +153,7 @@ def load_frame(frame_data, frame):
|
||||
|
||||
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
|
||||
|
||||
|
||||
def dump_layer(layer):
|
||||
""" Dump a grease pencil layer
|
||||
|
||||
@ -228,47 +231,58 @@ def load_layer(layer_data, layer):
|
||||
load_frame(frame_data, target_frame)
|
||||
|
||||
|
||||
class BlGpencil(BlDatablock):
|
||||
def layer_changed(datablock: object, data: dict) -> bool:
|
||||
if datablock.layers.active and \
|
||||
datablock.layers.active.info != data["active_layers"]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def frame_changed(data: dict) -> bool:
|
||||
return bpy.context.scene.frame_current != data["eval_frame"]
|
||||
|
||||
class BlGpencil(ReplicatedDatablock):
|
||||
bl_id = "grease_pencils"
|
||||
bl_class = bpy.types.GreasePencil
|
||||
bl_check_common = False
|
||||
bl_icon = 'GREASEPENCIL'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.grease_pencils.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
target.materials.clear()
|
||||
datablock.materials.clear()
|
||||
if "materials" in data.keys():
|
||||
for mat in data['materials']:
|
||||
target.materials.append(bpy.data.materials[mat])
|
||||
datablock.materials.append(bpy.data.materials[mat])
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# TODO: reuse existing layer
|
||||
for layer in target.layers:
|
||||
target.layers.remove(layer)
|
||||
for layer in datablock.layers:
|
||||
datablock.layers.remove(layer)
|
||||
|
||||
if "layers" in data.keys():
|
||||
for layer in data["layers"]:
|
||||
layer_data = data["layers"].get(layer)
|
||||
|
||||
# if layer not in target.layers.keys():
|
||||
target_layer = target.layers.new(data["layers"][layer]["info"])
|
||||
# if layer not in datablock.layers.keys():
|
||||
target_layer = datablock.layers.new(data["layers"][layer]["info"])
|
||||
# else:
|
||||
# target_layer = target.layers[layer]
|
||||
# target_layer.clear()
|
||||
|
||||
load_layer(layer_data, target_layer)
|
||||
|
||||
target.layers.update()
|
||||
|
||||
|
||||
datablock.layers.update()
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
@ -279,17 +293,22 @@ class BlGpencil(BlDatablock):
|
||||
'pixel_factor',
|
||||
'stroke_depth_order'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['layers'] = {}
|
||||
|
||||
for layer in instance.layers:
|
||||
for layer in datablock.layers:
|
||||
data['layers'][layer.info] = dump_layer(layer)
|
||||
|
||||
data["active_layers"] = instance.layers.active.info
|
||||
data["active_layers"] = datablock.layers.active.info if datablock.layers.active else "None"
|
||||
data["eval_frame"] = bpy.context.scene.frame_current
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.grease_pencils)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
@ -299,17 +318,12 @@ class BlGpencil(BlDatablock):
|
||||
|
||||
return deps
|
||||
|
||||
def layer_changed(self):
|
||||
return self.instance.layers.active.info != self.data["active_layers"]
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return bpy.context.mode == 'OBJECT' \
|
||||
or layer_changed(datablock, data) \
|
||||
or frame_changed(data) \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
def frame_changed(self):
|
||||
return bpy.context.scene.frame_current != self.data["eval_frame"]
|
||||
|
||||
def diff(self):
|
||||
if self.layer_changed() \
|
||||
or self.frame_changed() \
|
||||
or bpy.context.mode == 'OBJECT' \
|
||||
or self.preferences.sync_flags.sync_during_editmode:
|
||||
return super().diff()
|
||||
else:
|
||||
return False
|
||||
_type = bpy.types.GreasePencil
|
||||
_class = BlGpencil
|
@ -24,9 +24,12 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
format_to_ext = {
|
||||
'BMP': 'bmp',
|
||||
@ -48,13 +51,14 @@ format_to_ext = {
|
||||
}
|
||||
|
||||
|
||||
class BlImage(BlDatablock):
|
||||
class BlImage(ReplicatedDatablock):
|
||||
bl_id = "images"
|
||||
bl_class = bpy.types.Image
|
||||
bl_check_common = False
|
||||
bl_icon = 'IMAGE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.images.new(
|
||||
name=data['name'],
|
||||
@ -62,18 +66,21 @@ class BlImage(BlDatablock):
|
||||
height=data['size'][1]
|
||||
)
|
||||
|
||||
def load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(data, target)
|
||||
loader.load(data, datablock)
|
||||
|
||||
target.source = 'FILE'
|
||||
target.filepath_raw = get_filepath(data['filename'])
|
||||
target.colorspace_settings.name = data["colorspace_settings"]["name"]
|
||||
datablock.source = 'FILE'
|
||||
datablock.filepath_raw = get_filepath(data['filename'])
|
||||
color_space_name = data["colorspace_settings"]["name"]
|
||||
|
||||
def dump(self, instance=None):
|
||||
assert(instance)
|
||||
if color_space_name:
|
||||
datablock.colorspace_settings.name = color_space_name
|
||||
|
||||
filename = Path(instance.filepath).name
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
data = {
|
||||
"filename": filename
|
||||
@ -83,23 +90,20 @@ class BlImage(BlDatablock):
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
# 'source',
|
||||
'size',
|
||||
'height',
|
||||
'alpha',
|
||||
'float_buffer',
|
||||
'alpha_mode',
|
||||
'colorspace_settings']
|
||||
data.update(dumper.dump(instance))
|
||||
data.update(dumper.dump(datablock))
|
||||
return data
|
||||
|
||||
def diff(self):
|
||||
if self.instance.is_dirty:
|
||||
self.instance.save()
|
||||
|
||||
if self.instance and (self.instance.name != self.data['name']):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.images)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
@ -122,3 +126,16 @@ class BlImage(BlDatablock):
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
if datablock.is_dirty:
|
||||
datablock.save()
|
||||
|
||||
if not data or (datablock and (datablock.name != data.get('name'))):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
_type = bpy.types.Image
|
||||
_class = BlImage
|
@ -20,33 +20,39 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.exception import ContextError
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||
|
||||
|
||||
class BlLattice(BlDatablock):
|
||||
class BlLattice(ReplicatedDatablock):
|
||||
bl_id = "lattices"
|
||||
bl_class = bpy.types.Lattice
|
||||
bl_check_common = False
|
||||
bl_icon = 'LATTICE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.lattices.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
if target.is_editmode:
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
if datablock.is_editmode:
|
||||
raise ContextError("lattice is in edit mode")
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
np_load_collection(data['points'], target.points, POINT)
|
||||
np_load_collection(data['points'], datablock.points, POINT)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if instance.is_editmode:
|
||||
if datablock.is_editmode:
|
||||
raise ContextError("lattice is in edit mode")
|
||||
|
||||
dumper = Dumper()
|
||||
@ -62,9 +68,20 @@ class BlLattice(BlDatablock):
|
||||
'interpolation_type_w',
|
||||
'use_outside'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data['points'] = np_dump_collection(instance.points, POINT)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['points'] = np_dump_collection(datablock.points, POINT)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lattices)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_animation_dependencies(datablock)
|
||||
|
||||
_type = bpy.types.Lattice
|
||||
_class = BlLattice
|
@ -20,25 +20,32 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlLight(BlDatablock):
|
||||
class BlLight(ReplicatedDatablock):
|
||||
bl_id = "lights"
|
||||
bl_class = bpy.types.Light
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIGHT_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.lights.new(data["name"], data["type"])
|
||||
instance = bpy.data.lights.new(data["name"], data["type"])
|
||||
instance.uuid = data.get("uuid")
|
||||
return instance
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
@ -67,9 +74,23 @@ class BlLight(BlDatablock):
|
||||
'spot_size',
|
||||
'spot_blend'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lights)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
|
||||
_type = [bpy.types.SpotLight, bpy.types.PointLight, bpy.types.AreaLight, bpy.types.SunLight]
|
||||
_class = BlLight
|
@ -21,16 +21,17 @@ import mathutils
|
||||
import logging
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlLightprobe(BlDatablock):
|
||||
class BlLightprobe(ReplicatedDatablock):
|
||||
bl_id = "lightprobes"
|
||||
bl_class = bpy.types.LightProbe
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIGHTPROBE_GRID'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
|
||||
# See https://developer.blender.org/D6396
|
||||
@ -39,12 +40,13 @@ class BlLightprobe(BlDatablock):
|
||||
else:
|
||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
if bpy.app.version[1] < 83:
|
||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
@ -71,7 +73,16 @@ class BlLightprobe(BlDatablock):
|
||||
'visibility_blur'
|
||||
]
|
||||
|
||||
return dumper.dump(instance)
|
||||
return dumper.dump(datablock)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lightprobes)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
_type = bpy.types.LightProbe
|
||||
_class = BlLightprobe
|
@ -24,9 +24,10 @@ import re
|
||||
from uuid import uuid4
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import get_datablock_from_uuid, stamp_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.objects import Node
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
||||
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
|
||||
@ -36,7 +37,7 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
|
||||
|
||||
:arg node_data: dumped node data
|
||||
:type node_data: dict
|
||||
:arg node_tree: datablock node_tree
|
||||
:arg node_tree: target node_tree
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
loader = Loader()
|
||||
@ -47,7 +48,11 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
|
||||
node_tree_uuid = node_data.get('node_tree_uuid', None)
|
||||
|
||||
if image_uuid and not target_node.image:
|
||||
target_node.image = get_datablock_from_uuid(image_uuid, None)
|
||||
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
|
||||
if image is None:
|
||||
logging.error(f"Fail to find material image from uuid {image_uuid}")
|
||||
else:
|
||||
target_node.image = image
|
||||
|
||||
if node_tree_uuid:
|
||||
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
|
||||
@ -90,7 +95,7 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
|
||||
def dump_node(node: bpy.types.ShaderNode) -> dict:
|
||||
""" Dump a single node to a dict
|
||||
|
||||
:arg node: datablock node
|
||||
:arg node: target node
|
||||
:type node: bpy.types.Node
|
||||
:retrun: dict
|
||||
"""
|
||||
@ -251,7 +256,7 @@ def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
|
||||
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
|
||||
""" dump sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: datablock node_tree
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
@ -274,7 +279,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection,
|
||||
sockets_data: dict):
|
||||
""" load sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: datablock node_tree
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
@ -302,7 +307,7 @@ def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeT
|
||||
|
||||
:arg node_tree_data: dumped node data
|
||||
:type node_tree_data: dict
|
||||
:arg target_node_tree: datablock node_tree
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
# TODO: load only required nodes
|
||||
@ -375,7 +380,7 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
|
||||
|
||||
:arg src_materials: dumped material collection (ex: object.materials)
|
||||
:type src_materials: list of tuples (uuid, name)
|
||||
:arg dst_materials: datablock material collection pointer
|
||||
:arg dst_materials: target material collection pointer
|
||||
:type dst_materials: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
# MATERIAL SLOTS
|
||||
@ -404,7 +409,8 @@ class BlMaterial(ReplicatedDatablock):
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
data = data
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
loader = Loader()
|
||||
|
||||
is_grease_pencil = data.get('is_grease_pencil')
|
||||
@ -424,8 +430,6 @@ class BlMaterial(ReplicatedDatablock):
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
stamp_uuid(datablock)
|
||||
|
||||
mat_dumper = Dumper()
|
||||
mat_dumper.depth = 2
|
||||
mat_dumper.include_filter = [
|
||||
@ -491,17 +495,24 @@ class BlMaterial(ReplicatedDatablock):
|
||||
elif datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.materials)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve node group deps
|
||||
deps = []
|
||||
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Material
|
||||
_class = BlMaterial
|
||||
_class = BlMaterial
|
@ -22,20 +22,16 @@ import mathutils
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
from .dump_anything import (Dumper,
|
||||
Loader,
|
||||
np_load_collection_primitives,
|
||||
np_dump_collection_primitive,
|
||||
np_load_collection, np_dump_collection)
|
||||
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||
from replication.constants import DIFF_BINARY
|
||||
from replication.exception import ContextError
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.objects import Node
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid, stamp_uuid
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
|
||||
from ..preferences import get_preferences
|
||||
from ..utils import get_preferences
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
VERTICE = ['co']
|
||||
|
||||
@ -58,7 +54,6 @@ POLYGON = [
|
||||
'material_index',
|
||||
]
|
||||
|
||||
|
||||
class BlMesh(ReplicatedDatablock):
|
||||
bl_id = "meshes"
|
||||
bl_class = bpy.types.Mesh
|
||||
@ -68,17 +63,15 @@ class BlMesh(ReplicatedDatablock):
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
datablock = bpy.data.meshes.new(data["name"])
|
||||
datablock.uuid = data['uuid']
|
||||
return datablock
|
||||
return bpy.data.meshes.new(data.get("name"))
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
data = data
|
||||
|
||||
if not datablock or datablock.is_editmode:
|
||||
raise ContextError
|
||||
else:
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
@ -100,7 +93,7 @@ class BlMesh(ReplicatedDatablock):
|
||||
np_load_collection(data['vertices'], datablock.vertices, VERTICE)
|
||||
np_load_collection(data['edges'], datablock.edges, EDGE)
|
||||
np_load_collection(data['loops'], datablock.loops, LOOP)
|
||||
np_load_collection(data["polygons"], datablock.polygons, POLYGON)
|
||||
np_load_collection(data["polygons"],datablock.polygons, POLYGON)
|
||||
|
||||
# UV Layers
|
||||
if 'uv_layers' in data.keys():
|
||||
@ -109,10 +102,10 @@ class BlMesh(ReplicatedDatablock):
|
||||
datablock.uv_layers.new(name=layer)
|
||||
|
||||
np_load_collection_primitives(
|
||||
datablock.uv_layers[layer].data,
|
||||
'uv',
|
||||
datablock.uv_layers[layer].data,
|
||||
'uv',
|
||||
data["uv_layers"][layer]['data'])
|
||||
|
||||
|
||||
# Vertex color
|
||||
if 'vertex_colors' in data.keys():
|
||||
for color_layer in data['vertex_colors']:
|
||||
@ -120,8 +113,8 @@ class BlMesh(ReplicatedDatablock):
|
||||
datablock.vertex_colors.new(name=color_layer)
|
||||
|
||||
np_load_collection_primitives(
|
||||
datablock.vertex_colors[color_layer].data,
|
||||
'color',
|
||||
datablock.vertex_colors[color_layer].data,
|
||||
'color',
|
||||
data["vertex_colors"][color_layer]['data'])
|
||||
|
||||
datablock.validate()
|
||||
@ -129,8 +122,6 @@ class BlMesh(ReplicatedDatablock):
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
stamp_uuid(datablock)
|
||||
|
||||
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
|
||||
raise ContextError("Mesh is in edit mode")
|
||||
mesh = datablock
|
||||
@ -138,7 +129,6 @@ class BlMesh(ReplicatedDatablock):
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
'uuid'
|
||||
'name',
|
||||
'use_auto_smooth',
|
||||
'auto_smooth_angle',
|
||||
@ -148,6 +138,8 @@ class BlMesh(ReplicatedDatablock):
|
||||
|
||||
data = dumper.dump(mesh)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
# VERTICES
|
||||
data["vertex_count"] = len(mesh.vertices)
|
||||
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
|
||||
@ -169,21 +161,19 @@ class BlMesh(ReplicatedDatablock):
|
||||
data['uv_layers'] = {}
|
||||
for layer in mesh.uv_layers:
|
||||
data['uv_layers'][layer.name] = {}
|
||||
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(
|
||||
layer.data, 'uv')
|
||||
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
|
||||
|
||||
# Vertex color
|
||||
if mesh.vertex_colors:
|
||||
data['vertex_colors'] = {}
|
||||
for color_map in mesh.vertex_colors:
|
||||
data['vertex_colors'][color_map.name] = {}
|
||||
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(
|
||||
color_map.data, 'color')
|
||||
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
||||
|
||||
# Materials
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
return data
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
@ -192,14 +182,19 @@ class BlMesh(ReplicatedDatablock):
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
def diff(self):
|
||||
if 'EDIT' in bpy.context.mode \
|
||||
and not get_preferences().sync_flags.sync_during_editmode:
|
||||
return False
|
||||
else:
|
||||
return super().diff()
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.meshes)
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return ('EDIT' not in bpy.context.mode and bpy.context.mode != 'SCULPT') \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
_type = bpy.types.Mesh
|
||||
_class = BlMesh
|
||||
_class = BlMesh
|
@ -23,7 +23,9 @@ from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
|
||||
np_dump_collection, np_load_collection)
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
ELEMENT = [
|
||||
@ -62,29 +64,33 @@ def load_metaball_elements(elements_data, elements):
|
||||
np_load_collection(elements_data, elements, ELEMENT)
|
||||
|
||||
|
||||
class BlMetaball(BlDatablock):
|
||||
class BlMetaball(ReplicatedDatablock):
|
||||
bl_id = "metaballs"
|
||||
bl_class = bpy.types.MetaBall
|
||||
bl_check_common = False
|
||||
bl_icon = 'META_BALL'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.metaballs.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
target.elements.clear()
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
datablock.elements.clear()
|
||||
|
||||
for mtype in data["elements"]['type']:
|
||||
new_element = target.elements.new()
|
||||
new_element = datablock.elements.new()
|
||||
|
||||
load_metaball_elements(data['elements'], target.elements)
|
||||
load_metaball_elements(data['elements'], datablock.elements)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -98,7 +104,24 @@ class BlMetaball(BlDatablock):
|
||||
'texspace_size'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
data['elements'] = dump_metaball_elements(instance.elements)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
data['elements'] = dump_metaball_elements(datablock.elements)
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.metaballs)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.MetaBall
|
||||
_class = BlMetaball
|
@ -20,27 +20,43 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_material import (dump_node_tree,
|
||||
load_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlNodeGroup(BlDatablock):
|
||||
class BlNodeGroup(ReplicatedDatablock):
|
||||
bl_id = "node_groups"
|
||||
bl_class = bpy.types.NodeTree
|
||||
bl_check_common = False
|
||||
bl_icon = 'NODETREE'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.node_groups.new(data["name"], data["type"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_node_tree(data, target)
|
||||
load_node_tree(data, datablock)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
return dump_node_tree(instance)
|
||||
return dump_node_tree(datablock)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.node_groups)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return get_node_tree_dependencies(datablock)
|
||||
deps = []
|
||||
deps.extend(get_node_tree_dependencies(datablock))
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = [bpy.types.ShaderNodeTree, bpy.types.GeometryNodeTree]
|
||||
_class = BlNodeGroup
|
@ -21,17 +21,12 @@ import re
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.exception import ContextError
|
||||
from replication.objects import Node
|
||||
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid, stamp_uuid
|
||||
from .bl_action import (load_animation_data,
|
||||
dump_animation_data,
|
||||
resolve_animation_dependencies)
|
||||
|
||||
from ..preferences import get_preferences
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_material import IGNORED_SOCKETS
|
||||
from ..utils import get_preferences
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from .dump_anything import (
|
||||
Dumper,
|
||||
Loader,
|
||||
@ -45,6 +40,14 @@ SKIN_DATA = [
|
||||
'use_root'
|
||||
]
|
||||
|
||||
SHAPEKEY_BLOCK_ATTR = [
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
]
|
||||
|
||||
|
||||
if bpy.app.version[1] >= 93:
|
||||
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
|
||||
else:
|
||||
@ -52,6 +55,7 @@ else:
|
||||
logging.warning("Geometry node Float parameter not supported in \
|
||||
blender 2.92.")
|
||||
|
||||
|
||||
def get_node_group_inputs(node_group):
|
||||
inputs = []
|
||||
for inpt in node_group.inputs:
|
||||
@ -90,6 +94,7 @@ def dump_physics(target: bpy.types.Object)->dict:
|
||||
|
||||
return physics_data
|
||||
|
||||
|
||||
def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
||||
""" Load all physics settings from a given object excluding modifier
|
||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||
@ -115,7 +120,8 @@ def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
||||
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
|
||||
elif target.rigid_body_constraint:
|
||||
bpy.ops.rigidbody.constraint_remove({"object": target})
|
||||
|
||||
|
||||
|
||||
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
|
||||
""" Dump geometry node modifier input properties
|
||||
|
||||
@ -167,40 +173,45 @@ def load_pose(target_bone, data):
|
||||
|
||||
|
||||
def find_data_from_name(name=None):
|
||||
datablock = None
|
||||
instance = None
|
||||
if not name:
|
||||
pass
|
||||
elif name in bpy.data.meshes.keys():
|
||||
datablock = bpy.data.meshes[name]
|
||||
instance = bpy.data.meshes[name]
|
||||
elif name in bpy.data.lights.keys():
|
||||
datablock = bpy.data.lights[name]
|
||||
instance = bpy.data.lights[name]
|
||||
elif name in bpy.data.cameras.keys():
|
||||
datablock = bpy.data.cameras[name]
|
||||
instance = bpy.data.cameras[name]
|
||||
elif name in bpy.data.curves.keys():
|
||||
datablock = bpy.data.curves[name]
|
||||
instance = bpy.data.curves[name]
|
||||
elif name in bpy.data.metaballs.keys():
|
||||
datablock = bpy.data.metaballs[name]
|
||||
instance = bpy.data.metaballs[name]
|
||||
elif name in bpy.data.armatures.keys():
|
||||
datablock = bpy.data.armatures[name]
|
||||
instance = bpy.data.armatures[name]
|
||||
elif name in bpy.data.grease_pencils.keys():
|
||||
datablock = bpy.data.grease_pencils[name]
|
||||
instance = bpy.data.grease_pencils[name]
|
||||
elif name in bpy.data.curves.keys():
|
||||
datablock = bpy.data.curves[name]
|
||||
instance = bpy.data.curves[name]
|
||||
elif name in bpy.data.lattices.keys():
|
||||
datablock = bpy.data.lattices[name]
|
||||
instance = bpy.data.lattices[name]
|
||||
elif name in bpy.data.speakers.keys():
|
||||
datablock = bpy.data.speakers[name]
|
||||
instance = bpy.data.speakers[name]
|
||||
elif name in bpy.data.lightprobes.keys():
|
||||
# Only supported since 2.83
|
||||
if bpy.app.version[1] >= 83:
|
||||
datablock = bpy.data.lightprobes[name]
|
||||
instance = bpy.data.lightprobes[name]
|
||||
else:
|
||||
logging.warning(
|
||||
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys():
|
||||
# Only supported since 2.91
|
||||
datablock = bpy.data.volumes[name]
|
||||
return datablock
|
||||
instance = bpy.data.volumes[name]
|
||||
return instance
|
||||
|
||||
|
||||
def load_data(object, name):
|
||||
logging.info("loading data")
|
||||
pass
|
||||
|
||||
|
||||
def _is_editmode(object: bpy.types.Object) -> bool:
|
||||
@ -247,7 +258,6 @@ def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -
|
||||
return dependencies
|
||||
|
||||
|
||||
|
||||
def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
|
||||
""" Dump object's vertex groups
|
||||
|
||||
@ -293,43 +303,226 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
|
||||
vertex_group.add([index], weight, 'REPLACE')
|
||||
|
||||
|
||||
def dump_shape_keys(target_key: bpy.types.Key)->dict:
|
||||
""" Dump the target shape_keys datablock to a dict using numpy
|
||||
|
||||
:param dumped_key: target key datablock
|
||||
:type dumped_key: bpy.types.Key
|
||||
:return: dict
|
||||
"""
|
||||
|
||||
dumped_key_blocks = []
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
]
|
||||
for key in target_key.key_blocks:
|
||||
dumped_key_block = dumper.dump(key)
|
||||
dumped_key_block['data'] = np_dump_collection(key.data, ['co'])
|
||||
dumped_key_block['relative_key'] = key.relative_key.name
|
||||
dumped_key_blocks.append(dumped_key_block)
|
||||
|
||||
return {
|
||||
'reference_key': target_key.reference_key.name,
|
||||
'use_relative': target_key.use_relative,
|
||||
'key_blocks': dumped_key_blocks,
|
||||
'animation_data': dump_animation_data(target_key)
|
||||
}
|
||||
|
||||
|
||||
def load_shape_keys(dumped_shape_keys: dict, target_object: bpy.types.Object):
|
||||
""" Load the target shape_keys datablock to a dict using numpy
|
||||
|
||||
:param dumped_key: src key data
|
||||
:type dumped_key: bpy.types.Key
|
||||
:param target_object: object used to load the shapekeys data onto
|
||||
:type target_object: bpy.types.Object
|
||||
"""
|
||||
loader = Loader()
|
||||
# Remove existing ones
|
||||
target_object.shape_key_clear()
|
||||
|
||||
# Create keys and load vertices coords
|
||||
dumped_key_blocks = dumped_shape_keys.get('key_blocks')
|
||||
for dumped_key_block in dumped_key_blocks:
|
||||
key_block = target_object.shape_key_add(name=dumped_key_block['name'])
|
||||
|
||||
loader.load(key_block, dumped_key_block)
|
||||
np_load_collection(dumped_key_block['data'], key_block.data, ['co'])
|
||||
|
||||
# Load relative key after all
|
||||
for dumped_key_block in dumped_key_blocks:
|
||||
relative_key_name = dumped_key_block.get('relative_key')
|
||||
key_name = dumped_key_block.get('name')
|
||||
|
||||
target_keyblock = target_object.data.shape_keys.key_blocks[key_name]
|
||||
relative_key = target_object.data.shape_keys.key_blocks[relative_key_name]
|
||||
|
||||
target_keyblock.relative_key = relative_key
|
||||
|
||||
# Shape keys animation data
|
||||
anim_data = dumped_shape_keys.get('animation_data')
|
||||
|
||||
if anim_data:
|
||||
load_animation_data(anim_data, target_object.data.shape_keys)
|
||||
|
||||
|
||||
def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
|
||||
""" Dump all modifiers of a modifier collection into a dict
|
||||
|
||||
:param modifiers: modifiers
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
:return: dict
|
||||
"""
|
||||
dumped_modifiers = []
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = ['is_active']
|
||||
|
||||
for modifier in modifiers:
|
||||
dumped_modifier = dumper.dump(modifier)
|
||||
# hack to dump geometry nodes inputs
|
||||
if modifier.type == 'NODES':
|
||||
dumped_inputs = dump_modifier_geometry_node_inputs(
|
||||
modifier)
|
||||
dumped_modifier['inputs'] = dumped_inputs
|
||||
|
||||
elif modifier.type == 'PARTICLE_SYSTEM':
|
||||
dumper.exclude_filter = [
|
||||
"is_edited",
|
||||
"is_editable",
|
||||
"is_global_hair"
|
||||
]
|
||||
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
|
||||
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
|
||||
|
||||
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
||||
elif modifier.type == 'UV_PROJECT':
|
||||
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
|
||||
|
||||
dumped_modifiers.append(dumped_modifier)
|
||||
return dumped_modifiers
|
||||
|
||||
def dump_constraints(constraints: bpy.types.bpy_prop_collection)->list:
|
||||
"""Dump all constraints to a list
|
||||
|
||||
:param constraints: constraints
|
||||
:type constraints: bpy.types.bpy_prop_collection
|
||||
:return: dict
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = None
|
||||
dumped_constraints = []
|
||||
for constraint in constraints:
|
||||
dumped_constraints.append(dumper.dump(constraint))
|
||||
return dumped_constraints
|
||||
|
||||
def load_constraints(dumped_constraints: list, constraints: bpy.types.bpy_prop_collection):
|
||||
""" Load dumped constraints
|
||||
|
||||
:param dumped_constraints: list of constraints to load
|
||||
:type dumped_constraints: list
|
||||
:param constraints: constraints
|
||||
:type constraints: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
constraints.clear()
|
||||
for dumped_constraint in dumped_constraints:
|
||||
constraint_type = dumped_constraint.get('type')
|
||||
new_constraint = constraints.new(constraint_type)
|
||||
loader.load(new_constraint, dumped_constraint)
|
||||
|
||||
def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collection):
|
||||
""" Dump all modifiers of a modifier collection into a dict
|
||||
|
||||
:param dumped_modifiers: list of modifiers to load
|
||||
:type dumped_modifiers: list
|
||||
:param modifiers: modifiers
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
modifiers.clear()
|
||||
for dumped_modifier in dumped_modifiers:
|
||||
name = dumped_modifier.get('name')
|
||||
mtype = dumped_modifier.get('type')
|
||||
loaded_modifier = modifiers.new(name, mtype)
|
||||
loader.load(loaded_modifier, dumped_modifier)
|
||||
|
||||
if loaded_modifier.type == 'NODES':
|
||||
load_modifier_geometry_node_inputs(dumped_modifier, loaded_modifier)
|
||||
elif loaded_modifier.type == 'PARTICLE_SYSTEM':
|
||||
default = loaded_modifier.particle_system.settings
|
||||
dumped_particles = dumped_modifier['particle_system']
|
||||
loader.load(loaded_modifier.particle_system, dumped_particles)
|
||||
|
||||
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
||||
if settings:
|
||||
loaded_modifier.particle_system.settings = settings
|
||||
# Hack to remove the default generated particle settings
|
||||
if not default.uuid:
|
||||
bpy.data.particles.remove(default)
|
||||
elif loaded_modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
loader.load(loaded_modifier.settings, dumped_modifier['settings'])
|
||||
elif loaded_modifier.type == 'UV_PROJECT':
|
||||
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
|
||||
target_object = bpy.data.objects.get(projector_object)
|
||||
if target_object:
|
||||
loaded_modifier.projectors[projector_index].object = target_object
|
||||
else:
|
||||
logging.error("Could't load projector target object {projector_object}")
|
||||
|
||||
|
||||
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
|
||||
""" Load modifiers custom data not managed by the dump_anything loader
|
||||
|
||||
:param dumped_modifiers: modifiers to load
|
||||
:type dumped_modifiers: dict
|
||||
:param modifiers: target modifiers collection
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
|
||||
for modifier in modifiers:
|
||||
dumped_modifier = dumped_modifiers.get(modifier.name)
|
||||
|
||||
|
||||
class BlObject(ReplicatedDatablock):
|
||||
bl_id = "objects"
|
||||
bl_class = bpy.types.Object
|
||||
bl_check_common = False
|
||||
bl_icon = 'OBJECT_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
is_root = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> bpy.types.Object:
|
||||
datablock = None
|
||||
def construct(data: dict) -> object:
|
||||
instance = None
|
||||
|
||||
# TODO: refactoring
|
||||
object_name = data.get("name")
|
||||
data_uuid = data.get("data_uuid")
|
||||
data_id = data.get("data")
|
||||
object_uuid = data.get('uuid')
|
||||
data_type = data.get("type")
|
||||
|
||||
object_data = get_datablock_from_uuid(
|
||||
data_uuid,
|
||||
find_data_from_name(data_id),
|
||||
ignore=['images']) # TODO: use resolve_from_id
|
||||
|
||||
if object_data is None and data_uuid:
|
||||
raise Exception(f"Fail to load object {data['name']}({object_uuid})")
|
||||
if data_type != 'EMPTY' and object_data is None:
|
||||
raise Exception(f"Fail to load object {data['name']})")
|
||||
|
||||
datablock = bpy.data.objects.new(object_name, object_data)
|
||||
datablock.uuid = object_uuid
|
||||
|
||||
return datablock
|
||||
return bpy.data.objects.new(object_name, object_data)
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: bpy.types.Object):
|
||||
data = datablock.data
|
||||
load_animation_data(data, datablock)
|
||||
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
data_uuid = data.get("data_uuid")
|
||||
data_id = data.get("data")
|
||||
|
||||
@ -345,24 +538,9 @@ class BlObject(ReplicatedDatablock):
|
||||
object_data = datablock.data
|
||||
|
||||
# SHAPE KEYS
|
||||
if 'shape_keys' in data:
|
||||
datablock.shape_key_clear()
|
||||
|
||||
# Create keys and load vertices coords
|
||||
for key_block in data['shape_keys']['key_blocks']:
|
||||
key_data = data['shape_keys']['key_blocks'][key_block]
|
||||
datablock.shape_key_add(name=key_block)
|
||||
|
||||
loader.load(
|
||||
datablock.data.shape_keys.key_blocks[key_block], key_data)
|
||||
for vert in key_data['data']:
|
||||
datablock.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
||||
|
||||
# Load relative key after all
|
||||
for key_block in data['shape_keys']['key_blocks']:
|
||||
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
|
||||
|
||||
datablock.data.shape_keys.key_blocks[key_block].relative_key = datablock.data.shape_keys.key_blocks[reference]
|
||||
shape_keys = data.get('shape_keys')
|
||||
if shape_keys:
|
||||
load_shape_keys(shape_keys, datablock)
|
||||
|
||||
# Load transformation data
|
||||
loader.load(datablock, data)
|
||||
@ -388,26 +566,26 @@ class BlObject(ReplicatedDatablock):
|
||||
# Bone groups
|
||||
for bg_name in data['pose']['bone_groups']:
|
||||
bg_data = data['pose']['bone_groups'].get(bg_name)
|
||||
bg_datablock = datablock.pose.bone_groups.get(bg_name)
|
||||
bg_target = datablock.pose.bone_groups.get(bg_name)
|
||||
|
||||
if not bg_datablock:
|
||||
bg_datablock = datablock.pose.bone_groups.new(name=bg_name)
|
||||
if not bg_target:
|
||||
bg_target = datablock.pose.bone_groups.new(name=bg_name)
|
||||
|
||||
loader.load(bg_datablock, bg_data)
|
||||
loader.load(bg_target, bg_data)
|
||||
# datablock.pose.bone_groups.get
|
||||
|
||||
# Bones
|
||||
for bone in data['pose']['bones']:
|
||||
datablock_bone = datablock.pose.bones.get(bone)
|
||||
target_bone = datablock.pose.bones.get(bone)
|
||||
bone_data = data['pose']['bones'].get(bone)
|
||||
|
||||
if 'constraints' in bone_data.keys():
|
||||
loader.load(datablock_bone, bone_data['constraints'])
|
||||
loader.load(target_bone, bone_data['constraints'])
|
||||
|
||||
load_pose(datablock_bone, bone_data)
|
||||
load_pose(target_bone, bone_data)
|
||||
|
||||
if 'bone_index' in bone_data.keys():
|
||||
datablock_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
|
||||
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
|
||||
|
||||
# TODO: find another way...
|
||||
if datablock.empty_display_type == "IMAGE":
|
||||
@ -428,34 +606,12 @@ class BlObject(ReplicatedDatablock):
|
||||
and 'cycles_visibility' in data:
|
||||
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
|
||||
|
||||
# TODO: handle geometry nodes input from dump_anything
|
||||
if hasattr(datablock, 'modifiers'):
|
||||
nodes_modifiers = [
|
||||
mod for mod in datablock.modifiers if mod.type == 'NODES']
|
||||
for modifier in nodes_modifiers:
|
||||
load_modifier_geometry_node_inputs(
|
||||
data['modifiers'][modifier.name], modifier)
|
||||
load_modifiers(data['modifiers'], datablock.modifiers)
|
||||
|
||||
particles_modifiers = [
|
||||
mod for mod in datablock.modifiers if mod.type == 'PARTICLE_SYSTEM']
|
||||
|
||||
for mod in particles_modifiers:
|
||||
default = mod.particle_system.settings
|
||||
dumped_particles = data['modifiers'][mod.name]['particle_system']
|
||||
loader.load(mod.particle_system, dumped_particles)
|
||||
|
||||
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
||||
if settings:
|
||||
mod.particle_system.settings = settings
|
||||
# Hack to remove the default generated particle settings
|
||||
if not default.uuid:
|
||||
bpy.data.particles.remove(default)
|
||||
|
||||
phys_modifiers = [
|
||||
mod for mod in datablock.modifiers if mod.type in ['SOFT_BODY', 'CLOTH']]
|
||||
|
||||
for mod in phys_modifiers:
|
||||
loader.load(mod.settings, data['modifiers'][mod.name]['settings'])
|
||||
constraints = data.get('constraints')
|
||||
if constraints:
|
||||
load_constraints(constraints, datablock.constraints)
|
||||
|
||||
# PHYSICS
|
||||
load_physics(data, datablock)
|
||||
@ -470,10 +626,8 @@ class BlObject(ReplicatedDatablock):
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(datablock)
|
||||
|
||||
if _is_editmode(datablock):
|
||||
if self.preferences.sync_flags.sync_during_editmode:
|
||||
if get_preferences().sync_flags.sync_during_editmode:
|
||||
datablock.update_from_editmode()
|
||||
else:
|
||||
raise ContextError("Object is in edit-mode.")
|
||||
@ -481,7 +635,6 @@ class BlObject(ReplicatedDatablock):
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"uuid",
|
||||
"name",
|
||||
"rotation_mode",
|
||||
"data",
|
||||
@ -511,11 +664,15 @@ class BlObject(ReplicatedDatablock):
|
||||
'show_all_edges',
|
||||
'show_texture_space',
|
||||
'show_in_front',
|
||||
'type'
|
||||
'type',
|
||||
'parent_type',
|
||||
'parent_bone',
|
||||
'track_axis',
|
||||
'up_axis',
|
||||
]
|
||||
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
dumper.include_filter = [
|
||||
'matrix_parent_inverse',
|
||||
'matrix_local',
|
||||
@ -533,34 +690,9 @@ class BlObject(ReplicatedDatablock):
|
||||
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
|
||||
|
||||
# MODIFIERS
|
||||
modifiers = getattr(datablock, 'modifiers', None)
|
||||
if hasattr(datablock, 'modifiers'):
|
||||
data["modifiers"] = {}
|
||||
modifiers = getattr(datablock, 'modifiers', None)
|
||||
if modifiers:
|
||||
dumper.include_filter = None
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = ['is_active']
|
||||
for index, modifier in enumerate(modifiers):
|
||||
dumped_modifier = dumper.dump(modifier)
|
||||
# hack to dump geometry nodes inputs
|
||||
if modifier.type == 'NODES':
|
||||
dumped_inputs = dump_modifier_geometry_node_inputs(
|
||||
modifier)
|
||||
dumped_modifier['inputs'] = dumped_inputs
|
||||
|
||||
elif modifier.type == 'PARTICLE_SYSTEM':
|
||||
dumper.exclude_filter = [
|
||||
"is_edited",
|
||||
"is_editable",
|
||||
"is_global_hair"
|
||||
]
|
||||
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
|
||||
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
|
||||
|
||||
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
||||
|
||||
data["modifiers"][modifier.name] = dumped_modifier
|
||||
data['modifiers'] = dump_modifiers(modifiers)
|
||||
|
||||
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
|
||||
|
||||
@ -586,9 +718,7 @@ class BlObject(ReplicatedDatablock):
|
||||
|
||||
# CONSTRAINTS
|
||||
if hasattr(datablock, 'constraints'):
|
||||
dumper.include_filter = None
|
||||
dumper.depth = 3
|
||||
data["constraints"] = dumper.dump(datablock.constraints)
|
||||
data["constraints"] = dump_constraints(datablock.constraints)
|
||||
|
||||
# POSE
|
||||
if hasattr(datablock, 'pose') and datablock.pose:
|
||||
@ -635,30 +765,7 @@ class BlObject(ReplicatedDatablock):
|
||||
# SHAPE KEYS
|
||||
object_data = datablock.data
|
||||
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
'reference_key',
|
||||
'use_relative'
|
||||
]
|
||||
data['shape_keys'] = dumper.dump(object_data.shape_keys)
|
||||
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
|
||||
key_blocks = {}
|
||||
for key in object_data.shape_keys.key_blocks:
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
'data',
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
'data',
|
||||
'co'
|
||||
]
|
||||
key_blocks[key.name] = dumper.dump(key)
|
||||
key_blocks[key.name]['relative_key'] = key.relative_key.name
|
||||
data['shape_keys']['key_blocks'] = key_blocks
|
||||
data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
|
||||
|
||||
# SKIN VERTICES
|
||||
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
|
||||
@ -678,16 +785,15 @@ class BlObject(ReplicatedDatablock):
|
||||
'scatter',
|
||||
'shadow',
|
||||
]
|
||||
data['cycles_visibility'] = dumper.dump(
|
||||
datablock.cycles_visibility)
|
||||
data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility)
|
||||
|
||||
# PHYSICS
|
||||
data.update(dump_physics(instance))
|
||||
data.update(dump_physics(datablock))
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: bpy.types.Object) -> list:
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
# Avoid Empty case
|
||||
@ -705,13 +811,22 @@ class BlObject(ReplicatedDatablock):
|
||||
# TODO: uuid based
|
||||
deps.append(datablock.instance_collection)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
if datablock.modifiers:
|
||||
deps.extend(find_textures_dependencies(datablock.modifiers))
|
||||
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
|
||||
|
||||
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
|
||||
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.objects)
|
||||
|
||||
_type = bpy.types.Object
|
||||
_class = BlObject
|
||||
_class = BlObject
|
104
multi_user/bl_types/bl_particle.py
Normal file
104
multi_user/bl_types/bl_particle.py
Normal file
@ -0,0 +1,104 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from . import dump_anything
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
|
||||
""" Dump every texture slot collection as the form:
|
||||
[(index, slot_texture_uuid, slot_texture_name), (), ...]
|
||||
"""
|
||||
dumped_slots = []
|
||||
for index, slot in enumerate(texture_slots):
|
||||
if slot and slot.texture:
|
||||
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
|
||||
|
||||
return dumped_slots
|
||||
|
||||
|
||||
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
|
||||
"""
|
||||
"""
|
||||
for index, slot in enumerate(target_slots):
|
||||
if slot:
|
||||
target_slots.clear(index)
|
||||
|
||||
for index, slot_uuid, slot_name in dumped_slots:
|
||||
target_slots.create(index).texture = get_datablock_from_uuid(
|
||||
slot_uuid, slot_name
|
||||
)
|
||||
|
||||
IGNORED_ATTR = [
|
||||
"is_embedded_data",
|
||||
"is_evaluated",
|
||||
"is_fluid",
|
||||
"is_library_indirect",
|
||||
"users"
|
||||
]
|
||||
|
||||
class BlParticle(ReplicatedDatablock):
|
||||
bl_id = "particles"
|
||||
bl_class = bpy.types.ParticleSettings
|
||||
bl_icon = "PARTICLES"
|
||||
bl_check_common = False
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.particles.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
dump_anything.load(datablock, data)
|
||||
|
||||
dump_anything.load(datablock.effector_weights, data["effector_weights"])
|
||||
|
||||
# Force field
|
||||
force_field_1 = data.get("force_field_1", None)
|
||||
if force_field_1:
|
||||
dump_anything.load(datablock.force_field_1, force_field_1)
|
||||
|
||||
force_field_2 = data.get("force_field_2", None)
|
||||
if force_field_2:
|
||||
dump_anything.load(datablock.force_field_2, force_field_2)
|
||||
|
||||
# Texture slots
|
||||
load_texture_slots(data["texture_slots"], datablock.texture_slots)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = IGNORED_ATTR
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
# Particle effectors
|
||||
data["effector_weights"] = dumper.dump(datablock.effector_weights)
|
||||
if datablock.force_field_1:
|
||||
data["force_field_1"] = dumper.dump(datablock.force_field_1)
|
||||
if datablock.force_field_2:
|
||||
data["force_field_2"] = dumper.dump(datablock.force_field_2)
|
||||
|
||||
# Texture slots
|
||||
data["texture_slots"] = dump_textures_slots(datablock.texture_slots)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.particles)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = [t.texture for t in datablock.texture_slots if t and t.texture]
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = bpy.types.ParticleSettings
|
||||
_class = BlParticle
|
@ -18,26 +18,23 @@
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from deepdiff import DeepDiff
|
||||
from deepdiff import DeepDiff, Delta
|
||||
from replication.constants import DIFF_JSON, MODIFIED
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.objects import Node
|
||||
|
||||
from ..utils import flush_history
|
||||
from ..utils import flush_history, get_preferences
|
||||
from .bl_action import (dump_animation_data, load_animation_data,
|
||||
resolve_animation_dependencies)
|
||||
from .bl_collection import (dump_collection_children, dump_collection_objects,
|
||||
load_collection_childrens, load_collection_objects,
|
||||
resolve_collection_dependencies)
|
||||
from .bl_action import (load_animation_data,
|
||||
dump_animation_data,
|
||||
resolve_animation_dependencies)
|
||||
from .bl_datablock import stamp_uuid
|
||||
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_file import get_filepath
|
||||
from .dump_anything import Dumper, Loader
|
||||
from ..preferences import get_preferences
|
||||
|
||||
RENDER_SETTINGS = [
|
||||
'dither_intensity',
|
||||
@ -307,7 +304,8 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
|
||||
return data
|
||||
|
||||
|
||||
def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor):
|
||||
def load_sequence(sequence_data: dict,
|
||||
sequence_editor: bpy.types.SequenceEditor):
|
||||
""" Load sequence from dumped data
|
||||
|
||||
:arg sequence_data: sequence to dump
|
||||
@ -367,7 +365,7 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
|
||||
**seq)
|
||||
|
||||
loader = Loader()
|
||||
# TODO: Support filepath updates
|
||||
|
||||
loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
|
||||
loader.load(sequence, sequence_data)
|
||||
sequence.select = False
|
||||
@ -375,6 +373,7 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
|
||||
|
||||
class BlScene(ReplicatedDatablock):
|
||||
is_root = True
|
||||
use_delta = True
|
||||
|
||||
bl_id = "scenes"
|
||||
bl_class = bpy.types.Scene
|
||||
@ -384,13 +383,12 @@ class BlScene(ReplicatedDatablock):
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
datablock = bpy.data.scenes.new(data["name"])
|
||||
datablock.uuid = data.get("uuid")
|
||||
|
||||
return datablock
|
||||
return bpy.data.scenes.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
# Load other meshes metadata
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
@ -418,20 +416,19 @@ class BlScene(ReplicatedDatablock):
|
||||
if 'render' in data.keys():
|
||||
loader.load(datablock.render, data['render'])
|
||||
|
||||
if 'view_settings' in data.keys():
|
||||
loader.load(datablock.view_settings, data['view_settings'])
|
||||
view_settings = data.get('view_settings')
|
||||
if view_settings:
|
||||
loader.load(datablock.view_settings, view_settings)
|
||||
if datablock.view_settings.use_curve_mapping and \
|
||||
'curve_mapping' in data['view_settings']:
|
||||
'curve_mapping' in view_settings:
|
||||
# TODO: change this ugly fix
|
||||
datablock.view_settings.curve_mapping.white_level = data[
|
||||
'view_settings']['curve_mapping']['white_level']
|
||||
datablock.view_settings.curve_mapping.black_level = data[
|
||||
'view_settings']['curve_mapping']['black_level']
|
||||
datablock.view_settings.curve_mapping.white_level = view_settings['curve_mapping']['white_level']
|
||||
datablock.view_settings.curve_mapping.black_level = view_settings['curve_mapping']['black_level']
|
||||
datablock.view_settings.curve_mapping.update()
|
||||
|
||||
# Sequencer
|
||||
sequences = data.get('sequences')
|
||||
|
||||
|
||||
if sequences:
|
||||
# Create sequencer data
|
||||
datablock.sequence_editor_create()
|
||||
@ -442,7 +439,7 @@ class BlScene(ReplicatedDatablock):
|
||||
if seq.name not in sequences:
|
||||
vse.sequences.remove(seq)
|
||||
# Load existing sequences
|
||||
for seq_name, seq_data in sequences.items():
|
||||
for seq_data in sequences.value():
|
||||
load_sequence(seq_data, vse)
|
||||
# If the sequence is no longer used, clear it
|
||||
elif datablock.sequence_editor and not sequences:
|
||||
@ -454,7 +451,8 @@ class BlScene(ReplicatedDatablock):
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
stamp_uuid(datablock)
|
||||
data = {}
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
# Metadata
|
||||
scene_dumper = Dumper()
|
||||
@ -467,13 +465,11 @@ class BlScene(ReplicatedDatablock):
|
||||
'frame_start',
|
||||
'frame_end',
|
||||
'frame_step',
|
||||
'uuid'
|
||||
]
|
||||
if get_preferences().sync_flags.sync_active_camera:
|
||||
scene_dumper.include_filter.append('camera')
|
||||
|
||||
data = scene_dumper.dump(datablock)
|
||||
dump_animation_data(datablock, data)
|
||||
data.update(scene_dumper.dump(datablock))
|
||||
|
||||
# Master collection
|
||||
data['collection'] = {}
|
||||
@ -538,6 +534,8 @@ class BlScene(ReplicatedDatablock):
|
||||
if datablock.grease_pencil:
|
||||
deps.append(datablock.grease_pencil)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
# Sequences
|
||||
vse = datablock.sequence_editor
|
||||
if vse:
|
||||
@ -550,11 +548,22 @@ class BlScene(ReplicatedDatablock):
|
||||
for elem in sequence.elements:
|
||||
sequence.append(
|
||||
Path(bpy.path.abspath(sequence.directory),
|
||||
elem.filename))
|
||||
elem.filename))
|
||||
|
||||
return deps
|
||||
|
||||
def diff(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
name = data.get('name')
|
||||
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
|
||||
if datablock is None:
|
||||
datablock = bpy.data.scenes.get(name)
|
||||
|
||||
return datablock
|
||||
|
||||
@staticmethod
|
||||
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||
exclude_path = []
|
||||
|
||||
if not get_preferences().sync_flags.sync_render_settings:
|
||||
@ -566,7 +575,22 @@ class BlScene(ReplicatedDatablock):
|
||||
if not get_preferences().sync_flags.sync_active_camera:
|
||||
exclude_path.append("root['camera']")
|
||||
|
||||
return DeepDiff(self.data, self._dump(datablock=self.datablock), exclude_paths=exclude_path)
|
||||
diff_params = {
|
||||
'exclude_paths': exclude_path,
|
||||
'ignore_order': True,
|
||||
'report_repetition': True
|
||||
}
|
||||
delta_params = {
|
||||
# 'mutate': True
|
||||
}
|
||||
|
||||
return Delta(
|
||||
DeepDiff(last_data,
|
||||
current_data,
|
||||
cache_size=5000,
|
||||
**diff_params),
|
||||
**delta_params)
|
||||
|
||||
|
||||
_type = bpy.types.Scene
|
||||
_class = BlScene
|
||||
_class = BlScene
|
@ -23,38 +23,39 @@ from pathlib import Path
|
||||
import bpy
|
||||
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlSound(BlDatablock):
|
||||
class BlSound(ReplicatedDatablock):
|
||||
bl_id = "sounds"
|
||||
bl_class = bpy.types.Sound
|
||||
bl_check_common = False
|
||||
bl_icon = 'SOUND'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
filename = data.get('filename')
|
||||
|
||||
return bpy.data.sounds.load(get_filepath(filename))
|
||||
|
||||
def load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
|
||||
def dump(self, instance=None):
|
||||
filename = Path(instance.filepath).name
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
if not filename:
|
||||
raise FileExistsError(instance.filepath)
|
||||
|
||||
raise FileExistsError(datablock.filepath)
|
||||
|
||||
return {
|
||||
'filename': filename,
|
||||
'name': instance.name
|
||||
'name': datablock.name
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@ -62,7 +63,19 @@ class BlSound(BlDatablock):
|
||||
deps = []
|
||||
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||
ensure_unpacked(datablock)
|
||||
|
||||
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.sounds)
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
return False
|
||||
|
||||
_type = bpy.types.Sound
|
||||
_class = BlSound
|
@ -20,26 +20,29 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlSpeaker(BlDatablock):
|
||||
class BlSpeaker(ReplicatedDatablock):
|
||||
bl_id = "speakers"
|
||||
bl_class = bpy.types.Speaker
|
||||
bl_check_common = False
|
||||
bl_icon = 'SPEAKER'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.speakers.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -58,10 +61,18 @@ class BlSpeaker(BlDatablock):
|
||||
'cone_volume_outer'
|
||||
]
|
||||
|
||||
return dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.speakers)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
|
||||
sound = datablock.sound
|
||||
@ -69,6 +80,8 @@ class BlSpeaker(BlDatablock):
|
||||
if sound:
|
||||
deps.append(sound)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
|
||||
_type = bpy.types.Speaker
|
||||
_class = BlSpeaker
|
@ -20,25 +20,30 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
import bpy.types as T
|
||||
|
||||
|
||||
class BlTexture(BlDatablock):
|
||||
class BlTexture(ReplicatedDatablock):
|
||||
bl_id = "textures"
|
||||
bl_class = bpy.types.Texture
|
||||
bl_check_common = False
|
||||
bl_icon = 'TEXTURE'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.textures.new(data["name"], data["type"])
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
@ -52,15 +57,22 @@ class BlTexture(BlDatablock):
|
||||
'name_full'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
color_ramp = getattr(instance, 'color_ramp', None)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
color_ramp = getattr(datablock, 'color_ramp', None)
|
||||
|
||||
if color_ramp:
|
||||
dumper.depth = 4
|
||||
data['color_ramp'] = dumper.dump(color_ramp)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.textures)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
@ -70,6 +82,14 @@ class BlTexture(BlDatablock):
|
||||
if image:
|
||||
deps.append(image)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
_type = [T.WoodTexture, T.VoronoiTexture,
|
||||
T.StucciTexture, T.NoiseTexture,
|
||||
T.MusgraveTexture, T.MarbleTexture,
|
||||
T.MagicTexture, T.ImageTexture,
|
||||
T.DistortedNoiseTexture, T.CloudsTexture,
|
||||
T.BlendTexture]
|
||||
_class = BlTexture
|
@ -21,32 +21,24 @@ import mathutils
|
||||
from pathlib import Path
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlVolume(BlDatablock):
|
||||
class BlVolume(ReplicatedDatablock):
|
||||
bl_id = "volumes"
|
||||
bl_class = bpy.types.Volume
|
||||
bl_check_common = False
|
||||
bl_icon = 'VOLUME_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(target.display, data['display'])
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, target.materials)
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.volumes.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = [
|
||||
@ -60,17 +52,35 @@ class BlVolume(BlDatablock):
|
||||
'use_fake_user'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['display'] = dumper.dump(instance.display)
|
||||
data['display'] = dumper.dump(datablock.display)
|
||||
|
||||
# Fix material index
|
||||
data['materials'] = dump_materials_slots(instance.materials)
|
||||
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
loader.load(datablock.display, data['display'])
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.volumes)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
|
||||
external_vdb = Path(bpy.path.abspath(datablock.filepath))
|
||||
@ -81,6 +91,9 @@ class BlVolume(BlDatablock):
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
_type = bpy.types.Volume
|
||||
_class = BlVolume
|
@ -20,35 +20,40 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_material import (load_node_tree,
|
||||
dump_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlWorld(BlDatablock):
|
||||
|
||||
class BlWorld(ReplicatedDatablock):
|
||||
bl_id = "worlds"
|
||||
bl_class = bpy.types.World
|
||||
bl_check_common = True
|
||||
bl_icon = 'WORLD_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.worlds.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
if data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
if datablock.node_tree is None:
|
||||
datablock.use_nodes = True
|
||||
|
||||
load_node_tree(data['node_tree'], target.node_tree)
|
||||
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
assert(instance)
|
||||
|
||||
world_dumper = Dumper()
|
||||
world_dumper.depth = 1
|
||||
world_dumper.include_filter = [
|
||||
@ -56,11 +61,17 @@ class BlWorld(BlDatablock):
|
||||
"name",
|
||||
"color"
|
||||
]
|
||||
data = world_dumper.dump(instance)
|
||||
if instance.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(instance.node_tree)
|
||||
data = world_dumper.dump(datablock)
|
||||
if datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.worlds)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
@ -69,4 +80,8 @@ class BlWorld(BlDatablock):
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = bpy.types.World
|
||||
_class = BlWorld
|
@ -507,16 +507,12 @@ class Loader:
|
||||
_constructors = {
|
||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
|
||||
}
|
||||
|
||||
destructors = {
|
||||
T.ColorRampElement: DESTRUCTOR_REMOVE,
|
||||
T.Modifier: DESTRUCTOR_CLEAR,
|
||||
T.GpencilModifier: DESTRUCTOR_CLEAR,
|
||||
T.Constraint: DESTRUCTOR_REMOVE,
|
||||
}
|
||||
element_type = element.bl_rna_property.fixed_type
|
||||
|
@ -24,20 +24,25 @@ import sys
|
||||
from pathlib import Path
|
||||
import socket
|
||||
import re
|
||||
import bpy
|
||||
|
||||
VERSION_EXPR = re.compile('\d+.\d+.\d+')
|
||||
|
||||
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
DEFAULT_CACHE_DIR = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "cache")
|
||||
REPLICATION_DEPENDENCIES = {
|
||||
"zmq",
|
||||
"deepdiff"
|
||||
}
|
||||
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
REPLICATION = os.path.join(LIBS,"replication")
|
||||
|
||||
PYTHON_PATH = None
|
||||
SUBPROCESS_DIR = None
|
||||
|
||||
|
||||
rtypes = []
|
||||
|
||||
|
||||
def module_can_be_imported(name):
|
||||
def module_can_be_imported(name: str) -> bool:
|
||||
try:
|
||||
__import__(name)
|
||||
return True
|
||||
@ -50,7 +55,7 @@ def install_pip():
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
||||
|
||||
|
||||
def install_package(name, version):
|
||||
def install_package(name: str, install_dir: str):
|
||||
logging.info(f"installing {name} version...")
|
||||
env = os.environ
|
||||
if "PIP_REQUIRE_VIRTUALENV" in env:
|
||||
@ -60,12 +65,13 @@ def install_package(name, version):
|
||||
# env var for the subprocess.
|
||||
env = os.environ.copy()
|
||||
del env["PIP_REQUIRE_VIRTUALENV"]
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
|
||||
|
||||
if name in sys.modules:
|
||||
del sys.modules[name]
|
||||
|
||||
def check_package_version(name, required_version):
|
||||
|
||||
def check_package_version(name: str, required_version: str):
|
||||
logging.info(f"Checking {name} version...")
|
||||
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
||||
|
||||
@ -77,6 +83,7 @@ def check_package_version(name, required_version):
|
||||
logging.info(f"{name} need an update")
|
||||
return False
|
||||
|
||||
|
||||
def get_ip():
|
||||
"""
|
||||
Retrieve the main network interface IP.
|
||||
@ -94,7 +101,25 @@ def check_dir(dir):
|
||||
os.makedirs(dir)
|
||||
|
||||
|
||||
def setup(dependencies, python_path):
|
||||
def setup_paths(paths: list):
|
||||
""" Add missing path to sys.path
|
||||
"""
|
||||
for path in paths:
|
||||
if path not in sys.path:
|
||||
logging.debug(f"Adding {path} dir to the path.")
|
||||
sys.path.insert(0, path)
|
||||
|
||||
|
||||
def remove_paths(paths: list):
|
||||
""" Remove list of path from sys.path
|
||||
"""
|
||||
for path in paths:
|
||||
if path in sys.path:
|
||||
logging.debug(f"Removing {path} dir from the path.")
|
||||
sys.path.remove(path)
|
||||
|
||||
|
||||
def install_modules(dependencies: list, python_path: str, install_dir: str):
|
||||
global PYTHON_PATH, SUBPROCESS_DIR
|
||||
|
||||
PYTHON_PATH = Path(python_path)
|
||||
@ -103,9 +128,23 @@ def setup(dependencies, python_path):
|
||||
if not module_can_be_imported("pip"):
|
||||
install_pip()
|
||||
|
||||
for package_name, package_version in dependencies:
|
||||
for package_name in dependencies:
|
||||
if not module_can_be_imported(package_name):
|
||||
install_package(package_name, package_version)
|
||||
install_package(package_name, install_dir=install_dir)
|
||||
module_can_be_imported(package_name)
|
||||
elif not check_package_version(package_name, package_version):
|
||||
install_package(package_name, package_version)
|
||||
|
||||
def register():
|
||||
if bpy.app.version[1] >= 91:
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
for module_name in list(sys.modules.keys()):
|
||||
if 'replication' in module_name:
|
||||
del sys.modules[module_name]
|
||||
|
||||
setup_paths([LIBS, REPLICATION])
|
||||
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
|
||||
|
||||
def unregister():
|
||||
remove_paths([REPLICATION, LIBS])
|
150
multi_user/handlers.py
Normal file
150
multi_user/handlers.py
Normal file
@ -0,0 +1,150 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import logging
|
||||
|
||||
import bpy
|
||||
from bpy.app.handlers import persistent
|
||||
from replication import porcelain
|
||||
from replication.constants import RP_COMMON, STATE_ACTIVE, STATE_SYNCING, UP
|
||||
from replication.exception import ContextError, NonAuthorizedOperationError
|
||||
from replication.interface import session
|
||||
|
||||
from . import shared_data, utils
|
||||
|
||||
|
||||
def sanitize_deps_graph(remove_nodes: bool = False):
|
||||
""" Cleanup the replication graph
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
start = utils.current_milli_time()
|
||||
rm_cpt = 0
|
||||
for node in session.repository.graph.values():
|
||||
node.instance = session.repository.rdp.resolve(node.data)
|
||||
if node is None \
|
||||
or (node.state == UP and not node.instance):
|
||||
if remove_nodes:
|
||||
try:
|
||||
porcelain.rm(session.repository,
|
||||
node.uuid,
|
||||
remove_dependencies=False)
|
||||
logging.info(f"Removing {node.uuid}")
|
||||
rm_cpt += 1
|
||||
except NonAuthorizedOperationError:
|
||||
continue
|
||||
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
|
||||
|
||||
|
||||
def update_external_dependencies():
|
||||
"""Force external dependencies(files such as images) evaluation
|
||||
"""
|
||||
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in ['WindowsPath', 'PosixPath']]
|
||||
for node_id in nodes_ids:
|
||||
node = session.repository.graph.get(node_id)
|
||||
if node and node.owner in [session.repository.username, RP_COMMON]:
|
||||
porcelain.commit(session.repository, node_id)
|
||||
porcelain.push(session.repository, 'origin', node_id)
|
||||
|
||||
|
||||
@persistent
|
||||
def on_scene_update(scene):
|
||||
"""Forward blender depsgraph update to replication
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
context = bpy.context
|
||||
blender_depsgraph = bpy.context.view_layer.depsgraph
|
||||
dependency_updates = [u for u in blender_depsgraph.updates]
|
||||
settings = utils.get_preferences()
|
||||
incoming_updates = shared_data.session.applied_updates
|
||||
|
||||
distant_update = [getattr(u.id, 'uuid', None) for u in dependency_updates if getattr(u.id, 'uuid', None) in incoming_updates]
|
||||
if distant_update:
|
||||
for u in distant_update:
|
||||
shared_data.session.applied_updates.remove(u)
|
||||
logging.info(f"Ignoring distant update of {dependency_updates[0].id.name}")
|
||||
return
|
||||
|
||||
update_external_dependencies()
|
||||
|
||||
# NOTE: maybe we don't need to check each update but only the first
|
||||
for update in reversed(dependency_updates):
|
||||
update_uuid = getattr(update.id, 'uuid', None)
|
||||
if update_uuid:
|
||||
node = session.repository.graph.get(update.id.uuid)
|
||||
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
|
||||
|
||||
if node and (node.owner == session.repository.username or check_common):
|
||||
logging.debug(f"Evaluate {update.id.name}")
|
||||
if node.state == UP:
|
||||
try:
|
||||
porcelain.commit(session.repository, node.uuid)
|
||||
porcelain.push(session.repository,
|
||||
'origin', node.uuid)
|
||||
except ReferenceError:
|
||||
logging.debug(f"Reference error {node.uuid}")
|
||||
except ContextError as e:
|
||||
logging.debug(e)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
else:
|
||||
continue
|
||||
elif isinstance(update.id, bpy.types.Scene):
|
||||
scn_uuid = porcelain.add(session.repository, update.id)
|
||||
porcelain.commit(session.repository, scn_uuid)
|
||||
porcelain.push(session.repository, 'origin', scn_uuid)
|
||||
|
||||
|
||||
@persistent
|
||||
def resolve_deps_graph(dummy):
|
||||
"""Resolve deps graph
|
||||
|
||||
Temporary solution to resolve each node pointers after a Undo.
|
||||
A future solution should be to avoid storing dataclock reference...
|
||||
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
sanitize_deps_graph(remove_nodes=True)
|
||||
|
||||
|
||||
@persistent
|
||||
def load_pre_handler(dummy):
|
||||
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
|
||||
bpy.ops.session.stop()
|
||||
|
||||
|
||||
@persistent
|
||||
def update_client_frame(scene):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
porcelain.update_user_metadata(session.repository, {
|
||||
'frame_current': scene.frame_current
|
||||
})
|
||||
|
||||
|
||||
def register():
|
||||
bpy.app.handlers.undo_post.append(resolve_deps_graph)
|
||||
bpy.app.handlers.redo_post.append(resolve_deps_graph)
|
||||
|
||||
bpy.app.handlers.load_pre.append(load_pre_handler)
|
||||
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
|
||||
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
|
||||
|
||||
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
||||
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
@ -1,45 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlLibrary(BlDatablock):
|
||||
bl_id = "libraries"
|
||||
bl_class = bpy.types.Library
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIBRARY_DATA_DIRECT'
|
||||
bl_reload_parent = False
|
||||
|
||||
def construct(data: dict) -> object:
|
||||
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
|
||||
targetData = sourceData
|
||||
return sourceData
|
||||
def load(self, data, target):
|
||||
pass
|
||||
|
||||
def dump(self, instance=None):
|
||||
assert(instance)
|
||||
dumper = Dumper()
|
||||
return dumper.dump(instance)
|
||||
|
||||
|
@ -1,90 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from . import dump_anything
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
|
||||
|
||||
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
|
||||
""" Dump every texture slot collection as the form:
|
||||
[(index, slot_texture_uuid, slot_texture_name), (), ...]
|
||||
"""
|
||||
dumped_slots = []
|
||||
for index, slot in enumerate(texture_slots):
|
||||
if slot and slot.texture:
|
||||
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
|
||||
|
||||
return dumped_slots
|
||||
|
||||
|
||||
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
|
||||
"""
|
||||
"""
|
||||
for index, slot in enumerate(target_slots):
|
||||
if slot:
|
||||
target_slots.clear(index)
|
||||
|
||||
for index, slot_uuid, slot_name in dumped_slots:
|
||||
target_slots.create(index).texture = get_datablock_from_uuid(
|
||||
slot_uuid, slot_name
|
||||
)
|
||||
|
||||
IGNORED_ATTR = [
|
||||
"is_embedded_data",
|
||||
"is_evaluated",
|
||||
"is_fluid",
|
||||
"is_library_indirect",
|
||||
"users"
|
||||
]
|
||||
|
||||
class BlParticle(BlDatablock):
|
||||
bl_id = "particles"
|
||||
bl_class = bpy.types.ParticleSettings
|
||||
bl_icon = "PARTICLES"
|
||||
bl_check_common = False
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.particles.new(data["name"])
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
dump_anything.load(target, data)
|
||||
|
||||
dump_anything.load(target.effector_weights, data["effector_weights"])
|
||||
|
||||
# Force field
|
||||
force_field_1 = data.get("force_field_1", None)
|
||||
if force_field_1:
|
||||
dump_anything.load(target.force_field_1, force_field_1)
|
||||
|
||||
force_field_2 = data.get("force_field_2", None)
|
||||
if force_field_2:
|
||||
dump_anything.load(target.force_field_2, force_field_2)
|
||||
|
||||
# Texture slots
|
||||
load_texture_slots(data["texture_slots"], target.texture_slots)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert instance
|
||||
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = IGNORED_ATTR
|
||||
data = dumper.dump(instance)
|
||||
|
||||
# Particle effectors
|
||||
data["effector_weights"] = dumper.dump(instance.effector_weights)
|
||||
if instance.force_field_1:
|
||||
data["force_field_1"] = dumper.dump(instance.force_field_1)
|
||||
if instance.force_field_2:
|
||||
data["force_field_2"] = dumper.dump(instance.force_field_2)
|
||||
|
||||
# Texture slots
|
||||
data["texture_slots"] = dump_textures_slots(instance.texture_slots)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return [t.texture for t in self.instance.texture_slots if t and t.texture]
|
Submodule multi_user/libs/replication updated: 8447872940...cb4cdd0444
@ -27,12 +27,14 @@ import shutil
|
||||
import string
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
from pathlib import Path
|
||||
from queue import Queue
|
||||
from time import gmtime, strftime
|
||||
import traceback
|
||||
|
||||
from bpy.props import FloatProperty
|
||||
|
||||
try:
|
||||
import _pickle as pickle
|
||||
@ -43,15 +45,17 @@ import bpy
|
||||
import mathutils
|
||||
from bpy.app.handlers import persistent
|
||||
from bpy_extras.io_utils import ExportHelper, ImportHelper
|
||||
from replication import porcelain
|
||||
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
|
||||
STATE_INITIAL, STATE_SYNCING, UP)
|
||||
from replication.protocol import DataTranslationProtocol
|
||||
from replication.exception import ContextError, NonAuthorizedOperationError
|
||||
from replication.interface import session
|
||||
from replication import porcelain
|
||||
from replication.objects import Node
|
||||
from replication.protocol import DataTranslationProtocol
|
||||
from replication.repository import Repository
|
||||
|
||||
from . import io_bpy, environment, timers, ui, utils
|
||||
from . import bl_types, environment, shared_data, timers, ui, utils
|
||||
from .handlers import on_scene_update, sanitize_deps_graph
|
||||
from .presence import SessionStatusWidget, renderer, view3d_find
|
||||
from .timers import registry
|
||||
|
||||
@ -59,7 +63,6 @@ background_execution_queue = Queue()
|
||||
deleyables = []
|
||||
stop_modal_executor = False
|
||||
|
||||
|
||||
def session_callback(name):
|
||||
""" Session callback wrapper
|
||||
|
||||
@ -78,41 +81,39 @@ def session_callback(name):
|
||||
def initialize_session():
|
||||
"""Session connection init hander
|
||||
"""
|
||||
logging.info("Intializing the scene")
|
||||
settings = utils.get_preferences()
|
||||
runtime_settings = bpy.context.window_manager.session
|
||||
|
||||
# Step 1: Constrect nodes
|
||||
logging.info("Constructing nodes")
|
||||
for node in session.repository.list_ordered():
|
||||
node_ref = session.repository.get_node(node)
|
||||
if node_ref is None:
|
||||
logging.error(f"Can't construct node {node}")
|
||||
elif node_ref.state == FETCHED:
|
||||
node_ref.resolve()
|
||||
if not runtime_settings.is_host:
|
||||
logging.info("Intializing the scene")
|
||||
# Step 1: Constrect nodes
|
||||
logging.info("Instantiating nodes")
|
||||
for node in session.repository.index_sorted:
|
||||
node_ref = session.repository.graph.get(node)
|
||||
if node_ref is None:
|
||||
logging.error(f"Can't construct node {node}")
|
||||
elif node_ref.state == FETCHED:
|
||||
node_ref.instance = session.repository.rdp.resolve(node_ref.data)
|
||||
if node_ref.instance is None:
|
||||
node_ref.instance = session.repository.rdp.construct(node_ref.data)
|
||||
node_ref.instance.uuid = node_ref.uuid
|
||||
|
||||
# Step 2: Load nodes
|
||||
logging.info("Loading nodes")
|
||||
for node in session.repository.list_ordered():
|
||||
node_ref = session.repository.get_node(node)
|
||||
|
||||
if node_ref is None:
|
||||
logging.error(f"Can't load node {node}")
|
||||
elif node_ref.state == FETCHED:
|
||||
node_ref.apply()
|
||||
# Step 2: Load nodes
|
||||
logging.info("Applying nodes")
|
||||
for node in session.repository.index_sorted:
|
||||
porcelain.apply(session.repository, node)
|
||||
|
||||
logging.info("Registering timers")
|
||||
# Step 4: Register blender timers
|
||||
for d in deleyables:
|
||||
d.register()
|
||||
|
||||
bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
|
||||
|
||||
# Step 5: Clearing history
|
||||
utils.flush_history()
|
||||
|
||||
# Step 6: Launch deps graph update handling
|
||||
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
|
||||
bpy.app.handlers.depsgraph_update_post.append(on_scene_update)
|
||||
|
||||
|
||||
@session_callback('on_exit')
|
||||
@ -132,8 +133,8 @@ def on_connection_end(reason="none"):
|
||||
|
||||
stop_modal_executor = True
|
||||
|
||||
if depsgraph_evaluation in bpy.app.handlers.depsgraph_update_post:
|
||||
bpy.app.handlers.depsgraph_update_post.remove(depsgraph_evaluation)
|
||||
if on_scene_update in bpy.app.handlers.depsgraph_update_post:
|
||||
bpy.app.handlers.depsgraph_update_post.remove(on_scene_update)
|
||||
|
||||
# Step 3: remove file handled
|
||||
logger = logging.getLogger()
|
||||
@ -141,8 +142,7 @@ def on_connection_end(reason="none"):
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
logger.removeHandler(handler)
|
||||
if reason != "user":
|
||||
bpy.ops.session.notify(
|
||||
'INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
|
||||
bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
|
||||
|
||||
|
||||
# OPERATORS
|
||||
@ -163,7 +163,7 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
settings = utils.get_preferences()
|
||||
runtime_settings = context.window_manager.session
|
||||
users = bpy.data.window_managers['WinMan'].online_users
|
||||
admin_pass = runtime_settings.password
|
||||
admin_pass = settings.password
|
||||
|
||||
users.clear()
|
||||
deleyables.clear()
|
||||
@ -191,81 +191,102 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
bpy_protocol = io_bpy.get_data_translation_protocol()
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
|
||||
# Check if supported_datablocks are up to date before starting the
|
||||
# the session
|
||||
for impl in bpy_protocol.implementations.values():
|
||||
if impl.__name__ not in settings.supported_datablocks:
|
||||
logging.info(f"{impl.__name__} not found, \
|
||||
for dcc_type_id in bpy_protocol.implementations.keys():
|
||||
if dcc_type_id not in settings.supported_datablocks:
|
||||
logging.info(f"{dcc_type_id} not found, \
|
||||
regenerate type settings...")
|
||||
settings.generate_supported_types()
|
||||
|
||||
# Ensure blender 2.8 compatibility
|
||||
|
||||
if bpy.app.version[1] >= 91:
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
# HOST
|
||||
repo = Repository(
|
||||
rdp=bpy_protocol,
|
||||
username=settings.username)
|
||||
|
||||
# Host a session
|
||||
if self.host:
|
||||
if settings.init_method == 'EMPTY':
|
||||
utils.clean_scene()
|
||||
|
||||
# Start the server locally
|
||||
server = porcelain.serve(port=settings.port,
|
||||
timeout=settings.connection_timeout,
|
||||
admin_password=admin_pass,
|
||||
log_directory=settings.cache_directory)
|
||||
runtime_settings.is_host = True
|
||||
runtime_settings.internet_ip = environment.get_ip()
|
||||
|
||||
# Init repository
|
||||
repo = porcelain.init(bare=False,
|
||||
data_protocol=bpy_protocol)
|
||||
try:
|
||||
# Init repository
|
||||
for scene in bpy.data.scenes:
|
||||
porcelain.add(repo, scene)
|
||||
|
||||
# Add the existing scenes
|
||||
for scene in bpy.data.scenes:
|
||||
porcelain.add(repo, scene)
|
||||
|
||||
porcelain.remote_add(repo,
|
||||
'server',
|
||||
'127.0.0.1',
|
||||
settings.port)
|
||||
porcelain.sync(repo, 'server')
|
||||
porcelain.push(repo, 'server')
|
||||
# JOIN
|
||||
porcelain.remote_add(
|
||||
repo,
|
||||
'origin',
|
||||
'127.0.0.1',
|
||||
settings.port,
|
||||
admin_password=admin_pass)
|
||||
session.host(
|
||||
repository= repo,
|
||||
remote='origin',
|
||||
timeout=settings.connection_timeout,
|
||||
password=admin_pass,
|
||||
cache_directory=settings.cache_directory,
|
||||
server_log_level=logging.getLevelName(
|
||||
logging.getLogger().level),
|
||||
)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
logging.error(f"Error: {e}")
|
||||
traceback.print_exc()
|
||||
# Join a session
|
||||
else:
|
||||
utils.clean_scene()
|
||||
if not runtime_settings.admin:
|
||||
utils.clean_scene()
|
||||
# regular session, no password needed
|
||||
admin_pass = None
|
||||
|
||||
repo = porcelain.clone(settings.ip, settings.ip)
|
||||
try:
|
||||
porcelain.remote_add(
|
||||
repo,
|
||||
'origin',
|
||||
settings.ip,
|
||||
settings.port,
|
||||
admin_password=admin_pass)
|
||||
session.connect(
|
||||
repository= repo,
|
||||
timeout=settings.connection_timeout,
|
||||
password=admin_pass
|
||||
)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, str(e))
|
||||
logging.error(str(e))
|
||||
|
||||
# Background client updates service
|
||||
# deleyables.append(timers.ClientUpdate())
|
||||
# deleyables.append(timers.DynamicRightSelectTimer())
|
||||
# deleyables.append(timers.ApplyTimer(
|
||||
# timeout=settings.depsgraph_update_rate))
|
||||
# deleyables.append(timers.PushTimer(
|
||||
# queue=stagging,
|
||||
# timeout=settings.depsgraph_update_rate
|
||||
# ))
|
||||
# session_update = timers.SessionStatusUpdate()
|
||||
# session_user_sync = timers.SessionUserSync()
|
||||
# session_background_executor = timers.MainThreadExecutor(
|
||||
# execution_queue=background_execution_queue)
|
||||
# session_listen = timers.SessionListenTimer(timeout=0.001)
|
||||
deleyables.append(timers.ClientUpdate())
|
||||
deleyables.append(timers.DynamicRightSelectTimer())
|
||||
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
|
||||
|
||||
# session_listen.register()
|
||||
# session_update.register()
|
||||
# session_user_sync.register()
|
||||
# session_background_executor.register()
|
||||
session_update = timers.SessionStatusUpdate()
|
||||
session_user_sync = timers.SessionUserSync()
|
||||
session_background_executor = timers.MainThreadExecutor(
|
||||
execution_queue=background_execution_queue)
|
||||
session_listen = timers.SessionListenTimer(timeout=0.001)
|
||||
|
||||
# deleyables.append(session_background_executor)
|
||||
# deleyables.append(session_update)
|
||||
# deleyables.append(session_user_sync)
|
||||
# deleyables.append(session_listen)
|
||||
session_listen.register()
|
||||
session_update.register()
|
||||
session_user_sync.register()
|
||||
session_background_executor.register()
|
||||
|
||||
deleyables.append(session_background_executor)
|
||||
deleyables.append(session_update)
|
||||
deleyables.append(session_user_sync)
|
||||
deleyables.append(session_listen)
|
||||
|
||||
self.report(
|
||||
{'INFO'},
|
||||
f"connecting to tcp://{settings.ip}:{settings.port}")
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
@ -304,6 +325,7 @@ class SessionInitOperator(bpy.types.Operator):
|
||||
porcelain.add(session.repository, scene)
|
||||
|
||||
session.init()
|
||||
context.window_manager.session.is_host = True
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
@ -350,7 +372,7 @@ class SessionKickOperator(bpy.types.Operator):
|
||||
assert(session)
|
||||
|
||||
try:
|
||||
session.kick(self.user)
|
||||
porcelain.kick(session.repository, self.user)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
|
||||
@ -379,7 +401,7 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
try:
|
||||
session.remove(self.property_path)
|
||||
porcelain.rm(session.repository, self.property_path)
|
||||
|
||||
return {"FINISHED"}
|
||||
except: # NonAuthorizedOperationError:
|
||||
@ -421,10 +443,17 @@ class SessionPropertyRightOperator(bpy.types.Operator):
|
||||
runtime_settings = context.window_manager.session
|
||||
|
||||
if session:
|
||||
session.change_owner(self.key,
|
||||
runtime_settings.clients,
|
||||
if runtime_settings.clients == RP_COMMON:
|
||||
porcelain.unlock(session.repository,
|
||||
self.key,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=self.recursive)
|
||||
else:
|
||||
porcelain.lock(session.repository,
|
||||
self.key,
|
||||
runtime_settings.clients,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=self.recursive)
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
@ -539,7 +568,7 @@ class SessionSnapTimeOperator(bpy.types.Operator):
|
||||
|
||||
def modal(self, context, event):
|
||||
is_running = context.window_manager.session.user_snap_running
|
||||
if event.type in {'RIGHTMOUSE', 'ESC'} or not is_running:
|
||||
if not is_running:
|
||||
self.cancel(context)
|
||||
return {'CANCELLED'}
|
||||
|
||||
@ -572,13 +601,14 @@ class SessionApply(bpy.types.Operator):
|
||||
def execute(self, context):
|
||||
logging.debug(f"Running apply on {self.target}")
|
||||
try:
|
||||
node_ref = session.repository.get_node(self.target)
|
||||
node_ref = session.repository.graph.get(self.target)
|
||||
porcelain.apply(session.repository,
|
||||
self.target,
|
||||
force=True,
|
||||
force_dependencies=self.reset_dependencies)
|
||||
if node_ref.bl_reload_parent:
|
||||
for parent in session.repository.get_parents(self.target):
|
||||
impl = session.repository.rdp.get_implementation(node_ref.instance)
|
||||
if impl.bl_reload_parent:
|
||||
for parent in session.repository.graph.get_parents(self.target):
|
||||
logging.debug(f"Refresh parent {parent}")
|
||||
|
||||
porcelain.apply(session.repository,
|
||||
@ -587,7 +617,7 @@ class SessionApply(bpy.types.Operator):
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
traceback.print_exc()
|
||||
return {"CANCELLED"}
|
||||
return {"CANCELLED"}
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
@ -606,55 +636,12 @@ class SessionCommit(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
try:
|
||||
porcelain.commit(session.repository, uuid=self.target)
|
||||
session.push(self.target)
|
||||
porcelain.commit(session.repository, self.target)
|
||||
porcelain.push(session.repository, 'origin', self.target)
|
||||
return {"FINISHED"}
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
return {"CANCELED"}
|
||||
|
||||
|
||||
class ApplyArmatureOperator(bpy.types.Operator):
|
||||
"""Operator which runs its self from a timer"""
|
||||
bl_idname = "session.apply_armature_operator"
|
||||
bl_label = "Modal Executor Operator"
|
||||
|
||||
_timer = None
|
||||
|
||||
def modal(self, context, event):
|
||||
global stop_modal_executor, modal_executor_queue
|
||||
if stop_modal_executor:
|
||||
self.cancel(context)
|
||||
return {'CANCELLED'}
|
||||
|
||||
if event.type == 'TIMER':
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
nodes = session.list(filter=io_bpy.bl_armature.BlArmature)
|
||||
|
||||
for node in nodes:
|
||||
node_ref = session.repository.get_node(node)
|
||||
|
||||
if node_ref.state == FETCHED:
|
||||
try:
|
||||
porcelain.apply(session.repository, node)
|
||||
except Exception as e:
|
||||
logging.error("Fail to apply armature: {e}")
|
||||
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
def execute(self, context):
|
||||
wm = context.window_manager
|
||||
self._timer = wm.event_timer_add(2, window=context.window)
|
||||
wm.modal_handler_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def cancel(self, context):
|
||||
global stop_modal_executor
|
||||
|
||||
wm = context.window_manager
|
||||
wm.event_timer_remove(self._timer)
|
||||
|
||||
stop_modal_executor = False
|
||||
return {"CANCELLED"}
|
||||
|
||||
|
||||
class SessionClearCache(bpy.types.Operator):
|
||||
@ -698,6 +685,7 @@ class SessionPurgeOperator(bpy.types.Operator):
|
||||
def execute(self, context):
|
||||
try:
|
||||
sanitize_deps_graph(remove_nodes=True)
|
||||
porcelain.purge_orphan_nodes(session.repository)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
|
||||
@ -767,7 +755,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
|
||||
recorder.register()
|
||||
deleyables.append(recorder)
|
||||
else:
|
||||
session.save(self.filepath)
|
||||
session.repository.dumps(self.filepath)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@ -810,58 +798,25 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
def execute(self, context):
|
||||
from replication.repository import Repository
|
||||
|
||||
try:
|
||||
f = gzip.open(self.filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
except OSError as e:
|
||||
f = open(self.filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
# init the factory with supported types
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
repo = Repository(bpy_protocol)
|
||||
repo.loads(self.filepath)
|
||||
utils.clean_scene()
|
||||
|
||||
if db:
|
||||
logging.info(f"Reading {self.filepath}")
|
||||
nodes = db.get("nodes")
|
||||
nodes = [repo.graph.get(n) for n in repo.index_sorted]
|
||||
|
||||
logging.info(f"{len(nodes)} Nodes to load")
|
||||
# Step 1: Construct nodes
|
||||
for node in nodes:
|
||||
node.instance = bpy_protocol.resolve(node.data)
|
||||
if node.instance is None:
|
||||
node.instance = bpy_protocol.construct(node.data)
|
||||
node.instance.uuid = node.uuid
|
||||
|
||||
# init the factory with supported types
|
||||
bpy_protocol = DataTranslationProtocol()
|
||||
for type in io_bpy.types_to_register():
|
||||
type_module = getattr(io_bpy, type)
|
||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
||||
type_impl_name = 'Bl'+''.join(name)
|
||||
type_module_class = getattr(type_module, type_impl_name)
|
||||
# Step 2: Load nodes
|
||||
for node in nodes:
|
||||
porcelain.apply(repo, node.uuid)
|
||||
|
||||
bpy_protocol.register_type(
|
||||
type_module_class.bl_class,
|
||||
type_module_class)
|
||||
|
||||
graph = Repository()
|
||||
|
||||
for node, node_data in nodes:
|
||||
node_type = node_data.get('str_type')
|
||||
|
||||
impl = bpy_protocol.get_implementation_from_net(node_type)
|
||||
|
||||
if impl:
|
||||
logging.info(f"Loading {node}")
|
||||
instance = impl(owner=node_data['owner'],
|
||||
uuid=node,
|
||||
dependencies=node_data['dependencies'],
|
||||
data=node_data['data'])
|
||||
graph.do_commit(instance)
|
||||
instance.state = FETCHED
|
||||
|
||||
logging.info("Graph succefully loaded")
|
||||
|
||||
utils.clean_scene()
|
||||
|
||||
# Step 1: Construct nodes
|
||||
for node in graph.list_ordered():
|
||||
graph[node].resolve()
|
||||
|
||||
# Step 2: Load nodes
|
||||
for node in graph.list_ordered():
|
||||
graph[node].apply()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@ -869,10 +824,78 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
def poll(cls, context):
|
||||
return True
|
||||
|
||||
class SessionPresetServerAdd(bpy.types.Operator):
|
||||
"""Add a server to the server list preset"""
|
||||
bl_idname = "session.preset_server_add"
|
||||
bl_label = "add server preset"
|
||||
bl_description = "add the current server to the server preset list"
|
||||
bl_options = {"REGISTER"}
|
||||
|
||||
name : bpy.props.StringProperty(default="server_preset")
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return True
|
||||
|
||||
def invoke(self, context, event):
|
||||
assert(context)
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
|
||||
col = layout.column()
|
||||
settings = utils.get_preferences()
|
||||
|
||||
col.prop(settings, "server_name", text="server name")
|
||||
|
||||
def execute(self, context):
|
||||
assert(context)
|
||||
|
||||
settings = utils.get_preferences()
|
||||
|
||||
existing_preset = settings.server_preset.get(settings.server_name)
|
||||
|
||||
new_server = existing_preset if existing_preset else settings.server_preset.add()
|
||||
new_server.name = settings.server_name
|
||||
new_server.server_ip = settings.ip
|
||||
new_server.server_port = settings.port
|
||||
new_server.server_password = settings.password
|
||||
|
||||
settings.server_preset_interface = settings.server_name
|
||||
|
||||
if new_server == existing_preset :
|
||||
self.report({'INFO'}, "Server '" + settings.server_name + "' override")
|
||||
else :
|
||||
self.report({'INFO'}, "New '" + settings.server_name + "' server preset")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class SessionPresetServerRemove(bpy.types.Operator):
|
||||
"""Remove a server to the server list preset"""
|
||||
bl_idname = "session.preset_server_remove"
|
||||
bl_label = "remove server preset"
|
||||
bl_description = "remove the current server from the server preset list"
|
||||
bl_options = {"REGISTER"}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
assert(context)
|
||||
|
||||
settings = utils.get_preferences()
|
||||
|
||||
settings.server_preset.remove(settings.server_preset.find(settings.server_preset_interface))
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
|
||||
def menu_func_import(self, context):
|
||||
self.layout.operator(SessionLoadSaveOperator.bl_idname,
|
||||
text='Multi-user session snapshot (.db)')
|
||||
self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)')
|
||||
|
||||
|
||||
classes = (
|
||||
@ -884,132 +907,25 @@ classes = (
|
||||
SessionPropertyRightOperator,
|
||||
SessionApply,
|
||||
SessionCommit,
|
||||
ApplyArmatureOperator,
|
||||
SessionKickOperator,
|
||||
SessionInitOperator,
|
||||
SessionClearCache,
|
||||
SessionNotifyOperator,
|
||||
SessionNotifyOperator,
|
||||
SessionSaveBackupOperator,
|
||||
SessionLoadSaveOperator,
|
||||
SessionStopAutoSaveOperator,
|
||||
SessionPurgeOperator,
|
||||
SessionPresetServerAdd,
|
||||
SessionPresetServerRemove,
|
||||
)
|
||||
|
||||
|
||||
def update_external_dependencies():
|
||||
nodes_ids = session.list(filter=io_bpy.bl_file.BlFile)
|
||||
for node_id in nodes_ids:
|
||||
node = session.repository.get_node(node_id)
|
||||
if node and node.owner in [session.id, RP_COMMON] \
|
||||
and node.has_changed():
|
||||
porcelain.commit(session.repository, node_id)
|
||||
session.push(node_id, check_data=False)
|
||||
|
||||
|
||||
def sanitize_deps_graph(remove_nodes: bool = False):
|
||||
""" Cleanup the replication graph
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
start = utils.current_milli_time()
|
||||
rm_cpt = 0
|
||||
for node_key in session.list():
|
||||
node = session.repository.get_node(node_key)
|
||||
if node is None \
|
||||
or (node.state == UP and not node.resolve(construct=False)):
|
||||
if remove_nodes:
|
||||
try:
|
||||
session.remove(node.uuid, remove_dependencies=False)
|
||||
logging.info(f"Removing {node.uuid}")
|
||||
rm_cpt += 1
|
||||
except NonAuthorizedOperationError:
|
||||
continue
|
||||
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms")
|
||||
|
||||
|
||||
@persistent
|
||||
def resolve_deps_graph(dummy):
|
||||
"""Resolve deps graph
|
||||
|
||||
Temporary solution to resolve each node pointers after a Undo.
|
||||
A future solution should be to avoid storing dataclock reference...
|
||||
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
sanitize_deps_graph(remove_nodes=True)
|
||||
|
||||
|
||||
@persistent
|
||||
def load_pre_handler(dummy):
|
||||
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
|
||||
bpy.ops.session.stop()
|
||||
|
||||
|
||||
@persistent
|
||||
def update_client_frame(scene):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
session.update_user_metadata({
|
||||
'frame_current': scene.frame_current
|
||||
})
|
||||
|
||||
|
||||
@persistent
|
||||
def depsgraph_evaluation(scene):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
context = bpy.context
|
||||
blender_depsgraph = bpy.context.view_layer.depsgraph
|
||||
dependency_updates = [u for u in blender_depsgraph.updates]
|
||||
settings = utils.get_preferences()
|
||||
|
||||
update_external_dependencies()
|
||||
|
||||
# NOTE: maybe we don't need to check each update but only the first
|
||||
for update in reversed(dependency_updates):
|
||||
# Is the object tracked ?
|
||||
if update.id.uuid:
|
||||
# Retrieve local version
|
||||
node = session.repository.get_node(update.id.uuid)
|
||||
|
||||
# Check our right on this update:
|
||||
# - if its ours or ( under common and diff), launch the
|
||||
# update process
|
||||
# - if its to someone else, ignore the update
|
||||
if node and (node.owner == session.id or node.bl_check_common):
|
||||
if node.state == UP:
|
||||
try:
|
||||
if node.has_changed():
|
||||
porcelain.commit(session.repository, node.uuid)
|
||||
session.push(node.uuid, check_data=False)
|
||||
except ReferenceError:
|
||||
logging.debug(f"Reference error {node.uuid}")
|
||||
except ContextError as e:
|
||||
logging.debug(e)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
else:
|
||||
continue
|
||||
# A new scene is created
|
||||
elif isinstance(update.id, bpy.types.Scene):
|
||||
ref = session.repository.get_node_by_datablock(update.id)
|
||||
if ref:
|
||||
ref.resolve()
|
||||
else:
|
||||
scn_uuid = porcelain.add(session.repository, update.id)
|
||||
porcelain.commit(session.repository, scn_uuid)
|
||||
porcelain.push(session.repository)
|
||||
|
||||
|
||||
def register():
|
||||
from bpy.utils import register_class
|
||||
|
||||
for cls in classes:
|
||||
for cls in classes:
|
||||
register_class(cls)
|
||||
|
||||
bpy.app.handlers.undo_post.append(resolve_deps_graph)
|
||||
bpy.app.handlers.redo_post.append(resolve_deps_graph)
|
||||
|
||||
bpy.app.handlers.load_pre.append(load_pre_handler)
|
||||
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
||||
|
||||
|
||||
def unregister():
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
@ -1018,9 +934,3 @@ def unregister():
|
||||
from bpy.utils import unregister_class
|
||||
for cls in reversed(classes):
|
||||
unregister_class(cls)
|
||||
|
||||
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
|
||||
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
|
||||
|
||||
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
||||
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
||||
|
@ -24,7 +24,7 @@ import os
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from . import io_bpy, environment, addon_updater_ops, presence, ui
|
||||
from . import bl_types, environment, addon_updater_ops, presence, ui
|
||||
from .utils import get_preferences, get_expanded_icon
|
||||
from replication.constants import RP_COMMON
|
||||
from replication.interface import session
|
||||
@ -33,6 +33,19 @@ from replication.interface import session
|
||||
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
|
||||
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
|
||||
|
||||
DEFAULT_PRESETS = {
|
||||
"localhost" : {
|
||||
"server_ip": "localhost",
|
||||
"server_port": 5555,
|
||||
"server_password": "admin"
|
||||
},
|
||||
"public session" : {
|
||||
"server_ip": "51.75.71.183",
|
||||
"server_port": 5555,
|
||||
"server_password": ""
|
||||
},
|
||||
}
|
||||
|
||||
def randomColor():
|
||||
"""Generate a random color """
|
||||
r = random.random()
|
||||
@ -65,8 +78,11 @@ def update_ip(self, context):
|
||||
logging.error("Wrong IP format")
|
||||
self['ip'] = "127.0.0.1"
|
||||
|
||||
|
||||
|
||||
def update_server_preset_interface(self, context):
|
||||
self.server_name = self.server_preset.get(self.server_preset_interface).name
|
||||
self.ip = self.server_preset.get(self.server_preset_interface).server_ip
|
||||
self.port = self.server_preset.get(self.server_preset_interface).server_port
|
||||
self.password = self.server_preset.get(self.server_preset_interface).server_password
|
||||
|
||||
def update_directory(self, context):
|
||||
new_dir = Path(self.cache_directory)
|
||||
@ -93,6 +109,10 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
|
||||
auto_push: bpy.props.BoolProperty(default=True)
|
||||
icon: bpy.props.StringProperty()
|
||||
|
||||
class ServerPreset(bpy.types.PropertyGroup):
|
||||
server_ip: bpy.props.StringProperty()
|
||||
server_port: bpy.props.IntProperty(default=5555)
|
||||
server_password: bpy.props.StringProperty(default="admin", subtype = "PASSWORD")
|
||||
|
||||
def set_sync_render_settings(self, value):
|
||||
self['sync_render_settings'] = value
|
||||
@ -145,7 +165,7 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
ip: bpy.props.StringProperty(
|
||||
name="ip",
|
||||
description='Distant host ip',
|
||||
default="127.0.0.1",
|
||||
default="localhost",
|
||||
update=update_ip)
|
||||
username: bpy.props.StringProperty(
|
||||
name="Username",
|
||||
@ -160,6 +180,17 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
description='Distant host port',
|
||||
default=5555
|
||||
)
|
||||
server_name: bpy.props.StringProperty(
|
||||
name="server_name",
|
||||
description="Custom name of the server",
|
||||
default='localhost',
|
||||
)
|
||||
password: bpy.props.StringProperty(
|
||||
name="password",
|
||||
default=random_string_digits(),
|
||||
description='Session password',
|
||||
subtype='PASSWORD'
|
||||
)
|
||||
sync_flags: bpy.props.PointerProperty(
|
||||
type=ReplicationFlags
|
||||
)
|
||||
@ -321,6 +352,25 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
max=59
|
||||
)
|
||||
|
||||
# Server preset
|
||||
def server_list_callback(scene, context):
|
||||
settings = get_preferences()
|
||||
enum = []
|
||||
for i in settings.server_preset:
|
||||
enum.append((i.name, i.name, ""))
|
||||
return enum
|
||||
|
||||
server_preset: bpy.props.CollectionProperty(
|
||||
name="server preset",
|
||||
type=ServerPreset,
|
||||
)
|
||||
server_preset_interface: bpy.props.EnumProperty(
|
||||
name="servers",
|
||||
description="servers enum",
|
||||
items=server_list_callback,
|
||||
update=update_server_preset_interface,
|
||||
)
|
||||
|
||||
# Custom panel
|
||||
panel_category: bpy.props.StringProperty(
|
||||
description="Choose a name for the category of the panel",
|
||||
@ -407,19 +457,32 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
def generate_supported_types(self):
|
||||
self.supported_datablocks.clear()
|
||||
|
||||
bpy_protocol = io_bpy.get_data_translation_protocol()
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
|
||||
# init the factory with supported types
|
||||
for impl in bpy_protocol.implementations.values():
|
||||
for dcc_type_id, impl in bpy_protocol.implementations.items():
|
||||
new_db = self.supported_datablocks.add()
|
||||
|
||||
new_db.name = impl.__name__
|
||||
new_db.type_name = impl.__name__
|
||||
new_db.name = dcc_type_id
|
||||
new_db.type_name = dcc_type_id
|
||||
new_db.use_as_filter = True
|
||||
new_db.icon = impl.bl_icon
|
||||
new_db.bl_name = impl.bl_id
|
||||
|
||||
|
||||
# custom at launch server preset
|
||||
def generate_default_presets(self):
|
||||
for preset_name, preset_data in DEFAULT_PRESETS.items():
|
||||
existing_preset = self.server_preset.get(preset_name)
|
||||
if existing_preset :
|
||||
continue
|
||||
new_server = self.server_preset.add()
|
||||
new_server.name = preset_name
|
||||
new_server.server_ip = preset_data.get('server_ip')
|
||||
new_server.server_port = preset_data.get('server_port')
|
||||
new_server.server_password = preset_data.get('server_password',None)
|
||||
|
||||
|
||||
def client_list_callback(scene, context):
|
||||
from . import operators
|
||||
|
||||
@ -490,16 +553,20 @@ class SessionProps(bpy.types.PropertyGroup):
|
||||
description='Show only owned datablocks',
|
||||
default=True
|
||||
)
|
||||
filter_name: bpy.props.StringProperty(
|
||||
name="filter_name",
|
||||
default="",
|
||||
description='Node name filter',
|
||||
)
|
||||
admin: bpy.props.BoolProperty(
|
||||
name="admin",
|
||||
description='Connect as admin',
|
||||
default=False
|
||||
)
|
||||
password: bpy.props.StringProperty(
|
||||
name="password",
|
||||
default=random_string_digits(),
|
||||
description='Session password',
|
||||
subtype='PASSWORD'
|
||||
internet_ip: bpy.props.StringProperty(
|
||||
name="internet ip",
|
||||
default="no found",
|
||||
description='Internet interface ip',
|
||||
)
|
||||
user_snap_running: bpy.props.BoolProperty(
|
||||
default=False
|
||||
@ -507,15 +574,17 @@ class SessionProps(bpy.types.PropertyGroup):
|
||||
time_snap_running: bpy.props.BoolProperty(
|
||||
default=False
|
||||
)
|
||||
is_host: bpy.props.BoolProperty(
|
||||
default=False
|
||||
)
|
||||
|
||||
def get_preferences():
|
||||
return bpy.context.preferences.addons[__package__].preferences
|
||||
|
||||
classes = (
|
||||
SessionUser,
|
||||
SessionProps,
|
||||
ReplicationFlags,
|
||||
ReplicatedDatablock,
|
||||
ServerPreset,
|
||||
SessionPrefs,
|
||||
)
|
||||
|
||||
@ -528,8 +597,12 @@ def register():
|
||||
|
||||
prefs = bpy.context.preferences.addons[__package__].preferences
|
||||
if len(prefs.supported_datablocks) == 0:
|
||||
logging.debug('Generating io_bpy preferences')
|
||||
logging.debug('Generating bl_types preferences')
|
||||
prefs.generate_supported_types()
|
||||
|
||||
# at launch server presets
|
||||
prefs.generate_default_presets()
|
||||
|
||||
|
||||
|
||||
def unregister():
|
||||
|
@ -302,9 +302,10 @@ class UserSelectionWidget(Widget):
|
||||
return
|
||||
|
||||
vertex_pos = bbox_from_obj(ob, 1.0)
|
||||
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
|
||||
(4, 5), (4, 6), (5, 7), (6, 7),
|
||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||
vertex_indices = (
|
||||
(0, 1), (1, 2), (2, 3), (0, 3),
|
||||
(4, 5), (5, 6), (6, 7), (4, 7),
|
||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||
|
||||
if ob.instance_collection:
|
||||
for obj in ob.instance_collection.objects:
|
||||
|
48
multi_user/shared_data.py
Normal file
48
multi_user/shared_data.py
Normal file
@ -0,0 +1,48 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
from replication.constants import STATE_INITIAL
|
||||
|
||||
|
||||
class SessionData():
|
||||
""" A structure to share easily the current session data across the addon
|
||||
modules.
|
||||
This object will completely replace the Singleton lying in replication
|
||||
interface module.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.repository = None # The current repository
|
||||
self.remote = None # The active remote
|
||||
self.server = None
|
||||
self.applied_updates = []
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
if self.remote is None:
|
||||
return STATE_INITIAL
|
||||
else:
|
||||
return self.remote.connection_status
|
||||
|
||||
def clear(self):
|
||||
self.remote = None
|
||||
self.repository = None
|
||||
self.server = None
|
||||
self.applied_updates = []
|
||||
|
||||
|
||||
session = SessionData()
|
@ -31,6 +31,8 @@ from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
|
||||
generate_user_camera, get_view_matrix, refresh_3d_view,
|
||||
refresh_sidebar_view, renderer)
|
||||
|
||||
from . import shared_data
|
||||
|
||||
this = sys.modules[__name__]
|
||||
|
||||
# Registered timers
|
||||
@ -72,6 +74,7 @@ class Timer(object):
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
self.unregister()
|
||||
traceback.print_exc()
|
||||
session.disconnect(reason=f"Error during timer {self.id} execution")
|
||||
else:
|
||||
if self.is_running:
|
||||
@ -88,7 +91,7 @@ class Timer(object):
|
||||
if bpy.app.timers.is_registered(self.main):
|
||||
logging.info(f"Unregistering {self.id}")
|
||||
bpy.app.timers.unregister(self.main)
|
||||
|
||||
|
||||
del this.registry[self.id]
|
||||
self.is_running = False
|
||||
|
||||
@ -99,7 +102,7 @@ class SessionBackupTimer(Timer):
|
||||
|
||||
|
||||
def execute(self):
|
||||
session.save(self._filepath)
|
||||
session.repository.dumps(self._filepath)
|
||||
|
||||
class SessionListenTimer(Timer):
|
||||
def execute(self):
|
||||
@ -108,20 +111,20 @@ class SessionListenTimer(Timer):
|
||||
class ApplyTimer(Timer):
|
||||
def execute(self):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
nodes = session.list()
|
||||
|
||||
for node in nodes:
|
||||
node_ref = session.repository.get_node(node)
|
||||
for node in session.repository.graph.keys():
|
||||
node_ref = session.repository.graph.get(node)
|
||||
|
||||
if node_ref.state == FETCHED:
|
||||
try:
|
||||
shared_data.session.applied_updates.append(node)
|
||||
porcelain.apply(session.repository, node)
|
||||
except Exception as e:
|
||||
logging.error(f"Fail to apply {node_ref.uuid}")
|
||||
traceback.print_exc()
|
||||
else:
|
||||
if node_ref.bl_reload_parent:
|
||||
for parent in session.repository.get_parents(node):
|
||||
impl = session.repository.rdp.get_implementation(node_ref.instance)
|
||||
if impl.bl_reload_parent:
|
||||
for parent in session.repository.graph.get_parents(node):
|
||||
logging.debug("Refresh parent {node}")
|
||||
porcelain.apply(session.repository,
|
||||
parent.uuid,
|
||||
@ -152,31 +155,28 @@ class DynamicRightSelectTimer(Timer):
|
||||
|
||||
# if an annotation exist and is tracked
|
||||
if annotation_gp and annotation_gp.uuid:
|
||||
registered_gp = session.repository.get_node(annotation_gp.uuid)
|
||||
registered_gp = session.repository.graph.get(annotation_gp.uuid)
|
||||
if is_annotating(bpy.context):
|
||||
# try to get the right on it
|
||||
if registered_gp.owner == RP_COMMON:
|
||||
self._annotating = True
|
||||
logging.debug(
|
||||
"Getting the right on the annotation GP")
|
||||
session.change_owner(
|
||||
registered_gp.uuid,
|
||||
settings.username,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
porcelain.lock(session.repository,
|
||||
registered_gp.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
|
||||
if registered_gp.owner == settings.username:
|
||||
gp_node = session.repository.get_node(annotation_gp.uuid)
|
||||
if gp_node.has_changed():
|
||||
porcelain.commit(session.repository, gp_node.uuid)
|
||||
session.push(gp_node.uuid, check_data=False)
|
||||
gp_node = session.repository.graph.get(annotation_gp.uuid)
|
||||
porcelain.commit(session.repository, gp_node.uuid)
|
||||
porcelain.push(session.repository, 'origin', gp_node.uuid)
|
||||
|
||||
elif self._annotating:
|
||||
session.change_owner(
|
||||
registered_gp.uuid,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
porcelain.unlock(session.repository,
|
||||
registered_gp.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
|
||||
current_selection = utils.get_selected_objects(
|
||||
bpy.context.scene,
|
||||
@ -190,25 +190,24 @@ class DynamicRightSelectTimer(Timer):
|
||||
|
||||
# change old selection right to common
|
||||
for obj in obj_common:
|
||||
node = session.repository.get_node(obj)
|
||||
node = session.repository.graph.get(obj)
|
||||
|
||||
if node and (node.owner == settings.username or node.owner == RP_COMMON):
|
||||
recursive = True
|
||||
if node.data and 'instance_type' in node.data.keys():
|
||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||
try:
|
||||
session.change_owner(
|
||||
node.uuid,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.unlock(session.repository,
|
||||
node.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {node} owner")
|
||||
|
||||
# change new selection to our
|
||||
for obj in obj_ours:
|
||||
node = session.repository.get_node(obj)
|
||||
node = session.repository.graph.get(obj)
|
||||
|
||||
if node and node.owner == RP_COMMON:
|
||||
recursive = True
|
||||
@ -216,11 +215,10 @@ class DynamicRightSelectTimer(Timer):
|
||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||
|
||||
try:
|
||||
session.change_owner(
|
||||
node.uuid,
|
||||
settings.username,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.lock(session.repository,
|
||||
node.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {node} owner")
|
||||
@ -233,21 +231,19 @@ class DynamicRightSelectTimer(Timer):
|
||||
'selected_objects': current_selection
|
||||
}
|
||||
|
||||
session.update_user_metadata(user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, user_metadata)
|
||||
logging.debug("Update selection")
|
||||
|
||||
# Fix deselection until right managment refactoring (with Roles concepts)
|
||||
if len(current_selection) == 0 :
|
||||
owned_keys = session.list(
|
||||
filter_owner=settings.username)
|
||||
owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
|
||||
for key in owned_keys:
|
||||
node = session.repository.get_node(key)
|
||||
node = session.repository.graph.get(key)
|
||||
try:
|
||||
session.change_owner(
|
||||
key,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.unlock(session.repository,
|
||||
key,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=True)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {key} owner")
|
||||
@ -255,10 +251,10 @@ class DynamicRightSelectTimer(Timer):
|
||||
for obj in bpy.data.objects:
|
||||
object_uuid = getattr(obj, 'uuid', None)
|
||||
if object_uuid:
|
||||
node = session.repository.get_node(object_uuid)
|
||||
is_selectable = not node.owner in [settings.username, RP_COMMON]
|
||||
is_selectable = not session.repository.is_node_readonly(object_uuid)
|
||||
if obj.hide_select != is_selectable:
|
||||
obj.hide_select = is_selectable
|
||||
shared_data.session.applied_updates.append(object_uuid)
|
||||
|
||||
|
||||
class ClientUpdate(Timer):
|
||||
@ -272,7 +268,8 @@ class ClientUpdate(Timer):
|
||||
|
||||
if session and renderer:
|
||||
if session.state in [STATE_ACTIVE, STATE_LOBBY]:
|
||||
local_user = session.online_users.get(settings.username)
|
||||
local_user = session.online_users.get(
|
||||
settings.username)
|
||||
|
||||
if not local_user:
|
||||
return
|
||||
@ -309,18 +306,18 @@ class ClientUpdate(Timer):
|
||||
'frame_current': bpy.context.scene.frame_current,
|
||||
'scene_current': scene_current
|
||||
}
|
||||
session.update_user_metadata(metadata)
|
||||
porcelain.update_user_metadata(session.repository, metadata)
|
||||
|
||||
# Update client representation
|
||||
# Update client current scene
|
||||
elif scene_current != local_user_metadata['scene_current']:
|
||||
local_user_metadata['scene_current'] = scene_current
|
||||
session.update_user_metadata(local_user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
||||
local_user_metadata['view_corners'] = current_view_corners
|
||||
local_user_metadata['view_matrix'] = get_view_matrix(
|
||||
)
|
||||
session.update_user_metadata(local_user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||
|
||||
|
||||
class SessionStatusUpdate(Timer):
|
||||
|
@ -111,6 +111,8 @@ class SESSION_PT_settings(bpy.types.Panel):
|
||||
|
||||
row= layout.row()
|
||||
|
||||
if current_state in [STATE_ACTIVE] and runtime_settings.is_host:
|
||||
info_msg = f"LAN: {runtime_settings.internet_ip}"
|
||||
if current_state == STATE_LOBBY:
|
||||
info_msg = "Waiting for the session to start."
|
||||
|
||||
@ -154,7 +156,13 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
||||
row = layout.row()
|
||||
row.prop(runtime_settings, "session_mode", expand=True)
|
||||
row = layout.row()
|
||||
|
||||
col = row.row(align=True)
|
||||
col.prop(settings, "server_preset_interface", text="")
|
||||
col.operator("session.preset_server_add", icon='ADD', text="")
|
||||
col.operator("session.preset_server_remove", icon='REMOVE', text="")
|
||||
|
||||
row = layout.row()
|
||||
box = row.box()
|
||||
|
||||
if runtime_settings.session_mode == 'HOST':
|
||||
@ -166,7 +174,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
||||
row.prop(settings, "init_method", text="")
|
||||
row = box.row()
|
||||
row.label(text="Admin password:")
|
||||
row.prop(runtime_settings, "password", text="")
|
||||
row.prop(settings, "password", text="")
|
||||
row = box.row()
|
||||
row.operator("session.start", text="HOST").host = True
|
||||
else:
|
||||
@ -182,11 +190,10 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
||||
if runtime_settings.admin:
|
||||
row = box.row()
|
||||
row.label(text="Password:")
|
||||
row.prop(runtime_settings, "password", text="")
|
||||
row.prop(settings, "password", text="")
|
||||
row = box.row()
|
||||
row.operator("session.start", text="CONNECT").host = False
|
||||
|
||||
|
||||
class SESSION_PT_settings_user(bpy.types.Panel):
|
||||
bl_idname = "MULTIUSER_SETTINGS_USER_PT_panel"
|
||||
bl_label = "User info"
|
||||
@ -436,8 +443,8 @@ class SESSION_PT_presence(bpy.types.Panel):
|
||||
def draw_property(context, parent, property_uuid, level=0):
|
||||
settings = get_preferences()
|
||||
runtime_settings = context.window_manager.session
|
||||
item = session.repository.get_node(property_uuid)
|
||||
|
||||
item = session.repository.graph.get(property_uuid)
|
||||
type_id = item.data.get('type_id')
|
||||
area_msg = parent.row(align=True)
|
||||
|
||||
if item.state == ERROR:
|
||||
@ -448,11 +455,10 @@ def draw_property(context, parent, property_uuid, level=0):
|
||||
line = area_msg.box()
|
||||
|
||||
name = item.data['name'] if item.data else item.uuid
|
||||
|
||||
icon = settings.supported_datablocks[type_id].icon if type_id else 'ERROR'
|
||||
detail_item_box = line.row(align=True)
|
||||
|
||||
detail_item_box.label(text="",
|
||||
icon=settings.supported_datablocks[item.str_type].icon)
|
||||
detail_item_box.label(text="", icon=icon)
|
||||
detail_item_box.label(text=f"{name}")
|
||||
|
||||
# Operations
|
||||
@ -539,40 +545,32 @@ class SESSION_PT_repository(bpy.types.Panel):
|
||||
else:
|
||||
row.operator('session.save', icon="FILE_TICK")
|
||||
|
||||
flow = layout.grid_flow(
|
||||
row_major=True,
|
||||
columns=0,
|
||||
even_columns=True,
|
||||
even_rows=False,
|
||||
align=True)
|
||||
|
||||
for item in settings.supported_datablocks:
|
||||
col = flow.column(align=True)
|
||||
col.prop(item, "use_as_filter", text="", icon=item.icon)
|
||||
|
||||
row = layout.row(align=True)
|
||||
row.prop(runtime_settings, "filter_owned", text="Show only owned")
|
||||
|
||||
row = layout.row(align=True)
|
||||
box = layout.box()
|
||||
row = box.row()
|
||||
row.prop(runtime_settings, "filter_owned", text="Show only owned Nodes", icon_only=True, icon="DECORATE_UNLOCKED")
|
||||
row = box.row()
|
||||
row.prop(runtime_settings, "filter_name", text="Filter")
|
||||
row = box.row()
|
||||
|
||||
# Properties
|
||||
types_filter = [t.type_name for t in settings.supported_datablocks
|
||||
if t.use_as_filter]
|
||||
owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
|
||||
|
||||
key_to_filter = session.list(
|
||||
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
|
||||
filtered_node = owned_nodes if runtime_settings.filter_owned else session.repository.graph.keys()
|
||||
|
||||
client_keys = [key for key in key_to_filter
|
||||
if session.repository.get_node(key).str_type
|
||||
in types_filter]
|
||||
if runtime_settings.filter_name:
|
||||
for node_id in filtered_node:
|
||||
node_instance = session.repository.graph.get(node_id)
|
||||
name = node_instance.data.get('name')
|
||||
if runtime_settings.filter_name not in name:
|
||||
filtered_node.remove(node_id)
|
||||
|
||||
if client_keys:
|
||||
if filtered_node:
|
||||
col = layout.column(align=True)
|
||||
for key in client_keys:
|
||||
for key in filtered_node:
|
||||
draw_property(context, col, key)
|
||||
|
||||
else:
|
||||
row.label(text="Empty")
|
||||
layout.row().label(text="Empty")
|
||||
|
||||
elif session.state == STATE_LOBBY and usr and usr['admin']:
|
||||
row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
|
||||
|
@ -101,11 +101,17 @@ def get_state_str(state):
|
||||
|
||||
|
||||
def clean_scene():
|
||||
for type_name in dir(bpy.data):
|
||||
to_delete = [f for f in dir(bpy.data) if f not in ['brushes', 'palettes']]
|
||||
for type_name in to_delete:
|
||||
try:
|
||||
sub_collection_to_avoid = [bpy.data.linestyles['LineStyle'], bpy.data.materials['Dots Stroke']]
|
||||
type_collection = getattr(bpy.data, type_name)
|
||||
for item in type_collection:
|
||||
type_collection.remove(item)
|
||||
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid]
|
||||
for item in items_to_remove:
|
||||
try:
|
||||
type_collection.remove(item)
|
||||
except:
|
||||
continue
|
||||
except:
|
||||
continue
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import re
|
||||
|
||||
init_py = open("multi_user/__init__.py").read()
|
||||
init_py = open("multi_user/libs/replication/replication/__init__.py").read()
|
||||
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))
|
||||
|
@ -13,7 +13,7 @@ def main():
|
||||
if len(sys.argv) > 2:
|
||||
blender_rev = sys.argv[2]
|
||||
else:
|
||||
blender_rev = "2.92.0"
|
||||
blender_rev = "2.93.0"
|
||||
|
||||
try:
|
||||
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)
|
||||
|
@ -5,9 +5,10 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_action import BlAction
|
||||
from multi_user.bl_types.bl_action import BlAction
|
||||
|
||||
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
|
||||
FMODIFIERS = ['GENERATOR', 'FNGENERATOR', 'ENVELOPE', 'CYCLES', 'NOISE', 'LIMITS', 'STEPPED']
|
||||
|
||||
# @pytest.mark.parametrize('blendname', ['test_action.blend'])
|
||||
def test_action(clear_blend):
|
||||
@ -22,17 +23,20 @@ def test_action(clear_blend):
|
||||
point.co[1] = random.randint(-10,10)
|
||||
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
|
||||
|
||||
for mod_type in FMODIFIERS:
|
||||
fcurve_sample.modifiers.new(mod_type)
|
||||
|
||||
bpy.ops.mesh.primitive_plane_add()
|
||||
bpy.data.objects[0].animation_data_create()
|
||||
bpy.data.objects[0].animation_data.action = datablock
|
||||
|
||||
# Test
|
||||
implementation = BlAction()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.actions.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,18 +5,18 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_armature import BlArmature
|
||||
from multi_user.bl_types.bl_armature import BlArmature
|
||||
|
||||
def test_armature(clear_blend):
|
||||
bpy.ops.object.armature_add()
|
||||
datablock = bpy.data.armatures[0]
|
||||
|
||||
implementation = BlArmature()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.armatures.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -4,7 +4,7 @@ import pytest
|
||||
from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
from multi_user.io_bpy.bl_camera import BlCamera
|
||||
from multi_user.bl_types.bl_camera import BlCamera
|
||||
|
||||
|
||||
@pytest.mark.parametrize('camera_type', ['PANO','PERSP','ORTHO'])
|
||||
@ -15,11 +15,11 @@ def test_camera(clear_blend, camera_type):
|
||||
datablock.type = camera_type
|
||||
|
||||
camera_dumper = BlCamera()
|
||||
expected = camera_dumper._dump(datablock)
|
||||
expected = camera_dumper.dump(datablock)
|
||||
bpy.data.cameras.remove(datablock)
|
||||
|
||||
test = camera_dumper._construct(expected)
|
||||
camera_dumper._load(expected, test)
|
||||
result = camera_dumper._dump(test)
|
||||
test = camera_dumper.construct(expected)
|
||||
camera_dumper.load(expected, test)
|
||||
result = camera_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,7 +5,7 @@ from deepdiff import DeepDiff
|
||||
from uuid import uuid4
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_collection import BlCollection
|
||||
from multi_user.bl_types.bl_collection import BlCollection
|
||||
|
||||
def test_collection(clear_blend):
|
||||
# Generate a collection with childrens and a cube
|
||||
@ -23,11 +23,11 @@ def test_collection(clear_blend):
|
||||
|
||||
# Test
|
||||
implementation = BlCollection()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.collections.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,7 +5,7 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_curve import BlCurve
|
||||
from multi_user.bl_types.bl_curve import BlCurve
|
||||
|
||||
@pytest.mark.parametrize('curve_type', ['TEXT','BEZIER'])
|
||||
def test_curve(clear_blend, curve_type):
|
||||
@ -19,11 +19,11 @@ def test_curve(clear_blend, curve_type):
|
||||
datablock = bpy.data.curves[0]
|
||||
|
||||
implementation = BlCurve()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.curves.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -4,7 +4,7 @@ import pytest
|
||||
from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
from multi_user.io_bpy.bl_gpencil import BlGpencil
|
||||
from multi_user.bl_types.bl_gpencil import BlGpencil
|
||||
|
||||
|
||||
def test_gpencil(clear_blend):
|
||||
@ -13,11 +13,11 @@ def test_gpencil(clear_blend):
|
||||
datablock = bpy.data.grease_pencils[0]
|
||||
|
||||
implementation = BlGpencil()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.grease_pencils.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -4,7 +4,7 @@ import pytest
|
||||
from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
from multi_user.io_bpy.bl_lattice import BlLattice
|
||||
from multi_user.bl_types.bl_lattice import BlLattice
|
||||
|
||||
|
||||
def test_lattice(clear_blend):
|
||||
@ -13,11 +13,11 @@ def test_lattice(clear_blend):
|
||||
datablock = bpy.data.lattices[0]
|
||||
|
||||
implementation = BlLattice()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.lattices.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -4,7 +4,7 @@ import pytest
|
||||
from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
from multi_user.io_bpy.bl_lightprobe import BlLightprobe
|
||||
from multi_user.bl_types.bl_lightprobe import BlLightprobe
|
||||
|
||||
|
||||
@pytest.mark.skipif(bpy.app.version[1] < 83, reason="requires blender 2.83 or higher")
|
||||
@ -14,11 +14,11 @@ def test_lightprobes(clear_blend, lightprobe_type):
|
||||
|
||||
blender_light = bpy.data.lightprobes[0]
|
||||
lightprobe_dumper = BlLightprobe()
|
||||
expected = lightprobe_dumper._dump(blender_light)
|
||||
expected = lightprobe_dumper.dump(blender_light)
|
||||
bpy.data.lightprobes.remove(blender_light)
|
||||
|
||||
test = lightprobe_dumper._construct(expected)
|
||||
lightprobe_dumper._load(expected, test)
|
||||
result = lightprobe_dumper._dump(test)
|
||||
test = lightprobe_dumper.construct(expected)
|
||||
lightprobe_dumper.load(expected, test)
|
||||
result = lightprobe_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -4,7 +4,7 @@ import pytest
|
||||
from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
from multi_user.io_bpy.bl_light import BlLight
|
||||
from multi_user.bl_types.bl_light import BlLight
|
||||
|
||||
|
||||
@pytest.mark.parametrize('light_type', ['SPOT','SUN','POINT','AREA'])
|
||||
@ -13,11 +13,11 @@ def test_light(clear_blend, light_type):
|
||||
|
||||
blender_light = bpy.data.lights[0]
|
||||
light_dumper = BlLight()
|
||||
expected = light_dumper._dump(blender_light)
|
||||
expected = light_dumper.dump(blender_light)
|
||||
bpy.data.lights.remove(blender_light)
|
||||
|
||||
test = light_dumper._construct(expected)
|
||||
light_dumper._load(expected, test)
|
||||
result = light_dumper._dump(test)
|
||||
test = light_dumper.construct(expected)
|
||||
light_dumper.load(expected, test)
|
||||
result = light_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -4,7 +4,7 @@ import pytest
|
||||
from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
from multi_user.io_bpy.bl_material import BlMaterial
|
||||
from multi_user.bl_types.bl_material import BlMaterial
|
||||
|
||||
|
||||
def test_material_nodes(clear_blend):
|
||||
@ -17,12 +17,12 @@ def test_material_nodes(clear_blend):
|
||||
datablock.node_tree.nodes.new(ntype)
|
||||
|
||||
implementation = BlMaterial()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.materials.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
||||
@ -32,11 +32,11 @@ def test_material_gpencil(clear_blend):
|
||||
bpy.data.materials.create_gpencil_data(datablock)
|
||||
|
||||
implementation = BlMaterial()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.materials.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,7 +5,7 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_mesh import BlMesh
|
||||
from multi_user.bl_types.bl_mesh import BlMesh
|
||||
|
||||
@pytest.mark.parametrize('mesh_type', ['EMPTY','FILLED'])
|
||||
def test_mesh(clear_blend, mesh_type):
|
||||
@ -18,11 +18,11 @@ def test_mesh(clear_blend, mesh_type):
|
||||
|
||||
# Test
|
||||
implementation = BlMesh()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.meshes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -4,7 +4,7 @@ import pytest
|
||||
from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
from multi_user.io_bpy.bl_metaball import BlMetaball
|
||||
from multi_user.bl_types.bl_metaball import BlMetaball
|
||||
|
||||
|
||||
@pytest.mark.parametrize('metaballs_type', ['PLANE','CAPSULE','BALL','ELLIPSOID','CUBE'])
|
||||
@ -13,11 +13,11 @@ def test_metaball(clear_blend, metaballs_type):
|
||||
|
||||
datablock = bpy.data.metaballs[0]
|
||||
dumper = BlMetaball()
|
||||
expected = dumper._dump(datablock)
|
||||
expected = dumper.dump(datablock)
|
||||
bpy.data.metaballs.remove(datablock)
|
||||
|
||||
test = dumper._construct(expected)
|
||||
dumper._load(expected, test)
|
||||
result = dumper._dump(test)
|
||||
test = dumper.construct(expected)
|
||||
dumper.load(expected, test)
|
||||
result = dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,7 +5,7 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_object import BlObject
|
||||
from multi_user.bl_types.bl_object import BlObject
|
||||
|
||||
# Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be
|
||||
# correctly initialized (#TODO: report the bug)
|
||||
@ -65,11 +65,11 @@ def test_object(clear_blend):
|
||||
datablock.shape_key_add(name='shape2')
|
||||
|
||||
implementation = BlObject()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.objects.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
print(DeepDiff(expected, result))
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,7 +5,7 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_scene import BlScene
|
||||
from multi_user.bl_types.bl_scene import BlScene
|
||||
from multi_user.utils import get_preferences
|
||||
|
||||
def test_scene(clear_blend):
|
||||
@ -15,11 +15,11 @@ def test_scene(clear_blend):
|
||||
datablock.view_settings.use_curve_mapping = True
|
||||
# Test
|
||||
implementation = BlScene()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.scenes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,18 +5,18 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_speaker import BlSpeaker
|
||||
from multi_user.bl_types.bl_speaker import BlSpeaker
|
||||
|
||||
def test_speaker(clear_blend):
|
||||
bpy.ops.object.speaker_add()
|
||||
datablock = bpy.data.speakers[0]
|
||||
|
||||
implementation = BlSpeaker()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.speakers.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,7 +5,7 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_texture import BlTexture
|
||||
from multi_user.bl_types.bl_texture import BlTexture
|
||||
|
||||
TEXTURE_TYPES = ['NONE', 'BLEND', 'CLOUDS', 'DISTORTED_NOISE', 'IMAGE', 'MAGIC', 'MARBLE', 'MUSGRAVE', 'NOISE', 'STUCCI', 'VORONOI', 'WOOD']
|
||||
|
||||
@ -14,11 +14,11 @@ def test_texture(clear_blend, texture_type):
|
||||
datablock = bpy.data.textures.new('test', texture_type)
|
||||
|
||||
implementation = BlTexture()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.textures.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,17 +5,17 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_volume import BlVolume
|
||||
from multi_user.bl_types.bl_volume import BlVolume
|
||||
|
||||
def test_volume(clear_blend):
|
||||
datablock = bpy.data.volumes.new("Test")
|
||||
|
||||
implementation = BlVolume()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.volumes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -5,18 +5,18 @@ from deepdiff import DeepDiff
|
||||
|
||||
import bpy
|
||||
import random
|
||||
from multi_user.io_bpy.bl_world import BlWorld
|
||||
from multi_user.bl_types.bl_world import BlWorld
|
||||
|
||||
def test_world(clear_blend):
|
||||
datablock = bpy.data.worlds.new('test')
|
||||
datablock.use_nodes = True
|
||||
|
||||
implementation = BlWorld()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.worlds.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
Reference in New Issue
Block a user