Compare commits

..

15 Commits

Author SHA1 Message Date
93d50ac56b refactor: porcail api 2021-04-22 10:04:38 +02:00
0e73af4d49 refactor: update procelain import 2021-04-21 21:30:43 +02:00
00d5e622af feat: update replication 2021-04-21 14:33:18 +02:00
b358d3b305 refactor: change push api 2021-04-21 14:32:56 +02:00
203c10ccd3 feat: missing class attribute for registration 2021-04-21 11:22:53 +02:00
da349dd4a9 refactor: io_bpy architecture revamp 2021-04-21 11:10:24 +02:00
845bb11b8e fix: bl_object 2021-04-20 09:53:59 +02:00
9312d6a8c5 Merge branch 'develop' into 173-differential-revision-milestone-2-replication-refactoring 2021-04-20 09:47:23 +02:00
ca78a42076 feat: update submodules 2021-04-06 14:23:10 +02:00
ee886e00c8 feat: generate subtree 2021-04-06 14:21:26 +02:00
9c7043e84c fix: animation data error 2021-03-26 16:14:27 +01:00
e659b7da94 refactor: move implementation to static def 2021-03-26 12:30:15 +01:00
e3af69a9c8 feat: add replication to the submodules 2021-03-25 14:55:53 +01:00
328c651cea remove data handlign from ReplicatedDatablock 2021-03-24 16:08:12 +01:00
d4224c789a refactor: move commit to porcelain
feractor: remove is_ readonly
2021-03-24 11:20:40 +01:00
56 changed files with 903 additions and 1106 deletions

View File

@ -1,13 +0,0 @@
stages:
- test
- build
- deploy
- doc
include:
- local: .gitlab/ci/test.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/deploy.gitlab-ci.yml
- local: .gitlab/ci/doc.gitlab-ci.yml

3
.gitmodules vendored
View File

@ -0,0 +1,3 @@
[submodule "multi_user/libs/replication"]
path = multi_user/libs/replication
url = https://gitlab.com/slumber/replication

View File

@ -19,7 +19,7 @@
bl_info = { bl_info = {
"name": "Multi-User", "name": "Multi-User",
"author": "Swann Martinez", "author": "Swann Martinez",
"version": (0, 4, 0), "version": (0, 3, 0),
"description": "Enable real-time collaborative workflow inside blender", "description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 82, 0), "blender": (2, 82, 0),
"location": "3D View > Sidebar > Multi-User tab", "location": "3D View > Sidebar > Multi-User tab",
@ -41,12 +41,8 @@ import bpy
from bpy.app.handlers import persistent from bpy.app.handlers import persistent
from . import environment from . import environment
from uuid import uuid4
LIBS = os.path.dirname(os.path.abspath(__file__))+"/libs/replication"
DEPENDENCIES = {
("replication", '0.1.33'),
}
module_error_msg = "Insufficient rights to install the multi-user \ module_error_msg = "Insufficient rights to install the multi-user \
dependencies, aunch blender with administrator rights." dependencies, aunch blender with administrator rights."
@ -57,14 +53,15 @@ def register():
datefmt='%H:%M:%S', datefmt='%H:%M:%S',
level=logging.INFO) level=logging.INFO)
for module_name in list(sys.modules.keys()):
if 'replication' in module_name:
del sys.modules[module_name]
if LIBS not in sys.path:
logging.info('Adding local modules dir to the path')
sys.path.insert(0, LIBS)
try: try:
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
environment.setup(DEPENDENCIES, python_binary_path)
from . import presence from . import presence
from . import operators from . import operators
from . import ui from . import ui
@ -78,7 +75,7 @@ def register():
ui.register() ui.register()
except ModuleNotFoundError as e: except ModuleNotFoundError as e:
raise Exception(module_error_msg) raise Exception(module_error_msg)
logging.error(module_error_msg) logging.error(e)
bpy.types.WindowManager.session = bpy.props.PointerProperty( bpy.types.WindowManager.session = bpy.props.PointerProperty(
type=preferences.SessionProps) type=preferences.SessionProps)

View File

@ -1,56 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
__all__ = [
'bl_object',
'bl_mesh',
'bl_camera',
'bl_collection',
'bl_curve',
'bl_gpencil',
'bl_image',
'bl_light',
'bl_scene',
'bl_material',
'bl_library',
'bl_armature',
'bl_action',
'bl_world',
'bl_metaball',
'bl_lattice',
'bl_lightprobe',
'bl_speaker',
'bl_font',
'bl_sound',
'bl_file',
# 'bl_sequencer',
'bl_node_group',
'bl_texture',
"bl_particle",
] # Order here defines execution order
if bpy.app.version[1] >= 91:
__all__.append('bl_volume')
from . import *
from replication.data import DataTranslationProtocol
def types_to_register():
return __all__

View File

@ -1,232 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
from collections.abc import Iterable
import bpy
import mathutils
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
from replication.data import ReplicatedDatablock
from .. import utils
from .dump_anything import Dumper, Loader
def has_action(target):
""" Check if the target datablock has actions
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.action)
def has_driver(target):
""" Check if the target datablock is driven
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.drivers)
def dump_driver(driver):
dumper = Dumper()
dumper.depth = 6
data = dumper.dump(driver)
return data
def load_driver(target_datablock, src_driver):
loader = Loader()
drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver']
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
loader.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
src_var_data = src_driver_data['variables'][src_variable]
new_var = new_driver.driver.variables.new()
new_var.name = src_var_data['name']
new_var.type = src_var_data['type']
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
src_id = src_target_data.get('id')
if src_id:
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
loader.load(new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
for p in reversed(new_fcurve):
new_fcurve.remove(p, fast=True)
new_fcurve.add(len(src_driver['keyframe_points']))
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
loader.load(new_point, src_driver['keyframe_points'][src_point])
def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid:
return default
for category in dir(bpy.data):
root = getattr(bpy.data, category)
if isinstance(root, Iterable) and category not in ignore:
for item in root:
if getattr(item, 'uuid', None) == uuid:
return item
return default
class BlDatablock(ReplicatedDatablock):
"""BlDatablock
bl_id : blender internal storage identifier
bl_class : blender internal type
bl_icon : type icon (blender icon name)
bl_check_common: enable check even in common rights
bl_reload_parent: reload parent
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
self.preferences = utils.get_preferences()
# TODO: use is_library_indirect
self.is_library = (instance and hasattr(instance, 'library') and
instance.library) or \
(hasattr(self,'data') and self.data and 'library' in self.data)
if instance and hasattr(instance, 'uuid'):
instance.uuid = self.uuid
def resolve(self, construct = True):
datablock_root = getattr(bpy.data, self.bl_id)
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
if not datablock_ref:
try:
datablock_ref = datablock_root[self.data['name']]
except Exception:
pass
if construct and not datablock_ref:
name = self.data.get('name')
logging.debug(f"Constructing {name}")
datablock_ref = self._construct(data=self.data)
if datablock_ref is not None:
setattr(datablock_ref, 'uuid', self.uuid)
self.instance = datablock_ref
return True
else:
return False
def remove_instance(self):
"""
Remove instance from blender data
"""
assert(self.instance)
datablock_root = getattr(bpy.data, self.bl_id)
datablock_root.remove(self.instance)
def _dump(self, instance=None):
dumper = Dumper()
data = {}
animation_data = {}
# Dump animation data
if has_action(instance):
animation_data['action'] = instance.animation_data.action.name
if has_driver(instance):
animation_data['drivers'] = []
for driver in instance.animation_data.drivers:
animation_data['drivers'].append(dump_driver(driver))
if animation_data:
data['animation_data'] = animation_data
if self.is_library:
data.update(dumper.dump(instance))
else:
data.update(self._dump_implementation(data, instance=instance))
return data
def _dump_implementation(self, data, target):
raise NotImplementedError
def _load(self, data, target):
# Load animation data
if 'animation_data' in data.keys():
if target.animation_data is None:
target.animation_data_create()
for d in target.animation_data.drivers:
target.animation_data.drivers.remove(d)
if 'drivers' in data['animation_data']:
for driver in data['animation_data']['drivers']:
load_driver(target, driver)
if 'action' in data['animation_data']:
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
elif target.animation_data.action:
target.animation_data.action = None
# Remove existing animation data if there is not more to load
elif hasattr(target, 'animation_data') and target.animation_data:
target.animation_data_clear()
if self.is_library:
return
else:
self._load_implementation(data, target)
def _load_implementation(self, data, target):
raise NotImplementedError
def resolve_deps(self):
dependencies = []
if has_action(self.instance):
dependencies.append(self.instance.animation_data.action)
if not self.is_library:
dependencies.extend(self._resolve_deps_implementation())
logging.debug(f"{self.instance} dependencies: {dependencies}")
return dependencies
def _resolve_deps_implementation(self):
return []
def is_valid(self):
return getattr(bpy.data, self.bl_id).get(self.data['name'])

View File

@ -0,0 +1,62 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
from replication.protocol import ReplicatedDatablock
__all__ = [
'bl_object',
'bl_mesh',
# 'bl_camera',
'bl_collection',
# 'bl_curve',
# 'bl_gpencil',
# 'bl_image',
# 'bl_light',
'bl_scene',
'bl_material',
# 'bl_library',
# 'bl_armature',
# 'bl_action',
# 'bl_world',
# 'bl_metaball',
# 'bl_lattice',
# 'bl_lightprobe',
# 'bl_speaker',
# 'bl_font',
# 'bl_sound',
# 'bl_file',
# 'bl_sequencer',
# 'bl_node_group',
# 'bl_texture',
# "bl_particle",
] # Order here defines execution order
# if bpy.app.version[1] >= 91:
# __all__.append('bl_volume')
from replication.protocol import DataTranslationProtocol
def get_data_translation_protocol()-> DataTranslationProtocol:
""" Return a data translation protocol from implemented bpy types
"""
bpy_protocol = DataTranslationProtocol()
for module_name in __all__:
impl = globals().get(module_name)
if impl and hasattr(impl, "_type") and hasattr(impl, "_type"):
bpy_protocol.register_implementation(impl._type, impl._class)
return bpy_protocol

View File

@ -24,9 +24,14 @@ from enum import Enum
from .. import utils from .. import utils
from .dump_anything import ( from .dump_anything import (
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict) Dumper,
from .bl_datablock import BlDatablock, has_action, has_driver, dump_driver, load_driver Loader,
np_dump_collection,
np_load_collection,
remove_items_from_dict)
from .bl_datablock import stamp_uuid
from replication.protocol import ReplicatedDatablock
from replication.objects import Node
KEYFRAME = [ KEYFRAME = [
'amplitude', 'amplitude',
@ -42,6 +47,68 @@ KEYFRAME = [
] ]
def has_action(datablock):
""" Check if the datablock datablock has actions
"""
return (hasattr(datablock, 'animation_data')
and datablock.animation_data
and datablock.animation_data.action)
def has_driver(datablock):
""" Check if the datablock datablock is driven
"""
return (hasattr(datablock, 'animation_data')
and datablock.animation_data
and datablock.animation_data.drivers)
def dump_driver(driver):
dumper = Dumper()
dumper.depth = 6
data = dumper.dump(driver)
return data
def load_driver(target_datablock, src_driver):
loader = Loader()
drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver']
new_driver = drivers.new(
src_driver['data_path'], index=src_driver['array_index'])
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
loader.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
src_var_data = src_driver_data['variables'][src_variable]
new_var = new_driver.driver.variables.new()
new_var.name = src_var_data['name']
new_var.type = src_var_data['type']
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
new_var.targets[src_target].id = utils.resolve_from_id(
src_target_data['id'], src_target_data['id_type'])
loader.load(
new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
for p in reversed(new_fcurve):
new_fcurve.remove(p, fast=True)
new_fcurve.add(len(src_driver['keyframe_points']))
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
loader.load(new_point, src_driver['keyframe_points'][src_point])
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict: def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
""" Dump a sigle curve to a dict """ Dump a sigle curve to a dict
@ -61,6 +128,7 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
points = fcurve.keyframe_points points = fcurve.keyframe_points
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points) fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME) fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
else: # Legacy method else: # Legacy method
dumper = Dumper() dumper = Dumper()
fcurve_data["keyframe_points"] = [] fcurve_data["keyframe_points"] = []
@ -70,18 +138,6 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
dumper.dump(k) dumper.dump(k)
) )
if fcurve.modifiers:
dumper = Dumper()
dumper.exclude_filter = [
'is_valid',
'active'
]
dumped_modifiers = []
for modfifier in fcurve.modifiers:
dumped_modifiers.append(dumper.dump(modfifier))
fcurve_data['modifiers'] = dumped_modifiers
return fcurve_data return fcurve_data
@ -94,7 +150,7 @@ def load_fcurve(fcurve_data, fcurve):
:type fcurve: bpy.types.FCurve :type fcurve: bpy.types.FCurve
""" """
use_numpy = fcurve_data.get('use_numpy') use_numpy = fcurve_data.get('use_numpy')
loader = Loader()
keyframe_points = fcurve.keyframe_points keyframe_points = fcurve.keyframe_points
# Remove all keyframe points # Remove all keyframe points
@ -139,53 +195,37 @@ def load_fcurve(fcurve_data, fcurve):
fcurve.update() fcurve.update()
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
if dumped_fcurve_modifiers: def dump_animation_data(datablock, data):
# clear modifiers
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
# Load each modifiers in order
for modifier_data in dumped_fcurve_modifiers:
modifier = fcurve.modifiers.new(modifier_data['type'])
loader.load(modifier, modifier_data)
elif fcurve.modifiers:
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
def dump_animation_data(datablock):
animation_data = {}
if has_action(datablock): if has_action(datablock):
animation_data['action'] = datablock.animation_data.action.name dumper = Dumper()
dumper.include_filter = ['action']
data['animation_data'] = dumper.dump(datablock.animation_data)
if has_driver(datablock): if has_driver(datablock):
animation_data['drivers'] = [] dumped_drivers = {'animation_data': {'drivers': []}}
for driver in datablock.animation_data.drivers: for driver in datablock.animation_data.drivers:
animation_data['drivers'].append(dump_driver(driver)) dumped_drivers['animation_data']['drivers'].append(
dump_driver(driver))
return animation_data data.update(dumped_drivers)
def load_animation_data(animation_data, datablock): def load_animation_data(data, datablock):
# Load animation data # Load animation data
if animation_data: if 'animation_data' in data.keys():
if datablock.animation_data is None: if datablock.animation_data is None:
datablock.animation_data_create() datablock.animation_data_create()
for d in datablock.animation_data.drivers: for d in datablock.animation_data.drivers:
datablock.animation_data.drivers.remove(d) datablock.animation_data.drivers.remove(d)
if 'drivers' in animation_data: if 'drivers' in data['animation_data']:
for driver in animation_data['drivers']: for driver in data['animation_data']['drivers']:
load_driver(datablock, driver) load_driver(datablock, driver)
if 'action' in animation_data: if 'action' in data['animation_data']:
datablock.animation_data.action = bpy.data.actions[animation_data['action']] datablock.animation_data.action = bpy.data.actions[data['animation_data']['action']]
elif datablock.animation_data.action:
datablock.animation_data.action = None
# Remove existing animation data if there is not more to load # Remove existing animation data if there is not more to load
elif hasattr(datablock, 'animation_data') and datablock.animation_data: elif hasattr(datablock, 'animation_data') and datablock.animation_data:
datablock.animation_data_clear() datablock.animation_data_clear()
@ -198,26 +238,28 @@ def resolve_animation_dependencies(datablock):
return [] return []
class BlAction(BlDatablock): class BlAction(ReplicatedDatablock):
bl_id = "actions" bl_id = "actions"
bl_class = bpy.types.Action bl_class = bpy.types.Action
bl_check_common = False bl_check_common = False
bl_icon = 'ACTION_TWEAK' bl_icon = 'ACTION_TWEAK'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): @staticmethod
def construct(data: dict) -> object:
return bpy.data.actions.new(data["name"]) return bpy.data.actions.new(data["name"])
def _load_implementation(self, data, target): @staticmethod
def load(data: dict, datablock: object):
for dumped_fcurve in data["fcurves"]: for dumped_fcurve in data["fcurves"]:
dumped_data_path = dumped_fcurve["data_path"] dumped_data_path = dumped_fcurve["data_path"]
dumped_array_index = dumped_fcurve["dumped_array_index"] dumped_array_index = dumped_fcurve["dumped_array_index"]
# create fcurve if needed # create fcurve if needed
fcurve = target.fcurves.find( fcurve = datablock.fcurves.find(
dumped_data_path, index=dumped_array_index) dumped_data_path, index=dumped_array_index)
if fcurve is None: if fcurve is None:
fcurve = target.fcurves.new( fcurve = datablock.fcurves.new(
dumped_data_path, index=dumped_array_index) dumped_data_path, index=dumped_array_index)
load_fcurve(dumped_fcurve, fcurve) load_fcurve(dumped_fcurve, fcurve)
@ -225,9 +267,12 @@ class BlAction(BlDatablock):
id_root = data.get('id_root') id_root = data.get('id_root')
if id_root: if id_root:
target.id_root = id_root datablock.id_root = id_root
@staticmethod
def dump(datablock: object) -> dict:
stamp_uuid(datablock)
def _dump_implementation(self, data, instance=None):
dumper = Dumper() dumper = Dumper()
dumper.exclude_filter = [ dumper.exclude_filter = [
'name_full', 'name_full',
@ -242,11 +287,11 @@ class BlAction(BlDatablock):
'users' 'users'
] ]
dumper.depth = 1 dumper.depth = 1
data = dumper.dump(instance) data = dumper.dump(datablock)
data["fcurves"] = [] data["fcurves"] = []
for fcurve in instance.fcurves: for fcurve in datablock.fcurves:
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True)) data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
return data return data

View File

@ -42,10 +42,10 @@ class BlArmature(BlDatablock):
bl_icon = 'ARMATURE_DATA' bl_icon = 'ARMATURE_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.armatures.new(data["name"]) return bpy.data.armatures.new(data["name"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
# Load parent object # Load parent object
parent_object = utils.find_from_attr( parent_object = utils.find_from_attr(
'uuid', 'uuid',
@ -119,7 +119,7 @@ class BlArmature(BlDatablock):
if 'EDIT' in current_mode: if 'EDIT' in current_mode:
bpy.ops.object.mode_set(mode='EDIT') bpy.ops.object.mode_set(mode='EDIT')
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()

View File

@ -30,11 +30,11 @@ class BlCamera(BlDatablock):
bl_icon = 'CAMERA_DATA' bl_icon = 'CAMERA_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.cameras.new(data["name"]) return bpy.data.cameras.new(data["name"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
@ -56,12 +56,7 @@ class BlCamera(BlDatablock):
target_img.image = bpy.data.images[img_id] target_img.image = bpy.data.images[img_id]
loader.load(target_img, img_data) loader.load(target_img, img_data)
img_user = img_data.get('image_user') def dump(datablock: object) -> dict:
if img_user:
loader.load(target_img.image_user, img_user)
def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
# TODO: background image support # TODO: background image support
@ -106,22 +101,14 @@ class BlCamera(BlDatablock):
'scale', 'scale',
'use_flip_x', 'use_flip_x',
'use_flip_y', 'use_flip_y',
'image_user', 'image'
'image',
'frame_duration',
'frame_start',
'frame_offset',
'use_cyclic',
'use_auto_refresh'
] ]
data = dumper.dump(instance) return dumper.dump(instance)
for index, image in enumerate(instance.background_images):
if image.image_user: @staticmethod
data['background_images'][index]['image_user'] = dumper.dump(image.image_user) def resolve_deps(datablock: object) -> [object]:
return data
def _resolve_deps_implementation(self):
deps = [] deps = []
for background in self.instance.background_images: for background in datablock.background_images:
if background.image: if background.image:
deps.append(background.image) deps.append(background.image)

View File

@ -20,9 +20,9 @@ import bpy
import mathutils import mathutils
from .. import utils from .. import utils
from .bl_datablock import BlDatablock
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock
from replication.objects import Node
def dump_collection_children(collection): def dump_collection_children(collection):
collection_children = [] collection_children = []
@ -81,42 +81,37 @@ def resolve_collection_dependencies(collection):
return deps return deps
class BlCollection(BlDatablock): class BlCollection(ReplicatedDatablock):
bl_id = "collections" bl_id = "collections"
bl_icon = 'FILE_FOLDER' bl_icon = 'FILE_FOLDER'
bl_class = bpy.types.Collection bl_class = bpy.types.Collection
bl_check_common = True bl_check_common = True
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): @staticmethod
if self.is_library: def construct(data: dict) -> object:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData): datablock = bpy.data.collections.new(node.data["name"])
targetData.collections = [ return datablock
name for name in sourceData.collections if name == self.data['name']]
instance = bpy.data.collections[self.data['name']] @staticmethod
def load(data: dict, datablock: object):
return instance data = node.data
instance = bpy.data.collections.new(data["name"])
return instance
def _load_implementation(self, data, target):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
# Objects # Objects
load_collection_objects(data['objects'], target) load_collection_objects(data['objects'], datablock)
# Link childrens # Link childrens
load_collection_childrens(data['children'], target) load_collection_childrens(data['children'], datablock)
# FIXME: Find a better way after the replication big refacotoring # FIXME: Find a better way after the replication big refacotoring
# Keep other user from deleting collection object by flushing their history # Keep other user from deleting collection object by flushing their history
utils.flush_history() utils.flush_history()
def _dump_implementation(self, data, instance=None): @staticmethod
assert(instance) def dump(datablock: object) -> dict:
assert(datablock)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
@ -124,15 +119,19 @@ class BlCollection(BlDatablock):
"name", "name",
"instance_offset" "instance_offset"
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
# dump objects # dump objects
data['objects'] = dump_collection_objects(instance) data['objects'] = dump_collection_objects(datablock)
# dump children collections # dump children collections
data['children'] = dump_collection_children(instance) data['children'] = dump_collection_children(datablock)
return data return data
def _resolve_deps_implementation(self): @staticmethod
return resolve_collection_dependencies(self.instance) def resolve_deps(datablock: object) -> [object]:
return resolve_collection_dependencies(datablock)
_type = bpy.types.Collection
_class = BlCollection

View File

@ -141,10 +141,10 @@ class BlCurve(BlDatablock):
bl_icon = 'CURVE_DATA' bl_icon = 'CURVE_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.curves.new(data["name"], data["type"]) return bpy.data.curves.new(data["name"], data["type"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
@ -175,7 +175,7 @@ class BlCurve(BlDatablock):
if src_materials: if src_materials:
load_materials_slots(src_materials, target.materials) load_materials_slots(src_materials, target.materials)
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
# Conflicting attributes # Conflicting attributes
@ -222,10 +222,10 @@ class BlCurve(BlDatablock):
return data return data
def _resolve_deps_implementation(self): @staticmethod
# TODO: resolve material def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
curve = self.instance curve = datablock
if isinstance(curve, T.TextCurve): if isinstance(curve, T.TextCurve):
deps.extend([ deps.extend([
@ -234,7 +234,7 @@ class BlCurve(BlDatablock):
curve.font_bold_italic, curve.font_bold_italic,
curve.font_italic]) curve.font_italic])
for material in self.instance.materials: for material in curve.materials:
if material: if material:
deps.append(material) deps.append(material)

View File

@ -0,0 +1,59 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
from collections.abc import Iterable
import bpy
import mathutils
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
from replication.protocol import ReplicatedDatablock
from replication.objects import Node
from uuid import uuid4
from .. import utils
from .dump_anything import Dumper, Loader
def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid:
return default
for category in dir(bpy.data):
root = getattr(bpy.data, category)
if isinstance(root, Iterable) and category not in ignore:
for item in root:
if getattr(item, 'uuid', None) == uuid:
return item
return default
def resolve_datablock_from_root(node:Node, root)->object:
datablock_ref = utils.find_from_attr('uuid', node.uuid, root)
if not datablock_ref:
try:
datablock_ref = root[node.data['name']]
except Exception:
pass
return datablock_ref
def stamp_uuid(datablock):
if not datablock.uuid:
datablock.uuid = str(uuid4())

View File

@ -24,7 +24,7 @@ from pathlib import Path
import bpy import bpy
import mathutils import mathutils
from replication.constants import DIFF_BINARY, UP from replication.constants import DIFF_BINARY, UP
from replication.data import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from .. import utils from .. import utils
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
@ -84,7 +84,7 @@ class BlFile(ReplicatedDatablock):
if self.preferences.clear_memory_filecache: if self.preferences.clear_memory_filecache:
del self.data['file'] del self.data['file']
def _dump(self, instance=None): def dump(self, instance=None):
""" """
Read the file and return a dict as: Read the file and return a dict as:
{ {
@ -114,7 +114,7 @@ class BlFile(ReplicatedDatablock):
return data return data
def _load(self, data, target): def load(self, data, target):
""" """
Writing the file Writing the file
""" """
@ -134,8 +134,6 @@ class BlFile(ReplicatedDatablock):
if self.preferences.clear_memory_filecache: if self.preferences.clear_memory_filecache:
return False return False
else: else:
if not self.instance:
return False
memory_size = sys.getsizeof(self.data['file'])-33 memory_size = sys.getsizeof(self.data['file'])-33
disk_size = self.instance.stat().st_size disk_size = self.instance.stat().st_size
return memory_size != disk_size return memory_size != disk_size

View File

@ -34,7 +34,7 @@ class BlFont(BlDatablock):
bl_icon = 'FILE_FONT' bl_icon = 'FILE_FONT'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
filename = data.get('filename') filename = data.get('filename')
if filename == '<builtin>': if filename == '<builtin>':
@ -42,10 +42,10 @@ class BlFont(BlDatablock):
else: else:
return bpy.data.fonts.load(get_filepath(filename)) return bpy.data.fonts.load(get_filepath(filename))
def _load(self, data, target): def load(self, data, target):
pass pass
def _dump(self, instance=None): def dump(self, instance=None):
if instance.filepath == '<builtin>': if instance.filepath == '<builtin>':
filename = '<builtin>' filename = '<builtin>'
else: else:
@ -62,11 +62,12 @@ class BlFont(BlDatablock):
def diff(self): def diff(self):
return False return False
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>': if datablock.filepath and datablock.filepath != '<builtin>':
ensure_unpacked(self.instance) ensure_unpacked(datablock)
deps.append(Path(bpy.path.abspath(self.instance.filepath))) deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps return deps

View File

@ -235,10 +235,10 @@ class BlGpencil(BlDatablock):
bl_icon = 'GREASEPENCIL' bl_icon = 'GREASEPENCIL'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.grease_pencils.new(data["name"]) return bpy.data.grease_pencils.new(data["name"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
target.materials.clear() target.materials.clear()
if "materials" in data.keys(): if "materials" in data.keys():
for mat in data['materials']: for mat in data['materials']:
@ -267,7 +267,7 @@ class BlGpencil(BlDatablock):
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 2 dumper.depth = 2
@ -290,10 +290,11 @@ class BlGpencil(BlDatablock):
data["eval_frame"] = bpy.context.scene.frame_current data["eval_frame"] = bpy.context.scene.frame_current
return data return data
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
for material in self.instance.materials: for material in datablock.materials:
deps.append(material) deps.append(material)
return deps return deps

View File

@ -55,25 +55,22 @@ class BlImage(BlDatablock):
bl_icon = 'IMAGE_DATA' bl_icon = 'IMAGE_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.images.new( return bpy.data.images.new(
name=data['name'], name=data['name'],
width=data['size'][0], width=data['size'][0],
height=data['size'][1] height=data['size'][1]
) )
def _load(self, data, target): def load(self, data, target):
loader = Loader() loader = Loader()
loader.load(data, target) loader.load(data, target)
target.source = 'FILE' target.source = 'FILE'
target.filepath_raw = get_filepath(data['filename']) target.filepath_raw = get_filepath(data['filename'])
color_space_name = data["colorspace_settings"]["name"] target.colorspace_settings.name = data["colorspace_settings"]["name"]
if color_space_name: def dump(self, instance=None):
target.colorspace_settings.name = color_space_name
def _dump(self, instance=None):
assert(instance) assert(instance)
filename = Path(instance.filepath).name filename = Path(instance.filepath).name
@ -86,7 +83,6 @@ class BlImage(BlDatablock):
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
"name", "name",
# 'source',
'size', 'size',
'height', 'height',
'alpha', 'alpha',
@ -105,23 +101,24 @@ class BlImage(BlDatablock):
else: else:
return False return False
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
if self.instance.packed_file: if datablock.packed_file:
filename = Path(bpy.path.abspath(self.instance.filepath)).name filename = Path(bpy.path.abspath(datablock.filepath)).name
self.instance.filepath_raw = get_filepath(filename) datablock.filepath_raw = get_filepath(filename)
self.instance.save() datablock.save()
# An image can't be unpacked to the modified path # An image can't be unpacked to the modified path
# TODO: make a bug report # TODO: make a bug report
self.instance.unpack(method="REMOVE") datablock.unpack(method="REMOVE")
elif self.instance.source == "GENERATED": elif datablock.source == "GENERATED":
filename = f"{self.instance.name}.png" filename = f"{datablock.name}.png"
self.instance.filepath = get_filepath(filename) datablock.filepath = get_filepath(filename)
self.instance.save() datablock.save()
if self.instance.filepath: if datablock.filepath:
deps.append(Path(bpy.path.abspath(self.instance.filepath))) deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps return deps

View File

@ -33,10 +33,10 @@ class BlLattice(BlDatablock):
bl_icon = 'LATTICE_DATA' bl_icon = 'LATTICE_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.lattices.new(data["name"]) return bpy.data.lattices.new(data["name"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
if target.is_editmode: if target.is_editmode:
raise ContextError("lattice is in edit mode") raise ContextError("lattice is in edit mode")
@ -45,7 +45,7 @@ class BlLattice(BlDatablock):
np_load_collection(data['points'], target.points, POINT) np_load_collection(data['points'], target.points, POINT)
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
if instance.is_editmode: if instance.is_editmode:
raise ContextError("lattice is in edit mode") raise ContextError("lattice is in edit mode")

View File

@ -30,14 +30,14 @@ class BlLibrary(BlDatablock):
bl_icon = 'LIBRARY_DATA_DIRECT' bl_icon = 'LIBRARY_DATA_DIRECT'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData): with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
targetData = sourceData targetData = sourceData
return sourceData return sourceData
def _load(self, data, target): def load(self, data, target):
pass pass
def _dump(self, instance=None): def dump(self, instance=None):
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
return dumper.dump(instance) return dumper.dump(instance)

View File

@ -30,14 +30,14 @@ class BlLight(BlDatablock):
bl_icon = 'LIGHT_DATA' bl_icon = 'LIGHT_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.lights.new(data["name"], data["type"]) return bpy.data.lights.new(data["name"], data["type"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 3 dumper.depth = 3

View File

@ -31,7 +31,7 @@ class BlLightprobe(BlDatablock):
bl_icon = 'LIGHTPROBE_GRID' bl_icon = 'LIGHTPROBE_GRID'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type'] type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
# See https://developer.blender.org/D6396 # See https://developer.blender.org/D6396
if bpy.app.version[1] >= 83: if bpy.app.version[1] >= 83:
@ -39,11 +39,11 @@ class BlLightprobe(BlDatablock):
else: else:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
if bpy.app.version[1] < 83: if bpy.app.version[1] < 83:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")

View File

@ -24,7 +24,9 @@ import re
from uuid import uuid4 from uuid import uuid4
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid from .bl_datablock import get_datablock_from_uuid, stamp_uuid
from replication.protocol import ReplicatedDatablock
from replication.objects import Node
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]') NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM'] IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
@ -34,7 +36,7 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
:arg node_data: dumped node data :arg node_data: dumped node data
:type node_data: dict :type node_data: dict
:arg node_tree: target node_tree :arg node_tree: datablock node_tree
:type node_tree: bpy.types.NodeTree :type node_tree: bpy.types.NodeTree
""" """
loader = Loader() loader = Loader()
@ -88,7 +90,7 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
def dump_node(node: bpy.types.ShaderNode) -> dict: def dump_node(node: bpy.types.ShaderNode) -> dict:
""" Dump a single node to a dict """ Dump a single node to a dict
:arg node: target node :arg node: datablock node
:type node: bpy.types.Node :type node: bpy.types.Node
:retrun: dict :retrun: dict
""" """
@ -249,7 +251,7 @@ def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict: def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
""" dump sockets of a shader_node_tree """ dump sockets of a shader_node_tree
:arg target_node_tree: target node_tree :arg target_node_tree: datablock node_tree
:type target_node_tree: bpy.types.NodeTree :type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer :arg socket_id: socket identifer
:type socket_id: str :type socket_id: str
@ -272,7 +274,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection,
sockets_data: dict): sockets_data: dict):
""" load sockets of a shader_node_tree """ load sockets of a shader_node_tree
:arg target_node_tree: target node_tree :arg target_node_tree: datablock node_tree
:type target_node_tree: bpy.types.NodeTree :type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer :arg socket_id: socket identifer
:type socket_id: str :type socket_id: str
@ -300,7 +302,7 @@ def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeT
:arg node_tree_data: dumped node data :arg node_tree_data: dumped node data
:type node_tree_data: dict :type node_tree_data: dict
:arg target_node_tree: target node_tree :arg target_node_tree: datablock node_tree
:type target_node_tree: bpy.types.NodeTree :type target_node_tree: bpy.types.NodeTree
""" """
# TODO: load only required nodes # TODO: load only required nodes
@ -373,7 +375,7 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
:arg src_materials: dumped material collection (ex: object.materials) :arg src_materials: dumped material collection (ex: object.materials)
:type src_materials: list of tuples (uuid, name) :type src_materials: list of tuples (uuid, name)
:arg dst_materials: target material collection pointer :arg dst_materials: datablock material collection pointer
:type dst_materials: bpy.types.bpy_prop_collection :type dst_materials: bpy.types.bpy_prop_collection
""" """
# MATERIAL SLOTS # MATERIAL SLOTS
@ -389,36 +391,41 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
dst_materials.append(mat_ref) dst_materials.append(mat_ref)
class BlMaterial(BlDatablock): class BlMaterial(ReplicatedDatablock):
bl_id = "materials" bl_id = "materials"
bl_class = bpy.types.Material bl_class = bpy.types.Material
bl_check_common = False bl_check_common = False
bl_icon = 'MATERIAL_DATA' bl_icon = 'MATERIAL_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): @staticmethod
def construct(data: dict) -> object:
return bpy.data.materials.new(data["name"]) return bpy.data.materials.new(data["name"])
def _load_implementation(self, data, target): @staticmethod
def load(data: dict, datablock: object):
data = data
loader = Loader() loader = Loader()
is_grease_pencil = data.get('is_grease_pencil') is_grease_pencil = data.get('is_grease_pencil')
use_nodes = data.get('use_nodes') use_nodes = data.get('use_nodes')
loader.load(target, data) loader.load(datablock, data)
if is_grease_pencil: if is_grease_pencil:
if not target.is_grease_pencil: if not datablock.is_grease_pencil:
bpy.data.materials.create_gpencil_data(target) bpy.data.materials.create_gpencil_data(datablock)
loader.load(target.grease_pencil, data['grease_pencil']) loader.load(datablock.grease_pencil, data['grease_pencil'])
elif use_nodes: elif use_nodes:
if target.node_tree is None: if datablock.node_tree is None:
target.use_nodes = True datablock.use_nodes = True
load_node_tree(data['node_tree'], target.node_tree) load_node_tree(data['node_tree'], datablock.node_tree)
@staticmethod
def dump(datablock: object) -> dict:
stamp_uuid(datablock)
def _dump_implementation(self, data, instance=None):
assert(instance)
mat_dumper = Dumper() mat_dumper = Dumper()
mat_dumper.depth = 2 mat_dumper.depth = 2
mat_dumper.include_filter = [ mat_dumper.include_filter = [
@ -444,9 +451,9 @@ class BlMaterial(BlDatablock):
'line_priority', 'line_priority',
'is_grease_pencil' 'is_grease_pencil'
] ]
data = mat_dumper.dump(instance) data = mat_dumper.dump(datablock)
if instance.is_grease_pencil: if datablock.is_grease_pencil:
gp_mat_dumper = Dumper() gp_mat_dumper = Dumper()
gp_mat_dumper.depth = 3 gp_mat_dumper.depth = 3
@ -480,19 +487,21 @@ class BlMaterial(BlDatablock):
'use_overlap_strokes', 'use_overlap_strokes',
'use_fill_holdout', 'use_fill_holdout',
] ]
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil) data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
elif instance.use_nodes: elif datablock.use_nodes:
data['node_tree'] = dump_node_tree(instance.node_tree) data['node_tree'] = dump_node_tree(datablock.node_tree)
return data return data
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve node group deps # TODO: resolve node group deps
deps = [] deps = []
if self.instance.use_nodes: if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(self.instance.node_tree)) deps.extend(get_node_tree_dependencies(datablock.node_tree))
if self.is_library:
deps.append(self.instance.library)
return deps return deps
_type = bpy.types.Material
_class = BlMaterial

View File

@ -22,12 +22,21 @@ import mathutils
import logging import logging
import numpy as np import numpy as np
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection from .dump_anything import (Dumper,
Loader,
np_load_collection_primitives,
np_dump_collection_primitive,
np_load_collection, np_dump_collection)
from replication.constants import DIFF_BINARY from replication.constants import DIFF_BINARY
from replication.exception import ContextError from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid from replication.protocol import ReplicatedDatablock
from replication.objects import Node
from .bl_datablock import get_datablock_from_uuid, stamp_uuid
from .bl_material import dump_materials_slots, load_materials_slots from .bl_material import dump_materials_slots, load_materials_slots
from ..preferences import get_preferences
VERTICE = ['co'] VERTICE = ['co']
EDGE = [ EDGE = [
@ -49,80 +58,87 @@ POLYGON = [
'material_index', 'material_index',
] ]
class BlMesh(BlDatablock):
class BlMesh(ReplicatedDatablock):
bl_id = "meshes" bl_id = "meshes"
bl_class = bpy.types.Mesh bl_class = bpy.types.Mesh
bl_check_common = False bl_check_common = False
bl_icon = 'MESH_DATA' bl_icon = 'MESH_DATA'
bl_reload_parent = True bl_reload_parent = True
def _construct(self, data): @staticmethod
instance = bpy.data.meshes.new(data["name"]) def construct(data: dict) -> object:
instance.uuid = self.uuid datablock = bpy.data.meshes.new(data["name"])
return instance datablock.uuid = data['uuid']
return datablock
def _load_implementation(self, data, target): @staticmethod
if not target or target.is_editmode: def load(data: dict, datablock: object):
data = data
if not datablock or datablock.is_editmode:
raise ContextError raise ContextError
else: else:
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
# MATERIAL SLOTS # MATERIAL SLOTS
src_materials = data.get('materials', None) src_materials = data.get('materials', None)
if src_materials: if src_materials:
load_materials_slots(src_materials, target.materials) load_materials_slots(src_materials, datablock.materials)
# CLEAR GEOMETRY # CLEAR GEOMETRY
if target.vertices: if datablock.vertices:
target.clear_geometry() datablock.clear_geometry()
target.vertices.add(data["vertex_count"]) datablock.vertices.add(data["vertex_count"])
target.edges.add(data["egdes_count"]) datablock.edges.add(data["egdes_count"])
target.loops.add(data["loop_count"]) datablock.loops.add(data["loop_count"])
target.polygons.add(data["poly_count"]) datablock.polygons.add(data["poly_count"])
# LOADING # LOADING
np_load_collection(data['vertices'], target.vertices, VERTICE) np_load_collection(data['vertices'], datablock.vertices, VERTICE)
np_load_collection(data['edges'], target.edges, EDGE) np_load_collection(data['edges'], datablock.edges, EDGE)
np_load_collection(data['loops'], target.loops, LOOP) np_load_collection(data['loops'], datablock.loops, LOOP)
np_load_collection(data["polygons"],target.polygons, POLYGON) np_load_collection(data["polygons"], datablock.polygons, POLYGON)
# UV Layers # UV Layers
if 'uv_layers' in data.keys(): if 'uv_layers' in data.keys():
for layer in data['uv_layers']: for layer in data['uv_layers']:
if layer not in target.uv_layers: if layer not in datablock.uv_layers:
target.uv_layers.new(name=layer) datablock.uv_layers.new(name=layer)
np_load_collection_primitives( np_load_collection_primitives(
target.uv_layers[layer].data, datablock.uv_layers[layer].data,
'uv', 'uv',
data["uv_layers"][layer]['data']) data["uv_layers"][layer]['data'])
# Vertex color # Vertex color
if 'vertex_colors' in data.keys(): if 'vertex_colors' in data.keys():
for color_layer in data['vertex_colors']: for color_layer in data['vertex_colors']:
if color_layer not in target.vertex_colors: if color_layer not in datablock.vertex_colors:
target.vertex_colors.new(name=color_layer) datablock.vertex_colors.new(name=color_layer)
np_load_collection_primitives( np_load_collection_primitives(
target.vertex_colors[color_layer].data, datablock.vertex_colors[color_layer].data,
'color', 'color',
data["vertex_colors"][color_layer]['data']) data["vertex_colors"][color_layer]['data'])
target.validate() datablock.validate()
target.update() datablock.update()
def _dump_implementation(self, data, instance=None): @staticmethod
assert(instance) def dump(datablock: object) -> dict:
stamp_uuid(datablock)
if (instance.is_editmode or bpy.context.mode == "SCULPT") and not self.preferences.sync_flags.sync_during_editmode: if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
raise ContextError("Mesh is in edit mode") raise ContextError("Mesh is in edit mode")
mesh = instance mesh = datablock
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
'uuid'
'name', 'name',
'use_auto_smooth', 'use_auto_smooth',
'auto_smooth_angle', 'auto_smooth_angle',
@ -153,23 +169,26 @@ class BlMesh(BlDatablock):
data['uv_layers'] = {} data['uv_layers'] = {}
for layer in mesh.uv_layers: for layer in mesh.uv_layers:
data['uv_layers'][layer.name] = {} data['uv_layers'][layer.name] = {}
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv') data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(
layer.data, 'uv')
# Vertex color # Vertex color
if mesh.vertex_colors: if mesh.vertex_colors:
data['vertex_colors'] = {} data['vertex_colors'] = {}
for color_map in mesh.vertex_colors: for color_map in mesh.vertex_colors:
data['vertex_colors'][color_map.name] = {} data['vertex_colors'][color_map.name] = {}
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color') data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(
color_map.data, 'color')
# Materials # Materials
data['materials'] = dump_materials_slots(instance.materials) data['materials'] = dump_materials_slots(datablock.materials)
return data return data
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
for material in self.instance.materials: for material in datablock.materials:
if material: if material:
deps.append(material) deps.append(material)
@ -177,7 +196,10 @@ class BlMesh(BlDatablock):
def diff(self): def diff(self):
if 'EDIT' in bpy.context.mode \ if 'EDIT' in bpy.context.mode \
and not self.preferences.sync_flags.sync_during_editmode: and not get_preferences().sync_flags.sync_during_editmode:
return False return False
else: else:
return super().diff() return super().diff()
_type = bpy.types.Mesh
_class = BlMesh

View File

@ -69,10 +69,10 @@ class BlMetaball(BlDatablock):
bl_icon = 'META_BALL' bl_icon = 'META_BALL'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.metaballs.new(data["name"]) return bpy.data.metaballs.new(data["name"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
@ -83,7 +83,7 @@ class BlMetaball(BlDatablock):
load_metaball_elements(data['elements'], target.elements) load_metaball_elements(data['elements'], target.elements)
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1

View File

@ -32,14 +32,15 @@ class BlNodeGroup(BlDatablock):
bl_icon = 'NODETREE' bl_icon = 'NODETREE'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.node_groups.new(data["name"], data["type"]) return bpy.data.node_groups.new(data["name"], data["type"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
load_node_tree(data, target) load_node_tree(data, target)
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
return dump_node_tree(instance) return dump_node_tree(instance)
def _resolve_deps_implementation(self): @staticmethod
return get_node_tree_dependencies(self.instance) def resolve_deps(datablock: object) -> [object]:
return get_node_tree_dependencies(datablock)

View File

@ -21,10 +21,17 @@ import re
import bpy import bpy
import mathutils import mathutils
from replication.exception import ContextError from replication.exception import ContextError
from replication.objects import Node
from replication.protocol import ReplicatedDatablock
from .bl_datablock import BlDatablock, get_datablock_from_uuid from .bl_datablock import get_datablock_from_uuid, stamp_uuid
from .bl_action import (load_animation_data,
dump_animation_data,
resolve_animation_dependencies)
from ..preferences import get_preferences
from .bl_datablock import get_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS from .bl_material import IGNORED_SOCKETS
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .dump_anything import ( from .dump_anything import (
Dumper, Dumper,
Loader, Loader,
@ -38,12 +45,6 @@ SKIN_DATA = [
'use_root' 'use_root'
] ]
SHAPEKEY_BLOCK_ATTR = [
'mute',
'value',
'slider_min',
'slider_max',
]
if bpy.app.version[1] >= 93: if bpy.app.version[1] >= 93:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float) SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else: else:
@ -166,45 +167,40 @@ def load_pose(target_bone, data):
def find_data_from_name(name=None): def find_data_from_name(name=None):
instance = None datablock = None
if not name: if not name:
pass pass
elif name in bpy.data.meshes.keys(): elif name in bpy.data.meshes.keys():
instance = bpy.data.meshes[name] datablock = bpy.data.meshes[name]
elif name in bpy.data.lights.keys(): elif name in bpy.data.lights.keys():
instance = bpy.data.lights[name] datablock = bpy.data.lights[name]
elif name in bpy.data.cameras.keys(): elif name in bpy.data.cameras.keys():
instance = bpy.data.cameras[name] datablock = bpy.data.cameras[name]
elif name in bpy.data.curves.keys(): elif name in bpy.data.curves.keys():
instance = bpy.data.curves[name] datablock = bpy.data.curves[name]
elif name in bpy.data.metaballs.keys(): elif name in bpy.data.metaballs.keys():
instance = bpy.data.metaballs[name] datablock = bpy.data.metaballs[name]
elif name in bpy.data.armatures.keys(): elif name in bpy.data.armatures.keys():
instance = bpy.data.armatures[name] datablock = bpy.data.armatures[name]
elif name in bpy.data.grease_pencils.keys(): elif name in bpy.data.grease_pencils.keys():
instance = bpy.data.grease_pencils[name] datablock = bpy.data.grease_pencils[name]
elif name in bpy.data.curves.keys(): elif name in bpy.data.curves.keys():
instance = bpy.data.curves[name] datablock = bpy.data.curves[name]
elif name in bpy.data.lattices.keys(): elif name in bpy.data.lattices.keys():
instance = bpy.data.lattices[name] datablock = bpy.data.lattices[name]
elif name in bpy.data.speakers.keys(): elif name in bpy.data.speakers.keys():
instance = bpy.data.speakers[name] datablock = bpy.data.speakers[name]
elif name in bpy.data.lightprobes.keys(): elif name in bpy.data.lightprobes.keys():
# Only supported since 2.83 # Only supported since 2.83
if bpy.app.version[1] >= 83: if bpy.app.version[1] >= 83:
instance = bpy.data.lightprobes[name] datablock = bpy.data.lightprobes[name]
else: else:
logging.warning( logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") "Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys(): elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys():
# Only supported since 2.91 # Only supported since 2.91
instance = bpy.data.volumes[name] datablock = bpy.data.volumes[name]
return instance return datablock
def load_data(object, name):
logging.info("loading data")
pass
def _is_editmode(object: bpy.types.Object) -> bool: def _is_editmode(object: bpy.types.Object) -> bool:
@ -251,6 +247,7 @@ def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -
return dependencies return dependencies
def dump_vertex_groups(src_object: bpy.types.Object) -> dict: def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
""" Dump object's vertex groups """ Dump object's vertex groups
@ -295,257 +292,128 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
for index, weight in vg['vertices']: for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE') vertex_group.add([index], weight, 'REPLACE')
def dump_shape_keys(target_key: bpy.types.Key)->dict:
""" Dump the target shape_keys datablock to a dict using numpy
:param dumped_key: target key datablock class BlObject(ReplicatedDatablock):
:type dumped_key: bpy.types.Key
:return: dict
"""
dumped_key_blocks = []
dumper = Dumper()
dumper.include_filter = [
'name',
'mute',
'value',
'slider_min',
'slider_max',
]
for key in target_key.key_blocks:
dumped_key_block = dumper.dump(key)
dumped_key_block['data'] = np_dump_collection(key.data, ['co'])
dumped_key_block['relative_key'] = key.relative_key.name
dumped_key_blocks.append(dumped_key_block)
return {
'reference_key': target_key.reference_key.name,
'use_relative': target_key.use_relative,
'key_blocks': dumped_key_blocks,
'animation_data': dump_animation_data(target_key)
}
def load_shape_keys(dumped_shape_keys: dict, target_object: bpy.types.Object):
""" Load the target shape_keys datablock to a dict using numpy
:param dumped_key: src key data
:type dumped_key: bpy.types.Key
:param target_object: object used to load the shapekeys data onto
:type target_object: bpy.types.Object
"""
loader = Loader()
# Remove existing ones
target_object.shape_key_clear()
# Create keys and load vertices coords
dumped_key_blocks = dumped_shape_keys.get('key_blocks')
for dumped_key_block in dumped_key_blocks:
key_block = target_object.shape_key_add(name=dumped_key_block['name'])
loader.load(key_block, dumped_key_block)
np_load_collection(dumped_key_block['data'], key_block.data, ['co'])
# Load relative key after all
for dumped_key_block in dumped_key_blocks:
relative_key_name = dumped_key_block.get('relative_key')
key_name = dumped_key_block.get('name')
target_keyblock = target_object.data.shape_keys.key_blocks[key_name]
relative_key = target_object.data.shape_keys.key_blocks[relative_key_name]
target_keyblock.relative_key = relative_key
# Shape keys animation data
anim_data = dumped_shape_keys.get('animation_data')
if anim_data:
load_animation_data(anim_data, target_object.data.shape_keys)
def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
""" Dump all modifiers of a modifier collection into a dict
:param modifiers: modifiers
:type modifiers: bpy.types.bpy_prop_collection
:return: dict
"""
dumped_modifiers = {}
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for index, modifier in enumerate(modifiers):
dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
dumped_modifier['inputs'] = dumped_inputs
elif modifier.type == 'PARTICLE_SYSTEM':
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
elif modifier.type == 'UV_PROJECT':
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
dumped_modifiers[modifier.name] = dumped_modifier
return dumped_modifiers
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
""" Load modifiers custom data not managed by the dump_anything loader
:param dumped_modifiers: modifiers to load
:type dumped_modifiers: dict
:param modifiers: target modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
"""
loader = Loader()
for modifier in modifiers:
dumped_modifier = dumped_modifiers.get(modifier.name)
if modifier.type == 'NODES':
load_modifier_geometry_node_inputs(dumped_modifier, modifier)
elif modifier.type == 'PARTICLE_SYSTEM':
default = modifier.particle_system.settings
dumped_particles = dumped_modifier['particle_system']
loader.load(modifier.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
modifier.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
loader.load(modifier.settings, dumped_modifier['settings'])
elif modifier.type == 'UV_PROJECT':
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
target_object = bpy.data.objects.get(projector_object)
if target_object:
modifier.projectors[projector_index].object = target_object
else:
logging.error("Could't load projector target object {projector_object}")
class BlObject(BlDatablock):
bl_id = "objects" bl_id = "objects"
bl_class = bpy.types.Object
bl_check_common = False bl_check_common = False
bl_icon = 'OBJECT_DATA' bl_icon = 'OBJECT_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): is_root = False
instance = None
if self.is_library: @staticmethod
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData): def construct(data: dict) -> bpy.types.Object:
targetData.objects = [ datablock = None
name for name in sourceData.objects if name == self.data['name']]
instance = bpy.data.objects[self.data['name']]
instance.uuid = self.uuid
return instance
# TODO: refactoring # TODO: refactoring
object_name = data.get("name") object_name = data.get("name")
data_uuid = data.get("data_uuid") data_uuid = data.get("data_uuid")
data_id = data.get("data") data_id = data.get("data")
data_type = data.get("type") object_uuid = data.get('uuid')
object_data = get_datablock_from_uuid( object_data = get_datablock_from_uuid(
data_uuid, data_uuid,
find_data_from_name(data_id), find_data_from_name(data_id),
ignore=['images']) # TODO: use resolve_from_id ignore=['images']) # TODO: use resolve_from_id
if data_type != 'EMPTY' and object_data is None: if object_data is None and data_uuid:
raise Exception(f"Fail to load object {data['name']}({self.uuid})") raise Exception(f"Fail to load object {data['name']}({object_uuid})")
instance = bpy.data.objects.new(object_name, object_data) datablock = bpy.data.objects.new(object_name, object_data)
instance.uuid = self.uuid datablock.uuid = object_uuid
return instance return datablock
@staticmethod
def load(data: dict, datablock: bpy.types.Object):
data = datablock.data
load_animation_data(data, datablock)
def _load_implementation(self, data, target):
loader = Loader() loader = Loader()
data_uuid = data.get("data_uuid") data_uuid = data.get("data_uuid")
data_id = data.get("data") data_id = data.get("data")
if target.data and (target.data.name != data_id): if datablock.data and (datablock.data.name != data_id):
target.data = get_datablock_from_uuid( datablock.data = get_datablock_from_uuid(
data_uuid, find_data_from_name(data_id), ignore=['images']) data_uuid, find_data_from_name(data_id), ignore=['images'])
# vertex groups # vertex groups
vertex_groups = data.get('vertex_groups', None) vertex_groups = data.get('vertex_groups', None)
if vertex_groups: if vertex_groups:
load_vertex_groups(vertex_groups, target) load_vertex_groups(vertex_groups, datablock)
object_data = target.data object_data = datablock.data
# SHAPE KEYS # SHAPE KEYS
shape_keys = data.get('shape_keys') if 'shape_keys' in data:
if shape_keys: datablock.shape_key_clear()
load_shape_keys(shape_keys, target)
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data = data['shape_keys']['key_blocks'][key_block]
datablock.shape_key_add(name=key_block)
loader.load(
datablock.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
datablock.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
datablock.data.shape_keys.key_blocks[key_block].relative_key = datablock.data.shape_keys.key_blocks[reference]
# Load transformation data # Load transformation data
loader.load(target, data) loader.load(datablock, data)
# Object display fields # Object display fields
if 'display' in data: if 'display' in data:
loader.load(target.display, data['display']) loader.load(datablock.display, data['display'])
# Parenting # Parenting
parent_id = data.get('parent_uid') parent_id = data.get('parent_uid')
if parent_id: if parent_id:
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]]) parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
# Avoid reloading # Avoid reloading
if target.parent != parent and parent is not None: if datablock.parent != parent and parent is not None:
target.parent = parent datablock.parent = parent
elif target.parent: elif datablock.parent:
target.parent = None datablock.parent = None
# Pose # Pose
if 'pose' in data: if 'pose' in data:
if not target.pose: if not datablock.pose:
raise Exception('No pose data yet (Fixed in a near futur)') raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups # Bone groups
for bg_name in data['pose']['bone_groups']: for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name) bg_data = data['pose']['bone_groups'].get(bg_name)
bg_target = target.pose.bone_groups.get(bg_name) bg_datablock = datablock.pose.bone_groups.get(bg_name)
if not bg_target: if not bg_datablock:
bg_target = target.pose.bone_groups.new(name=bg_name) bg_datablock = datablock.pose.bone_groups.new(name=bg_name)
loader.load(bg_target, bg_data) loader.load(bg_datablock, bg_data)
# target.pose.bone_groups.get # datablock.pose.bone_groups.get
# Bones # Bones
for bone in data['pose']['bones']: for bone in data['pose']['bones']:
target_bone = target.pose.bones.get(bone) datablock_bone = datablock.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone) bone_data = data['pose']['bones'].get(bone)
if 'constraints' in bone_data.keys(): if 'constraints' in bone_data.keys():
loader.load(target_bone, bone_data['constraints']) loader.load(datablock_bone, bone_data['constraints'])
load_pose(target_bone, bone_data) load_pose(datablock_bone, bone_data)
if 'bone_index' in bone_data.keys(): if 'bone_index' in bone_data.keys():
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']] datablock_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
# TODO: find another way... # TODO: find another way...
if target.empty_display_type == "IMAGE": if datablock.empty_display_type == "IMAGE":
img_uuid = data.get('data_uuid') img_uuid = data.get('data_uuid')
if target.data is None and img_uuid: if datablock.data is None and img_uuid:
target.data = get_datablock_from_uuid(img_uuid, None) datablock.data = get_datablock_from_uuid(img_uuid, None)
if hasattr(object_data, 'skin_vertices') \ if hasattr(object_data, 'skin_vertices') \
and object_data.skin_vertices\ and object_data.skin_vertices\
@ -556,36 +424,64 @@ class BlObject(BlDatablock):
skin_data.data, skin_data.data,
SKIN_DATA) SKIN_DATA)
if hasattr(target, 'cycles_visibility') \ if hasattr(datablock, 'cycles_visibility') \
and 'cycles_visibility' in data: and 'cycles_visibility' in data:
loader.load(target.cycles_visibility, data['cycles_visibility']) loader.load(datablock.cycles_visibility, data['cycles_visibility'])
if hasattr(target, 'modifiers'): # TODO: handle geometry nodes input from dump_anything
load_modifiers_custom_data(data['modifiers'], target.modifiers) if hasattr(datablock, 'modifiers'):
nodes_modifiers = [
mod for mod in datablock.modifiers if mod.type == 'NODES']
for modifier in nodes_modifiers:
load_modifier_geometry_node_inputs(
data['modifiers'][modifier.name], modifier)
particles_modifiers = [
mod for mod in datablock.modifiers if mod.type == 'PARTICLE_SYSTEM']
for mod in particles_modifiers:
default = mod.particle_system.settings
dumped_particles = data['modifiers'][mod.name]['particle_system']
loader.load(mod.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
mod.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
phys_modifiers = [
mod for mod in datablock.modifiers if mod.type in ['SOFT_BODY', 'CLOTH']]
for mod in phys_modifiers:
loader.load(mod.settings, data['modifiers'][mod.name]['settings'])
# PHYSICS # PHYSICS
load_physics(data, target) load_physics(data, datablock)
transform = data.get('transforms', None) transform = data.get('transforms', None)
if transform: if transform:
target.matrix_parent_inverse = mathutils.Matrix( datablock.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse']) transform['matrix_parent_inverse'])
target.matrix_basis = mathutils.Matrix(transform['matrix_basis']) datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
target.matrix_local = mathutils.Matrix(transform['matrix_local']) datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
def _dump_implementation(self, data, instance=None): @staticmethod
assert(instance) def dump(datablock: object) -> dict:
assert(datablock)
if _is_editmode(instance): if _is_editmode(datablock):
if self.preferences.sync_flags.sync_during_editmode: if self.preferences.sync_flags.sync_during_editmode:
instance.update_from_editmode() datablock.update_from_editmode()
else: else:
raise ContextError("Object is in edit-mode.") raise ContextError("Object is in edit-mode.")
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
"uuid",
"name", "name",
"rotation_mode", "rotation_mode",
"data", "data",
@ -618,32 +514,55 @@ class BlObject(BlDatablock):
'type' 'type'
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
dumper.include_filter = [ dumper.include_filter = [
'matrix_parent_inverse', 'matrix_parent_inverse',
'matrix_local', 'matrix_local',
'matrix_basis'] 'matrix_basis']
data['transforms'] = dumper.dump(instance) data['transforms'] = dumper.dump(datablock)
dumper.include_filter = [ dumper.include_filter = [
'show_shadows', 'show_shadows',
] ]
data['display'] = dumper.dump(instance.display) data['display'] = dumper.dump(datablock.display)
data['data_uuid'] = getattr(instance.data, 'uuid', None) data['data_uuid'] = getattr(datablock.data, 'uuid', None)
if self.is_library:
return data
# PARENTING # PARENTING
if instance.parent: if datablock.parent:
data['parent_uid'] = (instance.parent.uuid, instance.parent.name) data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
# MODIFIERS # MODIFIERS
modifiers = getattr(instance, 'modifiers', None) if hasattr(datablock, 'modifiers'):
if hasattr(instance, 'modifiers'): data["modifiers"] = {}
data['modifiers'] = dump_modifiers(modifiers) modifiers = getattr(datablock, 'modifiers', None)
if modifiers:
dumper.include_filter = None
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for index, modifier in enumerate(modifiers):
dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
dumped_modifier['inputs'] = dumped_inputs
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None) elif modifier.type == 'PARTICLE_SYSTEM':
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
data["modifiers"][modifier.name] = dumped_modifier
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
if gp_modifiers: if gp_modifiers:
dumper.include_filter = None dumper.include_filter = None
@ -666,16 +585,16 @@ class BlObject(BlDatablock):
# CONSTRAINTS # CONSTRAINTS
if hasattr(instance, 'constraints'): if hasattr(datablock, 'constraints'):
dumper.include_filter = None dumper.include_filter = None
dumper.depth = 3 dumper.depth = 3
data["constraints"] = dumper.dump(instance.constraints) data["constraints"] = dumper.dump(datablock.constraints)
# POSE # POSE
if hasattr(instance, 'pose') and instance.pose: if hasattr(datablock, 'pose') and datablock.pose:
# BONES # BONES
bones = {} bones = {}
for bone in instance.pose.bones: for bone in datablock.pose.bones:
bones[bone.name] = {} bones[bone.name] = {}
dumper.depth = 1 dumper.depth = 1
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler' rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
@ -700,7 +619,7 @@ class BlObject(BlDatablock):
# GROUPS # GROUPS
bone_groups = {} bone_groups = {}
for group in instance.pose.bone_groups: for group in datablock.pose.bone_groups:
dumper.depth = 3 dumper.depth = 3
dumper.include_filter = [ dumper.include_filter = [
'name', 'name',
@ -710,13 +629,36 @@ class BlObject(BlDatablock):
data['pose']['bone_groups'] = bone_groups data['pose']['bone_groups'] = bone_groups
# VERTEx GROUP # VERTEx GROUP
if len(instance.vertex_groups) > 0: if len(datablock.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(instance) data['vertex_groups'] = dump_vertex_groups(datablock)
# SHAPE KEYS # SHAPE KEYS
object_data = instance.data object_data = datablock.data
if hasattr(object_data, 'shape_keys') and object_data.shape_keys: if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
data['shape_keys'] = dump_shape_keys(object_data.shape_keys) dumper = Dumper()
dumper.depth = 2
dumper.include_filter = [
'reference_key',
'use_relative'
]
data['shape_keys'] = dumper.dump(object_data.shape_keys)
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
key_blocks = {}
for key in object_data.shape_keys.key_blocks:
dumper.depth = 3
dumper.include_filter = [
'name',
'data',
'mute',
'value',
'slider_min',
'slider_max',
'data',
'co'
]
key_blocks[key.name] = dumper.dump(key)
key_blocks[key.name]['relative_key'] = key.relative_key.name
data['shape_keys']['key_blocks'] = key_blocks
# SKIN VERTICES # SKIN VERTICES
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices: if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
@ -727,7 +669,7 @@ class BlObject(BlDatablock):
data['skin_vertices'] = skin_vertices data['skin_vertices'] = skin_vertices
# CYCLE SETTINGS # CYCLE SETTINGS
if hasattr(instance, 'cycles_visibility'): if hasattr(datablock, 'cycles_visibility'):
dumper.include_filter = [ dumper.include_filter = [
'camera', 'camera',
'diffuse', 'diffuse',
@ -736,38 +678,40 @@ class BlObject(BlDatablock):
'scatter', 'scatter',
'shadow', 'shadow',
] ]
data['cycles_visibility'] = dumper.dump(instance.cycles_visibility) data['cycles_visibility'] = dumper.dump(
datablock.cycles_visibility)
# PHYSICS # PHYSICS
data.update(dump_physics(instance)) data.update(dump_physics(instance))
return data return data
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: bpy.types.Object) -> list:
deps = [] deps = []
# Avoid Empty case # Avoid Empty case
if self.instance.data: if datablock.data:
deps.append(self.instance.data) deps.append(datablock.data)
# Particle systems # Particle systems
for particle_slot in self.instance.particle_systems: for particle_slot in datablock.particle_systems:
deps.append(particle_slot.settings) deps.append(particle_slot.settings)
if self.is_library: if datablock.parent:
deps.append(self.instance.library) deps.append(datablock.parent)
if self.instance.parent: if datablock.instance_type == 'COLLECTION':
deps.append(self.instance.parent)
if self.instance.instance_type == 'COLLECTION':
# TODO: uuid based # TODO: uuid based
deps.append(self.instance.instance_collection) deps.append(datablock.instance_collection)
if self.instance.modifiers: deps.extend(resolve_animation_dependencies(datablock))
deps.extend(find_textures_dependencies(self.instance.modifiers))
deps.extend(find_geometry_nodes_dependencies(self.instance.modifiers)) if datablock.modifiers:
deps.extend(find_textures_dependencies(datablock.modifiers))
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
if hasattr(self.instance.data, 'shape_keys') and self.instance.data.shape_keys:
deps.extend(resolve_animation_dependencies(self.instance.data.shape_keys))
return deps return deps
_type = bpy.types.Object
_class = BlObject

View File

@ -23,14 +23,21 @@ import bpy
import mathutils import mathutils
from deepdiff import DeepDiff from deepdiff import DeepDiff
from replication.constants import DIFF_JSON, MODIFIED from replication.constants import DIFF_JSON, MODIFIED
from replication.protocol import ReplicatedDatablock
from replication.objects import Node
from ..utils import flush_history from ..utils import flush_history
from .bl_collection import (dump_collection_children, dump_collection_objects, from .bl_collection import (dump_collection_children, dump_collection_objects,
load_collection_childrens, load_collection_objects, load_collection_childrens, load_collection_objects,
resolve_collection_dependencies) resolve_collection_dependencies)
from .bl_datablock import BlDatablock from .bl_action import (load_animation_data,
dump_animation_data,
resolve_animation_dependencies)
from .bl_datablock import stamp_uuid
from .bl_file import get_filepath from .bl_file import get_filepath
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
from ..preferences import get_preferences
RENDER_SETTINGS = [ RENDER_SETTINGS = [
'dither_intensity', 'dither_intensity',
@ -286,12 +293,10 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
dumper.depth = 1 dumper.depth = 1
data = dumper.dump(sequence) data = dumper.dump(sequence)
# TODO: Support multiple images # TODO: Support multiple images
if sequence.type == 'IMAGE': if sequence.type == 'IMAGE':
data['filenames'] = [e.filename for e in sequence.elements] data['filenames'] = [e.filename for e in sequence.elements]
# Effect strip inputs # Effect strip inputs
input_count = getattr(sequence, 'input_count', None) input_count = getattr(sequence, 'input_count', None)
if input_count: if input_count:
@ -321,53 +326,54 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
if strip_type == 'SCENE': if strip_type == 'SCENE':
strip_scene = bpy.data.scenes.get(sequence_data.get('scene')) strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
sequence = sequence_editor.sequences.new_scene(strip_name, sequence = sequence_editor.sequences.new_scene(strip_name,
strip_scene, strip_scene,
strip_channel, strip_channel,
strip_frame_start) strip_frame_start)
elif strip_type == 'MOVIE': elif strip_type == 'MOVIE':
filepath = get_filepath(Path(sequence_data['filepath']).name) filepath = get_filepath(Path(sequence_data['filepath']).name)
sequence = sequence_editor.sequences.new_movie(strip_name, sequence = sequence_editor.sequences.new_movie(strip_name,
filepath, filepath,
strip_channel, strip_channel,
strip_frame_start) strip_frame_start)
elif strip_type == 'SOUND': elif strip_type == 'SOUND':
filepath = bpy.data.sounds[sequence_data['sound']].filepath filepath = bpy.data.sounds[sequence_data['sound']].filepath
sequence = sequence_editor.sequences.new_sound(strip_name, sequence = sequence_editor.sequences.new_sound(strip_name,
filepath, filepath,
strip_channel, strip_channel,
strip_frame_start) strip_frame_start)
elif strip_type == 'IMAGE': elif strip_type == 'IMAGE':
images_name = sequence_data.get('filenames') images_name = sequence_data.get('filenames')
filepath = get_filepath(images_name[0]) filepath = get_filepath(images_name[0])
sequence = sequence_editor.sequences.new_image(strip_name, sequence = sequence_editor.sequences.new_image(strip_name,
filepath, filepath,
strip_channel, strip_channel,
strip_frame_start) strip_frame_start)
# load other images # load other images
if len(images_name)>1: if len(images_name) > 1:
for img_idx in range(1,len(images_name)): for img_idx in range(1, len(images_name)):
sequence.elements.append((images_name[img_idx])) sequence.elements.append((images_name[img_idx]))
else: else:
seq = {} seq = {}
for i in range(sequence_data['input_count']): for i in range(sequence_data['input_count']):
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(sequence_data.get(f"input_{i+1}", None)) seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(
sequence_data.get(f"input_{i+1}", None))
sequence = sequence_editor.sequences.new_effect(name=strip_name, sequence = sequence_editor.sequences.new_effect(name=strip_name,
type=strip_type, type=strip_type,
channel=strip_channel, channel=strip_channel,
frame_start=strip_frame_start, frame_start=strip_frame_start,
frame_end=sequence_data['frame_final_end'], frame_end=sequence_data['frame_final_end'],
**seq) **seq)
loader = Loader() loader = Loader()
# TODO: Support filepath updates # TODO: Support filepath updates
loader.exclure_filter = ['filepath', 'sound', 'filenames','fps'] loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
loader.load(sequence, sequence_data) loader.load(sequence, sequence_data)
sequence.select = False sequence.select = False
class BlScene(BlDatablock): class BlScene(ReplicatedDatablock):
is_root = True is_root = True
bl_id = "scenes" bl_id = "scenes"
@ -376,58 +382,60 @@ class BlScene(BlDatablock):
bl_icon = 'SCENE_DATA' bl_icon = 'SCENE_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): @staticmethod
instance = bpy.data.scenes.new(data["name"]) def construct(data: dict) -> object:
instance.uuid = self.uuid datablock = bpy.data.scenes.new(data["name"])
datablock.uuid = data.get("uuid")
return instance return datablock
def _load_implementation(self, data, target): @staticmethod
def load(data: dict, datablock: object):
# Load other meshes metadata # Load other meshes metadata
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
# Load master collection # Load master collection
load_collection_objects( load_collection_objects(
data['collection']['objects'], target.collection) data['collection']['objects'], datablock.collection)
load_collection_childrens( load_collection_childrens(
data['collection']['children'], target.collection) data['collection']['children'], datablock.collection)
if 'world' in data.keys(): if 'world' in data.keys():
target.world = bpy.data.worlds[data['world']] datablock.world = bpy.data.worlds[data['world']]
# Annotation # Annotation
if 'grease_pencil' in data.keys(): if 'grease_pencil' in data.keys():
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']] datablock.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
if self.preferences.sync_flags.sync_render_settings: if get_preferences().sync_flags.sync_render_settings:
if 'eevee' in data.keys(): if 'eevee' in data.keys():
loader.load(target.eevee, data['eevee']) loader.load(datablock.eevee, data['eevee'])
if 'cycles' in data.keys(): if 'cycles' in data.keys():
loader.load(target.cycles, data['cycles']) loader.load(datablock.cycles, data['cycles'])
if 'render' in data.keys(): if 'render' in data.keys():
loader.load(target.render, data['render']) loader.load(datablock.render, data['render'])
if 'view_settings' in data.keys(): if 'view_settings' in data.keys():
loader.load(target.view_settings, data['view_settings']) loader.load(datablock.view_settings, data['view_settings'])
if target.view_settings.use_curve_mapping and \ if datablock.view_settings.use_curve_mapping and \
'curve_mapping' in data['view_settings']: 'curve_mapping' in data['view_settings']:
# TODO: change this ugly fix # TODO: change this ugly fix
target.view_settings.curve_mapping.white_level = data[ datablock.view_settings.curve_mapping.white_level = data[
'view_settings']['curve_mapping']['white_level'] 'view_settings']['curve_mapping']['white_level']
target.view_settings.curve_mapping.black_level = data[ datablock.view_settings.curve_mapping.black_level = data[
'view_settings']['curve_mapping']['black_level'] 'view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update() datablock.view_settings.curve_mapping.update()
# Sequencer # Sequencer
sequences = data.get('sequences') sequences = data.get('sequences')
if sequences: if sequences:
# Create sequencer data # Create sequencer data
target.sequence_editor_create() datablock.sequence_editor_create()
vse = target.sequence_editor vse = datablock.sequence_editor
# Clear removed sequences # Clear removed sequences
for seq in vse.sequences_all: for seq in vse.sequences_all:
@ -437,15 +445,16 @@ class BlScene(BlDatablock):
for seq_name, seq_data in sequences.items(): for seq_name, seq_data in sequences.items():
load_sequence(seq_data, vse) load_sequence(seq_data, vse)
# If the sequence is no longer used, clear it # If the sequence is no longer used, clear it
elif target.sequence_editor and not sequences: elif datablock.sequence_editor and not sequences:
target.sequence_editor_clear() datablock.sequence_editor_clear()
# FIXME: Find a better way after the replication big refacotoring # FIXME: Find a better way after the replication big refacotoring
# Keep other user from deleting collection object by flushing their history # Keep other user from deleting collection object by flushing their history
flush_history() flush_history()
def _dump_implementation(self, data, instance=None): @staticmethod
assert(instance) def dump(datablock: object) -> dict:
stamp_uuid(datablock)
# Metadata # Metadata
scene_dumper = Dumper() scene_dumper = Dumper()
@ -458,41 +467,43 @@ class BlScene(BlDatablock):
'frame_start', 'frame_start',
'frame_end', 'frame_end',
'frame_step', 'frame_step',
'uuid'
] ]
if self.preferences.sync_flags.sync_active_camera: if get_preferences().sync_flags.sync_active_camera:
scene_dumper.include_filter.append('camera') scene_dumper.include_filter.append('camera')
data.update(scene_dumper.dump(instance)) data = scene_dumper.dump(datablock)
dump_animation_data(datablock, data)
# Master collection # Master collection
data['collection'] = {} data['collection'] = {}
data['collection']['children'] = dump_collection_children( data['collection']['children'] = dump_collection_children(
instance.collection) datablock.collection)
data['collection']['objects'] = dump_collection_objects( data['collection']['objects'] = dump_collection_objects(
instance.collection) datablock.collection)
scene_dumper.depth = 1 scene_dumper.depth = 1
scene_dumper.include_filter = None scene_dumper.include_filter = None
# Render settings # Render settings
if self.preferences.sync_flags.sync_render_settings: if get_preferences().sync_flags.sync_render_settings:
scene_dumper.include_filter = RENDER_SETTINGS scene_dumper.include_filter = RENDER_SETTINGS
data['render'] = scene_dumper.dump(instance.render) data['render'] = scene_dumper.dump(datablock.render)
if instance.render.engine == 'BLENDER_EEVEE': if datablock.render.engine == 'BLENDER_EEVEE':
scene_dumper.include_filter = EVEE_SETTINGS scene_dumper.include_filter = EVEE_SETTINGS
data['eevee'] = scene_dumper.dump(instance.eevee) data['eevee'] = scene_dumper.dump(datablock.eevee)
elif instance.render.engine == 'CYCLES': elif datablock.render.engine == 'CYCLES':
scene_dumper.include_filter = CYCLES_SETTINGS scene_dumper.include_filter = CYCLES_SETTINGS
data['cycles'] = scene_dumper.dump(instance.cycles) data['cycles'] = scene_dumper.dump(datablock.cycles)
scene_dumper.include_filter = VIEW_SETTINGS scene_dumper.include_filter = VIEW_SETTINGS
data['view_settings'] = scene_dumper.dump(instance.view_settings) data['view_settings'] = scene_dumper.dump(datablock.view_settings)
if instance.view_settings.use_curve_mapping: if datablock.view_settings.use_curve_mapping:
data['view_settings']['curve_mapping'] = scene_dumper.dump( data['view_settings']['curve_mapping'] = scene_dumper.dump(
instance.view_settings.curve_mapping) datablock.view_settings.curve_mapping)
scene_dumper.depth = 5 scene_dumper.depth = 5
scene_dumper.include_filter = [ scene_dumper.include_filter = [
'curves', 'curves',
@ -500,35 +511,35 @@ class BlScene(BlDatablock):
'location', 'location',
] ]
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump( data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
instance.view_settings.curve_mapping.curves) datablock.view_settings.curve_mapping.curves)
# Sequence # Sequence
vse = instance.sequence_editor vse = datablock.sequence_editor
if vse: if vse:
dumped_sequences = {} dumped_sequences = {}
for seq in vse.sequences_all: for seq in vse.sequences_all:
dumped_sequences[seq.name] = dump_sequence(seq) dumped_sequences[seq.name] = dump_sequence(seq)
data['sequences'] = dumped_sequences data['sequences'] = dumped_sequences
return data return data
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
# Master Collection # Master Collection
deps.extend(resolve_collection_dependencies(self.instance.collection)) deps.extend(resolve_collection_dependencies(datablock.collection))
# world # world
if self.instance.world: if datablock.world:
deps.append(self.instance.world) deps.append(datablock.world)
# annotations # annotations
if self.instance.grease_pencil: if datablock.grease_pencil:
deps.append(self.instance.grease_pencil) deps.append(datablock.grease_pencil)
# Sequences # Sequences
vse = self.instance.sequence_editor vse = datablock.sequence_editor
if vse: if vse:
for sequence in vse.sequences_all: for sequence in vse.sequences_all:
if sequence.type == 'MOVIE' and sequence.filepath: if sequence.type == 'MOVIE' and sequence.filepath:
@ -539,20 +550,23 @@ class BlScene(BlDatablock):
for elem in sequence.elements: for elem in sequence.elements:
sequence.append( sequence.append(
Path(bpy.path.abspath(sequence.directory), Path(bpy.path.abspath(sequence.directory),
elem.filename)) elem.filename))
return deps return deps
def diff(self): def diff(self):
exclude_path = [] exclude_path = []
if not self.preferences.sync_flags.sync_render_settings: if not get_preferences().sync_flags.sync_render_settings:
exclude_path.append("root['eevee']") exclude_path.append("root['eevee']")
exclude_path.append("root['cycles']") exclude_path.append("root['cycles']")
exclude_path.append("root['view_settings']") exclude_path.append("root['view_settings']")
exclude_path.append("root['render']") exclude_path.append("root['render']")
if not self.preferences.sync_flags.sync_active_camera: if not get_preferences().sync_flags.sync_active_camera:
exclude_path.append("root['camera']") exclude_path.append("root['camera']")
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path) return DeepDiff(self.data, self._dump(datablock=self.datablock), exclude_paths=exclude_path)
_type = bpy.types.Scene
_class = BlScene

View File

@ -34,19 +34,19 @@ class BlSound(BlDatablock):
bl_icon = 'SOUND' bl_icon = 'SOUND'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
filename = data.get('filename') filename = data.get('filename')
return bpy.data.sounds.load(get_filepath(filename)) return bpy.data.sounds.load(get_filepath(filename))
def _load(self, data, target): def load(self, data, target):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
def diff(self): def diff(self):
return False return False
def _dump(self, instance=None): def dump(self, instance=None):
filename = Path(instance.filepath).name filename = Path(instance.filepath).name
if not filename: if not filename:
@ -57,11 +57,12 @@ class BlSound(BlDatablock):
'name': instance.name 'name': instance.name
} }
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>': if datablock.filepath and datablock.filepath != '<builtin>':
ensure_unpacked(self.instance) ensure_unpacked(datablock)
deps.append(Path(bpy.path.abspath(self.instance.filepath))) deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps return deps

View File

@ -30,14 +30,14 @@ class BlSpeaker(BlDatablock):
bl_icon = 'SPEAKER' bl_icon = 'SPEAKER'
bl_reload_parent = False bl_reload_parent = False
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.speakers.new(data["name"]) return bpy.data.speakers.new(data["name"])
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
@ -60,11 +60,11 @@ class BlSpeaker(BlDatablock):
return dumper.dump(instance) return dumper.dump(instance)
def _resolve_deps_implementation(self): @staticmethod
# TODO: resolve material def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
sound = self.instance.sound sound = datablock.sound
if sound: if sound:
deps.append(sound) deps.append(sound)

View File

@ -30,14 +30,14 @@ class BlTexture(BlDatablock):
bl_icon = 'TEXTURE' bl_icon = 'TEXTURE'
bl_reload_parent = False bl_reload_parent = False
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.textures.new(data["name"], data["type"]) return bpy.data.textures.new(data["name"], data["type"])
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
@ -61,11 +61,11 @@ class BlTexture(BlDatablock):
return data return data
def _resolve_deps_implementation(self): @staticmethod
# TODO: resolve material def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
image = getattr(self.instance,"image", None) image = getattr(datablock,"image", None)
if image: if image:
deps.append(image) deps.append(image)

View File

@ -31,7 +31,7 @@ class BlVolume(BlDatablock):
bl_icon = 'VOLUME_DATA' bl_icon = 'VOLUME_DATA'
bl_reload_parent = False bl_reload_parent = False
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
loader.load(target.display, data['display']) loader.load(target.display, data['display'])
@ -41,10 +41,10 @@ class BlVolume(BlDatablock):
if src_materials: if src_materials:
load_materials_slots(src_materials, target.materials) load_materials_slots(src_materials, target.materials)
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.volumes.new(data["name"]) return bpy.data.volumes.new(data["name"])
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
@ -69,15 +69,15 @@ class BlVolume(BlDatablock):
return data return data
def _resolve_deps_implementation(self): @staticmethod
# TODO: resolve material def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
external_vdb = Path(bpy.path.abspath(self.instance.filepath)) external_vdb = Path(bpy.path.abspath(datablock.filepath))
if external_vdb.exists() and not external_vdb.is_dir(): if external_vdb.exists() and not external_vdb.is_dir():
deps.append(external_vdb) deps.append(external_vdb)
for material in self.instance.materials: for material in datablock.materials:
if material: if material:
deps.append(material) deps.append(material)

View File

@ -33,10 +33,10 @@ class BlWorld(BlDatablock):
bl_icon = 'WORLD_DATA' bl_icon = 'WORLD_DATA'
bl_reload_parent = False bl_reload_parent = False
def _construct(self, data): def construct(data: dict) -> object:
return bpy.data.worlds.new(data["name"]) return bpy.data.worlds.new(data["name"])
def _load_implementation(self, data, target): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
@ -46,7 +46,7 @@ class BlWorld(BlDatablock):
load_node_tree(data['node_tree'], target.node_tree) load_node_tree(data['node_tree'], target.node_tree)
def _dump_implementation(self, data, instance=None): def dump(datablock: object) -> dict:
assert(instance) assert(instance)
world_dumper = Dumper() world_dumper = Dumper()
@ -62,11 +62,11 @@ class BlWorld(BlDatablock):
return data return data
def _resolve_deps_implementation(self): @staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
if self.instance.use_nodes: if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(self.instance.node_tree)) deps.extend(get_node_tree_dependencies(datablock.node_tree))
if self.is_library:
deps.append(self.instance.library)
return deps return deps

View File

@ -45,13 +45,13 @@ from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper from bpy_extras.io_utils import ExportHelper, ImportHelper
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE, from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP) STATE_INITIAL, STATE_SYNCING, UP)
from replication.data import DataTranslationProtocol from replication.protocol import DataTranslationProtocol
from replication.exception import ContextError, NonAuthorizedOperationError from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session from replication.interface import session
from replication.porcelain import add, apply from replication import porcelain
from replication.repository import Repository from replication.repository import Repository
from . import bl_types, environment, timers, ui, utils from . import io_bpy, environment, timers, ui, utils
from .presence import SessionStatusWidget, renderer, view3d_find from .presence import SessionStatusWidget, renderer, view3d_find
from .timers import registry from .timers import registry
@ -59,6 +59,7 @@ background_execution_queue = Queue()
deleyables = [] deleyables = []
stop_modal_executor = False stop_modal_executor = False
def session_callback(name): def session_callback(name):
""" Session callback wrapper """ Session callback wrapper
@ -89,7 +90,7 @@ def initialize_session():
logging.error(f"Can't construct node {node}") logging.error(f"Can't construct node {node}")
elif node_ref.state == FETCHED: elif node_ref.state == FETCHED:
node_ref.resolve() node_ref.resolve()
# Step 2: Load nodes # Step 2: Load nodes
logging.info("Loading nodes") logging.info("Loading nodes")
for node in session.repository.list_ordered(): for node in session.repository.list_ordered():
@ -140,7 +141,8 @@ def on_connection_end(reason="none"):
if isinstance(handler, logging.FileHandler): if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler) logger.removeHandler(handler)
if reason != "user": if reason != "user":
bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ") bpy.ops.session.notify(
'INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
# OPERATORS # OPERATORS
@ -189,108 +191,77 @@ class SessionStartOperator(bpy.types.Operator):
handler.setFormatter(formatter) handler.setFormatter(formatter)
bpy_protocol = DataTranslationProtocol() bpy_protocol = io_bpy.get_data_translation_protocol()
supported_bl_types = []
# init the factory with supported types # Check if supported_datablocks are up to date before starting the
for type in bl_types.types_to_register(): # the session
type_module = getattr(bl_types, type) for impl in bpy_protocol.implementations.values():
name = [e.capitalize() for e in type.split('_')[1:]] if impl.__name__ not in settings.supported_datablocks:
type_impl_name = 'Bl'+''.join(name) logging.info(f"{impl.__name__} not found, \
type_module_class = getattr(type_module, type_impl_name)
supported_bl_types.append(type_module_class.bl_id)
if type_impl_name not in settings.supported_datablocks:
logging.info(f"{type_impl_name} not found, \
regenerate type settings...") regenerate type settings...")
settings.generate_supported_types() settings.generate_supported_types()
type_local_config = settings.supported_datablocks[type_impl_name] # Ensure blender 2.8 compatibility
bpy_protocol.register_type(
type_module_class.bl_class,
type_module_class,
check_common=type_module_class.bl_check_common)
if bpy.app.version[1] >= 91: if bpy.app.version[1] >= 91:
python_binary_path = sys.executable python_binary_path = sys.executable
else: else:
python_binary_path = bpy.app.binary_path_python python_binary_path = bpy.app.binary_path_python
repo = Repository(data_protocol=bpy_protocol) # HOST
# Host a session
if self.host: if self.host:
if settings.init_method == 'EMPTY': if settings.init_method == 'EMPTY':
utils.clean_scene() utils.clean_scene()
runtime_settings.is_host = True # Start the server locally
runtime_settings.internet_ip = environment.get_ip() server = porcelain.serve(port=settings.port,
timeout=settings.connection_timeout,
admin_password=admin_pass,
log_directory=settings.cache_directory)
try: # Init repository
# Init repository repo = porcelain.init(bare=False,
for scene in bpy.data.scenes: data_protocol=bpy_protocol)
add(repo, scene)
session.host( # Add the existing scenes
repository= repo, for scene in bpy.data.scenes:
id=settings.username, porcelain.add(repo, scene)
port=settings.port,
timeout=settings.connection_timeout, porcelain.remote_add(repo,
password=admin_pass, 'server',
cache_directory=settings.cache_directory, '127.0.0.1',
server_log_level=logging.getLevelName( settings.port)
logging.getLogger().level), porcelain.sync(repo, 'server')
) porcelain.push(repo, 'server')
except Exception as e: # JOIN
self.report({'ERROR'}, repr(e))
logging.error(f"Error: {e}")
traceback.print_exc()
# Join a session
else: else:
if not runtime_settings.admin: utils.clean_scene()
utils.clean_scene()
# regular session, no password needed
admin_pass = None
try: repo = porcelain.clone(settings.ip, settings.ip)
session.connect(
repository= repo,
id=settings.username,
address=settings.ip,
port=settings.port,
timeout=settings.connection_timeout,
password=admin_pass
)
except Exception as e:
self.report({'ERROR'}, str(e))
logging.error(str(e))
# Background client updates service # Background client updates service
deleyables.append(timers.ClientUpdate()) # deleyables.append(timers.ClientUpdate())
deleyables.append(timers.DynamicRightSelectTimer()) # deleyables.append(timers.DynamicRightSelectTimer())
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate)) # deleyables.append(timers.ApplyTimer(
# timeout=settings.depsgraph_update_rate))
# deleyables.append(timers.PushTimer( # deleyables.append(timers.PushTimer(
# queue=stagging, # queue=stagging,
# timeout=settings.depsgraph_update_rate # timeout=settings.depsgraph_update_rate
# )) # ))
session_update = timers.SessionStatusUpdate() # session_update = timers.SessionStatusUpdate()
session_user_sync = timers.SessionUserSync() # session_user_sync = timers.SessionUserSync()
session_background_executor = timers.MainThreadExecutor( # session_background_executor = timers.MainThreadExecutor(
execution_queue=background_execution_queue) # execution_queue=background_execution_queue)
session_listen = timers.SessionListenTimer(timeout=0.001) # session_listen = timers.SessionListenTimer(timeout=0.001)
session_listen.register() # session_listen.register()
session_update.register() # session_update.register()
session_user_sync.register() # session_user_sync.register()
session_background_executor.register() # session_background_executor.register()
deleyables.append(session_background_executor) # deleyables.append(session_background_executor)
deleyables.append(session_update) # deleyables.append(session_update)
deleyables.append(session_user_sync) # deleyables.append(session_user_sync)
deleyables.append(session_listen) # deleyables.append(session_listen)
self.report( self.report(
{'INFO'}, {'INFO'},
@ -330,7 +301,7 @@ class SessionInitOperator(bpy.types.Operator):
utils.clean_scene() utils.clean_scene()
for scene in bpy.data.scenes: for scene in bpy.data.scenes:
add(session.repository, scene) porcelain.add(session.repository, scene)
session.init() session.init()
@ -602,21 +573,21 @@ class SessionApply(bpy.types.Operator):
logging.debug(f"Running apply on {self.target}") logging.debug(f"Running apply on {self.target}")
try: try:
node_ref = session.repository.get_node(self.target) node_ref = session.repository.get_node(self.target)
apply(session.repository, porcelain.apply(session.repository,
self.target, self.target,
force=True, force=True,
force_dependencies=self.reset_dependencies) force_dependencies=self.reset_dependencies)
if node_ref.bl_reload_parent: if node_ref.bl_reload_parent:
for parent in session.repository.get_parents(self.target): for parent in session.repository.get_parents(self.target):
logging.debug(f"Refresh parent {parent}") logging.debug(f"Refresh parent {parent}")
apply(session.repository, porcelain.apply(session.repository,
parent.uuid, parent.uuid,
force=True) force=True)
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
traceback.print_exc() traceback.print_exc()
return {"CANCELLED"} return {"CANCELLED"}
return {"FINISHED"} return {"FINISHED"}
@ -635,13 +606,14 @@ class SessionCommit(bpy.types.Operator):
def execute(self, context): def execute(self, context):
try: try:
session.commit(uuid=self.target) porcelain.commit(session.repository, uuid=self.target)
session.push(self.target) session.push(self.target)
return {"FINISHED"} return {"FINISHED"}
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
return {"CANCELED"} return {"CANCELED"}
class ApplyArmatureOperator(bpy.types.Operator): class ApplyArmatureOperator(bpy.types.Operator):
"""Operator which runs its self from a timer""" """Operator which runs its self from a timer"""
bl_idname = "session.apply_armature_operator" bl_idname = "session.apply_armature_operator"
@ -657,14 +629,14 @@ class ApplyArmatureOperator(bpy.types.Operator):
if event.type == 'TIMER': if event.type == 'TIMER':
if session and session.state == STATE_ACTIVE: if session and session.state == STATE_ACTIVE:
nodes = session.list(filter=bl_types.bl_armature.BlArmature) nodes = session.list(filter=io_bpy.bl_armature.BlArmature)
for node in nodes: for node in nodes:
node_ref = session.repository.get_node(node) node_ref = session.repository.get_node(node)
if node_ref.state == FETCHED: if node_ref.state == FETCHED:
try: try:
apply(session.repository, node) porcelain.apply(session.repository, node)
except Exception as e: except Exception as e:
logging.error("Fail to apply armature: {e}") logging.error("Fail to apply armature: {e}")
@ -713,6 +685,7 @@ class SessionClearCache(bpy.types.Operator):
row = self.layout row = self.layout
row.label(text=f" Do you really want to remove local cache ? ") row.label(text=f" Do you really want to remove local cache ? ")
class SessionPurgeOperator(bpy.types.Operator): class SessionPurgeOperator(bpy.types.Operator):
"Remove node with lost references" "Remove node with lost references"
bl_idname = "session.purge" bl_idname = "session.purge"
@ -757,7 +730,6 @@ class SessionNotifyOperator(bpy.types.Operator):
layout = self.layout layout = self.layout
layout.row().label(text=self.message) layout.row().label(text=self.message)
def invoke(self, context, event): def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self) return context.window_manager.invoke_props_dialog(self)
@ -803,6 +775,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
def poll(cls, context): def poll(cls, context):
return session.state == STATE_ACTIVE return session.state == STATE_ACTIVE
class SessionStopAutoSaveOperator(bpy.types.Operator): class SessionStopAutoSaveOperator(bpy.types.Operator):
bl_idname = "session.cancel_autosave" bl_idname = "session.cancel_autosave"
bl_label = "Cancel auto-save" bl_label = "Cancel auto-save"
@ -837,36 +810,31 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
def execute(self, context): def execute(self, context):
from replication.repository import Repository from replication.repository import Repository
# TODO: add filechecks
try: try:
f = gzip.open(self.filepath, "rb") f = gzip.open(self.filepath, "rb")
db = pickle.load(f) db = pickle.load(f)
except OSError as e: except OSError as e:
f = open(self.filepath, "rb") f = open(self.filepath, "rb")
db = pickle.load(f) db = pickle.load(f)
if db: if db:
logging.info(f"Reading {self.filepath}") logging.info(f"Reading {self.filepath}")
nodes = db.get("nodes") nodes = db.get("nodes")
logging.info(f"{len(nodes)} Nodes to load") logging.info(f"{len(nodes)} Nodes to load")
# init the factory with supported types # init the factory with supported types
bpy_protocol = DataTranslationProtocol() bpy_protocol = DataTranslationProtocol()
for type in bl_types.types_to_register(): for type in io_bpy.types_to_register():
type_module = getattr(bl_types, type) type_module = getattr(io_bpy, type)
name = [e.capitalize() for e in type.split('_')[1:]] name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name) type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name) type_module_class = getattr(type_module, type_impl_name)
bpy_protocol.register_type( bpy_protocol.register_type(
type_module_class.bl_class, type_module_class.bl_class,
type_module_class) type_module_class)
graph = Repository() graph = Repository()
for node, node_data in nodes: for node, node_data in nodes:
@ -882,7 +850,7 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
data=node_data['data']) data=node_data['data'])
graph.do_commit(instance) graph.do_commit(instance)
instance.state = FETCHED instance.state = FETCHED
logging.info("Graph succefully loaded") logging.info("Graph succefully loaded")
utils.clean_scene() utils.clean_scene()
@ -895,15 +863,16 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
for node in graph.list_ordered(): for node in graph.list_ordered():
graph[node].apply() graph[node].apply()
return {'FINISHED'} return {'FINISHED'}
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return True return True
def menu_func_import(self, context): def menu_func_import(self, context):
self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)') self.layout.operator(SessionLoadSaveOperator.bl_idname,
text='Multi-user session snapshot (.db)')
classes = ( classes = (
@ -926,15 +895,17 @@ classes = (
SessionPurgeOperator, SessionPurgeOperator,
) )
def update_external_dependencies(): def update_external_dependencies():
nodes_ids = session.list(filter=bl_types.bl_file.BlFile) nodes_ids = session.list(filter=io_bpy.bl_file.BlFile)
for node_id in nodes_ids: for node_id in nodes_ids:
node = session.repository.get_node(node_id) node = session.repository.get_node(node_id)
if node and node.owner in [session.id, RP_COMMON] \ if node and node.owner in [session.id, RP_COMMON] \
and node.has_changed(): and node.has_changed():
session.commit(node_id) porcelain.commit(session.repository, node_id)
session.push(node_id, check_data=False) session.push(node_id, check_data=False)
def sanitize_deps_graph(remove_nodes: bool = False): def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph """ Cleanup the replication graph
""" """
@ -966,6 +937,7 @@ def resolve_deps_graph(dummy):
if session and session.state == STATE_ACTIVE: if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True) sanitize_deps_graph(remove_nodes=True)
@persistent @persistent
def load_pre_handler(dummy): def load_pre_handler(dummy):
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]: if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
@ -996,7 +968,7 @@ def depsgraph_evaluation(scene):
if update.id.uuid: if update.id.uuid:
# Retrieve local version # Retrieve local version
node = session.repository.get_node(update.id.uuid) node = session.repository.get_node(update.id.uuid)
# Check our right on this update: # Check our right on this update:
# - if its ours or ( under common and diff), launch the # - if its ours or ( under common and diff), launch the
# update process # update process
@ -1005,34 +977,33 @@ def depsgraph_evaluation(scene):
if node.state == UP: if node.state == UP:
try: try:
if node.has_changed(): if node.has_changed():
session.commit(node.uuid) porcelain.commit(session.repository, node.uuid)
session.push(node.uuid, check_data=False) session.push(node.uuid, check_data=False)
except ReferenceError: except ReferenceError:
logging.debug(f"Reference error {node.uuid}") logging.debug(f"Reference error {node.uuid}")
if not node.is_valid():
session.remove(node.uuid)
except ContextError as e: except ContextError as e:
logging.debug(e) logging.debug(e)
except Exception as e: except Exception as e:
logging.error(e) logging.error(e)
else: else:
continue continue
# A new scene is created # A new scene is created
elif isinstance(update.id, bpy.types.Scene): elif isinstance(update.id, bpy.types.Scene):
ref = session.repository.get_node_by_datablock(update.id) ref = session.repository.get_node_by_datablock(update.id)
if ref: if ref:
ref.resolve() ref.resolve()
else: else:
scn_uuid = add(session.repository, update.id) scn_uuid = porcelain.add(session.repository, update.id)
session.commit(scn_uuid) porcelain.commit(session.repository, scn_uuid)
session.push(scn_uuid, check_data=False) porcelain.push(session.repository)
def register(): def register():
from bpy.utils import register_class from bpy.utils import register_class
for cls in classes: for cls in classes:
register_class(cls) register_class(cls)
bpy.app.handlers.undo_post.append(resolve_deps_graph) bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph) bpy.app.handlers.redo_post.append(resolve_deps_graph)

View File

@ -24,7 +24,7 @@ import os
from pathlib import Path from pathlib import Path
from . import bl_types, environment, addon_updater_ops, presence, ui from . import io_bpy, environment, addon_updater_ops, presence, ui
from .utils import get_preferences, get_expanded_icon from .utils import get_preferences, get_expanded_icon
from replication.constants import RP_COMMON from replication.constants import RP_COMMON
from replication.interface import session from replication.interface import session
@ -407,18 +407,17 @@ class SessionPrefs(bpy.types.AddonPreferences):
def generate_supported_types(self): def generate_supported_types(self):
self.supported_datablocks.clear() self.supported_datablocks.clear()
for type in bl_types.types_to_register(): bpy_protocol = io_bpy.get_data_translation_protocol()
# init the factory with supported types
for impl in bpy_protocol.implementations.values():
new_db = self.supported_datablocks.add() new_db = self.supported_datablocks.add()
type_module = getattr(bl_types, type) new_db.name = impl.__name__
name = [e.capitalize() for e in type.split('_')[1:]] new_db.type_name = impl.__name__
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
new_db.name = type_impl_name
new_db.type_name = type_impl_name
new_db.use_as_filter = True new_db.use_as_filter = True
new_db.icon = type_module_class.bl_icon new_db.icon = impl.bl_icon
new_db.bl_name = type_module_class.bl_id new_db.bl_name = impl.bl_id
def client_list_callback(scene, context): def client_list_callback(scene, context):
@ -502,21 +501,15 @@ class SessionProps(bpy.types.PropertyGroup):
description='Session password', description='Session password',
subtype='PASSWORD' subtype='PASSWORD'
) )
internet_ip: bpy.props.StringProperty(
name="internet ip",
default="no found",
description='Internet interface ip',
)
user_snap_running: bpy.props.BoolProperty( user_snap_running: bpy.props.BoolProperty(
default=False default=False
) )
time_snap_running: bpy.props.BoolProperty( time_snap_running: bpy.props.BoolProperty(
default=False default=False
) )
is_host: bpy.props.BoolProperty(
default=False
)
def get_preferences():
return bpy.context.preferences.addons[__package__].preferences
classes = ( classes = (
SessionUser, SessionUser,
@ -535,7 +528,7 @@ def register():
prefs = bpy.context.preferences.addons[__package__].preferences prefs = bpy.context.preferences.addons[__package__].preferences
if len(prefs.supported_datablocks) == 0: if len(prefs.supported_datablocks) == 0:
logging.debug('Generating bl_types preferences') logging.debug('Generating io_bpy preferences')
prefs.generate_supported_types() prefs.generate_supported_types()

View File

@ -24,7 +24,7 @@ from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_SRV_SYNC, STATE_SYNCING, UP) STATE_SRV_SYNC, STATE_SYNCING, UP)
from replication.exception import NonAuthorizedOperationError, ContextError from replication.exception import NonAuthorizedOperationError, ContextError
from replication.interface import session from replication.interface import session
from replication.porcelain import apply, add from replication import porcelain
from . import operators, utils from . import operators, utils
from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget, from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
@ -115,7 +115,7 @@ class ApplyTimer(Timer):
if node_ref.state == FETCHED: if node_ref.state == FETCHED:
try: try:
apply(session.repository, node) porcelain.apply(session.repository, node)
except Exception as e: except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}") logging.error(f"Fail to apply {node_ref.uuid}")
traceback.print_exc() traceback.print_exc()
@ -123,7 +123,7 @@ class ApplyTimer(Timer):
if node_ref.bl_reload_parent: if node_ref.bl_reload_parent:
for parent in session.repository.get_parents(node): for parent in session.repository.get_parents(node):
logging.debug("Refresh parent {node}") logging.debug("Refresh parent {node}")
apply(session.repository, porcelain.apply(session.repository,
parent.uuid, parent.uuid,
force=True) force=True)
@ -168,7 +168,7 @@ class DynamicRightSelectTimer(Timer):
if registered_gp.owner == settings.username: if registered_gp.owner == settings.username:
gp_node = session.repository.get_node(annotation_gp.uuid) gp_node = session.repository.get_node(annotation_gp.uuid)
if gp_node.has_changed(): if gp_node.has_changed():
session.commit(gp_node.uuid) porcelain.commit(session.repository, gp_node.uuid)
session.push(gp_node.uuid, check_data=False) session.push(gp_node.uuid, check_data=False)
elif self._annotating: elif self._annotating:
@ -255,7 +255,8 @@ class DynamicRightSelectTimer(Timer):
for obj in bpy.data.objects: for obj in bpy.data.objects:
object_uuid = getattr(obj, 'uuid', None) object_uuid = getattr(obj, 'uuid', None)
if object_uuid: if object_uuid:
is_selectable = not session.is_readonly(object_uuid) node = session.repository.get_node(object_uuid)
is_selectable = not node.owner in [settings.username, RP_COMMON]
if obj.hide_select != is_selectable: if obj.hide_select != is_selectable:
obj.hide_select = is_selectable obj.hide_select = is_selectable
@ -271,8 +272,7 @@ class ClientUpdate(Timer):
if session and renderer: if session and renderer:
if session.state in [STATE_ACTIVE, STATE_LOBBY]: if session.state in [STATE_ACTIVE, STATE_LOBBY]:
local_user = session.online_users.get( local_user = session.online_users.get(settings.username)
settings.username)
if not local_user: if not local_user:
return return

View File

@ -111,8 +111,6 @@ class SESSION_PT_settings(bpy.types.Panel):
row= layout.row() row= layout.row()
if current_state in [STATE_ACTIVE] and runtime_settings.is_host:
info_msg = f"LAN: {runtime_settings.internet_ip}"
if current_state == STATE_LOBBY: if current_state == STATE_LOBBY:
info_msg = "Waiting for the session to start." info_msg = "Waiting for the session to start."

View File

@ -5,10 +5,9 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_action import BlAction from multi_user.io_bpy.bl_action import BlAction
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC'] INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
FMODIFIERS = ['GENERATOR', 'FNGENERATOR', 'ENVELOPE', 'CYCLES', 'NOISE', 'LIMITS', 'STEPPED']
# @pytest.mark.parametrize('blendname', ['test_action.blend']) # @pytest.mark.parametrize('blendname', ['test_action.blend'])
def test_action(clear_blend): def test_action(clear_blend):
@ -23,9 +22,6 @@ def test_action(clear_blend):
point.co[1] = random.randint(-10,10) point.co[1] = random.randint(-10,10)
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)] point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
for mod_type in FMODIFIERS:
fcurve_sample.modifiers.new(mod_type)
bpy.ops.mesh.primitive_plane_add() bpy.ops.mesh.primitive_plane_add()
bpy.data.objects[0].animation_data_create() bpy.data.objects[0].animation_data_create()
bpy.data.objects[0].animation_data.action = datablock bpy.data.objects[0].animation_data.action = datablock

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_armature import BlArmature from multi_user.io_bpy.bl_armature import BlArmature
def test_armature(clear_blend): def test_armature(clear_blend):
bpy.ops.object.armature_add() bpy.ops.object.armature_add()

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.bl_types.bl_camera import BlCamera from multi_user.io_bpy.bl_camera import BlCamera
@pytest.mark.parametrize('camera_type', ['PANO','PERSP','ORTHO']) @pytest.mark.parametrize('camera_type', ['PANO','PERSP','ORTHO'])

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
from uuid import uuid4 from uuid import uuid4
import bpy import bpy
import random import random
from multi_user.bl_types.bl_collection import BlCollection from multi_user.io_bpy.bl_collection import BlCollection
def test_collection(clear_blend): def test_collection(clear_blend):
# Generate a collection with childrens and a cube # Generate a collection with childrens and a cube

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_curve import BlCurve from multi_user.io_bpy.bl_curve import BlCurve
@pytest.mark.parametrize('curve_type', ['TEXT','BEZIER']) @pytest.mark.parametrize('curve_type', ['TEXT','BEZIER'])
def test_curve(clear_blend, curve_type): def test_curve(clear_blend, curve_type):

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.bl_types.bl_gpencil import BlGpencil from multi_user.io_bpy.bl_gpencil import BlGpencil
def test_gpencil(clear_blend): def test_gpencil(clear_blend):

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.bl_types.bl_lattice import BlLattice from multi_user.io_bpy.bl_lattice import BlLattice
def test_lattice(clear_blend): def test_lattice(clear_blend):

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.bl_types.bl_lightprobe import BlLightprobe from multi_user.io_bpy.bl_lightprobe import BlLightprobe
@pytest.mark.skipif(bpy.app.version[1] < 83, reason="requires blender 2.83 or higher") @pytest.mark.skipif(bpy.app.version[1] < 83, reason="requires blender 2.83 or higher")

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.bl_types.bl_light import BlLight from multi_user.io_bpy.bl_light import BlLight
@pytest.mark.parametrize('light_type', ['SPOT','SUN','POINT','AREA']) @pytest.mark.parametrize('light_type', ['SPOT','SUN','POINT','AREA'])

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.bl_types.bl_material import BlMaterial from multi_user.io_bpy.bl_material import BlMaterial
def test_material_nodes(clear_blend): def test_material_nodes(clear_blend):

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_mesh import BlMesh from multi_user.io_bpy.bl_mesh import BlMesh
@pytest.mark.parametrize('mesh_type', ['EMPTY','FILLED']) @pytest.mark.parametrize('mesh_type', ['EMPTY','FILLED'])
def test_mesh(clear_blend, mesh_type): def test_mesh(clear_blend, mesh_type):

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.bl_types.bl_metaball import BlMetaball from multi_user.io_bpy.bl_metaball import BlMetaball
@pytest.mark.parametrize('metaballs_type', ['PLANE','CAPSULE','BALL','ELLIPSOID','CUBE']) @pytest.mark.parametrize('metaballs_type', ['PLANE','CAPSULE','BALL','ELLIPSOID','CUBE'])

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_object import BlObject from multi_user.io_bpy.bl_object import BlObject
# Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be # Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be
# correctly initialized (#TODO: report the bug) # correctly initialized (#TODO: report the bug)

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_scene import BlScene from multi_user.io_bpy.bl_scene import BlScene
from multi_user.utils import get_preferences from multi_user.utils import get_preferences
def test_scene(clear_blend): def test_scene(clear_blend):

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_speaker import BlSpeaker from multi_user.io_bpy.bl_speaker import BlSpeaker
def test_speaker(clear_blend): def test_speaker(clear_blend):
bpy.ops.object.speaker_add() bpy.ops.object.speaker_add()

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_texture import BlTexture from multi_user.io_bpy.bl_texture import BlTexture
TEXTURE_TYPES = ['NONE', 'BLEND', 'CLOUDS', 'DISTORTED_NOISE', 'IMAGE', 'MAGIC', 'MARBLE', 'MUSGRAVE', 'NOISE', 'STUCCI', 'VORONOI', 'WOOD'] TEXTURE_TYPES = ['NONE', 'BLEND', 'CLOUDS', 'DISTORTED_NOISE', 'IMAGE', 'MAGIC', 'MARBLE', 'MUSGRAVE', 'NOISE', 'STUCCI', 'VORONOI', 'WOOD']

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_volume import BlVolume from multi_user.io_bpy.bl_volume import BlVolume
def test_volume(clear_blend): def test_volume(clear_blend):
datablock = bpy.data.volumes.new("Test") datablock = bpy.data.volumes.new("Test")

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_world import BlWorld from multi_user.io_bpy.bl_world import BlWorld
def test_world(clear_blend): def test_world(clear_blend):
datablock = bpy.data.worlds.new('test') datablock = bpy.data.worlds.new('test')