Compare commits

...

13 Commits

24 changed files with 301 additions and 152 deletions

View File

@ -59,6 +59,7 @@ def register():
from . import presence from . import presence
from . import operators from . import operators
from . import handlers
from . import ui from . import ui
from . import preferences from . import preferences
from . import addon_updater_ops from . import addon_updater_ops
@ -67,6 +68,7 @@ def register():
addon_updater_ops.register(bl_info) addon_updater_ops.register(bl_info)
presence.register() presence.register()
operators.register() operators.register()
handlers.register()
ui.register() ui.register()
except ModuleNotFoundError as e: except ModuleNotFoundError as e:
raise Exception(module_error_msg) raise Exception(module_error_msg)
@ -87,6 +89,7 @@ def register():
def unregister(): def unregister():
from . import presence from . import presence
from . import operators from . import operators
from . import handlers
from . import ui from . import ui
from . import preferences from . import preferences
from . import addon_updater_ops from . import addon_updater_ops
@ -96,6 +99,7 @@ def unregister():
presence.unregister() presence.unregister()
addon_updater_ops.unregister() addon_updater_ops.unregister()
ui.unregister() ui.unregister()
handlers.unregister()
operators.unregister() operators.unregister()
preferences.unregister() preferences.unregister()

View File

@ -219,7 +219,7 @@ def load_fcurve(fcurve_data, fcurve):
def dump_animation_data(datablock): def dump_animation_data(datablock):
animation_data = {} animation_data = {}
if has_action(datablock): if has_action(datablock):
animation_data['action'] = datablock.animation_data.action.name animation_data['action'] = datablock.animation_data.action.uuid
if has_driver(datablock): if has_driver(datablock):
animation_data['drivers'] = [] animation_data['drivers'] = []
for driver in datablock.animation_data.drivers: for driver in datablock.animation_data.drivers:
@ -241,8 +241,10 @@ def load_animation_data(animation_data, datablock):
for driver in animation_data['drivers']: for driver in animation_data['drivers']:
load_driver(datablock, driver) load_driver(datablock, driver)
if 'action' in animation_data: action = animation_data.get('action')
datablock.animation_data.action = bpy.data.actions[animation_data['action']] if action:
action = resolve_datablock_from_uuid(action, bpy.data.actions)
datablock.animation_data.action = action
elif datablock.animation_data.action: elif datablock.animation_data.action:
datablock.animation_data.action = None datablock.animation_data.action = None
@ -259,6 +261,8 @@ def resolve_animation_dependencies(datablock):
class BlAction(ReplicatedDatablock): class BlAction(ReplicatedDatablock):
use_delta = True
bl_id = "actions" bl_id = "actions"
bl_class = bpy.types.Action bl_class = bpy.types.Action
bl_check_common = False bl_check_common = False

View File

@ -37,6 +37,8 @@ def get_roll(bone: bpy.types.Bone) -> float:
class BlArmature(ReplicatedDatablock): class BlArmature(ReplicatedDatablock):
use_delta = True
bl_id = "armatures" bl_id = "armatures"
bl_class = bpy.types.Armature bl_class = bpy.types.Armature
bl_check_common = False bl_check_common = False

View File

@ -26,6 +26,8 @@ from .bl_action import dump_animation_data, load_animation_data, resolve_animati
class BlCamera(ReplicatedDatablock): class BlCamera(ReplicatedDatablock):
use_delta = True
bl_id = "cameras" bl_id = "cameras"
bl_class = bpy.types.Camera bl_class = bpy.types.Camera
bl_check_common = False bl_check_common = False

View File

@ -137,6 +137,8 @@ SPLINE_METADATA = [
class BlCurve(ReplicatedDatablock): class BlCurve(ReplicatedDatablock):
use_delta = True
bl_id = "curves" bl_id = "curves"
bl_class = bpy.types.Curve bl_class = bpy.types.Curve
bl_check_common = False bl_check_common = False

View File

@ -29,6 +29,8 @@ POINT = ['co', 'weight_softbody', 'co_deform']
class BlLattice(ReplicatedDatablock): class BlLattice(ReplicatedDatablock):
use_delta = True
bl_id = "lattices" bl_id = "lattices"
bl_class = bpy.types.Lattice bl_class = bpy.types.Lattice
bl_check_common = False bl_check_common = False

View File

@ -26,6 +26,8 @@ from .bl_action import dump_animation_data, load_animation_data, resolve_animati
class BlLight(ReplicatedDatablock): class BlLight(ReplicatedDatablock):
use_delta = True
bl_id = "lights" bl_id = "lights"
bl_class = bpy.types.Light bl_class = bpy.types.Light
bl_check_common = False bl_check_common = False

View File

@ -25,6 +25,8 @@ from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid from .bl_datablock import resolve_datablock_from_uuid
class BlLightprobe(ReplicatedDatablock): class BlLightprobe(ReplicatedDatablock):
use_delta = True
bl_id = "lightprobes" bl_id = "lightprobes"
bl_class = bpy.types.LightProbe bl_class = bpy.types.LightProbe
bl_check_common = False bl_check_common = False

View File

@ -397,11 +397,14 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
class BlMaterial(ReplicatedDatablock): class BlMaterial(ReplicatedDatablock):
use_delta = True
bl_id = "materials" bl_id = "materials"
bl_class = bpy.types.Material bl_class = bpy.types.Material
bl_check_common = False bl_check_common = False
bl_icon = 'MATERIAL_DATA' bl_icon = 'MATERIAL_DATA'
bl_reload_parent = False bl_reload_parent = False
bl_reload_child = True
@staticmethod @staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
@ -409,8 +412,6 @@ class BlMaterial(ReplicatedDatablock):
@staticmethod @staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader() loader = Loader()
is_grease_pencil = data.get('is_grease_pencil') is_grease_pencil = data.get('is_grease_pencil')
@ -427,6 +428,8 @@ class BlMaterial(ReplicatedDatablock):
datablock.use_nodes = True datablock.use_nodes = True
load_node_tree(data['node_tree'], datablock.node_tree) load_node_tree(data['node_tree'], datablock.node_tree)
load_animation_data(data.get('nodes_animation_data'), datablock.node_tree)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod @staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
@ -494,8 +497,10 @@ class BlMaterial(ReplicatedDatablock):
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil) data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
elif datablock.use_nodes: elif datablock.use_nodes:
data['node_tree'] = dump_node_tree(datablock.node_tree) data['node_tree'] = dump_node_tree(datablock.node_tree)
data['nodes_animation_data'] = dump_animation_data(datablock.node_tree)
data['animation_data'] = dump_animation_data(datablock) data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod @staticmethod
@ -509,7 +514,7 @@ class BlMaterial(ReplicatedDatablock):
if datablock.use_nodes: if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree)) deps.extend(get_node_tree_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock)) deps.extend(resolve_animation_dependencies(datablock))
return deps return deps

View File

@ -55,6 +55,8 @@ POLYGON = [
] ]
class BlMesh(ReplicatedDatablock): class BlMesh(ReplicatedDatablock):
use_delta = True
bl_id = "meshes" bl_id = "meshes"
bl_class = bpy.types.Mesh bl_class = bpy.types.Mesh
bl_check_common = False bl_check_common = False

View File

@ -65,6 +65,8 @@ def load_metaball_elements(elements_data, elements):
class BlMetaball(ReplicatedDatablock): class BlMetaball(ReplicatedDatablock):
use_delta = True
bl_id = "metaballs" bl_id = "metaballs"
bl_class = bpy.types.MetaBall bl_class = bpy.types.MetaBall
bl_check_common = False bl_check_common = False

View File

@ -28,6 +28,8 @@ from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlNodeGroup(ReplicatedDatablock): class BlNodeGroup(ReplicatedDatablock):
use_delta = True
bl_id = "node_groups" bl_id = "node_groups"
bl_class = bpy.types.NodeTree bl_class = bpy.types.NodeTree
bl_check_common = False bl_check_common = False

View File

@ -493,6 +493,8 @@ def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_
class BlObject(ReplicatedDatablock): class BlObject(ReplicatedDatablock):
use_delta = True
bl_id = "objects" bl_id = "objects"
bl_class = bpy.types.Object bl_class = bpy.types.Object
bl_check_common = False bl_check_common = False

View File

@ -41,6 +41,8 @@ IGNORED_ATTR = [
] ]
class BlParticle(ReplicatedDatablock): class BlParticle(ReplicatedDatablock):
use_delta = True
bl_id = "particles" bl_id = "particles"
bl_class = bpy.types.ParticleSettings bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES" bl_icon = "PARTICLES"

View File

@ -25,6 +25,8 @@ from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlSpeaker(ReplicatedDatablock): class BlSpeaker(ReplicatedDatablock):
use_delta = True
bl_id = "speakers" bl_id = "speakers"
bl_class = bpy.types.Speaker bl_class = bpy.types.Speaker
bl_check_common = False bl_check_common = False

View File

@ -26,6 +26,8 @@ from .bl_action import dump_animation_data, load_animation_data, resolve_animati
import bpy.types as T import bpy.types as T
class BlTexture(ReplicatedDatablock): class BlTexture(ReplicatedDatablock):
use_delta = True
bl_id = "textures" bl_id = "textures"
bl_class = bpy.types.Texture bl_class = bpy.types.Texture
bl_check_common = False bl_check_common = False

View File

@ -27,6 +27,8 @@ from .bl_material import dump_materials_slots, load_materials_slots
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlVolume(ReplicatedDatablock): class BlVolume(ReplicatedDatablock):
use_delta = True
bl_id = "volumes" bl_id = "volumes"
bl_class = bpy.types.Volume bl_class = bpy.types.Volume
bl_check_common = False bl_check_common = False

View File

@ -30,6 +30,8 @@ from .bl_action import dump_animation_data, load_animation_data, resolve_animati
class BlWorld(ReplicatedDatablock): class BlWorld(ReplicatedDatablock):
use_delta = True
bl_id = "worlds" bl_id = "worlds"
bl_class = bpy.types.World bl_class = bpy.types.World
bl_check_common = True bl_check_common = True

150
multi_user/handlers.py Normal file
View File

@ -0,0 +1,150 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import bpy
from bpy.app.handlers import persistent
from replication import porcelain
from replication.constants import RP_COMMON, STATE_ACTIVE, STATE_SYNCING, UP
from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session
from . import shared_data, utils
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node in session.repository.graph.values():
node.instance = session.repository.rdp.resolve(node.data)
if node is None \
or (node.state == UP and not node.instance):
if remove_nodes:
try:
porcelain.rm(session.repository,
node.uuid,
remove_dependencies=False)
logging.info(f"Removing {node.uuid}")
rm_cpt += 1
except NonAuthorizedOperationError:
continue
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
def update_external_dependencies():
"""Force external dependencies(files such as images) evaluation
"""
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in ['WindowsPath', 'PosixPath']]
for node_id in nodes_ids:
node = session.repository.graph.get(node_id)
if node and node.owner in [session.repository.username, RP_COMMON]:
porcelain.commit(session.repository, node_id)
porcelain.push(session.repository, 'origin', node_id)
@persistent
def on_scene_update(scene):
"""Forward blender depsgraph update to replication
"""
if session and session.state == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
incoming_updates = shared_data.session.applied_updates
distant_update = [getattr(u.id, 'uuid', None) for u in dependency_updates if getattr(u.id, 'uuid', None) in incoming_updates]
if distant_update:
for u in distant_update:
shared_data.session.applied_updates.remove(u)
logging.debug(f"Ignoring distant update of {dependency_updates[0].id.name}")
return
update_external_dependencies()
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
update_uuid = getattr(update.id, 'uuid', None)
if update_uuid:
node = session.repository.graph.get(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
if node and (node.owner == session.repository.username or check_common):
logging.debug(f"Evaluate {update.id.name}")
if node.state == UP:
try:
porcelain.commit(session.repository, node.uuid)
porcelain.push(session.repository,
'origin', node.uuid)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
except ContextError as e:
logging.debug(e)
except Exception as e:
logging.error(e)
else:
continue
elif isinstance(update.id, bpy.types.Scene):
scn_uuid = porcelain.add(session.repository, update.id)
porcelain.commit(session.repository, scn_uuid)
porcelain.push(session.repository, 'origin', scn_uuid)
@persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state == STATE_ACTIVE:
porcelain.update_user_metadata(session.repository, {
'frame_current': scene.frame_current
})
def register():
bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister():
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)

View File

@ -27,12 +27,12 @@ import shutil
import string import string
import sys import sys
import time import time
import traceback
from datetime import datetime from datetime import datetime
from operator import itemgetter from operator import itemgetter
from pathlib import Path from pathlib import Path
from queue import Queue from queue import Queue
from time import gmtime, strftime from time import gmtime, strftime
import traceback
from bpy.props import FloatProperty from bpy.props import FloatProperty
@ -45,16 +45,17 @@ import bpy
import mathutils import mathutils
from bpy.app.handlers import persistent from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper from bpy_extras.io_utils import ExportHelper, ImportHelper
from replication import porcelain
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE, from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP) STATE_INITIAL, STATE_SYNCING, UP)
from replication.protocol import DataTranslationProtocol
from replication.exception import ContextError, NonAuthorizedOperationError from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session from replication.interface import session
from replication import porcelain
from replication.repository import Repository
from replication.objects import Node from replication.objects import Node
from replication.protocol import DataTranslationProtocol
from replication.repository import Repository
from . import bl_types, environment, timers, ui, utils from . import bl_types, environment, shared_data, timers, ui, utils
from .handlers import on_scene_update, sanitize_deps_graph
from .presence import SessionStatusWidget, renderer, view3d_find from .presence import SessionStatusWidget, renderer, view3d_find
from .timers import registry from .timers import registry
@ -99,7 +100,7 @@ def initialize_session():
# Step 2: Load nodes # Step 2: Load nodes
logging.info("Applying nodes") logging.info("Applying nodes")
for node in session.repository.index_sorted: for node in session.repository.heads:
porcelain.apply(session.repository, node) porcelain.apply(session.repository, node)
logging.info("Registering timers") logging.info("Registering timers")
@ -112,7 +113,7 @@ def initialize_session():
utils.flush_history() utils.flush_history()
# Step 6: Launch deps graph update handling # Step 6: Launch deps graph update handling
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation) bpy.app.handlers.depsgraph_update_post.append(on_scene_update)
@session_callback('on_exit') @session_callback('on_exit')
@ -132,8 +133,8 @@ def on_connection_end(reason="none"):
stop_modal_executor = True stop_modal_executor = True
if depsgraph_evaluation in bpy.app.handlers.depsgraph_update_post: if on_scene_update in bpy.app.handlers.depsgraph_update_post:
bpy.app.handlers.depsgraph_update_post.remove(depsgraph_evaluation) bpy.app.handlers.depsgraph_update_post.remove(on_scene_update)
# Step 3: remove file handled # Step 3: remove file handled
logger = logging.getLogger() logger = logging.getLogger()
@ -603,9 +604,9 @@ class SessionApply(bpy.types.Operator):
node_ref = session.repository.graph.get(self.target) node_ref = session.repository.graph.get(self.target)
porcelain.apply(session.repository, porcelain.apply(session.repository,
self.target, self.target,
force=True, force=True)
force_dependencies=self.reset_dependencies)
impl = session.repository.rdp.get_implementation(node_ref.instance) impl = session.repository.rdp.get_implementation(node_ref.instance)
# NOTE: find another way to handle child and parent automatic reloading
if impl.bl_reload_parent: if impl.bl_reload_parent:
for parent in session.repository.graph.get_parents(self.target): for parent in session.repository.graph.get_parents(self.target):
logging.debug(f"Refresh parent {parent}") logging.debug(f"Refresh parent {parent}")
@ -613,6 +614,11 @@ class SessionApply(bpy.types.Operator):
porcelain.apply(session.repository, porcelain.apply(session.repository,
parent.uuid, parent.uuid,
force=True) force=True)
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
for dep in node_ref.dependencies:
porcelain.apply(session.repository,
dep,
force=True)
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
traceback.print_exc() traceback.print_exc()
@ -636,7 +642,7 @@ class SessionCommit(bpy.types.Operator):
def execute(self, context): def execute(self, context):
try: try:
porcelain.commit(session.repository, self.target) porcelain.commit(session.repository, self.target)
porcelain.push(session.repository, 'origin', self.target) porcelain.push(session.repository, 'origin', self.target, force=True)
return {"FINISHED"} return {"FINISHED"}
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
@ -684,6 +690,7 @@ class SessionPurgeOperator(bpy.types.Operator):
def execute(self, context): def execute(self, context):
try: try:
sanitize_deps_graph(remove_nodes=True) sanitize_deps_graph(remove_nodes=True)
porcelain.purge_orphan_nodes(session.repository)
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
@ -716,7 +723,6 @@ class SessionNotifyOperator(bpy.types.Operator):
layout = self.layout layout = self.layout
layout.row().label(text=self.message) layout.row().label(text=self.message)
def invoke(self, context, event): def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self) return context.window_manager.invoke_props_dialog(self)
@ -919,110 +925,6 @@ classes = (
) )
def update_external_dependencies():
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in ['WindowsPath', 'PosixPath']]
for node_id in nodes_ids:
node = session.repository.graph.get(node_id)
if node and node.owner in [session.repository.username, RP_COMMON]:
porcelain.commit(session.repository, node_id)
porcelain.push(session.repository,'origin', node_id)
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node in session.repository.graph.values():
node.instance = session.repository.rdp.resolve(node.data)
if node is None \
or (node.state == UP and not node.instance):
if remove_nodes:
try:
porcelain.rm(session.repository,
node.uuid,
remove_dependencies=False)
logging.info(f"Removing {node.uuid}")
rm_cpt += 1
except NonAuthorizedOperationError:
continue
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
@persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state == STATE_ACTIVE:
porcelain.update_user_metadata(session.repository, {
'frame_current': scene.frame_current
})
@persistent
def depsgraph_evaluation(scene):
if session and session.state == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
update_external_dependencies()
is_internal = [u for u in dependency_updates if u.is_updated_geometry or u.is_updated_shading or u.is_updated_transform]
# NOTE: maybe we don't need to check each update but only the first
if not is_internal:
return
for update in reversed(dependency_updates):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.repository.graph.get(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
# - if its to someone else, ignore the update
if node and (node.owner == session.repository.username or check_common):
if node.state == UP:
try:
porcelain.commit(session.repository, node.uuid)
porcelain.push(session.repository, 'origin', node.uuid)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
except ContextError as e:
logging.debug(e)
except Exception as e:
logging.error(e)
else:
continue
# A new scene is created
elif isinstance(update.id, bpy.types.Scene):
ref = session.repository.get_node_by_datablock(update.id)
if ref:
pass
else:
scn_uuid = porcelain.add(session.repository, update.id)
porcelain.commit(session.node_id, scn_uuid)
porcelain.push(session.repository,'origin', scn_uuid)
def register(): def register():
from bpy.utils import register_class from bpy.utils import register_class
@ -1030,13 +932,6 @@ def register():
register_class(cls) register_class(cls)
bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister(): def unregister():
if session and session.state == STATE_ACTIVE: if session and session.state == STATE_ACTIVE:
session.disconnect() session.disconnect()
@ -1044,9 +939,3 @@ def unregister():
from bpy.utils import unregister_class from bpy.utils import unregister_class
for cls in reversed(classes): for cls in reversed(classes):
unregister_class(cls) unregister_class(cls)
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)

48
multi_user/shared_data.py Normal file
View File

@ -0,0 +1,48 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
from replication.constants import STATE_INITIAL
class SessionData():
""" A structure to share easily the current session data across the addon
modules.
This object will completely replace the Singleton lying in replication
interface module.
"""
def __init__(self):
self.repository = None # The current repository
self.remote = None # The active remote
self.server = None
self.applied_updates = []
@property
def state(self):
if self.remote is None:
return STATE_INITIAL
else:
return self.remote.connection_status
def clear(self):
self.remote = None
self.repository = None
self.server = None
self.applied_updates = []
session = SessionData()

View File

@ -31,6 +31,8 @@ from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
generate_user_camera, get_view_matrix, refresh_3d_view, generate_user_camera, get_view_matrix, refresh_3d_view,
refresh_sidebar_view, renderer) refresh_sidebar_view, renderer)
from . import shared_data
this = sys.modules[__name__] this = sys.modules[__name__]
# Registered timers # Registered timers
@ -114,6 +116,7 @@ class ApplyTimer(Timer):
if node_ref.state == FETCHED: if node_ref.state == FETCHED:
try: try:
shared_data.session.applied_updates.append(node)
porcelain.apply(session.repository, node) porcelain.apply(session.repository, node)
except Exception as e: except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}") logging.error(f"Fail to apply {node_ref.uuid}")
@ -126,6 +129,11 @@ class ApplyTimer(Timer):
porcelain.apply(session.repository, porcelain.apply(session.repository,
parent.uuid, parent.uuid,
force=True) force=True)
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
for dep in node_ref.dependencies:
porcelain.apply(session.repository,
dep,
force=True)
class DynamicRightSelectTimer(Timer): class DynamicRightSelectTimer(Timer):
@ -251,6 +259,7 @@ class DynamicRightSelectTimer(Timer):
is_selectable = not session.repository.is_node_readonly(object_uuid) is_selectable = not session.repository.is_node_readonly(object_uuid)
if obj.hide_select != is_selectable: if obj.hide_select != is_selectable:
obj.hide_select = is_selectable obj.hide_select = is_selectable
shared_data.session.applied_updates.append(object_uuid)
class ClientUpdate(Timer): class ClientUpdate(Timer):

View File

@ -38,6 +38,14 @@ from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_LOBBY, STATE_LOBBY,
CONNECTING) CONNECTING)
CLEARED_DATABLOCKS = ['actions', 'armatures', 'cache_files', 'cameras',
'collections', 'curves', 'filepath', 'fonts',
'grease_pencils', 'images', 'lattices', 'libraries',
'lightprobes', 'lights', 'linestyles', 'masks',
'materials', 'meshes', 'metaballs', 'movieclips',
'node_groups', 'objects', 'paint_curves', 'particles',
'scenes', 'shape_keys', 'sounds', 'speakers', 'texts',
'textures', 'volumes', 'worlds']
def find_from_attr(attr_name, attr_value, list): def find_from_attr(attr_name, attr_value, list):
for item in list: for item in list:
@ -101,23 +109,25 @@ def get_state_str(state):
def clean_scene(): def clean_scene():
to_delete = [f for f in dir(bpy.data) if f not in ['brushes', 'palettes']] for type_name in CLEARED_DATABLOCKS:
for type_name in to_delete: sub_collection_to_avoid = [
try: bpy.data.linestyles.get('LineStyle'),
sub_collection_to_avoid = [bpy.data.linestyles['LineStyle'], bpy.data.materials['Dots Stroke']] bpy.data.materials.get('Dots Stroke')
]
type_collection = getattr(bpy.data, type_name) type_collection = getattr(bpy.data, type_name)
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid] items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid]
for item in items_to_remove: for item in items_to_remove:
try: try:
type_collection.remove(item) type_collection.remove(item)
except: logging.info(item.name)
continue
except: except:
continue continue
# Clear sequencer # Clear sequencer
bpy.context.scene.sequence_editor_clear() bpy.context.scene.sequence_editor_clear()
def get_selected_objects(scene, active_view_layer): def get_selected_objects(scene, active_view_layer):
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)] return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]