Compare commits

..

2 Commits

Author SHA1 Message Date
c06febed45 fix: compositor working 2021-06-18 17:20:12 +02:00
8c3e510231 feat: add bl_compositor + node_tree 2021-06-17 16:10:42 +02:00
45 changed files with 906 additions and 1118 deletions

View File

@ -187,33 +187,3 @@ All notable changes to this project will be documented in this file.
- Sync missing armature bone Roll - Sync missing armature bone Roll
- Sync missing driver data_path - Sync missing driver data_path
- Constraint replication - Constraint replication
## [0.4.0] - 2021-07-20
### Added
- Connection preset system (@Kysios)
- Display connected users active mode (users pannel and viewport) (@Kysios)
- Delta-based replication
- Sync timeline marker
- Sync images settings (@Kysios)
- Sync parent relation type (@Kysios)
- Sync uv project modifier
- Sync FCurves modifiers
### Changed
- User selection optimizations (draw and sync) (@Kysios)
- Improved shapekey syncing performances
- Improved gpencil syncing performances
- Integrate replication as a submodule
- The dependencies are now installed in a folder(blender addon folder) that no longer requires administrative rights
- Presence overlay UI optimization (@Kysios)
### Fixed
- User selection bounding box glitches for non-mesh objects (@Kysios)
- Transforms replication for animated objects
- GPencil fill stroke
- Sculpt and GPencil brushes deleted when joining a session (@Kysios)
- Auto-updater doesn't work for master and develop builds

View File

@ -11,8 +11,9 @@ This tool aims to allow multiple users to work on the same scene over the networ
## Quick installation ## Quick installation
1. Download [latest build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build) or [stable build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build). 1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
2. Install last_version.zip from your addon preferences. 2. Run blender as administrator (dependencies installation).
3. Install last_version.zip from your addon preferences.
[Dependencies](#dependencies) will be automatically added to your blender python during installation. [Dependencies](#dependencies) will be automatically added to your blender python during installation.
@ -28,35 +29,35 @@ See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_sta
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones. Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment | | Name | Status | Comment |
| -------------- | :----: | :---------------------------------------------------------------------: | | -------------- | :----: | :----------------------------------------------------------: |
| action | ✔️ | | | action | ✔️ | |
| camera | ✔️ | | | camera | ✔️ | |
| collection | ✔️ | | | collection | ✔️ | |
| gpencil | ✔️ | | | gpencil | ✔️ | |
| image | ✔️ | | | image | ✔️ | |
| mesh | ✔️ | | | mesh | ✔️ | |
| material | ✔️ | | | material | ✔️ | |
| node_groups | ✔️ | Material & Geometry only | | node_groups | ✔️ | Material & Geometry only |
| geometry nodes | ✔️ | | | geometry nodes | ✔️ | |
| metaball | ✔️ | | | metaball | ✔️ | |
| object | ✔️ | | | object | ✔️ | |
| texts | ✔️ | | | texts | ✔️ | |
| scene | ✔️ | | | scene | ✔️ | |
| world | ✔️ | | | world | ✔️ | |
| volumes | ✔️ | | | volumes | ✔️ | |
| lightprobes | ✔️ | | | lightprobes | ✔️ | |
| physics | ✔️ | | | physics | ✔️ | |
| textures | ✔️ | | | curve | ❗ | Nurbs surfaces not supported |
| curve | ❗ | Nurbs surfaces not supported | | textures | ❗ | Supported for modifiers/materials/geo nodes only |
| armature | ❗ | Only for Mesh. [Planned for GPencil](https://gitlab.com/slumber/multi-user/-/issues/161). Not stable yet | | armature | ❗ | Not stable |
| particles | ❗ | The cache isn't syncing. | | particles | ❗ | The cache isn't syncing. |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) | | speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❗ | Mask and Clip not supported yet | | vse | ❗ | Mask and Clip not supported yet |
| libraries | | | | libraries | | Partial |
| nla | ❌ | | | nla | ❌ | |
| texts | ❌ | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/81) | | texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| compositing | ❌ | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/46) | | compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |

View File

@ -19,10 +19,10 @@ import sys
project = 'multi-user' project = 'multi-user'
copyright = '2020, Swann Martinez' copyright = '2020, Swann Martinez'
author = 'Swann Martinez, Poochy, Fabian' author = 'Swann Martinez, with contributions from Poochy'
# The full version, including alpha/beta/rc tags # The full version, including alpha/beta/rc tags
release = '0.5.0-develop' release = '0.2.0'
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 365 KiB

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

@ -215,10 +215,8 @@ One of the most vital tools is the **Online user panel**. It lists all connected
users' information including your own: users' information including your own:
* **Role** : if a user is an admin or a regular user. * **Role** : if a user is an admin or a regular user.
* **Username** : Name of the user.
* **Mode** : User's active editing mode (edit_mesh, paint,etc.).
* **Frame**: When (on which frame) the user is working.
* **Location**: Where the user is actually working. * **Location**: Where the user is actually working.
* **Frame**: When (on which frame) the user is working.
* **Ping**: user's connection delay in milliseconds * **Ping**: user's connection delay in milliseconds
.. figure:: img/quickstart_users.png .. figure:: img/quickstart_users.png
@ -275,7 +273,6 @@ it draw users' related information in your viewport such as:
* Username * Username
* User point of view * User point of view
* User active mode
* User selection * User selection
.. figure:: img/quickstart_presence.png .. figure:: img/quickstart_presence.png

View File

@ -19,9 +19,9 @@
bl_info = { bl_info = {
"name": "Multi-User", "name": "Multi-User",
"author": "Swann Martinez", "author": "Swann Martinez",
"version": (0, 4, 0), "version": (0, 5, 0),
"description": "Enable real-time collaborative workflow inside blender", "description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 82, 0), "blender": (2, 93, 0),
"location": "3D View > Sidebar > Multi-User tab", "location": "3D View > Sidebar > Multi-User tab",
"warning": "Unstable addon, use it at your own risks", "warning": "Unstable addon, use it at your own risks",
"category": "Collaboration", "category": "Collaboration",
@ -59,7 +59,6 @@ def register():
from . import presence from . import presence
from . import operators from . import operators
from . import handlers
from . import ui from . import ui
from . import preferences from . import preferences
from . import addon_updater_ops from . import addon_updater_ops
@ -68,7 +67,6 @@ def register():
addon_updater_ops.register(bl_info) addon_updater_ops.register(bl_info)
presence.register() presence.register()
operators.register() operators.register()
handlers.register()
ui.register() ui.register()
except ModuleNotFoundError as e: except ModuleNotFoundError as e:
raise Exception(module_error_msg) raise Exception(module_error_msg)
@ -89,7 +87,6 @@ def register():
def unregister(): def unregister():
from . import presence from . import presence
from . import operators from . import operators
from . import handlers
from . import ui from . import ui
from . import preferences from . import preferences
from . import addon_updater_ops from . import addon_updater_ops
@ -99,7 +96,6 @@ def unregister():
presence.unregister() presence.unregister()
addon_updater_ops.unregister() addon_updater_ops.unregister()
ui.unregister() ui.unregister()
handlers.unregister()
operators.unregister() operators.unregister()
preferences.unregister() preferences.unregister()

View File

@ -41,6 +41,7 @@ __all__ = [
'bl_node_group', 'bl_node_group',
'bl_texture', 'bl_texture',
"bl_particle", "bl_particle",
# 'bl_compositor',
] # Order here defines execution order ] # Order here defines execution order
if bpy.app.version[1] >= 91: if bpy.app.version[1] >= 91:

View File

@ -219,7 +219,7 @@ def load_fcurve(fcurve_data, fcurve):
def dump_animation_data(datablock): def dump_animation_data(datablock):
animation_data = {} animation_data = {}
if has_action(datablock): if has_action(datablock):
animation_data['action'] = datablock.animation_data.action.uuid animation_data['action'] = datablock.animation_data.action.name
if has_driver(datablock): if has_driver(datablock):
animation_data['drivers'] = [] animation_data['drivers'] = []
for driver in datablock.animation_data.drivers: for driver in datablock.animation_data.drivers:
@ -241,10 +241,8 @@ def load_animation_data(animation_data, datablock):
for driver in animation_data['drivers']: for driver in animation_data['drivers']:
load_driver(datablock, driver) load_driver(datablock, driver)
action = animation_data.get('action') if 'action' in animation_data:
if action: datablock.animation_data.action = bpy.data.actions[animation_data['action']]
action = resolve_datablock_from_uuid(action, bpy.data.actions)
datablock.animation_data.action = action
elif datablock.animation_data.action: elif datablock.animation_data.action:
datablock.animation_data.action = None datablock.animation_data.action = None
@ -261,8 +259,6 @@ def resolve_animation_dependencies(datablock):
class BlAction(ReplicatedDatablock): class BlAction(ReplicatedDatablock):
use_delta = True
bl_id = "actions" bl_id = "actions"
bl_class = bpy.types.Action bl_class = bpy.types.Action
bl_check_common = False bl_check_common = False

View File

@ -37,8 +37,6 @@ def get_roll(bone: bpy.types.Bone) -> float:
class BlArmature(ReplicatedDatablock): class BlArmature(ReplicatedDatablock):
use_delta = True
bl_id = "armatures" bl_id = "armatures"
bl_class = bpy.types.Armature bl_class = bpy.types.Armature
bl_check_common = False bl_check_common = False

View File

@ -26,8 +26,6 @@ from .bl_action import dump_animation_data, load_animation_data, resolve_animati
class BlCamera(ReplicatedDatablock): class BlCamera(ReplicatedDatablock):
use_delta = True
bl_id = "cameras" bl_id = "cameras"
bl_class = bpy.types.Camera bl_class = bpy.types.Camera
bl_check_common = False bl_check_common = False
@ -56,7 +54,7 @@ class BlCamera(ReplicatedDatablock):
background_images = data.get('background_images') background_images = data.get('background_images')
datablock.background_images.clear() datablock.background_images.clear()
# TODO: Use image uuid
if background_images: if background_images:
for img_name, img_data in background_images.items(): for img_name, img_data in background_images.items():
img_id = img_data.get('image') img_id = img_data.get('image')

View File

@ -0,0 +1,81 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
import logging
import re
from uuid import uuid4
from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .node_tree import load_node_tree, dump_node_tree, get_node_tree_dependencies
class BlCompositor(ReplicatedDatablock):
bl_id = "compositor"
bl_class = bpy.types.CompositorNodeTree
bl_check_common = True
bl_icon = 'COMPOSITOR_NODE'
bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object:
return bpy.data.scenes["Scene"].node_tree # TODO: resolve_datablock_from_uuid for multiple scenes
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader()
loader.load(datablock, data)
load_node_tree(data['node_tree'], datablock)
@staticmethod
def dump(datablock: object) -> dict:
comp_dumper = Dumper()
comp_dumper.depth = 1
comp_dumper.include_filter = [
'use_nodes',
'name',
]
data = comp_dumper.dump(datablock)
data['node_tree'] = dump_node_tree(datablock)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.scenes["Scene"].node_tree)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(get_node_tree_dependencies(datablock))
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.CompositorNodeTree
_class = BlCompositor

View File

@ -137,8 +137,6 @@ SPLINE_METADATA = [
class BlCurve(ReplicatedDatablock): class BlCurve(ReplicatedDatablock):
use_delta = True
bl_id = "curves" bl_id = "curves"
bl_class = bpy.types.Curve bl_class = bpy.types.Curve
bl_check_common = False bl_check_common = False

View File

@ -28,8 +28,7 @@ from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from ..utils import get_preferences from ..utils import get_preferences
from ..timers import is_annotating
from .bl_material import load_materials_slots, dump_materials_slots
STROKE_POINT = [ STROKE_POINT = [
'co', 'co',
@ -66,9 +65,36 @@ def dump_stroke(stroke):
:param stroke: target grease pencil stroke :param stroke: target grease pencil stroke
:type stroke: bpy.types.GPencilStroke :type stroke: bpy.types.GPencilStroke
:return: (p_count, p_data) :return: dict
""" """
return (len(stroke.points), np_dump_collection(stroke.points, STROKE_POINT))
assert(stroke)
dumper = Dumper()
dumper.include_filter = [
"aspect",
"display_mode",
"draw_cyclic",
"end_cap_mode",
"hardeness",
"line_width",
"material_index",
"start_cap_mode",
"uv_rotation",
"uv_scale",
"uv_translation",
"vertex_color_fill",
]
dumped_stroke = dumper.dump(stroke)
# Stoke points
p_count = len(stroke.points)
dumped_stroke['p_count'] = p_count
dumped_stroke['points'] = np_dump_collection(stroke.points, STROKE_POINT)
# TODO: uv_factor, uv_rotation
return dumped_stroke
def load_stroke(stroke_data, stroke): def load_stroke(stroke_data, stroke):
@ -81,12 +107,12 @@ def load_stroke(stroke_data, stroke):
""" """
assert(stroke and stroke_data) assert(stroke and stroke_data)
stroke.points.add(stroke_data[0]) stroke.points.add(stroke_data["p_count"])
np_load_collection(stroke_data[1], stroke.points, STROKE_POINT) np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
# HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to # HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to
# fix fill issues # fix fill issues
stroke.uv_scale = 1.0 stroke.uv_scale = stroke_data["uv_scale"]
def dump_frame(frame): def dump_frame(frame):
@ -121,12 +147,10 @@ def load_frame(frame_data, frame):
assert(frame and frame_data) assert(frame and frame_data)
# Load stroke points
for stroke_data in frame_data['strokes_points']: for stroke_data in frame_data['strokes_points']:
target_stroke = frame.strokes.new() target_stroke = frame.strokes.new()
load_stroke(stroke_data, target_stroke) load_stroke(stroke_data, target_stroke)
# Load stroke metadata
np_load_collection(frame_data['strokes'], frame.strokes, STROKE) np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
@ -146,6 +170,7 @@ def dump_layer(layer):
'opacity', 'opacity',
'channel_color', 'channel_color',
'color', 'color',
# 'thickness', #TODO: enabling only for annotation
'tint_color', 'tint_color',
'tint_factor', 'tint_factor',
'vertex_paint_opacity', 'vertex_paint_opacity',
@ -162,7 +187,7 @@ def dump_layer(layer):
'hide', 'hide',
'annotation_hide', 'annotation_hide',
'lock', 'lock',
'lock_frame', # 'lock_frame',
# 'lock_material', # 'lock_material',
# 'use_mask_layer', # 'use_mask_layer',
'use_lights', 'use_lights',
@ -170,13 +195,12 @@ def dump_layer(layer):
'select', 'select',
'show_points', 'show_points',
'show_in_front', 'show_in_front',
# 'thickness'
# 'parent', # 'parent',
# 'parent_type', # 'parent_type',
# 'parent_bone', # 'parent_bone',
# 'matrix_inverse', # 'matrix_inverse',
] ]
if layer.thickness != 0: if layer.id_data.is_annotation:
dumper.include_filter.append('thickness') dumper.include_filter.append('thickness')
dumped_layer = dumper.dump(layer) dumped_layer = dumper.dump(layer)
@ -231,10 +255,10 @@ class BlGpencil(ReplicatedDatablock):
@staticmethod @staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
# MATERIAL SLOTS datablock.materials.clear()
src_materials = data.get('materials', None) if "materials" in data.keys():
if src_materials: for mat in data['materials']:
load_materials_slots(src_materials, datablock.materials) datablock.materials.append(bpy.data.materials[mat])
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(datablock, data)
@ -262,6 +286,7 @@ class BlGpencil(ReplicatedDatablock):
dumper = Dumper() dumper = Dumper()
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
'materials',
'name', 'name',
'zdepth_offset', 'zdepth_offset',
'stroke_thickness_space', 'stroke_thickness_space',
@ -269,7 +294,7 @@ class BlGpencil(ReplicatedDatablock):
'stroke_depth_order' 'stroke_depth_order'
] ]
data = dumper.dump(datablock) data = dumper.dump(datablock)
data['materials'] = dump_materials_slots(datablock.materials)
data['layers'] = {} data['layers'] = {}
for layer in datablock.layers: for layer in datablock.layers:
@ -298,8 +323,7 @@ class BlGpencil(ReplicatedDatablock):
return bpy.context.mode == 'OBJECT' \ return bpy.context.mode == 'OBJECT' \
or layer_changed(datablock, data) \ or layer_changed(datablock, data) \
or frame_changed(data) \ or frame_changed(data) \
or get_preferences().sync_flags.sync_during_editmode \ or get_preferences().sync_flags.sync_during_editmode
or is_annotating(bpy.context)
_type = bpy.types.GreasePencil _type = bpy.types.GreasePencil
_class = BlGpencil _class = BlGpencil

View File

@ -69,12 +69,11 @@ class BlImage(ReplicatedDatablock):
@staticmethod @staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(data, datablock)
# datablock.name = data.get('name')
datablock.source = 'FILE' datablock.source = 'FILE'
datablock.filepath_raw = get_filepath(data['filename']) datablock.filepath_raw = get_filepath(data['filename'])
color_space_name = data.get("colorspace") color_space_name = data["colorspace_settings"]["name"]
if color_space_name: if color_space_name:
datablock.colorspace_settings.name = color_space_name datablock.colorspace_settings.name = color_space_name
@ -93,10 +92,12 @@ class BlImage(ReplicatedDatablock):
"name", "name",
# 'source', # 'source',
'size', 'size',
'alpha_mode'] 'height',
'alpha',
'float_buffer',
'alpha_mode',
'colorspace_settings']
data.update(dumper.dump(datablock)) data.update(dumper.dump(datablock))
data['colorspace'] = datablock.colorspace_settings.name
return data return data
@staticmethod @staticmethod
@ -131,7 +132,10 @@ class BlImage(ReplicatedDatablock):
if datablock.is_dirty: if datablock.is_dirty:
datablock.save() datablock.save()
return True if not data or (datablock and (datablock.name != data.get('name'))):
return True
else:
return False
_type = bpy.types.Image _type = bpy.types.Image
_class = BlImage _class = BlImage

View File

@ -29,8 +29,6 @@ POINT = ['co', 'weight_softbody', 'co_deform']
class BlLattice(ReplicatedDatablock): class BlLattice(ReplicatedDatablock):
use_delta = True
bl_id = "lattices" bl_id = "lattices"
bl_class = bpy.types.Lattice bl_class = bpy.types.Lattice
bl_check_common = False bl_check_common = False

View File

@ -26,8 +26,6 @@ from .bl_action import dump_animation_data, load_animation_data, resolve_animati
class BlLight(ReplicatedDatablock): class BlLight(ReplicatedDatablock):
use_delta = True
bl_id = "lights" bl_id = "lights"
bl_class = bpy.types.Light bl_class = bpy.types.Light
bl_check_common = False bl_check_common = False

View File

@ -25,8 +25,6 @@ from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid from .bl_datablock import resolve_datablock_from_uuid
class BlLightprobe(ReplicatedDatablock): class BlLightprobe(ReplicatedDatablock):
use_delta = True
bl_id = "lightprobes" bl_id = "lightprobes"
bl_class = bpy.types.LightProbe bl_class = bpy.types.LightProbe
bl_check_common = False bl_check_common = False

View File

@ -28,341 +28,7 @@ from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .node_tree import load_node_tree, dump_node_tree, get_node_tree_dependencies
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
""" Load a node into a node_tree from a dict
:arg node_data: dumped node data
:type node_data: dict
:arg node_tree: target node_tree
:type node_tree: bpy.types.NodeTree
"""
loader = Loader()
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
target_node.select = False
loader.load(target_node, node_data)
image_uuid = node_data.get('image_uuid', None)
node_tree_uuid = node_data.get('node_tree_uuid', None)
if image_uuid and not target_node.image:
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
if image is None:
logging.error(f"Fail to find material image from uuid {image_uuid}")
else:
target_node.image = image
if node_tree_uuid:
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
inputs_data = node_data.get('inputs')
if inputs_data:
inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs):
if idx < len(inputs_data) and hasattr(inpt, "default_value"):
loaded_input = inputs_data[idx]
try:
if inpt.type in ['OBJECT', 'COLLECTION']:
inpt.default_value = get_datablock_from_uuid(loaded_input, None)
else:
inpt.default_value = loaded_input
except Exception as e:
logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})")
else:
logging.warning(f"Node {target_node.name} input length mismatch.")
outputs_data = node_data.get('outputs')
if outputs_data:
outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS]
for idx, output in enumerate(outputs):
if idx < len(outputs_data) and hasattr(output, "default_value"):
loaded_output = outputs_data[idx]
try:
if output.type in ['OBJECT', 'COLLECTION']:
output.default_value = get_datablock_from_uuid(loaded_output, None)
else:
output.default_value = loaded_output
except Exception as e:
logging.warning(
f"Node {target_node.name} output {output.name} parameter not supported, skipping ({e})")
else:
logging.warning(
f"Node {target_node.name} output length mismatch.")
def dump_node(node: bpy.types.ShaderNode) -> dict:
""" Dump a single node to a dict
:arg node: target node
:type node: bpy.types.Node
:retrun: dict
"""
node_dumper = Dumper()
node_dumper.depth = 1
node_dumper.exclude_filter = [
"dimensions",
"show_expanded",
"name_full",
"select",
"bl_label",
"bl_height_min",
"bl_height_max",
"bl_height_default",
"bl_width_min",
"bl_width_max",
"type",
"bl_icon",
"bl_width_default",
"bl_static_type",
"show_tetxure",
"is_active_output",
"hide",
"show_options",
"show_preview",
"show_texture",
"outputs",
"width_hidden"
]
dumped_node = node_dumper.dump(node)
if node.parent:
dumped_node['parent'] = node.parent.name
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
if dump_io_needed:
io_dumper = Dumper()
io_dumper.depth = 2
io_dumper.include_filter = ["default_value"]
if hasattr(node, 'inputs'):
dumped_node['inputs'] = []
inputs = [i for i in node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs):
if hasattr(inpt, 'default_value'):
if isinstance(inpt.default_value, bpy.types.ID):
dumped_input = inpt.default_value.uuid
else:
dumped_input = io_dumper.dump(inpt.default_value)
dumped_node['inputs'].append(dumped_input)
if hasattr(node, 'outputs'):
dumped_node['outputs'] = []
for idx, output in enumerate(node.outputs):
if output.type not in IGNORED_SOCKETS:
if hasattr(output, 'default_value'):
dumped_node['outputs'].append(
io_dumper.dump(output.default_value))
if hasattr(node, 'color_ramp'):
ramp_dumper = Dumper()
ramp_dumper.depth = 4
ramp_dumper.include_filter = [
'elements',
'alpha',
'color',
'position',
'interpolation',
'hue_interpolation',
'color_mode'
]
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
if hasattr(node, 'mapping'):
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location'
]
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
if hasattr(node, 'image') and getattr(node, 'image'):
dumped_node['image_uuid'] = node.image.uuid
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
dumped_node['node_tree_uuid'] = node.node_tree.uuid
return dumped_node
def load_links(links_data, node_tree):
""" Load node_tree links from a list
:arg links_data: dumped node links
:type links_data: list
:arg node_tree: node links collection
:type node_tree: bpy.types.NodeTree
"""
for link in links_data:
input_socket = node_tree.nodes[link['to_node']
].inputs[int(link['to_socket'])]
output_socket = node_tree.nodes[link['from_node']].outputs[int(
link['from_socket'])]
node_tree.links.new(input_socket, output_socket)
def dump_links(links):
""" Dump node_tree links collection to a list
:arg links: node links collection
:type links: bpy.types.NodeLinks
:retrun: list
"""
links_data = []
for link in links:
to_socket = NODE_SOCKET_INDEX.search(
link.to_socket.path_from_id()).group(1)
from_socket = NODE_SOCKET_INDEX.search(
link.from_socket.path_from_id()).group(1)
links_data.append({
'to_node': link.to_node.name,
'to_socket': to_socket,
'from_node': link.from_node.name,
'from_socket': from_socket,
})
return links_data
def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
""" Dump a shader node_tree to a dict including links and nodes
:arg node_tree: dumped shader node tree
:type node_tree: bpy.types.ShaderNodeTree
:return: dict
"""
node_tree_data = {
'nodes': {node.name: dump_node(node) for node in node_tree.nodes},
'links': dump_links(node_tree.links),
'name': node_tree.name,
'type': type(node_tree).__name__
}
for socket_id in ['inputs', 'outputs']:
socket_collection = getattr(node_tree, socket_id)
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
return node_tree_data
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
""" dump sockets of a shader_node_tree
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer
:type socket_id: str
:return: dict
"""
sockets_data = []
for socket in sockets:
try:
socket_uuid = socket['uuid']
except Exception:
socket_uuid = str(uuid4())
socket['uuid'] = socket_uuid
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
return sockets_data
def load_node_tree_sockets(sockets: bpy.types.Collection,
sockets_data: dict):
""" load sockets of a shader_node_tree
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer
:type socket_id: str
:arg socket_data: dumped socket data
:type socket_data: dict
"""
# Check for removed sockets
for socket in sockets:
if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]:
sockets.remove(socket)
# Check for new sockets
for idx, socket_data in enumerate(sockets_data):
try:
checked_socket = sockets[idx]
if checked_socket.name != socket_data[0]:
checked_socket.name = socket_data[0]
except Exception:
s = sockets.new(socket_data[1], socket_data[0])
s['uuid'] = socket_data[2]
def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeTree) -> dict:
"""Load a shader node_tree from dumped data
:arg node_tree_data: dumped node data
:type node_tree_data: dict
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
"""
# TODO: load only required nodes
target_node_tree.nodes.clear()
if not target_node_tree.is_property_readonly('name'):
target_node_tree.name = node_tree_data['name']
if 'inputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'inputs')
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
if 'outputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'outputs')
load_node_tree_sockets(socket_collection, node_tree_data['outputs'])
# Load nodes
for node in node_tree_data["nodes"]:
load_node(node_tree_data["nodes"][node], target_node_tree)
for node_id, node_data in node_tree_data["nodes"].items():
target_node = target_node_tree.nodes.get(node_id, None)
if target_node is None:
continue
elif 'parent' in node_data:
target_node.parent = target_node_tree.nodes[node_data['parent']]
else:
target_node.parent = None
# TODO: load only required nodes links
# Load nodes links
target_node_tree.links.clear()
load_links(node_tree_data["links"], target_node_tree)
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
def has_image(node): return (
node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
def has_node_group(node): return (
hasattr(node, 'node_tree') and node.node_tree)
def has_texture(node): return (
node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture)
deps = []
for node in node_tree.nodes:
if has_image(node):
deps.append(node.image)
elif has_node_group(node):
deps.append(node.node_tree)
elif has_texture(node):
deps.append(node.texture)
return deps
def dump_materials_slots(materials: bpy.types.bpy_prop_collection) -> list: def dump_materials_slots(materials: bpy.types.bpy_prop_collection) -> list:
""" Dump material slots collection """ Dump material slots collection
@ -387,22 +53,20 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
for mat_uuid, mat_name in src_materials: for mat_uuid, mat_name in src_materials:
mat_ref = None mat_ref = None
if mat_uuid: if mat_uuid is not None:
mat_ref = get_datablock_from_uuid(mat_uuid, None) mat_ref = get_datablock_from_uuid(mat_uuid, None)
else: else:
mat_ref = bpy.data.materials[mat_name] mat_ref = bpy.data.materials[mat_name]
dst_materials.append(mat_ref) dst_materials.append(mat_ref)
class BlMaterial(ReplicatedDatablock): class BlMaterial(ReplicatedDatablock):
use_delta = True
bl_id = "materials" bl_id = "materials"
bl_class = bpy.types.Material bl_class = bpy.types.Material
bl_check_common = False bl_check_common = False
bl_icon = 'MATERIAL_DATA' bl_icon = 'MATERIAL_DATA'
bl_reload_parent = False bl_reload_parent = False
bl_reload_child = True
@staticmethod @staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
@ -410,6 +74,8 @@ class BlMaterial(ReplicatedDatablock):
@staticmethod @staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader() loader = Loader()
is_grease_pencil = data.get('is_grease_pencil') is_grease_pencil = data.get('is_grease_pencil')
@ -426,8 +92,6 @@ class BlMaterial(ReplicatedDatablock):
datablock.use_nodes = True datablock.use_nodes = True
load_node_tree(data['node_tree'], datablock.node_tree) load_node_tree(data['node_tree'], datablock.node_tree)
load_animation_data(data.get('nodes_animation_data'), datablock.node_tree)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod @staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
@ -495,10 +159,8 @@ class BlMaterial(ReplicatedDatablock):
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil) data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
elif datablock.use_nodes: elif datablock.use_nodes:
data['node_tree'] = dump_node_tree(datablock.node_tree) data['node_tree'] = dump_node_tree(datablock.node_tree)
data['nodes_animation_data'] = dump_animation_data(datablock.node_tree)
data['animation_data'] = dump_animation_data(datablock) data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod @staticmethod
@ -512,7 +174,7 @@ class BlMaterial(ReplicatedDatablock):
if datablock.use_nodes: if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree)) deps.extend(get_node_tree_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock)) deps.extend(resolve_animation_dependencies(datablock))
return deps return deps

View File

@ -55,8 +55,6 @@ POLYGON = [
] ]
class BlMesh(ReplicatedDatablock): class BlMesh(ReplicatedDatablock):
use_delta = True
bl_id = "meshes" bl_id = "meshes"
bl_class = bpy.types.Mesh bl_class = bpy.types.Mesh
bl_check_common = False bl_check_common = False

View File

@ -65,8 +65,6 @@ def load_metaball_elements(elements_data, elements):
class BlMetaball(ReplicatedDatablock): class BlMetaball(ReplicatedDatablock):
use_delta = True
bl_id = "metaballs" bl_id = "metaballs"
bl_class = bpy.types.MetaBall bl_class = bpy.types.MetaBall
bl_check_common = False bl_check_common = False

View File

@ -28,8 +28,6 @@ from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlNodeGroup(ReplicatedDatablock): class BlNodeGroup(ReplicatedDatablock):
use_delta = True
bl_id = "node_groups" bl_id = "node_groups"
bl_class = bpy.types.NodeTree bl_class = bpy.types.NodeTree
bl_check_common = False bl_check_common = False

View File

@ -24,7 +24,7 @@ from replication.exception import ContextError
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS from .node_tree import IGNORED_SOCKETS
from ..utils import get_preferences from ..utils import get_preferences
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .dump_anything import ( from .dump_anything import (
@ -493,8 +493,6 @@ def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_
class BlObject(ReplicatedDatablock): class BlObject(ReplicatedDatablock):
use_delta = True
bl_id = "objects" bl_id = "objects"
bl_class = bpy.types.Object bl_class = bpy.types.Object
bl_check_common = False bl_check_common = False
@ -620,8 +618,10 @@ class BlObject(ReplicatedDatablock):
transform = data.get('transforms', None) transform = data.get('transforms', None)
if transform: if transform:
datablock.matrix_parent_inverse = mathutils.Matrix(transform['matrix_parent_inverse']) datablock.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse'])
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis']) datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
@staticmethod @staticmethod

View File

@ -3,8 +3,7 @@ import mathutils
from . import dump_anything from . import dump_anything
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
@ -41,8 +40,6 @@ IGNORED_ATTR = [
] ]
class BlParticle(ReplicatedDatablock): class BlParticle(ReplicatedDatablock):
use_delta = True
bl_id = "particles" bl_id = "particles"
bl_class = bpy.types.ParticleSettings bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES" bl_icon = "PARTICLES"

View File

@ -19,6 +19,7 @@
import logging import logging
from pathlib import Path from pathlib import Path
from uuid import uuid4 from uuid import uuid4
import re
import bpy import bpy
import mathutils import mathutils
@ -29,10 +30,12 @@ from replication.protocol import ReplicatedDatablock
from ..utils import flush_history, get_preferences from ..utils import flush_history, get_preferences
from .bl_action import (dump_animation_data, load_animation_data, from .bl_action import (dump_animation_data, load_animation_data,
resolve_animation_dependencies) resolve_animation_dependencies)
from .node_tree import (get_node_tree_dependencies, load_node_tree,
dump_node_tree)
from .bl_collection import (dump_collection_children, dump_collection_objects, from .bl_collection import (dump_collection_children, dump_collection_objects,
load_collection_childrens, load_collection_objects, load_collection_childrens, load_collection_objects,
resolve_collection_dependencies) resolve_collection_dependencies)
from .bl_datablock import resolve_datablock_from_uuid from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_file import get_filepath from .bl_file import get_filepath
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
@ -303,7 +306,6 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
return data return data
def load_sequence(sequence_data: dict, def load_sequence(sequence_data: dict,
sequence_editor: bpy.types.SequenceEditor): sequence_editor: bpy.types.SequenceEditor):
""" Load sequence from dumped data """ Load sequence from dumped data
@ -370,7 +372,6 @@ def load_sequence(sequence_data: dict,
loader.load(sequence, sequence_data) loader.load(sequence, sequence_data)
sequence.select = False sequence.select = False
class BlScene(ReplicatedDatablock): class BlScene(ReplicatedDatablock):
is_root = True is_root = True
use_delta = True use_delta = True
@ -403,9 +404,8 @@ class BlScene(ReplicatedDatablock):
datablock.world = bpy.data.worlds[data['world']] datablock.world = bpy.data.worlds[data['world']]
# Annotation # Annotation
gpencil_uid = data.get('grease_pencil') if 'grease_pencil' in data.keys():
if gpencil_uid: datablock.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
datablock.grease_pencil = resolve_datablock_from_uuid(gpencil_uid, bpy.data.grease_pencils)
if get_preferences().sync_flags.sync_render_settings: if get_preferences().sync_flags.sync_render_settings:
if 'eevee' in data.keys(): if 'eevee' in data.keys():
@ -446,17 +446,16 @@ class BlScene(ReplicatedDatablock):
elif datablock.sequence_editor and not sequences: elif datablock.sequence_editor and not sequences:
datablock.sequence_editor_clear() datablock.sequence_editor_clear()
# Timeline markers
markers = data.get('timeline_markers')
if markers:
datablock.timeline_markers.clear()
for name, frame, camera in markers:
marker = datablock.timeline_markers.new(name, frame=frame)
if camera:
marker.camera = resolve_datablock_from_uuid(camera, bpy.data.objects)
marker.select = False
# FIXME: Find a better way after the replication big refacotoring # FIXME: Find a better way after the replication big refacotoring
# Keep other user from deleting collection object by flushing their history # Keep other user from deleting collection object by flushing their history
# Compositor
if data["use_nodes"]:
if datablock.node_tree is None:
datablock.use_nodes = True
load_node_tree(data['node_tree'], datablock.node_tree)
flush_history() flush_history()
@staticmethod @staticmethod
@ -468,9 +467,11 @@ class BlScene(ReplicatedDatablock):
scene_dumper = Dumper() scene_dumper = Dumper()
scene_dumper.depth = 1 scene_dumper.depth = 1
scene_dumper.include_filter = [ scene_dumper.include_filter = [
'use_nodes',
'name', 'name',
'world', 'world',
'id', 'id',
'grease_pencil',
'frame_start', 'frame_start',
'frame_end', 'frame_end',
'frame_step', 'frame_step',
@ -526,12 +527,10 @@ class BlScene(ReplicatedDatablock):
dumped_sequences[seq.name] = dump_sequence(seq) dumped_sequences[seq.name] = dump_sequence(seq)
data['sequences'] = dumped_sequences data['sequences'] = dumped_sequences
# Timeline markers # Compositor
if datablock.timeline_markers: if datablock.use_nodes:
data['timeline_markers'] = [(m.name, m.frame, getattr(m.camera, 'uuid', None)) for m in datablock.timeline_markers] data['node_tree'] = dump_node_tree(datablock.node_tree)
data['animation_data'] = dump_animation_data(datablock)
if datablock.grease_pencil:
data['grease_pencil'] = datablock.grease_pencil.uuid
return data return data
@ -566,6 +565,12 @@ class BlScene(ReplicatedDatablock):
Path(bpy.path.abspath(sequence.directory), Path(bpy.path.abspath(sequence.directory),
elem.filename)) elem.filename))
# Compositor
if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
@staticmethod @staticmethod

View File

@ -25,8 +25,6 @@ from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlSpeaker(ReplicatedDatablock): class BlSpeaker(ReplicatedDatablock):
use_delta = True
bl_id = "speakers" bl_id = "speakers"
bl_class = bpy.types.Speaker bl_class = bpy.types.Speaker
bl_check_common = False bl_check_common = False

View File

@ -26,8 +26,6 @@ from .bl_action import dump_animation_data, load_animation_data, resolve_animati
import bpy.types as T import bpy.types as T
class BlTexture(ReplicatedDatablock): class BlTexture(ReplicatedDatablock):
use_delta = True
bl_id = "textures" bl_id = "textures"
bl_class = bpy.types.Texture bl_class = bpy.types.Texture
bl_check_common = False bl_check_common = False

View File

@ -27,8 +27,6 @@ from .bl_material import dump_materials_slots, load_materials_slots
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlVolume(ReplicatedDatablock): class BlVolume(ReplicatedDatablock):
use_delta = True
bl_id = "volumes" bl_id = "volumes"
bl_class = bpy.types.Volume bl_class = bpy.types.Volume
bl_check_common = False bl_check_common = False

View File

@ -21,7 +21,7 @@ import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from .bl_material import (load_node_tree, from .node_tree import (load_node_tree,
dump_node_tree, dump_node_tree,
get_node_tree_dependencies) get_node_tree_dependencies)
@ -30,8 +30,6 @@ from .bl_action import dump_animation_data, load_animation_data, resolve_animati
class BlWorld(ReplicatedDatablock): class BlWorld(ReplicatedDatablock):
use_delta = True
bl_id = "worlds" bl_id = "worlds"
bl_class = bpy.types.World bl_class = bpy.types.World
bl_check_common = True bl_check_common = True

View File

@ -0,0 +1,362 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
import logging
import re
from uuid import uuid4
from .dump_anything import Loader, Dumper
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
def load_node(node_data: dict, node_tree: bpy.types.NodeTree):
""" Load a node into a node_tree from a dict
:arg node_data: dumped node data
:type node_data: dict
:arg node_tree: target node_tree
:type node_tree: bpy.types.NodeTree
"""
loader = Loader()
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
target_node.select = False
loader.load(target_node, node_data)
image_uuid = node_data.get('image_uuid', None)
node_tree_uuid = node_data.get('node_tree_uuid', None)
if image_uuid and not target_node.image:
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
if image is None:
logging.error(f"Fail to find material image from uuid {image_uuid}")
else:
target_node.image = image
if node_tree_uuid:
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
inputs_data = node_data.get('inputs')
if inputs_data:
inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs):
if idx < len(inputs_data) and hasattr(inpt, "default_value"):
loaded_input = inputs_data[idx]
try:
if inpt.type in ['OBJECT', 'COLLECTION']:
inpt.default_value = get_datablock_from_uuid(loaded_input, None)
else:
inpt.default_value = loaded_input
except Exception as e:
logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})")
else:
logging.warning(f"Node {target_node.name} input length mismatch.")
outputs_data = node_data.get('outputs')
if outputs_data:
outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS]
for idx, output in enumerate(outputs):
if idx < len(outputs_data) and hasattr(output, "default_value"):
loaded_output = outputs_data[idx]
try:
if output.type in ['OBJECT', 'COLLECTION']:
output.default_value = get_datablock_from_uuid(loaded_output, None)
else:
output.default_value = loaded_output
except Exception as e:
logging.warning(
f"Node {target_node.name} output {output.name} parameter not supported, skipping ({e})")
else:
logging.warning(
f"Node {target_node.name} output length mismatch.")
def dump_node(node: bpy.types.Node) -> dict:
""" Dump a single node to a dict
:arg node: target node
:type node: bpy.types.Node
:retrun: dict
"""
node_dumper = Dumper()
node_dumper.depth = 1
node_dumper.exclude_filter = [
"dimensions",
"show_expanded",
"name_full",
"select",
"bl_label",
"bl_height_min",
"bl_height_max",
"bl_height_default",
"bl_width_min",
"bl_width_max",
"type",
"bl_icon",
"bl_width_default",
"bl_static_type",
"show_tetxure",
"is_active_output",
"hide",
"show_options",
"show_preview",
"show_texture",
"outputs",
"width_hidden",
"image"
]
dumped_node = node_dumper.dump(node)
if node.parent:
dumped_node['parent'] = node.parent.name
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
if dump_io_needed:
io_dumper = Dumper()
io_dumper.depth = 2
io_dumper.include_filter = ["default_value"]
if hasattr(node, 'inputs'):
dumped_node['inputs'] = []
inputs = [i for i in node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs):
if hasattr(inpt, 'default_value'):
if isinstance(inpt.default_value, bpy.types.ID):
dumped_input = inpt.default_value.uuid
else:
dumped_input = io_dumper.dump(inpt.default_value)
dumped_node['inputs'].append(dumped_input)
if hasattr(node, 'outputs'):
dumped_node['outputs'] = []
for idx, output in enumerate(node.outputs):
if output.type not in IGNORED_SOCKETS:
if hasattr(output, 'default_value'):
dumped_node['outputs'].append(
io_dumper.dump(output.default_value))
if hasattr(node, 'color_ramp'):
ramp_dumper = Dumper()
ramp_dumper.depth = 4
ramp_dumper.include_filter = [
'elements',
'alpha',
'color',
'position',
'interpolation',
'hue_interpolation',
'color_mode'
]
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
if hasattr(node, 'mapping'):
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location'
]
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
if hasattr(node, 'image') and getattr(node, 'image'):
dumped_node['image_uuid'] = node.image.uuid
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
dumped_node['node_tree_uuid'] = node.node_tree.uuid
return dumped_node
def load_links(links_data, node_tree):
""" Load node_tree links from a list
:arg links_data: dumped node links
:type links_data: list
:arg node_tree: node links collection
:type node_tree: bpy.types.NodeTree
"""
for link in links_data:
input_socket = node_tree.nodes[link['to_node']
].inputs[int(link['to_socket'])]
output_socket = node_tree.nodes[link['from_node']].outputs[int(
link['from_socket'])]
node_tree.links.new(input_socket, output_socket)
def dump_links(links):
""" Dump node_tree links collection to a list
:arg links: node links collection
:type links: bpy.types.NodeLinks
:retrun: list
"""
links_data = []
for link in links:
to_socket = NODE_SOCKET_INDEX.search(
link.to_socket.path_from_id()).group(1)
from_socket = NODE_SOCKET_INDEX.search(
link.from_socket.path_from_id()).group(1)
links_data.append({
'to_node': link.to_node.name,
'to_socket': to_socket,
'from_node': link.from_node.name,
'from_socket': from_socket,
})
return links_data
def dump_node_tree(node_tree: bpy.types.NodeTree) -> dict:
""" Dump a node_tree to a dict including links and nodes
:arg node_tree: dumped node tree
:type node_tree: bpy.types.NodeTree
:return: dict
"""
node_tree_data = {
'nodes': {node.name: dump_node(node) for node in node_tree.nodes},
'links': dump_links(node_tree.links),
'name': node_tree.name,
'type': type(node_tree).__name__
}
for socket_id in ['inputs', 'outputs']:
socket_collection = getattr(node_tree, socket_id)
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
return node_tree_data
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
""" dump sockets of a shader_node_tree
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer
:type socket_id: str
:return: dict
"""
sockets_data = []
for socket in sockets:
try:
socket_uuid = socket['uuid']
except Exception:
socket_uuid = str(uuid4())
socket['uuid'] = socket_uuid
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
return sockets_data
def load_node_tree_sockets(sockets: bpy.types.Collection,
sockets_data: dict):
""" load sockets of a shader_node_tree
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer
:type socket_id: str
:arg socket_data: dumped socket data
:type socket_data: dict
"""
# Check for removed sockets
for socket in sockets:
if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]:
sockets.remove(socket)
# Check for new sockets
for idx, socket_data in enumerate(sockets_data):
try:
checked_socket = sockets[idx]
if checked_socket.name != socket_data[0]:
checked_socket.name = socket_data[0]
except Exception:
s = sockets.new(socket_data[1], socket_data[0])
s['uuid'] = socket_data[2]
def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.NodeTree) -> dict:
"""Load a shader node_tree from dumped data
:arg node_tree_data: dumped node data
:type node_tree_data: dict
:arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree
"""
# TODO: load only required nodes
target_node_tree.nodes.clear()
if not target_node_tree.is_property_readonly('name'):
target_node_tree.name = node_tree_data['name']
if 'inputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'inputs')
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
if 'outputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'outputs')
load_node_tree_sockets(socket_collection, node_tree_data['outputs'])
# Load nodes
for node in node_tree_data["nodes"]:
load_node(node_tree_data["nodes"][node], target_node_tree)
for node_id, node_data in node_tree_data["nodes"].items():
target_node = target_node_tree.nodes.get(node_id, None)
if target_node is None:
continue
elif 'parent' in node_data:
target_node.parent = target_node_tree.nodes[node_data['parent']]
else:
target_node.parent = None
# TODO: load only required nodes links
# Load nodes links
target_node_tree.links.clear()
load_links(node_tree_data["links"], target_node_tree)
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
def has_image(node): return (
node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT','IMAGE','R_LAYER'] and node.image)
def has_node_group(node): return (
hasattr(node, 'node_tree') and node.node_tree)
def has_texture(node): return (
node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture)
deps = []
for node in node_tree.nodes:
if has_image(node):
deps.append(node.image)
elif has_node_group(node):
deps.append(node.node_tree)
elif has_texture(node):
deps.append(node.texture)
return deps

View File

@ -1,152 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import bpy
from bpy.app.handlers import persistent
from replication import porcelain
from replication.constants import RP_COMMON, STATE_ACTIVE, STATE_SYNCING, UP
from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session
from . import shared_data, utils
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node in session.repository.graph.values():
node.instance = session.repository.rdp.resolve(node.data)
if node is None \
or (node.state == UP and not node.instance):
if remove_nodes:
try:
porcelain.rm(session.repository,
node.uuid,
remove_dependencies=False)
logging.info(f"Removing {node.uuid}")
rm_cpt += 1
except NonAuthorizedOperationError:
continue
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
def update_external_dependencies():
"""Force external dependencies(files such as images) evaluation
"""
external_types = ['WindowsPath', 'PosixPath', 'Image']
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in external_types]
for node_id in nodes_ids:
node = session.repository.graph.get(node_id)
if node and node.owner in [session.repository.username, RP_COMMON]:
porcelain.commit(session.repository, node_id)
porcelain.push(session.repository, 'origin', node_id)
@persistent
def on_scene_update(scene):
"""Forward blender depsgraph update to replication
"""
if session and session.state == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
incoming_updates = shared_data.session.applied_updates
distant_update = [getattr(u.id, 'uuid', None) for u in dependency_updates if getattr(u.id, 'uuid', None) in incoming_updates]
if distant_update:
for u in distant_update:
shared_data.session.applied_updates.remove(u)
logging.debug(f"Ignoring distant update of {dependency_updates[0].id.name}")
return
update_external_dependencies()
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
update_uuid = getattr(update.id, 'uuid', None)
if update_uuid:
node = session.repository.graph.get(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
if node and (node.owner == session.repository.username or check_common):
logging.debug(f"Evaluate {update.id.name}")
if node.state == UP:
try:
porcelain.commit(session.repository, node.uuid)
porcelain.push(session.repository,
'origin', node.uuid)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
except ContextError as e:
logging.debug(e)
except Exception as e:
logging.error(e)
else:
continue
elif isinstance(update.id, bpy.types.Scene):
scene = bpy.data.scenes.get(update.id.name)
scn_uuid = porcelain.add(session.repository, scene)
porcelain.commit(session.repository, scn_uuid)
porcelain.push(session.repository, 'origin', scn_uuid)
@persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state == STATE_ACTIVE:
porcelain.update_user_metadata(session.repository, {
'frame_current': scene.frame_current
})
def register():
bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister():
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)

View File

@ -27,12 +27,12 @@ import shutil
import string import string
import sys import sys
import time import time
import traceback
from datetime import datetime from datetime import datetime
from operator import itemgetter from operator import itemgetter
from pathlib import Path from pathlib import Path
from queue import Queue from queue import Queue
from time import gmtime, strftime from time import gmtime, strftime
import traceback
from bpy.props import FloatProperty from bpy.props import FloatProperty
@ -45,17 +45,16 @@ import bpy
import mathutils import mathutils
from bpy.app.handlers import persistent from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper from bpy_extras.io_utils import ExportHelper, ImportHelper
from replication import porcelain
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE, from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP) STATE_INITIAL, STATE_SYNCING, UP)
from replication.protocol import DataTranslationProtocol
from replication.exception import ContextError, NonAuthorizedOperationError from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session from replication.interface import session
from replication.objects import Node from replication import porcelain
from replication.protocol import DataTranslationProtocol
from replication.repository import Repository from replication.repository import Repository
from replication.objects import Node
from . import bl_types, environment, shared_data, timers, ui, utils from . import bl_types, environment, timers, ui, utils
from .handlers import on_scene_update, sanitize_deps_graph
from .presence import SessionStatusWidget, renderer, view3d_find from .presence import SessionStatusWidget, renderer, view3d_find
from .timers import registry from .timers import registry
@ -100,7 +99,7 @@ def initialize_session():
# Step 2: Load nodes # Step 2: Load nodes
logging.info("Applying nodes") logging.info("Applying nodes")
for node in session.repository.heads: for node in session.repository.index_sorted:
porcelain.apply(session.repository, node) porcelain.apply(session.repository, node)
logging.info("Registering timers") logging.info("Registering timers")
@ -113,7 +112,7 @@ def initialize_session():
utils.flush_history() utils.flush_history()
# Step 6: Launch deps graph update handling # Step 6: Launch deps graph update handling
bpy.app.handlers.depsgraph_update_post.append(on_scene_update) bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
@session_callback('on_exit') @session_callback('on_exit')
@ -133,8 +132,8 @@ def on_connection_end(reason="none"):
stop_modal_executor = True stop_modal_executor = True
if on_scene_update in bpy.app.handlers.depsgraph_update_post: if depsgraph_evaluation in bpy.app.handlers.depsgraph_update_post:
bpy.app.handlers.depsgraph_update_post.remove(on_scene_update) bpy.app.handlers.depsgraph_update_post.remove(depsgraph_evaluation)
# Step 3: remove file handled # Step 3: remove file handled
logger = logging.getLogger() logger = logging.getLogger()
@ -273,7 +272,8 @@ class SessionStartOperator(bpy.types.Operator):
session_update = timers.SessionStatusUpdate() session_update = timers.SessionStatusUpdate()
session_user_sync = timers.SessionUserSync() session_user_sync = timers.SessionUserSync()
session_background_executor = timers.MainThreadExecutor(execution_queue=background_execution_queue) session_background_executor = timers.MainThreadExecutor(
execution_queue=background_execution_queue)
session_listen = timers.SessionListenTimer(timeout=0.001) session_listen = timers.SessionListenTimer(timeout=0.001)
session_listen.register() session_listen.register()
@ -285,7 +285,6 @@ class SessionStartOperator(bpy.types.Operator):
deleyables.append(session_update) deleyables.append(session_update)
deleyables.append(session_user_sync) deleyables.append(session_user_sync)
deleyables.append(session_listen) deleyables.append(session_listen)
deleyables.append(timers.AnnotationUpdates())
return {"FINISHED"} return {"FINISHED"}
@ -604,9 +603,9 @@ class SessionApply(bpy.types.Operator):
node_ref = session.repository.graph.get(self.target) node_ref = session.repository.graph.get(self.target)
porcelain.apply(session.repository, porcelain.apply(session.repository,
self.target, self.target,
force=True) force=True,
force_dependencies=self.reset_dependencies)
impl = session.repository.rdp.get_implementation(node_ref.instance) impl = session.repository.rdp.get_implementation(node_ref.instance)
# NOTE: find another way to handle child and parent automatic reloading
if impl.bl_reload_parent: if impl.bl_reload_parent:
for parent in session.repository.graph.get_parents(self.target): for parent in session.repository.graph.get_parents(self.target):
logging.debug(f"Refresh parent {parent}") logging.debug(f"Refresh parent {parent}")
@ -614,11 +613,6 @@ class SessionApply(bpy.types.Operator):
porcelain.apply(session.repository, porcelain.apply(session.repository,
parent.uuid, parent.uuid,
force=True) force=True)
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
for dep in node_ref.dependencies:
porcelain.apply(session.repository,
dep,
force=True)
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
traceback.print_exc() traceback.print_exc()
@ -642,7 +636,7 @@ class SessionCommit(bpy.types.Operator):
def execute(self, context): def execute(self, context):
try: try:
porcelain.commit(session.repository, self.target) porcelain.commit(session.repository, self.target)
porcelain.push(session.repository, 'origin', self.target, force=True) porcelain.push(session.repository, 'origin', self.target)
return {"FINISHED"} return {"FINISHED"}
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
@ -690,7 +684,6 @@ class SessionPurgeOperator(bpy.types.Operator):
def execute(self, context): def execute(self, context):
try: try:
sanitize_deps_graph(remove_nodes=True) sanitize_deps_graph(remove_nodes=True)
porcelain.purge_orphan_nodes(session.repository)
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
@ -723,6 +716,7 @@ class SessionNotifyOperator(bpy.types.Operator):
layout = self.layout layout = self.layout
layout.row().label(text=self.message) layout.row().label(text=self.message)
def invoke(self, context, event): def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self) return context.window_manager.invoke_props_dialog(self)
@ -925,6 +919,110 @@ classes = (
) )
def update_external_dependencies():
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in ['WindowsPath', 'PosixPath']]
for node_id in nodes_ids:
node = session.repository.graph.get(node_id)
if node and node.owner in [session.repository.username, RP_COMMON]:
porcelain.commit(session.repository, node_id)
porcelain.push(session.repository,'origin', node_id)
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node in session.repository.graph.values():
node.instance = session.repository.rdp.resolve(node.data)
if node is None \
or (node.state == UP and not node.instance):
if remove_nodes:
try:
porcelain.rm(session.repository,
node.uuid,
remove_dependencies=False)
logging.info(f"Removing {node.uuid}")
rm_cpt += 1
except NonAuthorizedOperationError:
continue
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
@persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state == STATE_ACTIVE:
porcelain.update_user_metadata(session.repository, {
'frame_current': scene.frame_current
})
@persistent
def depsgraph_evaluation(scene):
if session and session.state == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
update_external_dependencies()
is_internal = [u for u in dependency_updates if u.is_updated_geometry or u.is_updated_shading or u.is_updated_transform]
# NOTE: maybe we don't need to check each update but only the first
if not is_internal:
return
for update in reversed(dependency_updates):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.repository.graph.get(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
# - if its to someone else, ignore the update
if node and (node.owner == session.repository.username or check_common):
if node.state == UP:
try:
porcelain.commit(session.repository, node.uuid)
porcelain.push(session.repository, 'origin', node.uuid)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
except ContextError as e:
logging.debug(e)
except Exception as e:
logging.error(e)
else:
continue
# A new scene is created
elif isinstance(update.id, bpy.types.Scene):
ref = session.repository.get_node_by_datablock(update.id)
if ref:
pass
else:
scn_uuid = porcelain.add(session.repository, update.id)
porcelain.commit(session.node_id, scn_uuid)
porcelain.push(session.repository,'origin', scn_uuid)
def register(): def register():
from bpy.utils import register_class from bpy.utils import register_class
@ -932,6 +1030,13 @@ def register():
register_class(cls) register_class(cls)
bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister(): def unregister():
if session and session.state == STATE_ACTIVE: if session and session.state == STATE_ACTIVE:
session.disconnect() session.disconnect()
@ -939,3 +1044,9 @@ def unregister():
from bpy.utils import unregister_class from bpy.utils import unregister_class
for cls in reversed(classes): for cls in reversed(classes):
unregister_class(cls) unregister_class(cls)
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)

View File

@ -273,13 +273,6 @@ class SessionPrefs(bpy.types.AddonPreferences):
step=1, step=1,
subtype='PERCENTAGE', subtype='PERCENTAGE',
) )
presence_mode_distance: bpy.props.FloatProperty(
name="Distance mode visibilty",
description="Adjust the distance visibilty of user's mode",
min=0.1,
max=1000,
default=100,
)
conf_session_identity_expanded: bpy.props.BoolProperty( conf_session_identity_expanded: bpy.props.BoolProperty(
name="Identity", name="Identity",
description="Identity", description="Identity",
@ -453,11 +446,10 @@ class SessionPrefs(bpy.types.AddonPreferences):
col = box.column(align=True) col = box.column(align=True)
col.prop(self, "presence_hud_scale", expand=True) col.prop(self, "presence_hud_scale", expand=True)
col.prop(self, "presence_hud_hpos", expand=True) col.prop(self, "presence_hud_hpos", expand=True)
col.prop(self, "presence_hud_vpos", expand=True) col.prop(self, "presence_hud_vpos", expand=True)
col.prop(self, "presence_mode_distance", expand=True)
if self.category == 'UPDATE': if self.category == 'UPDATE':
from . import addon_updater_ops from . import addon_updater_ops
addon_updater_ops.update_settings_ui(self, context) addon_updater_ops.update_settings_ui(self, context)
@ -546,11 +538,6 @@ class SessionProps(bpy.types.PropertyGroup):
description='Enable user overlay ', description='Enable user overlay ',
default=True, default=True,
) )
presence_show_mode: bpy.props.BoolProperty(
name="Show users current mode",
description='Enable user mode overlay ',
default=False,
)
presence_show_far_user: bpy.props.BoolProperty( presence_show_far_user: bpy.props.BoolProperty(
name="Show users on different scenes", name="Show users on different scenes",
description="Show user on different scenes", description="Show user on different scenes",

View File

@ -94,41 +94,15 @@ def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D,
return [target.x, target.y, target.z] return [target.x, target.y, target.z]
def bbox_from_obj(obj: bpy.types.Object, index: int = 1) -> list: def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
""" Generate a bounding box for a given object by using its world matrix """ Generate a bounding box for a given object by using its world matrix
:param obj: target object :param obj: target object
:type obj: bpy.types.Object :type obj: bpy.types.Object
:param index: indice offset :param radius: bounding box radius
:type index: int :type radius: float
:return: list of 8 points [(x,y,z),...], list of 12 link between these points [(1,2),...] :return: list of 8 points [(x,y,z),...]
""" """
radius = 1.0 # Radius of the bounding box
index = 8*index
vertex_indices = (
(0+index, 1+index), (0+index, 2+index), (1+index, 3+index), (2+index, 3+index),
(4+index, 5+index), (4+index, 6+index), (5+index, 7+index), (6+index, 7+index),
(0+index, 4+index), (1+index, 5+index), (2+index, 6+index), (3+index, 7+index))
if obj.type == 'EMPTY':
radius = obj.empty_display_size
elif obj.type == 'LIGHT':
radius = obj.data.shadow_soft_size
elif obj.type == 'LIGHT_PROBE':
radius = obj.data.influence_distance
elif obj.type == 'CAMERA':
radius = obj.data.display_size
elif hasattr(obj, 'bound_box'):
vertex_indices = (
(0+index, 1+index), (1+index, 2+index),
(2+index, 3+index), (0+index, 3+index),
(4+index, 5+index), (5+index, 6+index),
(6+index, 7+index), (4+index, 7+index),
(0+index, 4+index), (1+index, 5+index),
(2+index, 6+index), (3+index, 7+index))
vertex_pos = get_bb_coords_from_obj(obj)
return vertex_pos, vertex_indices
coords = [ coords = [
(-radius, -radius, -radius), (+radius, -radius, -radius), (-radius, -radius, -radius), (+radius, -radius, -radius),
(-radius, +radius, -radius), (+radius, +radius, -radius), (-radius, +radius, -radius), (+radius, +radius, -radius),
@ -138,32 +112,9 @@ def bbox_from_obj(obj: bpy.types.Object, index: int = 1) -> list:
base = obj.matrix_world base = obj.matrix_world
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords] bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners] return [(point.x, point.y, point.z)
for point in bbox_corners]
return vertex_pos, vertex_indices
def bbox_from_instance_collection(ic: bpy.types.Object, index: int = 0) -> list:
""" Generate a bounding box for a given instance collection by using its objects
:param ic: target instance collection
:type ic: bpy.types.Object
:param index: indice offset
:type index: int
:return: list of 8*objs points [(x,y,z),...], tuple of 12*objs link between these points [(1,2),...]
"""
vertex_pos = []
vertex_indices = ()
for obj_index, obj in enumerate(ic.instance_collection.objects):
vertex_pos_temp, vertex_indices_temp = bbox_from_obj(obj, index=index+obj_index)
vertex_pos += vertex_pos_temp
vertex_indices += vertex_indices_temp
bbox_corners = [ic.matrix_world @ mathutils.Vector(vertex) for vertex in vertex_pos]
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
return vertex_pos, vertex_indices
def generate_user_camera() -> list: def generate_user_camera() -> list:
""" Generate a basic camera represention of the user point of view """ Generate a basic camera represention of the user point of view
@ -252,13 +203,6 @@ class Widget(object):
""" """
return True return True
def configure_bgl(self):
bgl.glLineWidth(2.)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
def draw(self): def draw(self):
"""How to draw the widget """How to draw the widget
""" """
@ -312,6 +256,11 @@ class UserFrustumWidget(Widget):
{"pos": positions}, {"pos": positions},
indices=self.indices) indices=self.indices)
bgl.glLineWidth(2.)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
shader.bind() shader.bind()
shader.uniform_float("color", self.data.get('color')) shader.uniform_float("color", self.data.get('color'))
batch.draw(shader) batch.draw(shader)
@ -323,8 +272,6 @@ class UserSelectionWidget(Widget):
username): username):
self.username = username self.username = username
self.settings = bpy.context.window_manager.session self.settings = bpy.context.window_manager.session
self.current_selection_ids = []
self.current_selected_objects = []
@property @property
def data(self): def data(self):
@ -334,15 +281,6 @@ class UserSelectionWidget(Widget):
else: else:
return None return None
@property
def selected_objects(self):
user_selection = self.data.get('selected_objects')
if self.current_selection_ids != user_selection:
self.current_selected_objects = [find_from_attr("uuid", uid, bpy.data.objects) for uid in user_selection]
self.current_selection_ids = user_selection
return self.current_selected_objects
def poll(self): def poll(self):
if self.data is None: if self.data is None:
return False return False
@ -357,31 +295,49 @@ class UserSelectionWidget(Widget):
self.settings.enable_presence self.settings.enable_presence
def draw(self): def draw(self):
vertex_pos = [] user_selection = self.data.get('selected_objects')
vertex_ind = [] for select_ob in user_selection:
collection_offset = 0 ob = find_from_attr("uuid", select_ob, bpy.data.objects)
for obj_index, obj in enumerate(self.selected_objects): if not ob:
if obj is None: return
continue
obj_index+=collection_offset
if hasattr(obj, 'instance_collection') and obj.instance_collection:
bbox_pos, bbox_ind = bbox_from_instance_collection(obj, index=obj_index)
collection_offset+=len(obj.instance_collection.objects)-1
else :
bbox_pos, bbox_ind = bbox_from_obj(obj, index=obj_index)
vertex_pos += bbox_pos
vertex_ind += bbox_ind
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR') vertex_pos = bbox_from_obj(ob, 1.0)
batch = batch_for_shader( vertex_indices = (
shader, (0, 1), (1, 2), (2, 3), (0, 3),
'LINES', (4, 5), (5, 6), (6, 7), (4, 7),
{"pos": vertex_pos}, (0, 4), (1, 5), (2, 6), (3, 7))
indices=vertex_ind)
if ob.instance_collection:
for obj in ob.instance_collection.objects:
if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
vertex_pos = get_bb_coords_from_obj(obj, instance=ob)
break
elif ob.type == 'EMPTY':
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
elif ob.type == 'LIGHT':
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
elif ob.type == 'LIGHT_PROBE':
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
elif ob.type == 'CAMERA':
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
elif hasattr(ob, 'bound_box'):
vertex_indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
vertex_pos = get_bb_coords_from_obj(ob)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(
shader,
'LINES',
{"pos": vertex_pos},
indices=vertex_indices)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
class UserNameWidget(Widget): class UserNameWidget(Widget):
draw_type = 'POST_PIXEL' draw_type = 'POST_PIXEL'
@ -425,62 +381,6 @@ class UserNameWidget(Widget):
blf.color(0, color[0], color[1], color[2], color[3]) blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, self.username) blf.draw(0, self.username)
class UserModeWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
self.preferences = get_preferences()
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
scene_current = self.data.get('scene_current')
mode_current = self.data.get('mode_current')
user_selection = self.data.get('selected_objects')
return (scene_current == bpy.context.scene.name or
mode_current == bpy.context.mode or
self.settings.presence_show_far_user) and \
user_selection and \
self.settings.presence_show_mode and \
self.settings.enable_presence
def draw(self):
user_selection = self.data.get('selected_objects')
area, region, rv3d = view3d_find()
viewport_coord = project_to_viewport(region, rv3d, (0, 0))
obj = find_from_attr("uuid", user_selection[0], bpy.data.objects)
if not obj:
return
mode_current = self.data.get('mode_current')
color = self.data.get('color')
origin_coord = project_to_screen(obj.location)
distance_viewport_object = math.sqrt((viewport_coord[0]-obj.location[0])**2+(viewport_coord[1]-obj.location[1])**2+(viewport_coord[2]-obj.location[2])**2)
if distance_viewport_object > self.preferences.presence_mode_distance :
return
if origin_coord :
blf.position(0, origin_coord[0]+8, origin_coord[1]-15, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, mode_current)
class SessionStatusWidget(Widget): class SessionStatusWidget(Widget):
draw_type = 'POST_PIXEL' draw_type = 'POST_PIXEL'
@ -563,7 +463,6 @@ class DrawFactory(object):
try: try:
for widget in self.widgets.values(): for widget in self.widgets.values():
if widget.draw_type == 'POST_VIEW' and widget.poll(): if widget.draw_type == 'POST_VIEW' and widget.poll():
widget.configure_bgl()
widget.draw() widget.draw()
except Exception as e: except Exception as e:
logging.error( logging.error(
@ -573,7 +472,6 @@ class DrawFactory(object):
try: try:
for widget in self.widgets.values(): for widget in self.widgets.values():
if widget.draw_type == 'POST_PIXEL' and widget.poll(): if widget.draw_type == 'POST_PIXEL' and widget.poll():
widget.configure_bgl()
widget.draw() widget.draw()
except Exception as e: except Exception as e:
logging.error( logging.error(
@ -587,7 +485,6 @@ this.renderer = DrawFactory()
def register(): def register():
this.renderer.register_handlers() this.renderer.register_handlers()
this.renderer.add_widget("session_status", SessionStatusWidget()) this.renderer.add_widget("session_status", SessionStatusWidget())

View File

@ -1,48 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
from replication.constants import STATE_INITIAL
class SessionData():
""" A structure to share easily the current session data across the addon
modules.
This object will completely replace the Singleton lying in replication
interface module.
"""
def __init__(self):
self.repository = None # The current repository
self.remote = None # The active remote
self.server = None
self.applied_updates = []
@property
def state(self):
if self.remote is None:
return STATE_INITIAL
else:
return self.remote.connection_status
def clear(self):
self.remote = None
self.repository = None
self.server = None
self.applied_updates = []
session = SessionData()

View File

@ -27,12 +27,10 @@ from replication.interface import session
from replication import porcelain from replication import porcelain
from . import operators, utils from . import operators, utils
from .presence import (UserFrustumWidget, UserNameWidget, UserModeWidget, UserSelectionWidget, from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
generate_user_camera, get_view_matrix, refresh_3d_view, generate_user_camera, get_view_matrix, refresh_3d_view,
refresh_sidebar_view, renderer) refresh_sidebar_view, renderer)
from . import shared_data
this = sys.modules[__name__] this = sys.modules[__name__]
# Registered timers # Registered timers
@ -41,8 +39,7 @@ this.registry = dict()
def is_annotating(context: bpy.types.Context): def is_annotating(context: bpy.types.Context):
""" Check if the annotate mode is enabled """ Check if the annotate mode is enabled
""" """
active_tool = bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False) return bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False).idname == 'builtin.annotate'
return (active_tool and active_tool.idname == 'builtin.annotate')
class Timer(object): class Timer(object):
@ -117,7 +114,6 @@ class ApplyTimer(Timer):
if node_ref.state == FETCHED: if node_ref.state == FETCHED:
try: try:
shared_data.session.applied_updates.append(node)
porcelain.apply(session.repository, node) porcelain.apply(session.repository, node)
except Exception as e: except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}") logging.error(f"Fail to apply {node_ref.uuid}")
@ -130,58 +126,14 @@ class ApplyTimer(Timer):
porcelain.apply(session.repository, porcelain.apply(session.repository,
parent.uuid, parent.uuid,
force=True) force=True)
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
for dep in node_ref.dependencies:
porcelain.apply(session.repository,
dep,
force=True)
class AnnotationUpdates(Timer):
def __init__(self, timeout=1):
self._annotating = False
self._settings = utils.get_preferences()
super().__init__(timeout)
def execute(self):
if session and session.state == STATE_ACTIVE:
ctx = bpy.context
annotation_gp = ctx.scene.grease_pencil
if annotation_gp and not annotation_gp.uuid:
ctx.scene.update_tag()
# if an annotation exist and is tracked
if annotation_gp and annotation_gp.uuid:
registered_gp = session.repository.graph.get(annotation_gp.uuid)
if is_annotating(bpy.context):
# try to get the right on it
if registered_gp.owner == RP_COMMON:
self._annotating = True
logging.debug(
"Getting the right on the annotation GP")
porcelain.lock(session.repository,
[registered_gp.uuid],
ignore_warnings=True,
affect_dependencies=False)
if registered_gp.owner == self._settings.username:
porcelain.commit(session.repository, annotation_gp.uuid)
porcelain.push(session.repository, 'origin', annotation_gp.uuid)
elif self._annotating:
porcelain.unlock(session.repository,
[registered_gp.uuid],
ignore_warnings=True,
affect_dependencies=False)
self._annotating = False
class DynamicRightSelectTimer(Timer): class DynamicRightSelectTimer(Timer):
def __init__(self, timeout=.1): def __init__(self, timeout=.1):
super().__init__(timeout) super().__init__(timeout)
self._last_selection = set() self._last_selection = []
self._user = None self._user = None
self._annotating = False
def execute(self): def execute(self):
settings = utils.get_preferences() settings = utils.get_preferences()
@ -192,46 +144,83 @@ class DynamicRightSelectTimer(Timer):
self._user = session.online_users.get(settings.username) self._user = session.online_users.get(settings.username)
if self._user: if self._user:
current_selection = set(utils.get_selected_objects( ctx = bpy.context
bpy.context.scene, annotation_gp = ctx.scene.grease_pencil
bpy.data.window_managers['WinMan'].windows[0].view_layer
))
if current_selection != self._last_selection:
to_lock = list(current_selection.difference(self._last_selection))
to_release = list(self._last_selection.difference(current_selection))
instances_to_lock = list()
for node_id in to_lock: if annotation_gp and not annotation_gp.uuid:
node = session.repository.graph.get(node_id) ctx.scene.update_tag()
instance_mode = node.data.get('instance_type')
if instance_mode and instance_mode == 'COLLECTION': # if an annotation exist and is tracked
to_lock.remove(node_id) if annotation_gp and annotation_gp.uuid:
instances_to_lock.append(node_id) registered_gp = session.repository.graph.get(annotation_gp.uuid)
if instances_to_lock: if is_annotating(bpy.context):
try: # try to get the right on it
if registered_gp.owner == RP_COMMON:
self._annotating = True
logging.debug(
"Getting the right on the annotation GP")
porcelain.lock(session.repository, porcelain.lock(session.repository,
instances_to_lock, registered_gp.uuid,
ignore_warnings=True, ignore_warnings=True,
affect_dependencies=False) affect_dependencies=False)
except NonAuthorizedOperationError as e:
logging.warning(e)
if to_release: if registered_gp.owner == settings.username:
try: gp_node = session.repository.graph.get(annotation_gp.uuid)
porcelain.unlock(session.repository, porcelain.commit(session.repository, gp_node.uuid)
to_release, porcelain.push(session.repository, 'origin', gp_node.uuid)
ignore_warnings=True,
affect_dependencies=True) elif self._annotating:
except NonAuthorizedOperationError as e: porcelain.unlock(session.repository,
logging.warning(e) registered_gp.uuid,
if to_lock: ignore_warnings=True,
try: affect_dependencies=False)
porcelain.lock(session.repository,
to_lock, current_selection = utils.get_selected_objects(
ignore_warnings=True, bpy.context.scene,
affect_dependencies=True) bpy.data.window_managers['WinMan'].windows[0].view_layer
except NonAuthorizedOperationError as e: )
logging.warning(e) if current_selection != self._last_selection:
obj_common = [
o for o in self._last_selection if o not in current_selection]
obj_ours = [
o for o in current_selection if o not in self._last_selection]
# change old selection right to common
for obj in obj_common:
node = session.repository.graph.get(obj)
if node and (node.owner == settings.username or node.owner == RP_COMMON):
recursive = True
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
try:
porcelain.unlock(session.repository,
node.uuid,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {node} owner")
# change new selection to our
for obj in obj_ours:
node = session.repository.graph.get(obj)
if node and node.owner == RP_COMMON:
recursive = True
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
try:
porcelain.lock(session.repository,
node.uuid,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {node} owner")
else:
return
self._last_selection = current_selection self._last_selection = current_selection
@ -245,23 +234,23 @@ class DynamicRightSelectTimer(Timer):
# Fix deselection until right managment refactoring (with Roles concepts) # Fix deselection until right managment refactoring (with Roles concepts)
if len(current_selection) == 0 : if len(current_selection) == 0 :
owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username] owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
if owned_keys: for key in owned_keys:
node = session.repository.graph.get(key)
try: try:
porcelain.unlock(session.repository, porcelain.unlock(session.repository,
owned_keys, key,
ignore_warnings=True, ignore_warnings=True,
affect_dependencies=True) affect_dependencies=True)
except NonAuthorizedOperationError as e: except NonAuthorizedOperationError:
logging.warning(e) logging.warning(
f"Not authorized to change {key} owner")
# Objects selectability
for obj in bpy.data.objects: for obj in bpy.data.objects:
object_uuid = getattr(obj, 'uuid', None) object_uuid = getattr(obj, 'uuid', None)
if object_uuid: if object_uuid:
is_selectable = not session.repository.is_node_readonly(object_uuid) is_selectable = not session.repository.is_node_readonly(object_uuid)
if obj.hide_select != is_selectable: if obj.hide_select != is_selectable:
obj.hide_select = is_selectable obj.hide_select = is_selectable
shared_data.session.applied_updates.append(object_uuid)
class ClientUpdate(Timer): class ClientUpdate(Timer):
@ -311,8 +300,7 @@ class ClientUpdate(Timer):
settings.client_color.b, settings.client_color.b,
1), 1),
'frame_current': bpy.context.scene.frame_current, 'frame_current': bpy.context.scene.frame_current,
'scene_current': scene_current, 'scene_current': scene_current
'mode_current': bpy.context.mode
} }
porcelain.update_user_metadata(session.repository, metadata) porcelain.update_user_metadata(session.repository, metadata)
@ -326,9 +314,6 @@ class ClientUpdate(Timer):
local_user_metadata['view_matrix'] = get_view_matrix( local_user_metadata['view_matrix'] = get_view_matrix(
) )
porcelain.update_user_metadata(session.repository, local_user_metadata) porcelain.update_user_metadata(session.repository, local_user_metadata)
elif bpy.context.mode != local_user_metadata['mode_current']:
local_user_metadata['mode_current'] = bpy.context.mode
porcelain.update_user_metadata(session.repository, local_user_metadata)
class SessionStatusUpdate(Timer): class SessionStatusUpdate(Timer):
@ -356,7 +341,6 @@ class SessionUserSync(Timer):
renderer.remove_widget(f"{user.username}_cam") renderer.remove_widget(f"{user.username}_cam")
renderer.remove_widget(f"{user.username}_select") renderer.remove_widget(f"{user.username}_select")
renderer.remove_widget(f"{user.username}_name") renderer.remove_widget(f"{user.username}_name")
renderer.remove_widget(f"{user.username}_mode")
ui_users.remove(index) ui_users.remove(index)
break break
@ -372,8 +356,6 @@ class SessionUserSync(Timer):
f"{user}_select", UserSelectionWidget(user)) f"{user}_select", UserSelectionWidget(user))
renderer.add_widget( renderer.add_widget(
f"{user}_name", UserNameWidget(user)) f"{user}_name", UserNameWidget(user))
renderer.add_widget(
f"{user}_mode", UserModeWidget(user))
class MainThreadExecutor(Timer): class MainThreadExecutor(Timer):

View File

@ -107,7 +107,7 @@ class SESSION_PT_settings(bpy.types.Panel):
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True) row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE') row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT') row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='VIEW_CAMERA') row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE')
row= layout.row() row= layout.row()
@ -343,10 +343,9 @@ class SESSION_PT_user(bpy.types.Panel):
box = row.box() box = row.box()
split = box.split(factor=0.35) split = box.split(factor=0.35)
split.label(text="user") split.label(text="user")
split = split.split(factor=0.3) split = split.split(factor=0.5)
split.label(text="mode")
split.label(text="frame")
split.label(text="location") split.label(text="location")
split.label(text="frame")
split.label(text="ping") split.label(text="ping")
row = layout.row() row = layout.row()
@ -384,8 +383,6 @@ class SESSION_UL_users(bpy.types.UIList):
ping = '-' ping = '-'
frame_current = '-' frame_current = '-'
scene_current = '-' scene_current = '-'
mode_current = '-'
mode_icon = 'BLANK1'
status_icon = 'BLANK1' status_icon = 'BLANK1'
if session: if session:
user = session.online_users.get(item.username) user = session.online_users.get(item.username)
@ -395,45 +392,13 @@ class SESSION_UL_users(bpy.types.UIList):
if metadata and 'frame_current' in metadata: if metadata and 'frame_current' in metadata:
frame_current = str(metadata.get('frame_current','-')) frame_current = str(metadata.get('frame_current','-'))
scene_current = metadata.get('scene_current','-') scene_current = metadata.get('scene_current','-')
mode_current = metadata.get('mode_current','-')
if mode_current == "OBJECT" :
mode_icon = "OBJECT_DATAMODE"
elif mode_current == "EDIT_MESH" :
mode_icon = "EDITMODE_HLT"
elif mode_current == 'EDIT_CURVE':
mode_icon = "CURVE_DATA"
elif mode_current == 'EDIT_SURFACE':
mode_icon = "SURFACE_DATA"
elif mode_current == 'EDIT_TEXT':
mode_icon = "FILE_FONT"
elif mode_current == 'EDIT_ARMATURE':
mode_icon = "ARMATURE_DATA"
elif mode_current == 'EDIT_METABALL':
mode_icon = "META_BALL"
elif mode_current == 'EDIT_LATTICE':
mode_icon = "LATTICE_DATA"
elif mode_current == 'POSE':
mode_icon = "POSE_HLT"
elif mode_current == 'SCULPT':
mode_icon = "SCULPTMODE_HLT"
elif mode_current == 'PAINT_WEIGHT':
mode_icon = "WPAINT_HLT"
elif mode_current == 'PAINT_VERTEX':
mode_icon = "VPAINT_HLT"
elif mode_current == 'PAINT_TEXTURE':
mode_icon = "TPAINT_HLT"
elif mode_current == 'PARTICLE':
mode_icon = "PARTICLES"
elif mode_current == 'PAINT_GPENCIL' or mode_current =='EDIT_GPENCIL' or mode_current =='SCULPT_GPENCIL' or mode_current =='WEIGHT_GPENCIL' or mode_current =='VERTEX_GPENCIL':
mode_icon = "GREASEPENCIL"
if user['admin']: if user['admin']:
status_icon = 'FAKE_USER_ON' status_icon = 'FAKE_USER_ON'
split = layout.split(factor=0.35) split = layout.split(factor=0.35)
split.label(text=item.username, icon=status_icon) split.label(text=item.username, icon=status_icon)
split = split.split(factor=0.3) split = split.split(factor=0.5)
split.label(icon=mode_icon)
split.label(text=frame_current)
split.label(text=scene_current) split.label(text=scene_current)
split.label(text=frame_current)
split.label(text=ping) split.label(text=ping)
@ -460,29 +425,20 @@ class SESSION_PT_presence(bpy.types.Panel):
settings = context.window_manager.session settings = context.window_manager.session
pref = get_preferences() pref = get_preferences()
layout.active = settings.enable_presence layout.active = settings.enable_presence
row = layout.row()
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
row.prop(settings, "presence_show_selected",text="",icon_only=True, icon='CUBE')
row.prop(settings, "presence_show_user", text="",icon_only=True, icon='CAMERA_DATA')
row.prop(settings, "presence_show_mode", text="",icon_only=True, icon='OBJECT_DATAMODE')
row.prop(settings, "presence_show_far_user", text="",icon_only=True, icon='SCENE_DATA')
col = layout.column() col = layout.column()
if settings.presence_show_mode :
row = col.column()
row.prop(pref, "presence_mode_distance", expand=True)
col.prop(settings, "presence_show_session_status") col.prop(settings, "presence_show_session_status")
if settings.presence_show_session_status : row = col.column()
row = col.column() row.active = settings.presence_show_session_status
row.active = settings.presence_show_session_status row.prop(pref, "presence_hud_scale", expand=True)
row.prop(pref, "presence_hud_scale", expand=True) row = col.column(align=True)
row = col.column(align=True) row.active = settings.presence_show_session_status
row.active = settings.presence_show_session_status row.prop(pref, "presence_hud_hpos", expand=True)
row.prop(pref, "presence_hud_hpos", expand=True) row.prop(pref, "presence_hud_vpos", expand=True)
row.prop(pref, "presence_hud_vpos", expand=True) col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
def draw_property(context, parent, property_uuid, level=0): def draw_property(context, parent, property_uuid, level=0):
settings = get_preferences() settings = get_preferences()
@ -599,15 +555,20 @@ class SESSION_PT_repository(bpy.types.Panel):
# Properties # Properties
owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username] owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
filtered_node = owned_nodes if runtime_settings.filter_owned else list(session.repository.graph.keys()) filtered_node = owned_nodes if runtime_settings.filter_owned else session.repository.graph.keys()
if runtime_settings.filter_name: if runtime_settings.filter_name:
filtered_node = [n for n in filtered_node if runtime_settings.filter_name.lower() in session.repository.graph.get(n).data.get('name').lower()] for node_id in filtered_node:
node_instance = session.repository.graph.get(node_id)
name = node_instance.data.get('name')
if runtime_settings.filter_name not in name:
filtered_node.remove(node_id)
if filtered_node: if filtered_node:
col = layout.column(align=True) col = layout.column(align=True)
for key in filtered_node: for key in filtered_node:
draw_property(context, col, key) draw_property(context, col, key)
else: else:
layout.row().label(text="Empty") layout.row().label(text="Empty")
@ -629,32 +590,23 @@ class VIEW3D_PT_overlay_session(bpy.types.Panel):
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
settings = context.window_manager.session view = context.space_data
pref = get_preferences() overlay = view.overlay
layout.active = settings.enable_presence display_all = overlay.show_overlays
row = layout.row()
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
row.prop(settings, "presence_show_selected",text="",icon_only=True, icon='CUBE')
row.prop(settings, "presence_show_user", text="",icon_only=True, icon='CAMERA_DATA')
row.prop(settings, "presence_show_mode", text="",icon_only=True, icon='OBJECT_DATAMODE')
row.prop(settings, "presence_show_far_user", text="",icon_only=True, icon='SCENE_DATA')
col = layout.column() col = layout.column()
if settings.presence_show_mode :
row = col.column()
row.prop(pref, "presence_mode_distance", expand=True)
row = col.row(align=True)
settings = context.window_manager.session
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status") col.prop(settings, "presence_show_session_status")
if settings.presence_show_session_status : col.prop(settings, "presence_show_selected")
row = col.column() col.prop(settings, "presence_show_user")
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_scale", expand=True)
row = col.column(align=True)
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_hpos", expand=True)
row.prop(pref, "presence_hud_vpos", expand=True)
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
classes = ( classes = (
SESSION_UL_users, SESSION_UL_users,

View File

@ -38,14 +38,6 @@ from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_LOBBY, STATE_LOBBY,
CONNECTING) CONNECTING)
CLEARED_DATABLOCKS = ['actions', 'armatures', 'cache_files', 'cameras',
'collections', 'curves', 'filepath', 'fonts',
'grease_pencils', 'images', 'lattices', 'libraries',
'lightprobes', 'lights', 'linestyles', 'masks',
'materials', 'meshes', 'metaballs', 'movieclips',
'node_groups', 'objects', 'paint_curves', 'particles',
'scenes', 'shape_keys', 'sounds', 'speakers', 'texts',
'textures', 'volumes', 'worlds']
def find_from_attr(attr_name, attr_value, list): def find_from_attr(attr_name, attr_value, list):
for item in list: for item in list:
@ -109,25 +101,23 @@ def get_state_str(state):
def clean_scene(): def clean_scene():
for type_name in CLEARED_DATABLOCKS: to_delete = [f for f in dir(bpy.data) if f not in ['brushes', 'palettes']]
sub_collection_to_avoid = [ for type_name in to_delete:
bpy.data.linestyles.get('LineStyle'), try:
bpy.data.materials.get('Dots Stroke') sub_collection_to_avoid = [bpy.data.linestyles['LineStyle'], bpy.data.materials['Dots Stroke']]
] type_collection = getattr(bpy.data, type_name)
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid]
type_collection = getattr(bpy.data, type_name) for item in items_to_remove:
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid] try:
for item in items_to_remove: type_collection.remove(item)
try: except:
type_collection.remove(item) continue
logging.info(item.name) except:
except: continue
continue
# Clear sequencer # Clear sequencer
bpy.context.scene.sequence_editor_clear() bpy.context.scene.sequence_editor_clear()
def get_selected_objects(scene, active_view_layer): def get_selected_objects(scene, active_view_layer):
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)] return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]

View File

@ -8,7 +8,7 @@ from multi_user.bl_types.bl_material import BlMaterial
def test_material_nodes(clear_blend): def test_material_nodes(clear_blend):
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()] nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()] # Faire un peu comme ici
datablock = bpy.data.materials.new("test") datablock = bpy.data.materials.new("test")
datablock.use_nodes = True datablock.use_nodes = True

View File

@ -11,9 +11,8 @@ from multi_user.utils import get_preferences
def test_scene(clear_blend): def test_scene(clear_blend):
get_preferences().sync_flags.sync_render_settings = True get_preferences().sync_flags.sync_render_settings = True
datablock = bpy.data.scenes.new("toto") # datablock = bpy.data.scenes.new("toto") # TODO: trouver datablock -> active compositing 'Use nodes'
datablock.timeline_markers.new('toto', frame=10) datablock = bpy.data.scenes["Scene"].use_nodes
datablock.timeline_markers.new('tata', frame=1)
datablock.view_settings.use_curve_mapping = True datablock.view_settings.use_curve_mapping = True
# Test # Test
implementation = BlScene() implementation = BlScene()