Files
multi-user/multi_user/operators.py

1385 lines
45 KiB
Python
Raw Normal View History

2020-03-20 14:56:50 +01:00
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
2019-05-02 17:58:37 +02:00
import asyncio
2020-12-16 11:05:58 +01:00
import copy
import gzip
2019-02-11 17:02:25 +01:00
import logging
2019-05-02 17:58:37 +02:00
import os
import queue
2019-02-22 16:46:35 +01:00
import random
2020-10-05 23:38:52 +02:00
import shutil
2019-02-22 16:46:35 +01:00
import string
2020-12-02 14:21:49 +01:00
import sys
2019-03-25 14:56:09 +01:00
import time
2020-12-11 23:02:20 +01:00
from datetime import datetime
2019-04-03 15:07:35 +02:00
from operator import itemgetter
from pathlib import Path
from queue import Queue
2020-12-16 11:05:58 +01:00
from time import gmtime, strftime
2021-12-11 15:53:02 +01:00
from numpy import interp
2020-12-16 11:05:58 +01:00
try:
import _pickle as pickle
except ImportError:
import pickle
2019-09-30 13:35:50 +02:00
2019-03-25 14:56:09 +01:00
import bpy
2021-12-11 15:53:02 +01:00
import bmesh
2019-03-25 14:56:09 +01:00
import mathutils
2019-09-16 11:42:53 +02:00
from bpy.app.handlers import persistent
2020-12-16 11:05:58 +01:00
from bpy_extras.io_utils import ExportHelper, ImportHelper
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP)
2020-07-10 16:50:09 +02:00
from replication.data import ReplicatedDataFactory
from replication.exception import NonAuthorizedOperationError, ContextError
from replication.interface import session
2019-03-25 14:56:09 +01:00
from . import bl_types, environment, timers, ui, utils
2020-12-02 14:21:49 +01:00
from .presence import SessionStatusWidget, renderer, view3d_find
from .timers import registry
background_execution_queue = Queue()
2020-12-09 18:35:29 +01:00
deleyables = []
stop_modal_executor = False
2021-12-11 15:53:02 +01:00
CLEARED_DATABLOCKS = ['actions', 'armatures', 'cache_files', 'cameras',
'collections', 'curves', 'filepath', 'fonts',
'grease_pencils', 'images', 'lattices', 'libraries',
'lightprobes', 'lights', 'linestyles', 'masks',
'materials', 'meshes', 'metaballs', 'movieclips',
'node_groups', 'objects', 'paint_curves', 'particles',
'scenes', 'shape_keys', 'sounds', 'speakers', 'texts',
'textures', 'volumes', 'worlds']
PERSISTENT_DATABLOCKS = ['LineStyle', 'Dots Stroke', 'replay_action']
def clean_scene(ignored_datablocks: list = None):
"""
Delete all datablock of the scene except PERSISTENT_DATABLOCKS and ignored
ones in ignored_datablocks.
"""
PERSISTENT_DATABLOCKS.extend(ignored_datablocks)
# Avoid to trigger a runtime error by keeping the last scene
PERSISTENT_DATABLOCKS.append(bpy.data.scenes[0].name)
for type_name in CLEARED_DATABLOCKS:
type_collection = getattr(bpy.data, type_name)
for datablock in type_collection:
if datablock.name in PERSISTENT_DATABLOCKS:
logging.debug(f"Skipping {datablock.name}")
continue
else:
logging.debug(f"Removing {datablock.name}")
type_collection.remove(datablock)
# Clear sequencer
bpy.context.scene.sequence_editor_clear()
def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object = None) -> list:
""" Generate bounding box in world coordinate from object bound box
:param object: target object
:type object: bpy.types.Object
:param instance: optionnal instance
:type instance: bpy.types.Object
:return: list of 8 points [(x,y,z),...]
"""
base = object.matrix_world
if instance:
scale = mathutils.Matrix.Diagonal(object.matrix_world.to_scale())
base = instance.matrix_world @ scale.to_4x4()
bbox_corners = [base @ mathutils.Vector(
corner) for corner in object.bound_box]
return [(point.x, point.y, point.z) for point in bbox_corners]
def bbox_from_obj(obj: bpy.types.Object, index: int = 1) -> list:
""" Generate a bounding box for a given object by using its world matrix
:param obj: target object
:type obj: bpy.types.Object
:param index: indice offset
:type index: int
:return: list of 8 points [(x,y,z),...], list of 12 link between these points [(1,2),...]
"""
radius = 1.0 # Radius of the bounding box
index = 8*index
vertex_indices = (
(0+index, 1+index), (0+index, 2+index), (1+index, 3+index), (2+index, 3+index),
(4+index, 5+index), (4+index, 6+index), (5+index, 7+index), (6+index, 7+index),
(0+index, 4+index), (1+index, 5+index), (2+index, 6+index), (3+index, 7+index))
if obj.type == 'EMPTY':
radius = obj.empty_display_size
elif obj.type == 'LIGHT':
radius = obj.data.shadow_soft_size
elif obj.type == 'LIGHT_PROBE':
radius = obj.data.influence_distance
elif obj.type == 'CAMERA':
radius = obj.data.display_size
elif hasattr(obj, 'bound_box'):
vertex_indices = (
(0+index, 1+index), (1+index, 2+index),
(2+index, 3+index), (0+index, 3+index),
(4+index, 5+index), (5+index, 6+index),
(6+index, 7+index), (4+index, 7+index),
(0+index, 4+index), (1+index, 5+index),
(2+index, 6+index), (3+index, 7+index))
vertex_pos = get_bb_coords_from_obj(obj)
return vertex_pos, vertex_indices
coords = [
(-radius, -radius, -radius), (+radius, -radius, -radius),
(-radius, +radius, -radius), (+radius, +radius, -radius),
(-radius, -radius, +radius), (+radius, -radius, +radius),
(-radius, +radius, +radius), (+radius, +radius, +radius)]
base = obj.matrix_world
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
return vertex_pos, vertex_indices
def draw_user(username, metadata, radius=0.01, intensity=10.0):
"""
Generate a mesh representation of a given user frustum and
sight of view.
"""
view_corners = metadata.get('view_corners')
color = metadata.get('color', (1,1,1,0))
objects = metadata.get('selected_objects', None)
scene = metadata.get('scene_current', bpy.context.scene.name)
user_collection = bpy.data.collections.new(username)
# User Color
user_mat = bpy.data.materials.new(username)
user_mat.use_nodes = True
nodes = user_mat.node_tree.nodes
nodes.remove(nodes['Principled BSDF'])
emission_node = nodes.new('ShaderNodeEmission')
emission_node.inputs['Color'].default_value = color
emission_node.inputs['Strength'].default_value = intensity
output_node = nodes['Material Output']
user_mat.node_tree.links.new(
emission_node.outputs['Emission'], output_node.inputs['Surface'])
# Generate camera mesh
camera_vertices = view_corners[:4]
camera_vertices.append(view_corners[6])
camera_mesh = bpy.data.meshes.new(f"{username}_camera")
camera_obj = bpy.data.objects.new(f"{username}_camera", camera_mesh)
frustum_bm = bmesh.new()
frustum_bm.from_mesh(camera_mesh)
for p in camera_vertices:
frustum_bm.verts.new(p)
frustum_bm.verts.ensure_lookup_table()
frustum_bm.edges.new((frustum_bm.verts[0], frustum_bm.verts[2]))
frustum_bm.edges.new((frustum_bm.verts[2], frustum_bm.verts[1]))
frustum_bm.edges.new((frustum_bm.verts[1], frustum_bm.verts[3]))
frustum_bm.edges.new((frustum_bm.verts[3], frustum_bm.verts[0]))
frustum_bm.edges.new((frustum_bm.verts[0], frustum_bm.verts[4]))
frustum_bm.edges.new((frustum_bm.verts[2], frustum_bm.verts[4]))
frustum_bm.edges.new((frustum_bm.verts[1], frustum_bm.verts[4]))
frustum_bm.edges.new((frustum_bm.verts[3], frustum_bm.verts[4]))
frustum_bm.edges.ensure_lookup_table()
frustum_bm.to_mesh(camera_mesh)
frustum_bm.free() # free and prevent further access
camera_obj.modifiers.new("wireframe", "SKIN")
camera_obj.data.skin_vertices[0].data[0].use_root = True
for v in camera_mesh.skin_vertices[0].data:
v.radius = [radius, radius]
camera_mesh.materials.append(user_mat)
user_collection.objects.link(camera_obj)
# Generate sight mesh
sight_mesh = bpy.data.meshes.new(f"{username}_sight")
sight_obj = bpy.data.objects.new(f"{username}_sight", sight_mesh)
sight_verts = view_corners[4:6]
sight_bm = bmesh.new()
sight_bm.from_mesh(sight_mesh)
for p in sight_verts:
sight_bm.verts.new(p)
sight_bm.verts.ensure_lookup_table()
sight_bm.edges.new((sight_bm.verts[0], sight_bm.verts[1]))
sight_bm.edges.ensure_lookup_table()
sight_bm.to_mesh(sight_mesh)
sight_bm.free()
sight_obj.modifiers.new("wireframe", "SKIN")
sight_obj.data.skin_vertices[0].data[0].use_root = True
for v in sight_mesh.skin_vertices[0].data:
v.radius = [radius, radius]
sight_mesh.materials.append(user_mat)
user_collection.objects.link(sight_obj)
# Draw selected objects
if objects:
for o in list(objects):
instance = bl_types.bl_datablock.get_datablock_from_uuid(o, None)
if instance:
bbox_mesh = bpy.data.meshes.new(f"{instance.name}_bbox")
bbox_obj = bpy.data.objects.new(
f"{instance.name}_bbox", bbox_mesh)
bbox_verts, bbox_ind = bbox_from_obj(instance, index=0)
bbox_bm = bmesh.new()
bbox_bm.from_mesh(bbox_mesh)
for p in bbox_verts:
bbox_bm.verts.new(p)
bbox_bm.verts.ensure_lookup_table()
for e in bbox_ind:
bbox_bm.edges.new(
(bbox_bm.verts[e[0]], bbox_bm.verts[e[1]]))
bbox_bm.to_mesh(bbox_mesh)
bbox_bm.free()
bpy.data.collections[username].objects.link(bbox_obj)
bbox_obj.modifiers.new("wireframe", "SKIN")
bbox_obj.data.skin_vertices[0].data[0].use_root = True
for v in bbox_mesh.skin_vertices[0].data:
v.radius = [radius, radius]
bbox_mesh.materials.append(user_mat)
bpy.data.scenes[scene].collection.children.link(user_collection)
def session_callback(name):
""" Session callback wrapper
This allow to encapsulate session callbacks to background_execution_queue.
By doing this way callback are executed from the main thread.
"""
def func_wrapper(func):
@session.register(name)
def add_background_task(**kwargs):
background_execution_queue.put((func, kwargs))
return add_background_task
return func_wrapper
@session_callback('on_connection')
def initialize_session():
"""Session connection init hander
"""
2021-01-13 15:36:41 +01:00
logging.info("Intializing the scene")
settings = utils.get_preferences()
runtime_settings = bpy.context.window_manager.session
# Step 1: Constrect nodes
2021-02-09 14:20:08 +01:00
logging.info("Constructing nodes")
for node in session._graph.list_ordered():
node_ref = session.get(uuid=node)
if node_ref is None:
2021-02-09 14:20:08 +01:00
logging.error(f"Can't construct node {node}")
elif node_ref.state == FETCHED:
node_ref.resolve()
2021-02-09 14:20:08 +01:00
# Step 2: Load nodes
2021-02-09 14:20:08 +01:00
logging.info("Loading nodes")
for node in session._graph.list_ordered():
node_ref = session.get(uuid=node)
if node_ref is None:
2021-02-09 14:20:08 +01:00
logging.error(f"Can't load node {node}")
elif node_ref.state == FETCHED:
node_ref.apply()
2021-01-13 15:36:41 +01:00
logging.info("Registering timers")
# Step 4: Register blender timers
2020-12-09 18:35:29 +01:00
for d in deleyables:
d.register()
bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
2021-01-13 14:45:23 +01:00
# Step 5: Clearing history
utils.flush_history()
2020-12-23 17:27:43 +01:00
2021-01-13 14:45:23 +01:00
# Step 6: Launch deps graph update handling
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
@session_callback('on_exit')
def on_connection_end(reason="none"):
"""Session connection finished handler
"""
2020-12-09 18:35:29 +01:00
global deleyables, stop_modal_executor
settings = utils.get_preferences()
# Step 1: Unregister blender timers
2020-12-09 18:35:29 +01:00
for d in deleyables:
try:
d.unregister()
except:
continue
2020-12-09 18:35:29 +01:00
deleyables.clear()
stop_modal_executor = True
if depsgraph_evaluation in bpy.app.handlers.depsgraph_update_post:
bpy.app.handlers.depsgraph_update_post.remove(depsgraph_evaluation)
# Step 3: remove file handled
logger = logging.getLogger()
for handler in logger.handlers:
if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler)
if reason != "user":
bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
2020-11-25 22:53:38 +01:00
# OPERATORS
class SessionStartOperator(bpy.types.Operator):
bl_idname = "session.start"
bl_label = "start"
2019-02-11 15:48:07 +01:00
bl_description = "connect to a net server"
host: bpy.props.BoolProperty(default=False)
2019-02-11 15:48:07 +01:00
@classmethod
def poll(cls, context):
return True
def execute(self, context):
2020-12-09 18:35:29 +01:00
global deleyables
settings = utils.get_preferences()
runtime_settings = context.window_manager.session
users = bpy.data.window_managers['WinMan'].online_users
2020-06-10 10:15:39 +02:00
admin_pass = runtime_settings.password
users.clear()
2020-12-09 18:35:29 +01:00
deleyables.clear()
logger = logging.getLogger()
if len(logger.handlers) == 1:
2020-09-15 16:33:49 +02:00
formatter = logging.Formatter(
fmt='%(asctime)s CLIENT %(levelname)-8s %(message)s',
datefmt='%H:%M:%S'
)
2021-02-05 11:50:58 +01:00
start_time = datetime.now().strftime('%Y_%m_%d_%H-%M-%S')
2020-09-15 16:33:49 +02:00
log_directory = os.path.join(
settings.cache_directory,
2021-02-05 11:50:58 +01:00
f"multiuser_{start_time}.log")
2020-09-15 16:33:49 +02:00
os.makedirs(settings.cache_directory, exist_ok=True)
2020-09-15 16:33:49 +02:00
handler = logging.FileHandler(log_directory, mode='w')
logger.addHandler(handler)
for handler in logger.handlers:
if isinstance(handler, logging.NullHandler):
continue
handler.setFormatter(formatter)
2019-08-08 13:41:53 +02:00
bpy_factory = ReplicatedDataFactory()
supported_bl_types = []
2020-01-22 14:33:34 +01:00
2019-08-08 13:41:53 +02:00
# init the factory with supported types
for type in bl_types.types_to_register():
type_module = getattr(bl_types, type)
2020-11-11 14:09:57 +01:00
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
supported_bl_types.append(type_module_class.bl_id)
if type_impl_name not in settings.supported_datablocks:
logging.info(f"{type_impl_name} not found, \
regenerate type settings...")
settings.generate_supported_types()
type_local_config = settings.supported_datablocks[type_impl_name]
2019-08-23 12:28:57 +02:00
bpy_factory.register_type(
type_module_class.bl_class,
type_module_class,
2020-09-17 22:47:11 +02:00
check_common=type_module_class.bl_check_common)
2019-08-23 12:28:57 +02:00
2020-12-02 14:21:49 +01:00
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
session.configure(
factory=bpy_factory,
2020-12-02 14:21:49 +01:00
python_path=python_binary_path,
external_update_handling=True)
2020-02-07 14:11:23 +01:00
# Host a session
if self.host:
2020-06-26 22:46:38 +02:00
if settings.init_method == 'EMPTY':
utils.clean_scene()
2020-06-17 22:11:20 +02:00
runtime_settings.is_host = True
runtime_settings.internet_ip = environment.get_ip()
try:
2020-09-21 18:52:27 +02:00
for scene in bpy.data.scenes:
session.add(scene)
2020-09-21 18:52:27 +02:00
session.host(
2020-02-07 14:11:23 +01:00
id=settings.username,
port=settings.port,
2020-04-08 11:15:29 +02:00
ipc_port=settings.ipc_port,
timeout=settings.connection_timeout,
password=admin_pass,
cache_directory=settings.cache_directory,
server_log_level=logging.getLevelName(
logging.getLogger().level),
2020-04-08 11:15:29 +02:00
)
2020-02-07 14:11:23 +01:00
except Exception as e:
self.report({'ERROR'}, repr(e))
2020-04-22 17:04:14 +02:00
logging.error(f"Error: {e}")
import traceback
traceback.print_exc()
2020-02-07 14:11:23 +01:00
# Join a session
else:
2020-06-10 10:15:39 +02:00
if not runtime_settings.admin:
utils.clean_scene()
# regular session, no password needed
2020-06-10 10:15:39 +02:00
admin_pass = None
2020-02-07 14:11:23 +01:00
try:
session.connect(
2020-02-07 14:11:23 +01:00
id=settings.username,
address=settings.ip,
port=settings.port,
2020-04-08 11:15:29 +02:00
ipc_port=settings.ipc_port,
timeout=settings.connection_timeout,
2020-06-10 10:15:39 +02:00
password=admin_pass
2020-02-07 14:11:23 +01:00
)
except Exception as e:
self.report({'ERROR'}, str(e))
logging.error(str(e))
2020-02-07 14:11:23 +01:00
# Background client updates service
deleyables.append(timers.ClientUpdate())
deleyables.append(timers.DynamicRightSelectTimer())
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
# deleyables.append(timers.PushTimer(
# queue=stagging,
# timeout=settings.depsgraph_update_rate
# ))
session_update = timers.SessionStatusUpdate()
session_user_sync = timers.SessionUserSync()
session_background_executor = timers.MainThreadExecutor(
execution_queue=background_execution_queue)
session_update.register()
session_user_sync.register()
session_background_executor.register()
2020-12-09 18:35:29 +01:00
deleyables.append(session_background_executor)
deleyables.append(session_update)
deleyables.append(session_user_sync)
2019-09-26 22:42:42 +02:00
self.report(
2019-09-16 23:34:12 +02:00
{'INFO'},
2020-04-22 17:04:14 +02:00
f"connecting to tcp://{settings.ip}:{settings.port}")
2019-02-11 15:48:07 +01:00
return {"FINISHED"}
2019-02-25 18:05:46 +01:00
2020-06-10 18:43:21 +02:00
class SessionInitOperator(bpy.types.Operator):
bl_idname = "session.init"
bl_label = "Init session repostitory from"
bl_description = "Init the current session"
bl_options = {"REGISTER"}
init_method: bpy.props.EnumProperty(
name='init_method',
description='Init repo',
items={
('EMPTY', 'an empty scene', 'start empty'),
('BLEND', 'current scenes', 'use current scenes')},
default='BLEND')
@classmethod
def poll(cls, context):
return True
2020-06-10 18:43:21 +02:00
def draw(self, context):
layout = self.layout
col = layout.column()
col.prop(self, 'init_method', text="")
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
def execute(self, context):
if self.init_method == 'EMPTY':
utils.clean_scene()
for scene in bpy.data.scenes:
session.add(scene)
session.init()
2020-06-10 18:43:21 +02:00
return {"FINISHED"}
2019-08-06 11:34:39 +02:00
class SessionStopOperator(bpy.types.Operator):
bl_idname = "session.stop"
bl_label = "close"
bl_description = "Exit current session"
2019-02-11 17:02:25 +01:00
bl_options = {"REGISTER"}
@classmethod
def poll(cls, context):
return True
def execute(self, context):
2020-12-09 18:35:29 +01:00
global deleyables, stop_modal_executor
if session:
try:
session.disconnect()
except Exception as e:
self.report({'ERROR'}, repr(e))
else:
self.report({'WARNING'}, "No session to quit.")
return {"FINISHED"}
return {"FINISHED"}
2020-04-03 14:59:33 +02:00
class SessionKickOperator(bpy.types.Operator):
bl_idname = "session.kick"
bl_label = "Kick"
2020-10-16 10:57:45 +02:00
bl_description = "Kick the target user"
2020-04-03 14:59:33 +02:00
bl_options = {"REGISTER"}
user: bpy.props.StringProperty()
@classmethod
def poll(cls, context):
return True
def execute(self, context):
2020-12-09 18:35:29 +01:00
global deleyables, stop_modal_executor
assert(session)
2020-04-03 14:59:33 +02:00
try:
session.kick(self.user)
2020-04-03 14:59:33 +02:00
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"FINISHED"}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
row = self.layout
row.label(text=f" Do you really want to kick {self.user} ? ")
class SessionPropertyRemoveOperator(bpy.types.Operator):
bl_idname = "session.remove_prop"
2020-10-16 10:57:45 +02:00
bl_label = "Delete cache"
bl_description = "Stop tracking modification on the target datablock." + \
"The datablock will no longer be updated for others client. "
bl_options = {"REGISTER"}
property_path: bpy.props.StringProperty(default="None")
@classmethod
def poll(cls, context):
return True
def execute(self, context):
try:
session.remove(self.property_path)
return {"FINISHED"}
2019-09-16 23:34:12 +02:00
except: # NonAuthorizedOperationError:
2019-08-28 15:02:57 +02:00
self.report(
{'ERROR'},
"Non authorized operation")
return {"CANCELLED"}
2019-02-25 18:05:46 +01:00
class SessionPropertyRightOperator(bpy.types.Operator):
2019-04-24 17:42:23 +02:00
bl_idname = "session.right"
2020-10-16 10:57:45 +02:00
bl_label = "Change modification rights"
bl_description = "Modify the owner of the target datablock"
2019-04-24 17:42:23 +02:00
bl_options = {"REGISTER"}
key: bpy.props.StringProperty(default="None")
recursive: bpy.props.BoolProperty(default=True)
2019-04-24 17:42:23 +02:00
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
def draw(self, context):
layout = self.layout
runtime_settings = context.window_manager.session
2019-04-24 17:42:23 +02:00
2020-10-16 10:57:45 +02:00
row = layout.row()
row.label(text="Give the owning rights to:")
row.prop(runtime_settings, "clients", text="")
row = layout.row()
row.label(text="Affect dependencies")
row.prop(self, "recursive", text="")
2019-04-24 17:42:23 +02:00
def execute(self, context):
runtime_settings = context.window_manager.session
2019-04-24 17:42:23 +02:00
if session:
2020-10-21 14:40:15 +02:00
session.change_owner(self.key,
runtime_settings.clients,
ignore_warnings=True,
affect_dependencies=self.recursive)
2019-04-24 17:42:23 +02:00
return {"FINISHED"}
class SessionSnapUserOperator(bpy.types.Operator):
2019-03-14 11:04:41 +01:00
bl_idname = "session.snapview"
bl_label = "snap to user"
bl_description = "Snap 3d view to selected user"
2019-03-14 11:04:41 +01:00
bl_options = {"REGISTER"}
2019-10-15 15:04:34 +02:00
_timer = None
2019-07-10 17:16:44 +02:00
target_client: bpy.props.StringProperty(default="None")
2019-03-14 11:04:41 +01:00
@classmethod
def poll(cls, context):
return True
def execute(self, context):
2019-10-15 15:04:34 +02:00
wm = context.window_manager
runtime_settings = context.window_manager.session
if runtime_settings.time_snap_running:
runtime_settings.time_snap_running = False
return {'CANCELLED'}
else:
runtime_settings.time_snap_running = True
2019-10-15 15:04:34 +02:00
self._timer = wm.event_timer_add(0.1, window=context.window)
wm.modal_handler_add(self)
return {'RUNNING_MODAL'}
2019-03-14 11:04:41 +01:00
2019-10-15 15:04:34 +02:00
def cancel(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
2019-05-02 17:58:37 +02:00
2019-10-15 15:04:34 +02:00
def modal(self, context, event):
session_sessings = context.window_manager.session
is_running = session_sessings.time_snap_running
if event.type in {'RIGHTMOUSE', 'ESC'} or not is_running:
2019-10-15 15:04:34 +02:00
self.cancel(context)
return {'CANCELLED'}
if event.type == 'TIMER':
2020-10-05 23:38:52 +02:00
area, region, rv3d = view3d_find()
2019-10-15 15:04:34 +02:00
if session:
target_ref = session.online_users.get(self.target_client)
2020-01-22 15:15:44 +01:00
if target_ref:
target_scene = target_ref['metadata']['scene_current']
# Handle client on other scenes
if target_scene != context.scene.name:
blender_scene = bpy.data.scenes.get(target_scene, None)
if blender_scene is None:
self.report(
{'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.")
session_sessings.time_snap_running = False
return {"CANCELLED"}
bpy.context.window.scene = blender_scene
# Update client viewmatrix
client_vmatrix = target_ref['metadata'].get(
'view_matrix', None)
if client_vmatrix:
rv3d.view_matrix = mathutils.Matrix(client_vmatrix)
else:
self.report({'ERROR'}, f"Client viewport not ready.")
session_sessings.time_snap_running = False
return {"CANCELLED"}
2019-10-15 15:04:34 +02:00
else:
return {"CANCELLED"}
2019-03-14 11:04:41 +01:00
2019-10-15 15:04:34 +02:00
return {'PASS_THROUGH'}
2019-03-14 11:04:41 +01:00
2019-03-25 14:56:09 +01:00
class SessionSnapTimeOperator(bpy.types.Operator):
bl_idname = "session.snaptime"
bl_label = "snap to user time"
bl_description = "Snap time to selected user time's"
bl_options = {"REGISTER"}
_timer = None
target_client: bpy.props.StringProperty(default="None")
@classmethod
def poll(cls, context):
return True
def execute(self, context):
runtime_settings = context.window_manager.session
if runtime_settings.user_snap_running:
runtime_settings.user_snap_running = False
return {'CANCELLED'}
else:
runtime_settings.user_snap_running = True
wm = context.window_manager
self._timer = wm.event_timer_add(0.05, window=context.window)
wm.modal_handler_add(self)
return {'RUNNING_MODAL'}
def cancel(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
def modal(self, context, event):
is_running = context.window_manager.session.user_snap_running
if event.type in {'RIGHTMOUSE', 'ESC'} or not is_running:
self.cancel(context)
return {'CANCELLED'}
if event.type == 'TIMER':
if session:
target_ref = session.online_users.get(self.target_client)
if target_ref:
context.scene.frame_current = target_ref['metadata']['frame_current']
else:
return {"CANCELLED"}
return {'PASS_THROUGH'}
2019-08-09 18:14:32 +02:00
class SessionApply(bpy.types.Operator):
bl_idname = "session.apply"
2020-10-16 10:57:45 +02:00
bl_label = "Revert"
bl_description = "Revert the selected datablock from his cached" + \
" version."
bl_options = {"REGISTER"}
target: bpy.props.StringProperty()
2020-10-15 12:11:28 +02:00
reset_dependencies: bpy.props.BoolProperty(default=False)
@classmethod
def poll(cls, context):
return True
def execute(self, context):
2020-10-14 19:12:28 +02:00
logging.debug(f"Running apply on {self.target}")
try:
node_ref = session.get(uuid=self.target)
session.apply(self.target,
force=True,
force_dependencies=self.reset_dependencies)
if node_ref.bl_reload_parent:
for parent in session._graph.find_parents(self.target):
logging.debug(f"Refresh parent {parent}")
session.apply(parent, force=True)
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
2019-08-09 18:14:32 +02:00
return {"FINISHED"}
2019-07-01 18:04:35 +02:00
2019-09-16 23:34:12 +02:00
2019-08-28 13:13:32 +02:00
class SessionCommit(bpy.types.Operator):
bl_idname = "session.commit"
2020-10-16 10:57:45 +02:00
bl_label = "Force server update"
bl_description = "Commit and push the target datablock to server"
2019-08-28 13:13:32 +02:00
bl_options = {"REGISTER"}
target: bpy.props.StringProperty()
2019-08-28 13:13:32 +02:00
@classmethod
def poll(cls, context):
return True
def execute(self, context):
try:
session.commit(uuid=self.target)
session.push(self.target)
return {"FINISHED"}
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
2019-12-02 17:34:08 +01:00
class ApplyArmatureOperator(bpy.types.Operator):
"""Operator which runs its self from a timer"""
2019-12-02 17:34:08 +01:00
bl_idname = "session.apply_armature_operator"
bl_label = "Modal Executor Operator"
_timer = None
def modal(self, context, event):
global stop_modal_executor, modal_executor_queue
if stop_modal_executor:
self.cancel(context)
return {'CANCELLED'}
if event.type == 'TIMER':
if session and session.state['STATE'] == STATE_ACTIVE:
nodes = session.list(filter=bl_types.bl_armature.BlArmature)
for node in nodes:
node_ref = session.get(uuid=node)
if node_ref.state == FETCHED:
try:
session.apply(node)
except Exception as e:
2020-07-09 15:52:42 +02:00
logging.error("Fail to apply armature: {e}")
2019-12-02 17:34:08 +01:00
return {'PASS_THROUGH'}
def execute(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(2, window=context.window)
wm.modal_handler_add(self)
return {'RUNNING_MODAL'}
def cancel(self, context):
global stop_modal_executor
wm = context.window_manager
wm.event_timer_remove(self._timer)
stop_modal_executor = False
2019-12-02 17:34:08 +01:00
2019-09-16 23:34:12 +02:00
2020-11-25 22:53:38 +01:00
class SessionClearCache(bpy.types.Operator):
2020-09-21 16:47:49 +02:00
"Clear local session cache"
bl_idname = "session.clear_cache"
bl_label = "Modal Executor Operator"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
cache_dir = utils.get_preferences().cache_directory
2020-09-21 16:47:49 +02:00
try:
for root, dirs, files in os.walk(cache_dir):
for name in files:
Path(root, name).unlink()
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"FINISHED"}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
class SessionPurgeOperator(bpy.types.Operator):
"Remove node with lost references"
bl_idname = "session.purge"
bl_label = "Purge session data"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
try:
sanitize_deps_graph(remove_nodes=True)
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"FINISHED"}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
2020-11-25 22:53:38 +01:00
class SessionNotifyOperator(bpy.types.Operator):
"""Dialog only operator"""
bl_idname = "session.notify"
bl_label = "Multi-user"
bl_description = "multiuser notification"
2020-11-25 22:53:38 +01:00
message: bpy.props.StringProperty()
@classmethod
def poll(cls, context):
return True
def execute(self, context):
return {'FINISHED'}
def draw(self, context):
layout = self.layout
2020-12-03 13:30:54 +01:00
layout.row().label(text=self.message)
2020-11-25 22:53:38 +01:00
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
2020-11-25 22:53:38 +01:00
class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
bl_idname = "session.save"
bl_label = "Save session data"
bl_description = "Save a snapshot of the collaborative session"
2020-12-10 13:31:43 +01:00
# ExportHelper mixin class uses this
filename_ext = ".db"
filter_glob: bpy.props.StringProperty(
default="*.db",
options={'HIDDEN'},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
enable_autosave: bpy.props.BoolProperty(
name="Auto-save",
description="Enable session auto-save",
default=True,
)
save_interval: bpy.props.FloatProperty(
name="Auto save interval",
description="auto-save interval (seconds)",
default=10,
)
2020-12-10 13:31:43 +01:00
def execute(self, context):
if self.enable_autosave:
recorder = timers.SessionBackupTimer(
filepath=self.filepath,
timeout=self.save_interval)
recorder.register()
deleyables.append(recorder)
else:
session.save(self.filepath)
2020-12-10 13:31:43 +01:00
return {'FINISHED'}
2020-12-10 15:50:43 +01:00
@classmethod
def poll(cls, context):
return session.state['STATE'] == STATE_ACTIVE
class SessionStopAutoSaveOperator(bpy.types.Operator):
bl_idname = "session.cancel_autosave"
bl_label = "Cancel auto-save"
bl_description = "Cancel session auto-save"
@classmethod
def poll(cls, context):
return (session.state['STATE'] == STATE_ACTIVE and 'SessionBackupTimer' in registry)
def execute(self, context):
autosave_timer = registry.get('SessionBackupTimer')
autosave_timer.unregister()
return {'FINISHED'}
class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
2020-12-16 11:05:58 +01:00
bl_idname = "session.load"
bl_label = "Load session save"
bl_description = "Load a Multi-user session save"
2020-12-16 11:05:58 +01:00
bl_options = {'REGISTER', 'UNDO'}
# ExportHelper mixin class uses this
filename_ext = ".db"
filter_glob: bpy.props.StringProperty(
default="*.db",
options={'HIDDEN'},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
2021-12-11 15:53:02 +01:00
draw_users: bpy.props.BoolProperty(
name="Load users",
description="Draw users in the scene",
default=False,
)
replay: bpy.props.BoolProperty(
name="Replay mode",
description="Enable replay functions",
default=False,
)
user_skin_radius: bpy.props.FloatProperty(
name="Wireframe radius",
description="Wireframe radius",
default=0.005,
)
user_color_intensity: bpy.props.FloatProperty(
name="Shading intensity",
description="Shading intensity",
default=1.0,
)
files: bpy.props.CollectionProperty(
name='File paths',
type=bpy.types.OperatorFileListElement
)
def draw(self, context):
pass
2020-12-16 11:05:58 +01:00
def execute(self, context):
from replication.graph import ReplicationGraph
2021-12-11 15:53:02 +01:00
runtime_settings = context.window_manager.session
2020-12-16 11:05:58 +01:00
# TODO: add filechecks
try:
f = gzip.open(self.filepath, "rb")
db = pickle.load(f)
except OSError as e:
f = open(self.filepath, "rb")
db = pickle.load(f)
if db:
logging.info(f"Reading {self.filepath}")
nodes = db.get("nodes")
logging.info(f"{len(nodes)} Nodes to load")
# init the factory with supported types
bpy_factory = ReplicatedDataFactory()
for type in bl_types.types_to_register():
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
bpy_factory.register_type(
type_module_class.bl_class,
type_module_class)
graph = ReplicationGraph()
for node, node_data in nodes:
node_type = node_data.get('str_type')
impl = bpy_factory.get_implementation_from_net(node_type)
if impl:
logging.info(f"Loading {node}")
instance = impl(owner=node_data['owner'],
uuid=node,
dependencies=node_data['dependencies'],
data=node_data['data'])
instance.store(graph)
instance.state = FETCHED
logging.info("Graph succefully loaded")
2021-12-11 15:53:02 +01:00
# Persitstent collection
ignored_datablocks = []
persistent_collection = bpy.data.collections.get("multiuser_timelapse")
if self.replay and \
runtime_settings.replay_persistent_collection and \
persistent_collection:
ignored_datablocks = ['multiuser_timelapse','multiuser_timelapse_cam','multiuser_timelapse_cam_obj','multiuser_timelapse_path','multiuser_timelapse_path_obj', 'multiuser_timelapse_pathAction']
clean_scene(ignored_datablocks=ignored_datablocks)
2020-12-16 11:05:58 +01:00
# Step 1: Construct nodes
for node in graph.list_ordered():
graph[node].resolve()
# Step 2: Load nodes
for node in graph.list_ordered():
graph[node].apply()
2021-12-11 15:53:02 +01:00
if len(self.files) > 1:
runtime_settings.replay_files.clear()
context.scene.active_replay_file = len(self.files)-1
directory = Path(self.filepath).parent
file_list = [f['name'] for f in self.files]
file_list.sort()
for f in file_list:
snap = runtime_settings.replay_files.add()
snap.name = str(Path(directory, f))
print(f)
if runtime_settings.replay_mode == 'TIMELINE':
replay_action = bpy.data.actions.get('replay_action', bpy.data.actions.new('replay_action'))
bpy.context.scene.animation_data_create()
bpy.context.scene.animation_data.action = replay_action
if len(replay_action.fcurves) > 0 and replay_action.fcurves[0].data_path == 'active_replay_file':
replay_fcurve = replay_action.fcurves[0]
else:
replay_fcurve = replay_action.fcurves.new('active_replay_file')
for p in reversed(replay_fcurve.keyframe_points):
replay_fcurve.keyframe_points.remove(p, fast=True)
duration = runtime_settings.replay_duration
file_count = len(self.files)-1
for index in range(0, file_count):
frame = interp(index, [0, file_count], [bpy.context.scene.frame_start, duration])
replay_fcurve.keyframe_points.insert(frame, index)
if self.draw_users:
f = gzip.open(self.filepath, "rb")
db = pickle.load(f)
users = db.get("users")
for username, user_data in users.items():
metadata = user_data['metadata']
if metadata:
draw_user(username, metadata, radius=self.user_skin_radius, intensity=self.user_color_intensity)
# Relink the persistent collection
if self.replay and persistent_collection:
logging.info(f"Relinking {persistent_collection.name}")
bpy.context.scene.collection.children.link(persistent_collection)
# Reasign scene action
if self.replay and \
runtime_settings.replay_mode == 'TIMELINE' and \
not bpy.context.scene.animation_data :
bpy.context.scene.animation_data_create()
bpy.context.scene.animation_data.action = bpy.data.actions.get('replay_action')
bpy.context.scene.frame_end = runtime_settings.replay_duration
# Reasign the scene camera
if self.replay and \
runtime_settings.replay_persistent_collection and \
runtime_settings.replay_camera:
bpy.context.scene.camera = runtime_settings.replay_camera
2020-12-16 11:05:58 +01:00
return {'FINISHED'}
@classmethod
def poll(cls, context):
return True
def menu_func_import(self, context):
self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)')
2020-12-16 11:05:58 +01:00
2020-12-10 13:31:43 +01:00
classes = (
SessionStartOperator,
SessionStopOperator,
SessionPropertyRemoveOperator,
SessionSnapUserOperator,
SessionSnapTimeOperator,
SessionPropertyRightOperator,
2019-08-09 18:14:32 +02:00
SessionApply,
2019-08-28 13:13:32 +02:00
SessionCommit,
2019-12-02 17:34:08 +01:00
ApplyArmatureOperator,
2020-04-03 14:59:33 +02:00
SessionKickOperator,
2020-06-10 18:43:21 +02:00
SessionInitOperator,
2020-11-25 22:53:38 +01:00
SessionClearCache,
SessionNotifyOperator,
SessionSaveBackupOperator,
SessionLoadSaveOperator,
SessionStopAutoSaveOperator,
SessionPurgeOperator,
)
def update_external_dependencies():
nodes_ids = session.list(filter=bl_types.bl_file.BlFile)
for node_id in nodes_ids:
node = session.get(node_id)
if node and node.owner in [session.id, RP_COMMON] \
and node.has_changed():
session.commit(node_id)
session.push(node_id, check_data=False)
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state['STATE'] == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node_key in session.list():
node = session.get(node_key)
if node is None \
or (node.state == UP and not node.resolve(construct=False)):
if remove_nodes:
try:
session.remove(node.uuid, remove_dependencies=False)
logging.info(f"Removing {node.uuid}")
rm_cpt += 1
except NonAuthorizedOperationError:
continue
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms")
@persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state['STATE'] == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
2021-12-11 15:53:02 +01:00
setting = bpy.context.window_manager.session
if setting.replay_mode == 'TIMELINE' and \
setting.replay_files and \
scene.active_replay_file != setting.replay_frame_current :
index = bpy.context.scene.active_replay_file
bpy.ops.session.load(filepath=bpy.context.window_manager.session.replay_files[index].name,
draw_users=True,
replay=True)
setting.replay_frame_current = index
if session and session.state['STATE'] == STATE_ACTIVE:
session.update_user_metadata({
'frame_current': scene.frame_current
})
2020-01-24 17:52:10 +01:00
@persistent
def depsgraph_evaluation(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
2020-01-24 17:52:10 +01:00
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
2020-01-24 17:52:10 +01:00
update_external_dependencies()
2020-01-24 17:52:10 +01:00
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.get(uuid=update.id.uuid)
2020-01-24 17:52:10 +01:00
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
2021-01-13 14:45:23 +01:00
# - if its to someone else, ignore the update
if node and (node.owner == session.id or node.bl_check_common):
if node.state == UP:
try:
if node.has_changed():
session.commit(node.uuid)
session.push(node.uuid, check_data=False)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
if not node.is_valid():
session.remove(node.uuid)
except ContextError as e:
logging.debug(e)
except Exception as e:
logging.error(e)
2020-01-24 17:52:10 +01:00
else:
continue
2021-01-21 14:48:07 +01:00
# A new scene is created
elif isinstance(update.id, bpy.types.Scene):
ref = session.get(reference=update.id)
if ref:
ref.resolve()
else:
scn_uuid = session.add(update.id)
session.commit(scn_uuid)
session.push(scn_uuid, check_data=False)
2019-02-08 18:34:10 +01:00
def register():
from bpy.utils import register_class
2020-10-06 15:46:35 +02:00
2020-12-17 14:04:00 +01:00
for cls in classes:
2019-02-08 18:34:10 +01:00
register_class(cls)
2019-07-10 17:16:44 +02:00
bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister():
if session and session.state['STATE'] == STATE_ACTIVE:
session.disconnect()
2019-02-08 18:34:10 +01:00
from bpy.utils import unregister_class
for cls in reversed(classes):
unregister_class(cls)
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
2019-09-16 23:34:12 +02:00
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)