Compare commits

...

31 Commits

Author SHA1 Message Date
5ffb05f46a feat: draw user curve 2021-04-08 11:22:01 +02:00
5817c9110b feat: basic collection loading 2021-04-07 10:06:38 +02:00
5e30e215ab fix: empty node 2021-04-02 16:37:47 +02:00
9f167256d0 fix: node frame trasform 2021-04-02 16:12:51 +02:00
4e19c169b2 fix: node_groups unordered socket loading
fix: geometry_node sample texture handling
fix: geometry node dependencies
2021-04-02 15:51:31 +02:00
9c633c35ec fix: geometry node socket for blender 2.93 2021-04-02 10:01:45 +02:00
9610b50a49 Merge branch '181-geometry-nodes-int-float-inputs-doesn-t-sync' into 'develop'
Resolve "Geometry nodes int/float inputs doesn't sync"

See merge request slumber/multi-user!116
2021-03-31 13:42:26 +00:00
67d18f08e2 fix: Timer not unregistered error
fix: handle correctly unsupported float parameter for geometry nodes
fix: Material loading
2021-03-31 15:38:35 +02:00
9d0d684589 fix: geometry nodes str, float, int loading 2021-03-31 11:19:03 +02:00
2446df4fe3 feat: raise the default timeout to 5 second 2021-03-21 09:28:54 +01:00
07862f1cf0 fix: missing hue_interpolation 2021-03-19 11:07:04 +01:00
480818fe85 Merge branch '180-parent-relation-have-doesn-t-keeps-transform' into 'develop'
Resolve "Parenting objects doesn't keeps transform"

See merge request slumber/multi-user!115
2021-03-13 17:35:42 +00:00
b965c80ba5 fix: parent transform
fix: race  condition for COMMON objects

related to #180
2021-03-13 18:32:20 +01:00
b66d0dd4ce Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2021-03-04 15:49:00 +01:00
9487753307 feat: fix object and collection support for geometry nodes 2021-03-04 15:48:36 +01:00
df1257ca4c Merge branch '179-parent-relation-can-t-be-removed' into 'develop'
Resolve "Parent relation can't be removed"

See merge request slumber/multi-user!113
2021-03-04 13:25:39 +00:00
cc5a87adb8 fix: prevent matrix_parent_inverse from being reset by loading parents only if its necessary 2021-03-03 11:00:47 +01:00
19c56e590b feat: remove parent as node dependency 2021-03-03 10:03:57 +01:00
d0e80da945 fix: object parenting can't be removed
Related to #179
2021-03-03 09:55:48 +01:00
0ccd0563ea feat: testing doc building with python 3.8 2021-03-02 12:56:12 +00:00
1c3394ce56 feat: sphinx-material theme 2021-03-02 12:46:26 +00:00
d2b63df68e Merge branch '178-move-documentation-hosting-to-gitlab-page' into 'develop'
Resolve "Move documentation hosting to gitlab page"

See merge request slumber/multi-user!112
2021-03-02 09:21:57 +00:00
3d9c78c2f9 doc: only build for master/develop 2021-03-02 10:18:11 +01:00
4726a90a4a doc: reflect doc hosting changes to the Readme.md 2021-03-02 10:16:59 +01:00
73b763d85f fix: job ordering error 2021-03-02 09:09:50 +00:00
5e29c6fe26 Update .gitlab/ci/doc.gitlab-ci.yml 2021-03-02 09:08:39 +00:00
113ab81cbf Update .gitlab/ci/doc.gitlab-ci.yml 2021-03-02 09:07:50 +00:00
d2215b662c feat: update jobs dependencies 2021-03-02 09:06:32 +00:00
238a34d023 feat: needs test to success 2021-03-02 09:05:21 +00:00
55ca8a7b84 Update .gitlab/ci/doc.gitlab-ci.yml 2021-03-02 09:03:19 +00:00
7049c1723d feat: initial CI job for building the documentation for gitlab page 2021-03-02 09:58:06 +01:00
13 changed files with 508 additions and 263 deletions

View File

@ -2,9 +2,12 @@ stages:
- test
- build
- deploy
- doc
include:
- local: .gitlab/ci/test.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/deploy.gitlab-ci.yml
- local: .gitlab/ci/doc.gitlab-ci.yml

View File

@ -1,5 +1,6 @@
build:
stage: build
needs: ["test"]
image: debian:stable-slim
script:
- rm -rf tests .git .gitignore script

View File

@ -1,5 +1,6 @@
deploy:
stage: deploy
needs: ["build"]
image: slumber/docker-python
variables:
DOCKER_DRIVER: overlay2

View File

@ -0,0 +1,16 @@
pages:
stage: doc
needs: ["deploy"]
image: python
script:
- pip install -U sphinx sphinx_rtd_theme sphinx-material
- sphinx-build -b html ./docs public
artifacts:
paths:
- public
only:
refs:
- master
- develop

View File

@ -19,44 +19,46 @@ This tool aims to allow multiple users to work on the same scene over the networ
## Usage
See the [documentation](https://multi-user.readthedocs.io/en/latest/) for details.
See the [documentation](https://slumber.gitlab.io/multi-user/index.html) for details.
## Troubleshooting
See the [troubleshooting guide](https://multi-user.readthedocs.io/en/latest/getting_started/troubleshooting.html) for tips on the most common issues.
See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_started/troubleshooting.html) for tips on the most common issues.
## Current development status
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment |
| ----------- | :----: | :--------------------------------------------------------------------------: |
| action | ✔️ | |
| armature | | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | ❗ | Nurbs not supported |
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
| image | ✔️ | |
| mesh | ✔️ | |
| material | ✔️ | |
| node_groups || Material only |
| metaball | ✔️ | |
| object | ✔️ | |
| textures | ❗ | Supported for modifiers only |
| texts | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | ❌ | |
| volumes | ✔️ | |
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❗ | Mask and Clip not supported yet |
| physics | | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
| Name | Status | Comment |
| -------------- | :----: | :--------------------------------------------------------------------------: |
| action | ✔️ | |
| armature | ❗ | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | | Nurbs surfaces not supported |
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
| image | ✔️ | |
| mesh | ✔️ | |
| material | ✔️ | |
| node_groups | | Material only |
| geometry nodes | ✔️ | |
| metaball | ✔️ | |
| object | ✔️ | |
| textures | | Supported for modifiers/materials only |
| texts | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | | |
| volumes | ✔️ | |
| particles | | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | | Mask and Clip not supported yet |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
### Performance issues
@ -68,13 +70,13 @@ I'm working on it.
| Dependencies | Version | Needed |
| ------------ | :-----: | -----: |
| Replication | latest | yes |
| Replication | latest | yes |
## Contributing
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
See [contributing section](https://slumber.gitlab.io/multi-user/ways_to_contribute.html) of the documentation.
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.

View File

@ -27,7 +27,7 @@ from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['GEOMETRY']
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
""" Load a node into a node_tree from a dict
@ -53,31 +53,136 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
inputs_data = node_data.get('inputs')
if inputs_data:
inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs_data):
if idx < len(inputs) and hasattr(inputs[idx], "default_value"):
for idx, inpt in enumerate(inputs):
if idx < len(inputs_data) and hasattr(inpt, "default_value"):
loaded_input = inputs_data[idx]
try:
inputs[idx].default_value = inpt
if inpt.type in ['OBJECT', 'COLLECTION']:
inpt.default_value = get_datablock_from_uuid(loaded_input, None)
else:
inpt.default_value = loaded_input
except Exception as e:
logging.warning(f"Node {target_node.name} input {inputs[idx].name} parameter not supported, skipping ({e})")
logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})")
else:
logging.warning(f"Node {target_node.name} input length mismatch.")
outputs_data = node_data.get('outputs')
if outputs_data:
outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS]
for idx, output in enumerate(outputs_data):
if idx < len(outputs) and hasattr(outputs[idx], "default_value"):
for idx, output in enumerate(outputs):
if idx < len(outputs_data) and hasattr(output, "default_value"):
loaded_output = outputs_data[idx]
try:
outputs[idx].default_value = output
if output.type in ['OBJECT', 'COLLECTION']:
output.default_value = get_datablock_from_uuid(loaded_output, None)
else:
output.default_value = loaded_output
except Exception as e:
logging.warning(
f"Node {target_node.name} output {outputs[idx].name} parameter not supported, skipping ({e})")
f"Node {target_node.name} output {output.name} parameter not supported, skipping ({e})")
else:
logging.warning(
f"Node {target_node.name} output length mismatch.")
def dump_node(node: bpy.types.ShaderNode) -> dict:
""" Dump a single node to a dict
:arg node: target node
:type node: bpy.types.Node
:retrun: dict
"""
node_dumper = Dumper()
node_dumper.depth = 1
node_dumper.exclude_filter = [
"dimensions",
"show_expanded",
"name_full",
"select",
"bl_label",
"bl_height_min",
"bl_height_max",
"bl_height_default",
"bl_width_min",
"bl_width_max",
"type",
"bl_icon",
"bl_width_default",
"bl_static_type",
"show_tetxure",
"is_active_output",
"hide",
"show_options",
"show_preview",
"show_texture",
"outputs",
"width_hidden",
"image"
]
dumped_node = node_dumper.dump(node)
if node.parent:
dumped_node['parent'] = node.parent.name
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
if dump_io_needed:
io_dumper = Dumper()
io_dumper.depth = 2
io_dumper.include_filter = ["default_value"]
if hasattr(node, 'inputs'):
dumped_node['inputs'] = []
inputs = [i for i in node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs):
if hasattr(inpt, 'default_value'):
if isinstance(inpt.default_value, bpy.types.ID):
dumped_input = inpt.default_value.uuid
else:
dumped_input = io_dumper.dump(inpt.default_value)
dumped_node['inputs'].append(dumped_input)
if hasattr(node, 'outputs'):
dumped_node['outputs'] = []
for idx, output in enumerate(node.outputs):
if output.type not in IGNORED_SOCKETS:
if hasattr(output, 'default_value'):
dumped_node['outputs'].append(
io_dumper.dump(output.default_value))
if hasattr(node, 'color_ramp'):
ramp_dumper = Dumper()
ramp_dumper.depth = 4
ramp_dumper.include_filter = [
'elements',
'alpha',
'color',
'position',
'interpolation',
'hue_interpolation',
'color_mode'
]
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
if hasattr(node, 'mapping'):
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location'
]
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
if hasattr(node, 'image') and getattr(node, 'image'):
dumped_node['image_uuid'] = node.image.uuid
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
dumped_node['node_tree_uuid'] = node.node_tree.uuid
return dumped_node
def load_links(links_data, node_tree):
""" Load node_tree links from a list
@ -120,93 +225,6 @@ def dump_links(links):
return links_data
def dump_node(node: bpy.types.ShaderNode) -> dict:
""" Dump a single node to a dict
:arg node: target node
:type node: bpy.types.Node
:retrun: dict
"""
node_dumper = Dumper()
node_dumper.depth = 1
node_dumper.exclude_filter = [
"dimensions",
"show_expanded",
"name_full",
"select",
"bl_label",
"bl_height_min",
"bl_height_max",
"bl_height_default",
"bl_width_min",
"bl_width_max",
"type",
"bl_icon",
"bl_width_default",
"bl_static_type",
"show_tetxure",
"is_active_output",
"hide",
"show_options",
"show_preview",
"show_texture",
"outputs",
"width_hidden",
"image"
]
dumped_node = node_dumper.dump(node)
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
if dump_io_needed:
io_dumper = Dumper()
io_dumper.depth = 2
io_dumper.include_filter = ["default_value"]
if hasattr(node, 'inputs'):
dumped_node['inputs'] = []
for idx, inpt in enumerate(node.inputs):
if hasattr(inpt, 'default_value'):
dumped_node['inputs'].append(
io_dumper.dump(inpt.default_value))
if hasattr(node, 'outputs'):
dumped_node['outputs'] = []
for idx, output in enumerate(node.outputs):
if hasattr(output, 'default_value'):
dumped_node['outputs'].append(
io_dumper.dump(output.default_value))
if hasattr(node, 'color_ramp'):
ramp_dumper = Dumper()
ramp_dumper.depth = 4
ramp_dumper.include_filter = [
'elements',
'alpha',
'color',
'position',
'interpolation',
'color_mode'
]
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
if hasattr(node, 'mapping'):
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location'
]
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
if hasattr(node, 'image') and getattr(node, 'image'):
dumped_node['image_uuid'] = node.image.uuid
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
dumped_node['node_tree_uuid'] = node.node_tree.uuid
return dumped_node
def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
""" Dump a shader node_tree to a dict including links and nodes
@ -263,7 +281,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection,
"""
# Check for removed sockets
for socket in sockets:
if not [s for s in sockets_data if socket['uuid'] == s[2]]:
if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]:
sockets.remove(socket)
# Check for new sockets
@ -303,6 +321,14 @@ def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeT
for node in node_tree_data["nodes"]:
load_node(node_tree_data["nodes"][node], target_node_tree)
for node_id, node_data in node_tree_data["nodes"].items():
target_node = target_node_tree.nodes.get(node_id, None)
if target_node is None:
continue
elif 'parent' in node_data:
target_node.parent = target_node_tree.nodes[node_data['parent']]
else:
target_node.parent = None
# TODO: load only required nodes links
# Load nodes links
target_node_tree.links.clear()
@ -317,6 +343,8 @@ def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
def has_node_group(node): return (
hasattr(node, 'node_tree') and node.node_tree)
def has_texture(node): return (
node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture)
deps = []
for node in node_tree.nodes:
@ -324,6 +352,8 @@ def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
deps.append(node.image)
elif has_node_group(node):
deps.append(node.node_tree)
elif has_texture(node):
deps.append(node.texture)
return deps
@ -354,10 +384,7 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
if mat_uuid is not None:
mat_ref = get_datablock_from_uuid(mat_uuid, None)
else:
mat_ref = bpy.data.materials.get(mat_name, None)
if mat_ref is None:
raise Exception(f"Material {mat_name} doesn't exist")
mat_ref = bpy.data.materials[mat_name]
dst_materials.append(mat_ref)

View File

@ -69,7 +69,7 @@ class BlMesh(BlDatablock):
loader.load(target, data)
# MATERIAL SLOTS
src_materials = data.get('materials', None)
src_materials = data.get('materials', data.get('material_list'))
if src_materials:
load_materials_slots(src_materials, target.materials)

View File

@ -23,6 +23,7 @@ import mathutils
from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS
from .dump_anything import (
Dumper,
Loader,
@ -30,16 +31,28 @@ from .dump_anything import (
np_dump_collection)
SKIN_DATA = [
'radius',
'use_loose',
'use_root'
]
def get_input_index(e):
return int(re.findall('[0-9]+', e)[0])
if bpy.app.version[1] >= 93:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str)
logging.warning("Geometry node Float parameter not supported in \
blender 2.92.")
def get_node_group_inputs(node_group):
inputs = []
for inpt in node_group.inputs:
if inpt.type in IGNORED_SOCKETS:
continue
else:
inputs.append(inpt)
return inputs
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties
@ -47,15 +60,14 @@ def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
:arg modifier: geometry node modifier to dump
:type modifier: bpy.type.Modifier
"""
inputs_name = [p for p in dir(modifier) if "Input_" in p]
inputs_name.sort(key=get_input_index)
dumped_inputs = []
for inputs_index, input_name in enumerate(inputs_name):
input_value = modifier[input_name]
for inpt in get_node_group_inputs(modifier.node_group):
input_value = modifier[inpt.identifier]
dumped_input = None
if isinstance(input_value, bpy.types.ID):
dumped_input = input_value.uuid
elif type(input_value) in [int, str, float]:
elif isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
dumped_input = input_value
elif hasattr(input_value, 'to_list'):
dumped_input = input_value.to_list()
@ -73,18 +85,16 @@ def load_modifier_geometry_node_inputs(dumped_modifier: dict, target_modifier: b
:type target_modifier: bpy.type.Modifier
"""
inputs_name = [p for p in dir(target_modifier) if "Input_" in p]
inputs_name.sort(key=get_input_index)
for input_index, input_name in enumerate(inputs_name):
for input_index, inpt in enumerate(get_node_group_inputs(target_modifier.node_group)):
dumped_value = dumped_modifier['inputs'][input_index]
input_value = target_modifier[input_name]
if type(input_value) in [int, str, float]:
input_value = dumped_value
input_value = target_modifier[inpt.identifier]
if isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
target_modifier[inpt.identifier] = dumped_value
elif hasattr(input_value, 'to_list'):
for index in range(len(input_value)):
input_value[index] = dumped_value[index]
else:
target_modifier[input_name] = get_datablock_from_uuid(
elif inpt.type in ['COLLECTION', 'OBJECT']:
target_modifier[inpt.identifier] = get_datablock_from_uuid(
dumped_value, None)
@ -161,19 +171,24 @@ def find_textures_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy
return textures
def find_geometry_nodes(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]:
""" Find geometry nodes group from a modifier stack
def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]:
""" Find geometry nodes dependencies from a modifier stack
:arg modifiers: modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
:return: list of bpy.types.NodeTree pointers
"""
nodes_groups = []
for item in modifiers:
if item.type == 'NODES' and item.node_group:
nodes_groups.append(item.node_group)
dependencies = []
for mod in modifiers:
if mod.type == 'NODES' and mod.node_group:
dependencies.append(mod.node_group)
# for inpt in get_node_group_inputs(mod.node_group):
# parameter = mod.get(inpt.identifier)
# if parameter and isinstance(parameter, bpy.types.ID):
# dependencies.append(parameter)
logging.info(dependencies)
return dependencies
return nodes_groups
def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
""" Dump object's vertex groups
@ -219,6 +234,7 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE')
class BlObject(BlDatablock):
bl_id = "objects"
bl_class = bpy.types.Object
@ -296,9 +312,20 @@ class BlObject(BlDatablock):
# Load transformation data
loader.load(target, data)
# Object display fields
if 'display' in data:
loader.load(target.display, data['display'])
# Parenting
parent_id = data.get('parent_id')
if parent_id:
parent = bpy.data.objects[parent_id]
# Avoid reloading
if target.parent != parent and parent is not None:
target.parent = parent
elif target.parent:
target.parent = None
# Pose
if 'pose' in data:
if not target.pose:
@ -343,14 +370,23 @@ class BlObject(BlDatablock):
SKIN_DATA)
if hasattr(target, 'cycles_visibility') \
and 'cycles_visibility' in data:
and 'cycles_visibility' in data:
loader.load(target.cycles_visibility, data['cycles_visibility'])
# TODO: handle geometry nodes input from dump_anything
if hasattr(target, 'modifiers'):
nodes_modifiers = [mod for mod in target.modifiers if mod.type == 'NODES']
nodes_modifiers = [
mod for mod in target.modifiers if mod.type == 'NODES']
for modifier in nodes_modifiers:
load_modifier_geometry_node_inputs(data['modifiers'][modifier.name], modifier)
load_modifier_geometry_node_inputs(
data['modifiers'][modifier.name], modifier)
transform = data.get('transforms', None)
if transform:
target.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse'])
target.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
target.matrix_local = mathutils.Matrix(transform['matrix_local'])
def _dump_implementation(self, data, instance=None):
assert(instance)
@ -366,7 +402,6 @@ class BlObject(BlDatablock):
dumper.include_filter = [
"name",
"rotation_mode",
"parent",
"data",
"library",
"empty_display_type",
@ -381,8 +416,6 @@ class BlObject(BlDatablock):
"color",
"instance_collection",
"instance_type",
"location",
"scale",
'lock_location',
'lock_rotation',
'lock_scale',
@ -396,12 +429,16 @@ class BlObject(BlDatablock):
'show_all_edges',
'show_texture_space',
'show_in_front',
'type',
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
'type'
]
data = dumper.dump(instance)
dumper.include_filter = [
'matrix_parent_inverse',
'matrix_local',
'matrix_basis']
data['transforms'] = dumper.dump(instance)
dumper.include_filter = [
'show_shadows',
]
@ -411,6 +448,10 @@ class BlObject(BlDatablock):
if self.is_library:
return data
# PARENTING
if instance.parent:
data['parent_id'] = instance.parent.name
# MODIFIERS
if hasattr(instance, 'modifiers'):
data["modifiers"] = {}
@ -422,7 +463,8 @@ class BlObject(BlDatablock):
data["modifiers"][modifier.name] = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(modifier)
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
data["modifiers"][modifier.name]['inputs'] = dumped_inputs
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
@ -489,7 +531,6 @@ class BlObject(BlDatablock):
bone_groups[group.name] = dumper.dump(group)
data['pose']['bone_groups'] = bone_groups
# VERTEx GROUP
if len(instance.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(instance)
@ -526,7 +567,8 @@ class BlObject(BlDatablock):
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
skin_vertices = list()
for skin_data in object_data.skin_vertices:
skin_vertices.append(np_dump_collection(skin_data.data, SKIN_DATA))
skin_vertices.append(
np_dump_collection(skin_data.data, SKIN_DATA))
data['skin_vertices'] = skin_vertices
# CYCLE SETTINGS
@ -549,18 +591,19 @@ class BlObject(BlDatablock):
# Avoid Empty case
if self.instance.data:
deps.append(self.instance.data)
if self.instance.parent :
deps.append(self.instance.parent)
if self.is_library:
deps.append(self.instance.library)
if self.instance.parent:
deps.append(self.instance.parent)
if self.instance.instance_type == 'COLLECTION':
# TODO: uuid based
deps.append(self.instance.instance_collection)
if self.instance.modifiers:
deps.extend(find_textures_dependencies(self.instance.modifiers))
deps.extend(find_geometry_nodes(self.instance.modifiers))
deps.extend(find_geometry_nodes_dependencies(self.instance.modifiers))
return deps

View File

@ -55,6 +55,10 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
for attr in attributes:
attr_type = collection[0].bl_rna.properties.get(attr).type
if attr not in dikt:
logging.warning(f"No data for {attr}, skipping.")
continue
if attr_type in PRIMITIVE_TYPES:
np_load_collection_primitives(collection, attr, dikt[attr])

Submodule multi_user/libs/replication added at 9a02e16d70

View File

@ -39,6 +39,7 @@ except ImportError:
import pickle
import bpy
import bmesh
import mathutils
from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper
@ -56,11 +57,12 @@ background_execution_queue = Queue()
deleyables = []
stop_modal_executor = False
def session_callback(name):
""" Session callback wrapper
This allow to encapsulate session callbacks to background_execution_queue.
By doing this way callback are executed from the main thread.
By doing this way callback are executed from the main thread.
"""
def func_wrapper(func):
@session.register(name)
@ -72,7 +74,7 @@ def session_callback(name):
@session_callback('on_connection')
def initialize_session():
"""Session connection init hander
"""Session connection init hander
"""
logging.info("Intializing the scene")
settings = utils.get_preferences()
@ -86,7 +88,7 @@ def initialize_session():
logging.error(f"Can't construct node {node}")
elif node_ref.state == FETCHED:
node_ref.resolve()
# Step 2: Load nodes
logging.info("Loading nodes")
for node in session._graph.list_ordered():
@ -113,7 +115,7 @@ def initialize_session():
@session_callback('on_exit')
def on_connection_end(reason="none"):
"""Session connection finished handler
"""Session connection finished handler
"""
global deleyables, stop_modal_executor
settings = utils.get_preferences()
@ -137,7 +139,8 @@ def on_connection_end(reason="none"):
if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler)
if reason != "user":
bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
bpy.ops.session.notify(
'INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
# OPERATORS
@ -210,8 +213,6 @@ class SessionStartOperator(bpy.types.Operator):
type_module_class,
check_common=type_module_class.bl_check_common)
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
else:
@ -272,6 +273,8 @@ class SessionStartOperator(bpy.types.Operator):
# Background client updates service
deleyables.append(timers.ClientUpdate())
deleyables.append(timers.DynamicRightSelectTimer())
deleyables.append(timers.ApplyTimer(
timeout=settings.depsgraph_update_rate))
# deleyables.append(timers.PushTimer(
# queue=stagging,
# timeout=settings.depsgraph_update_rate
@ -289,8 +292,6 @@ class SessionStartOperator(bpy.types.Operator):
deleyables.append(session_update)
deleyables.append(session_user_sync)
self.report(
{'INFO'},
f"connecting to tcp://{settings.ip}:{settings.port}")
@ -610,7 +611,7 @@ class SessionApply(bpy.types.Operator):
session.apply(parent, force=True)
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
return {"CANCELED"}
return {"FINISHED"}
@ -636,6 +637,7 @@ class SessionCommit(bpy.types.Operator):
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
class ApplyArmatureOperator(bpy.types.Operator):
"""Operator which runs its self from a timer"""
bl_idname = "session.apply_armature_operator"
@ -707,6 +709,7 @@ class SessionClearCache(bpy.types.Operator):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
class SessionPurgeOperator(bpy.types.Operator):
"Remove node with lost references"
bl_idname = "session.purge"
@ -751,7 +754,6 @@ class SessionNotifyOperator(bpy.types.Operator):
layout = self.layout
layout.row().label(text=self.message)
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
@ -797,6 +799,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
def poll(cls, context):
return session.state['STATE'] == STATE_ACTIVE
class SessionStopAutoSaveOperator(bpy.types.Operator):
bl_idname = "session.cancel_autosave"
bl_label = "Cancel auto-save"
@ -828,79 +831,222 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
maxlen=255, # Max internal buffer length, longer would be clamped.
)
load_to_collection: bpy.props.BoolProperty(
name="Load to collection",
description="Load the snapshot into a collection",
default=False,
)
draw_users: bpy.props.BoolProperty(
name="Draw users",
description="Draw a mesh representing each user position and selected object",
default=False,
)
animate: bpy.props.BoolProperty(
name="Animate history",
description="Animate history versions",
default=False,
)
clear_datablocks: bpy.props.BoolProperty(
name="Removes existing data",
description="Remove all exisitng datablocks",
default=True,
)
files: bpy.props.CollectionProperty(
type=bpy.types.OperatorFileListElement,
options={'HIDDEN', 'SKIP_SAVE'},
)
def draw_users_as_curves(self, user_list):
users_collection = bpy.data.collections.new("users")
for username, user_data in user_list.items():
metadata = user_data.get('metadata', None)
if metadata:
logging.info(f"Loading user {username}")
# Curve creation
location = metadata.get('view_corners')
positions = [coord for coord in location]
curve = bpy.data.curves.new(username, 'CURVE')
obj = bpy.data.objects.new(username, curve)
for p in positions:
p.append(0.0)
sight = curve.splines.new('POLY')
sight.points.add(1)
sight.points[0].co = positions[4]
sight.points[1].co = positions[5]
cadre = curve.splines.new('POLY')
cadre.points.add(4)
cadre.points[0].co = positions[0]
cadre.points[1].co = positions[2]
cadre.points[2].co = positions[1]
cadre.points[3].co = positions[3]
cadre.points[4].co = positions[0]
frust = curve.splines.new('POLY')
frust.points.add(2)
frust.points[0].co = positions[0]
frust.points[1].co = positions[6]
frust.points[2].co = positions[1]
frust2 = curve.splines.new('POLY')
frust2.points.add(2)
frust2.points[0].co = positions[2]
frust2.points[1].co = positions[6]
frust2.points[2].co = positions[3]
curve.bevel_depth = 0.02
# Material creation
color = metadata.get('color')
material = bpy.data.materials.new(username)
material.use_nodes = True
material.node_tree.nodes[0].inputs['Emission'].default_value = color
curve.materials.append(material)
users_collection.objects.link(obj)
return users_collection
def draw_users_meshes(self, user_list):
for username, user_data in user_list.items():
metadata = user_data.get('metadata', None)
if metadata:
logging.info(f"Loading user {username}")
location = metadata.get('view_corners')
color = metadata.get('color')
positions = [tuple(coord) for coord in location]
edges = ((1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6))
mesh = bpy.data.meshes.new("toto")
obj = bpy.data.objects.new("toto", mesh)
bm = bmesh.new()
for p in positions:
bm.verts.new(p)
bm.verts.ensure_lookup_table()
for v1, v2 in edges:
bm.edges.new((bm.verts[v1], bm.verts[v2]))
bm.to_mesh(mesh)
bpy.context.scene.collection.objects.link(obj)
def execute(self, context):
from replication.graph import ReplicationGraph
# TODO: add filechecks
try:
f = gzip.open(self.filepath, "rb")
db = pickle.load(f)
except OSError as e:
f = open(self.filepath, "rb")
db = pickle.load(f)
if db:
logging.info(f"Reading {self.filepath}")
nodes = db.get("nodes")
logging.info(f"{len(nodes)} Nodes to load")
# init the factory with supported types
bpy_factory = ReplicatedDataFactory()
for type in bl_types.types_to_register():
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
# Initialisation
# init the factory with supported types
bpy_factory=ReplicatedDataFactory()
for type in bl_types.types_to_register():
type_module=getattr(bl_types, type)
name=[e.capitalize() for e in type.split('_')[1:]]
type_impl_name='Bl'+''.join(name)
type_module_class=getattr(type_module, type_impl_name)
bpy_factory.register_type(
type_module_class.bl_class,
type_module_class)
graph = ReplicationGraph()
for node, node_data in nodes:
node_type = node_data.get('str_type')
impl = bpy_factory.get_implementation_from_net(node_type)
if impl:
logging.info(f"Loading {node}")
instance = impl(owner=node_data['owner'],
uuid=node,
dependencies=node_data['dependencies'],
data=node_data['data'])
instance.store(graph)
instance.state = FETCHED
logging.info("Graph succefully loaded")
bpy_factory.register_type(
type_module_class.bl_class,
type_module_class)
# Optionnaly clear the scene
if self.clear_datablocks:
utils.clean_scene()
# Step 1: Construct nodes
for node in graph.list_ordered():
graph[node].resolve()
dir_path=Path(self.filepath).parent
# Step 2: Load nodes
for node in graph.list_ordered():
graph[node].apply()
for db in self.files:
filepath=os.path.join(dir_path, db.name)
try:
f=gzip.open(filepath, "rb")
db=pickle.load(f)
except OSError as e:
f=open(filepath, "rb")
db=pickle.load(f)
if db:
created=os.path.getctime(filepath)
logging.info(f"Reading {filepath}")
nodes=db.get("nodes")
users=db.get("users")
users_collection = self.draw_users_as_curves(users)
logging.info(f"{len(nodes)} Nodes to load")
graph=ReplicationGraph()
for node, node_data in nodes:
node_type=node_data.get('str_type')
impl=bpy_factory.get_implementation_from_net(node_type)
if impl:
logging.info(f"Loading {node}")
instance=impl(owner=node_data['owner'],
uuid=node,
dependencies=node_data['dependencies'],
data=node_data['data'])
instance.store(graph)
instance.state=FETCHED
logging.info("Graph succefully loaded")
# Find scene
scenes=[n for n in graph.values() if isinstance(
n, bl_types.bl_scene.BlScene)]
scene=scenes[0]
# collection_data = {
# 'instance_offset': [0.0, 0.0, 0.0],
# 'name': str(created),
# 'objects': scene.data['collection']['objects'],
# 'children': scene.data['collection']['children']}
# collection_node = bl_types.bl_collection.BlCollection()
# collection_node.dependencies = scene.dependencies
# collection_node.data = collection_data
# graph[collection_node.uuid] = collection_node
# del graph[scene.uuid]
scene.data['name']=str(created)
# Step 1: Construct nodes
for node in graph.list_ordered():
node_inst=graph[node]
try:
node_inst.instance=node_inst._construct(node_inst.data)
node_inst.instance.uuid=node_inst.uuid
except Exception as e:
continue
# Step 2: Load nodes
for node in graph.list_ordered():
graph[node].state=FETCHED
graph[node].apply()
scene.instance.collection.children.link(users_collection)
# bpy.context.scene.collection.children.link(collection_node.instance)
return {'FINISHED'}
@classmethod
@ classmethod
def poll(cls, context):
return True
def menu_func_import(self, context):
self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)')
self.layout.operator(SessionLoadSaveOperator.bl_idname,
text='Multi-user session snapshot (.db)')
classes = (
classes=(
SessionStartOperator,
SessionStopOperator,
SessionPropertyRemoveOperator,
@ -921,22 +1067,22 @@ classes = (
)
def update_external_dependencies():
nodes_ids = session.list(filter=bl_types.bl_file.BlFile)
nodes_ids=session.list(filter=bl_types.bl_file.BlFile)
for node_id in nodes_ids:
node = session.get(node_id)
node=session.get(node_id)
if node and node.owner in [session.id, RP_COMMON] \
and node.has_changed():
session.commit(node_id)
session.push(node_id, check_data=False)
def sanitize_deps_graph(remove_nodes: bool = False):
def sanitize_deps_graph(remove_nodes: bool=False):
""" Cleanup the replication graph
"""
if session and session.state['STATE'] == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
start=utils.current_milli_time()
rm_cpt=0
for node_key in session.list():
node = session.get(node_key)
node=session.get(node_key)
if node is None \
or (node.state == UP and not node.resolve(construct=False)):
if remove_nodes:
@ -949,7 +1095,7 @@ def sanitize_deps_graph(remove_nodes: bool = False):
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms")
@persistent
@ persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
@ -960,13 +1106,13 @@ def resolve_deps_graph(dummy):
if session and session.state['STATE'] == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
@ persistent
def load_pre_handler(dummy):
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
@ persistent
def update_client_frame(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
session.update_user_metadata({
@ -974,13 +1120,13 @@ def update_client_frame(scene):
})
@persistent
@ persistent
def depsgraph_evaluation(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
context=bpy.context
blender_depsgraph=bpy.context.view_layer.depsgraph
dependency_updates=[u for u in blender_depsgraph.updates]
settings=utils.get_preferences()
update_external_dependencies()
@ -989,13 +1135,13 @@ def depsgraph_evaluation(scene):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.get(uuid=update.id.uuid)
node=session.get(uuid=update.id.uuid)
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
# - if its to someone else, ignore the update
if node and node.owner in [session.id, RP_COMMON]:
if node and (node.owner == session.id or node.bl_check_common):
if node.state == UP:
try:
if node.has_changed():
@ -1006,24 +1152,24 @@ def depsgraph_evaluation(scene):
if not node.is_valid():
session.remove(node.uuid)
except ContextError as e:
logging.debug(e)
logging.debug(e)
except Exception as e:
logging.error(e)
else:
continue
# A new scene is created
# A new scene is created
elif isinstance(update.id, bpy.types.Scene):
ref = session.get(reference=update.id)
ref=session.get(reference=update.id)
if ref:
ref.resolve()
else:
scn_uuid = session.add(update.id)
scn_uuid=session.add(update.id)
session.commit(scn_uuid)
session.push(scn_uuid, check_data=False)
def register():
from bpy.utils import register_class
for cls in classes:
for cls in classes:
register_class(cls)

View File

@ -195,7 +195,7 @@ class SessionPrefs(bpy.types.AddonPreferences):
connection_timeout: bpy.props.IntProperty(
name='connection timeout',
description='connection timeout before disconnection',
default=1000
default=5000
)
# Replication update settings
depsgraph_update_rate: bpy.props.FloatProperty(

View File

@ -17,7 +17,7 @@
import logging
import sys
import traceback
import bpy
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_LOBBY, STATE_QUITTING,
@ -112,7 +112,8 @@ class ApplyTimer(Timer):
try:
session.apply(node)
except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}: {e}")
logging.error(f"Fail to apply {node_ref.uuid}")
traceback.print_exc()
else:
if node_ref.bl_reload_parent:
for parent in session._graph.find_parents(node):