Compare commits

...

9 Commits

7 changed files with 24 additions and 11 deletions

View File

@ -264,4 +264,10 @@ All notable changes to this project will be documented in this file.
- Server crashing during snapshots - Server crashing during snapshots
- Blender 3.1 numpy loading error during early connection process - Blender 3.1 numpy loading error during early connection process
- Server docker arguments - Server docker arguments
## [0.5.5] - 2022-06-12
### Fixed
- Numpy mesh serialization error

View File

@ -16,12 +16,12 @@ import sys
# -- Project information ----------------------------------------------------- # -- Project information -----------------------------------------------------
project = 'Multi-User 0.5.0 Documentation' project = 'Multi-User 0.5.x Documentation'
copyright = '2020, Swann Martinez' copyright = '2020, Swann Martinez'
author = 'Swann Martinez, Poochy, Fabian' author = 'Swann Martinez, Poochy, Fabian'
# The full version, including alpha/beta/rc tags # The full version, including alpha/beta/rc tags
version_release = '0.5.1-develop' version_release = '0.5.5'
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------

View File

@ -19,7 +19,7 @@
bl_info = { bl_info = {
"name": "Multi-User", "name": "Multi-User",
"author": "Swann Martinez", "author": "Swann Martinez",
"version": (0, 5, 4), "version": (0, 5, 7),
"description": "Enable real-time collaborative workflow inside blender", "description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 82, 0), "blender": (2, 82, 0),
"location": "3D View > Sidebar > Multi-User tab", "location": "3D View > Sidebar > Multi-User tab",

View File

@ -64,7 +64,7 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
if idx < len(inputs_data) and hasattr(inpt, "default_value"): if idx < len(inputs_data) and hasattr(inpt, "default_value"):
loaded_input = inputs_data[idx] loaded_input = inputs_data[idx]
try: try:
if inpt.type in ['OBJECT', 'COLLECTION']: if inpt.type in ['OBJECT', 'COLLECTION', 'MATERIAL']:
inpt.default_value = get_datablock_from_uuid(loaded_input, None) inpt.default_value = get_datablock_from_uuid(loaded_input, None)
else: else:
inpt.default_value = loaded_input inpt.default_value = loaded_input
@ -80,7 +80,7 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
if idx < len(outputs_data) and hasattr(output, "default_value"): if idx < len(outputs_data) and hasattr(output, "default_value"):
loaded_output = outputs_data[idx] loaded_output = outputs_data[idx]
try: try:
if output.type in ['OBJECT', 'COLLECTION']: if output.type in ['OBJECT', 'COLLECTION', 'MATERIAL']:
output.default_value = get_datablock_from_uuid(loaded_output, None) output.default_value = get_datablock_from_uuid(loaded_output, None)
else: else:
output.default_value = loaded_output output.default_value = loaded_output
@ -351,6 +351,11 @@ def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
def has_texture(node): return ( def has_texture(node): return (
node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture) node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture)
def has_material(node): return (
node.type == 'MATERIAL' and node.inputs[2].default_value is not None
)
deps = [] deps = []
for node in node_tree.nodes: for node in node_tree.nodes:
@ -360,7 +365,9 @@ def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
deps.append(node.node_tree) deps.append(node.node_tree)
elif has_texture(node): elif has_texture(node):
deps.append(node.texture) deps.append(node.texture)
elif has_material(node):
print(node.inputs[2].default_value.name)
deps.append(node.inputs[2].default_value)
return deps return deps

View File

@ -24,8 +24,8 @@ import numpy as np
BPY_TO_NUMPY_TYPES = { BPY_TO_NUMPY_TYPES = {
'FLOAT': np.float, 'FLOAT': np.float32,
'INT': np.int, 'INT': np.int32,
'BOOL': np.bool, 'BOOL': np.bool,
'BOOLEAN': np.bool} 'BOOLEAN': np.bool}

View File

@ -31,7 +31,7 @@ DEFAULT_CACHE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "cache") os.path.dirname(os.path.abspath(__file__)), "cache")
REPLICATION_DEPENDENCIES = { REPLICATION_DEPENDENCIES = {
"zmq", "zmq",
"deepdiff" "deepdiff==5.7.0"
} }
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs") LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
REPLICATION = os.path.join(LIBS,"replication") REPLICATION = os.path.join(LIBS,"replication")