From d0e80da945a8f0ed6e584b64a449efa917873217 Mon Sep 17 00:00:00 2001 From: Swann Date: Wed, 3 Mar 2021 09:55:48 +0100 Subject: [PATCH 1/4] fix: object parenting can't be removed Related to #179 --- multi_user/bl_types/bl_object.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index 810eb69..430cd7c 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -296,9 +296,14 @@ class BlObject(BlDatablock): # Load transformation data loader.load(target, data) + # Object display fields if 'display' in data: loader.load(target.display, data['display']) + # Parent + if 'parent' not in data and target.parent: + target.parent = None + # Pose if 'pose' in data: if not target.pose: @@ -367,6 +372,9 @@ class BlObject(BlDatablock): "name", "rotation_mode", "parent", + "parent_type", + "track_axis", + "up_axis", "data", "library", "empty_display_type", From 19c56e590b2047266e3b6e9852e6baf1f9cac477 Mon Sep 17 00:00:00 2001 From: Swann Date: Wed, 3 Mar 2021 10:03:57 +0100 Subject: [PATCH 2/4] feat: remove parent as node dependency --- multi_user/bl_types/bl_object.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index 430cd7c..b68b24d 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -557,8 +557,6 @@ class BlObject(BlDatablock): # Avoid Empty case if self.instance.data: deps.append(self.instance.data) - if self.instance.parent : - deps.append(self.instance.parent) if self.is_library: deps.append(self.instance.library) From cc5a87adb839c9d93679ad540b8aab65f0483059 Mon Sep 17 00:00:00 2001 From: Swann Date: Wed, 3 Mar 2021 11:00:47 +0100 Subject: [PATCH 3/4] fix: prevent matrix_parent_inverse from being reset by loading parents only if its necessary --- multi_user/bl_types/bl_object.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index b68b24d..d7dde74 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -300,8 +300,14 @@ class BlObject(BlDatablock): if 'display' in data: loader.load(target.display, data['display']) - # Parent - if 'parent' not in data and target.parent: + # Parenting + parent_id = data.get('parent_id') + if parent_id: + parent = bpy.data.objects[parent_id] + # Avoid reloading + if target.parent != parent and parent is not None: + target.parent = parent + elif target.parent: target.parent = None # Pose @@ -371,10 +377,6 @@ class BlObject(BlDatablock): dumper.include_filter = [ "name", "rotation_mode", - "parent", - "parent_type", - "track_axis", - "up_axis", "data", "library", "empty_display_type", @@ -419,6 +421,10 @@ class BlObject(BlDatablock): if self.is_library: return data + # PARENTING + if instance.parent: + data['parent_id'] = instance.parent.name + # MODIFIERS if hasattr(instance, 'modifiers'): data["modifiers"] = {} @@ -561,6 +567,9 @@ class BlObject(BlDatablock): if self.is_library: deps.append(self.instance.library) + if self.instance.parent : + deps.append(self.instance.parent) + if self.instance.instance_type == 'COLLECTION': # TODO: uuid based deps.append(self.instance.instance_collection) From 94877533070e33f2946a1b97f514a29edc9bb54f Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 4 Mar 2021 15:48:36 +0100 Subject: [PATCH 4/4] feat: fix object and collection support for geometry nodes --- multi_user/bl_types/bl_material.py | 120 ++++++++++++++++------------- 1 file changed, 65 insertions(+), 55 deletions(-) diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index e815831..12964ed 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -27,7 +27,7 @@ from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock, get_datablock_from_uuid NODE_SOCKET_INDEX = re.compile('\[(\d*)\]') -IGNORED_SOCKETS = ['GEOMETRY'] +IGNORED_SOCKETS = ['GEOMETRY', 'SHADER'] def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree): """ Load a node into a node_tree from a dict @@ -53,13 +53,16 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree): inputs_data = node_data.get('inputs') if inputs_data: inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS] - - for idx, inpt in enumerate(inputs_data): - if idx < len(inputs) and hasattr(inputs[idx], "default_value"): + for idx, inpt in enumerate(inputs): + loaded_input = inputs_data[idx] + if idx < len(inputs_data) and hasattr(inpt, "default_value"): try: - inputs[idx].default_value = inpt + if inpt.type in ['OBJECT', 'COLLECTION']: + inpt.default_value = get_datablock_from_uuid(loaded_input, None) + else: + inpt.default_value = loaded_input except Exception as e: - logging.warning(f"Node {target_node.name} input {inputs[idx].name} parameter not supported, skipping ({e})") + logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})") else: logging.warning(f"Node {target_node.name} input length mismatch.") @@ -78,48 +81,6 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree): f"Node {target_node.name} output length mismatch.") -def load_links(links_data, node_tree): - """ Load node_tree links from a list - - :arg links_data: dumped node links - :type links_data: list - :arg node_tree: node links collection - :type node_tree: bpy.types.NodeTree - """ - - for link in links_data: - input_socket = node_tree.nodes[link['to_node'] - ].inputs[int(link['to_socket'])] - output_socket = node_tree.nodes[link['from_node']].outputs[int( - link['from_socket'])] - node_tree.links.new(input_socket, output_socket) - - -def dump_links(links): - """ Dump node_tree links collection to a list - - :arg links: node links collection - :type links: bpy.types.NodeLinks - :retrun: list - """ - - links_data = [] - - for link in links: - to_socket = NODE_SOCKET_INDEX.search( - link.to_socket.path_from_id()).group(1) - from_socket = NODE_SOCKET_INDEX.search( - link.from_socket.path_from_id()).group(1) - links_data.append({ - 'to_node': link.to_node.name, - 'to_socket': to_socket, - 'from_node': link.from_node.name, - 'from_socket': from_socket, - }) - - return links_data - - def dump_node(node: bpy.types.ShaderNode) -> dict: """ Dump a single node to a dict @@ -167,17 +128,23 @@ def dump_node(node: bpy.types.ShaderNode) -> dict: if hasattr(node, 'inputs'): dumped_node['inputs'] = [] - for idx, inpt in enumerate(node.inputs): + inputs = [i for i in node.inputs if i.type not in IGNORED_SOCKETS] + for idx, inpt in enumerate(inputs): if hasattr(inpt, 'default_value'): - dumped_node['inputs'].append( - io_dumper.dump(inpt.default_value)) + if isinstance(inpt.default_value, bpy.types.ID): + dumped_input = inpt.default_value.uuid + else: + dumped_input = io_dumper.dump(inpt.default_value) + + dumped_node['inputs'].append(dumped_input) if hasattr(node, 'outputs'): dumped_node['outputs'] = [] for idx, output in enumerate(node.outputs): - if hasattr(output, 'default_value'): - dumped_node['outputs'].append( - io_dumper.dump(output.default_value)) + if output.type not in IGNORED_SOCKETS: + if hasattr(output, 'default_value'): + dumped_node['outputs'].append( + io_dumper.dump(output.default_value)) if hasattr(node, 'color_ramp'): ramp_dumper = Dumper() @@ -207,6 +174,49 @@ def dump_node(node: bpy.types.ShaderNode) -> dict: return dumped_node + +def load_links(links_data, node_tree): + """ Load node_tree links from a list + + :arg links_data: dumped node links + :type links_data: list + :arg node_tree: node links collection + :type node_tree: bpy.types.NodeTree + """ + + for link in links_data: + input_socket = node_tree.nodes[link['to_node'] + ].inputs[int(link['to_socket'])] + output_socket = node_tree.nodes[link['from_node']].outputs[int( + link['from_socket'])] + node_tree.links.new(input_socket, output_socket) + + +def dump_links(links): + """ Dump node_tree links collection to a list + + :arg links: node links collection + :type links: bpy.types.NodeLinks + :retrun: list + """ + + links_data = [] + + for link in links: + to_socket = NODE_SOCKET_INDEX.search( + link.to_socket.path_from_id()).group(1) + from_socket = NODE_SOCKET_INDEX.search( + link.from_socket.path_from_id()).group(1) + links_data.append({ + 'to_node': link.to_node.name, + 'to_socket': to_socket, + 'from_node': link.from_node.name, + 'from_socket': from_socket, + }) + + return links_data + + def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict: """ Dump a shader node_tree to a dict including links and nodes @@ -263,7 +273,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection, """ # Check for removed sockets for socket in sockets: - if not [s for s in sockets_data if socket['uuid'] == s[2]]: + if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]: sockets.remove(socket) # Check for new sockets