feat(rcf): last commit before replication refactoring using json datastructure
This commit is contained in:
333
libs/dump_anything.py
Normal file
333
libs/dump_anything.py
Normal file
@ -0,0 +1,333 @@
|
|||||||
|
import bpy
|
||||||
|
import bpy.types as T
|
||||||
|
import mathutils
|
||||||
|
|
||||||
|
|
||||||
|
def remove_items_from_dict(d, keys, recursive=False):
|
||||||
|
copy = dict(d)
|
||||||
|
for k in keys:
|
||||||
|
copy.pop(k, None)
|
||||||
|
if recursive:
|
||||||
|
for k in [k for k in copy.keys() if isinstance(copy[k], dict)]:
|
||||||
|
copy[k] = remove_items_from_dict(copy[k], keys, recursive)
|
||||||
|
return copy
|
||||||
|
|
||||||
|
|
||||||
|
def _is_dictionnary(v):
|
||||||
|
return hasattr(v, "items") and callable(v.items)
|
||||||
|
|
||||||
|
|
||||||
|
def _dump_filter_type(t):
|
||||||
|
return lambda x: isinstance(x, t)
|
||||||
|
|
||||||
|
|
||||||
|
def _dump_filter_array(array):
|
||||||
|
# only primitive type array
|
||||||
|
if not isinstance(array, T.bpy_prop_array):
|
||||||
|
return False
|
||||||
|
if len(array) > 0 and type(array[0]) not in [bool, float, int]:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _dump_filter_default(default):
|
||||||
|
if default is None:
|
||||||
|
return False
|
||||||
|
if type(default) is list:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _load_filter_type(t, use_bl_rna=True):
|
||||||
|
def filter_function(x):
|
||||||
|
if use_bl_rna and x.bl_rna_property:
|
||||||
|
return isinstance(x.bl_rna_property, t)
|
||||||
|
else:
|
||||||
|
isinstance(x.read(), t)
|
||||||
|
return filter_function
|
||||||
|
|
||||||
|
|
||||||
|
def _load_filter_array(array):
|
||||||
|
# only primitive type array
|
||||||
|
if not isinstance(array.read(), T.bpy_prop_array):
|
||||||
|
return False
|
||||||
|
if len(array.read()) > 0 and type(array.read()[0]) not in [bool, float, int]:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _load_filter_default(default):
|
||||||
|
if default.read() is None:
|
||||||
|
return False
|
||||||
|
if type(default.read()) is list:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class Dumper:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.verbose = False
|
||||||
|
self.depth = 1
|
||||||
|
self.keep_compounds_as_leaves = False
|
||||||
|
self.accept_read_only = True
|
||||||
|
self._build_inline_dump_functions()
|
||||||
|
self._build_match_elements()
|
||||||
|
self.type_subset = self.match_subset_all
|
||||||
|
# self._atomic_types = [] # TODO future option?
|
||||||
|
|
||||||
|
def dump(self, any):
|
||||||
|
return self._dump_any(any, 0)
|
||||||
|
|
||||||
|
def _dump_any(self, any, depth):
|
||||||
|
for filter_function, dump_function in self.type_subset:
|
||||||
|
if filter_function(any):
|
||||||
|
return dump_function[not (depth >= self.depth)](any, depth + 1)
|
||||||
|
|
||||||
|
def _build_inline_dump_functions(self):
|
||||||
|
self._dump_identity = (lambda x, depth: x, lambda x, depth: x)
|
||||||
|
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
|
||||||
|
self._dump_collection = (self._dump_default_as_leaf, self._dump_collection_as_branch)
|
||||||
|
self._dump_array = (self._dump_default_as_leaf, self._dump_array_as_branch)
|
||||||
|
self._dump_matrix = (self._dump_matrix_as_leaf, self._dump_matrix_as_leaf)
|
||||||
|
self._dump_vector = (self._dump_vector_as_leaf, self._dump_vector_as_leaf)
|
||||||
|
self._dump_default = (self._dump_default_as_leaf, self._dump_default_as_branch)
|
||||||
|
|
||||||
|
def _build_match_elements(self):
|
||||||
|
self._match_type_bool = (_dump_filter_type(bool), self._dump_identity)
|
||||||
|
self._match_type_int = (_dump_filter_type(int), self._dump_identity)
|
||||||
|
self._match_type_float = (_dump_filter_type(float), self._dump_identity)
|
||||||
|
self._match_type_string = (_dump_filter_type(str), self._dump_identity)
|
||||||
|
self._match_type_ID = (_dump_filter_type(T.ID), self._dump_ID)
|
||||||
|
self._match_type_bpy_prop_collection = (_dump_filter_type(T.bpy_prop_collection), self._dump_collection)
|
||||||
|
self._match_type_array = (_dump_filter_array, self._dump_array)
|
||||||
|
self._match_type_matrix = (_dump_filter_type(mathutils.Matrix), self._dump_matrix)
|
||||||
|
self._match_type_vector = (_dump_filter_type(mathutils.Vector), self._dump_vector)
|
||||||
|
self._match_default = (_dump_filter_default, self._dump_default)
|
||||||
|
|
||||||
|
def _dump_collection_as_branch(self, collection, depth):
|
||||||
|
dump = {}
|
||||||
|
for i in collection.items():
|
||||||
|
dv = self._dump_any(i[1], depth)
|
||||||
|
if not (dv is None):
|
||||||
|
dump[i[0]] = dv
|
||||||
|
return dump
|
||||||
|
|
||||||
|
def _dump_default_as_leaf(self, default, depth):
|
||||||
|
if self.keep_compounds_as_leaves:
|
||||||
|
return str(type(default))
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _dump_array_as_branch(self, array, depth):
|
||||||
|
return [i for i in array]
|
||||||
|
|
||||||
|
def _dump_matrix_as_leaf(self, matrix, depth):
|
||||||
|
return [list(v) for v in matrix]
|
||||||
|
|
||||||
|
def _dump_vector_as_leaf(self, vector, depth):
|
||||||
|
return list(vector)
|
||||||
|
|
||||||
|
def _dump_default_as_branch(self, default, depth):
|
||||||
|
def is_valid_property(p):
|
||||||
|
try:
|
||||||
|
getattr(default, p)
|
||||||
|
except AttributeError:
|
||||||
|
return False
|
||||||
|
if p.startswith("__"):
|
||||||
|
return False
|
||||||
|
if callable(getattr(default, p)):
|
||||||
|
return False
|
||||||
|
if p in ["bl_rna", "rna_type"]:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
all_property_names = [p for p in dir(default) if is_valid_property(p)]
|
||||||
|
dump = {}
|
||||||
|
for p in all_property_names:
|
||||||
|
dp = self._dump_any(getattr(default, p), depth)
|
||||||
|
if not (dp is None):
|
||||||
|
dump[p] = dp
|
||||||
|
return dump
|
||||||
|
|
||||||
|
@property
|
||||||
|
def match_subset_all(self):
|
||||||
|
return [
|
||||||
|
self._match_type_bool,
|
||||||
|
self._match_type_int,
|
||||||
|
self._match_type_float,
|
||||||
|
self._match_type_string,
|
||||||
|
self._match_type_ID,
|
||||||
|
self._match_type_bpy_prop_collection,
|
||||||
|
self._match_type_array,
|
||||||
|
self._match_type_matrix,
|
||||||
|
self._match_type_vector,
|
||||||
|
self._match_default
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def match_subset_primitives(self):
|
||||||
|
return [
|
||||||
|
self._match_type_bool,
|
||||||
|
self._match_type_int,
|
||||||
|
self._match_type_float,
|
||||||
|
self._match_type_string,
|
||||||
|
self._match_default
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class BlenderAPIElement:
|
||||||
|
def __init__(self, api_element, sub_element_name="", occlude_read_only=True):
|
||||||
|
self.api_element = api_element
|
||||||
|
self.sub_element_name = sub_element_name
|
||||||
|
self.occlude_read_only = occlude_read_only
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
return getattr(self.api_element, self.sub_element_name) if self.sub_element_name else self.api_element
|
||||||
|
|
||||||
|
def write(self, value):
|
||||||
|
# take precaution if property is read-only
|
||||||
|
try:
|
||||||
|
if self.sub_element_name:
|
||||||
|
setattr(self.api_element, self.sub_element_name, value)
|
||||||
|
else:
|
||||||
|
self.api_element = value
|
||||||
|
except AttributeError as err:
|
||||||
|
if not self.occlude_read_only:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
def extend(self, element_name):
|
||||||
|
return BlenderAPIElement(self.read(), element_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bl_rna_property(self):
|
||||||
|
if not hasattr(self.api_element, "bl_rna"):
|
||||||
|
return False
|
||||||
|
if not self.sub_element_name:
|
||||||
|
return False
|
||||||
|
return self.api_element.bl_rna.properties[self.sub_element_name]
|
||||||
|
|
||||||
|
|
||||||
|
class Loader:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.type_subset = self.match_subset_all
|
||||||
|
self.occlude_read_only = True
|
||||||
|
self.order = ['*']
|
||||||
|
|
||||||
|
def load(self, dst_data, src_dumped_data):
|
||||||
|
self._load_any(
|
||||||
|
BlenderAPIElement(dst_data, occlude_read_only=self.occlude_read_only),
|
||||||
|
src_dumped_data
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_any(self, any, dump):
|
||||||
|
for filter_function, load_function in self.type_subset:
|
||||||
|
if filter_function(any):
|
||||||
|
load_function(any, dump)
|
||||||
|
return
|
||||||
|
|
||||||
|
def _load_identity(self, element, dump):
|
||||||
|
element.write(dump)
|
||||||
|
|
||||||
|
def _load_array(self, element, dump):
|
||||||
|
# supports only primitive types currently
|
||||||
|
try:
|
||||||
|
for i in range(len(dump)):
|
||||||
|
element.read()[i] = dump[i]
|
||||||
|
except AttributeError as err:
|
||||||
|
if not self.occlude_read_only:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
def _load_collection(self, element, dump):
|
||||||
|
if not element.bl_rna_property:
|
||||||
|
return
|
||||||
|
# local enum
|
||||||
|
CONSTRUCTOR_NEW = "new"
|
||||||
|
CONSTRUCTOR_ADD = "add"
|
||||||
|
|
||||||
|
constructors = {
|
||||||
|
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||||
|
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, [])
|
||||||
|
}
|
||||||
|
element_type = element.bl_rna_property.fixed_type
|
||||||
|
constructor = constructors.get(type(element_type))
|
||||||
|
if constructor is None: # collection type not supported
|
||||||
|
return
|
||||||
|
for dumped_element in dump.values():
|
||||||
|
try:
|
||||||
|
constructor_parameters = [dumped_element[name] for name in constructor[1]]
|
||||||
|
except KeyError:
|
||||||
|
print("Collection load error, missing parameters.")
|
||||||
|
continue # TODO handle error
|
||||||
|
new_element = getattr(element.read(), constructor[0])(*constructor_parameters)
|
||||||
|
self._load_any(
|
||||||
|
BlenderAPIElement(new_element, occlude_read_only=self.occlude_read_only),
|
||||||
|
dumped_element
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_pointer(self, pointer, dump):
|
||||||
|
rna_property_type = pointer.bl_rna_property.fixed_type
|
||||||
|
if not rna_property_type:
|
||||||
|
return
|
||||||
|
if isinstance(rna_property_type, T.Image):
|
||||||
|
pointer.write(bpy.data.images.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.Texture):
|
||||||
|
pointer.write(bpy.data.textures.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.ColorRamp):
|
||||||
|
self._load_default(pointer, dump)
|
||||||
|
|
||||||
|
def _load_matrix(self, matrix, dump):
|
||||||
|
matrix.write(mathutils.Matrix(dump))
|
||||||
|
|
||||||
|
def _load_vector(self, vector, dump):
|
||||||
|
vector.write(mathutils.Vector(dump))
|
||||||
|
|
||||||
|
def _ordered_keys(self, keys):
|
||||||
|
ordered_keys = []
|
||||||
|
for order_element in self.order:
|
||||||
|
if order_element == '*':
|
||||||
|
ordered_keys += [k for k in keys if not k in self.order]
|
||||||
|
else:
|
||||||
|
if order_element in keys:
|
||||||
|
ordered_keys.append(order_element)
|
||||||
|
return ordered_keys
|
||||||
|
|
||||||
|
def _load_default(self, default, dump):
|
||||||
|
if not _is_dictionnary(dump):
|
||||||
|
return # TODO error handling
|
||||||
|
for k in self._ordered_keys(dump.keys()):
|
||||||
|
v = dump[k]
|
||||||
|
if not hasattr(default.read(), k):
|
||||||
|
continue # TODO error handling
|
||||||
|
self._load_any(default.extend(k), v)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def match_subset_all(self):
|
||||||
|
return [
|
||||||
|
(_load_filter_type(T.BoolProperty), self._load_identity),
|
||||||
|
(_load_filter_type(T.IntProperty), self._load_identity),
|
||||||
|
(_load_filter_type(mathutils.Matrix, use_bl_rna=False), self._load_matrix), # before float because bl_rna type of matrix if FloatProperty
|
||||||
|
(_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector), # before float because bl_rna type of vector if FloatProperty
|
||||||
|
(_load_filter_type(T.FloatProperty), self._load_identity),
|
||||||
|
(_load_filter_type(T.StringProperty), self._load_identity),
|
||||||
|
(_load_filter_type(T.EnumProperty), self._load_identity),
|
||||||
|
(_load_filter_type(T.PointerProperty), self._load_pointer),
|
||||||
|
(_load_filter_array, self._load_array),
|
||||||
|
(_load_filter_type(T.CollectionProperty), self._load_collection),
|
||||||
|
(_load_filter_default, self._load_default)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Utility functions
|
||||||
|
|
||||||
|
|
||||||
|
def dump(any, depth=1):
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.depath = depth
|
||||||
|
return dumper.dump(any)
|
||||||
|
|
||||||
|
|
||||||
|
def load(dst, src):
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(dst, src)
|
@ -3,6 +3,7 @@ import bpy
|
|||||||
from . import net_components
|
from . import net_components
|
||||||
from . import net_ui
|
from . import net_ui
|
||||||
from . import rna_translation
|
from . import rna_translation
|
||||||
|
from .libs import dump_anything
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
import mathutils
|
import mathutils
|
||||||
@ -673,20 +674,35 @@ class load_data(bpy.types.Operator):
|
|||||||
bl_idname = "session.load_data"
|
bl_idname = "session.load_data"
|
||||||
bl_label = "Get bpy data"
|
bl_label = "Get bpy data"
|
||||||
bl_description = "Description that shows in blender tooltips"
|
bl_description = "Description that shows in blender tooltips"
|
||||||
bl_options = {"REGISTER"}
|
bl_options = {"REGISTER","UNDO"}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def explore(self, root):
|
def explore(self, root):
|
||||||
for item in root.bl_rna.properties:
|
for item in root.bl_rna.properties:
|
||||||
if item.name not in ['RNA']:
|
if item.name not in ['RNA']:
|
||||||
print(item.name)
|
|
||||||
|
print((item.name))
|
||||||
# self.explore(item)
|
# self.explore(item)
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
self.explore(bpy.data)
|
obj = bpy.data.meshes['Cube']
|
||||||
|
|
||||||
|
dumper = dump_anything.Dumper()
|
||||||
|
dumper.type_subset = dumper.match_subset_all
|
||||||
|
dumper.depth = 4
|
||||||
|
|
||||||
|
c = dumper.dump(obj)
|
||||||
|
bpy.data.meshes.remove(obj)
|
||||||
|
|
||||||
|
newo = bpy.data.meshes.new(c["name"])
|
||||||
|
print(c)
|
||||||
|
loader = dump_anything.Loader()
|
||||||
|
loader.load(c, newo)
|
||||||
|
# bpy.data.collections['Collection'].objects.link(newo)
|
||||||
|
# self.explore(bpy.data.objects)
|
||||||
# for datablock in getattr(bpy.data,item):
|
# for datablock in getattr(bpy.data,item):
|
||||||
# print(": {}:{}".format(item,datablock.name))
|
# print(": {}:{}".format(item,datablock.name))
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
@ -705,6 +721,16 @@ classes = (
|
|||||||
load_data,
|
load_data,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def depsgraph_update(scene):
|
||||||
|
for c in bpy.context.depsgraph.updates.items():
|
||||||
|
# print(c[1].id)
|
||||||
|
if c[1].is_updated_geometry:
|
||||||
|
pass
|
||||||
|
if c[1].is_updated_transform:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# print(dumper.dump(c[1]))
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
from bpy.utils import register_class
|
from bpy.utils import register_class
|
||||||
@ -714,6 +740,7 @@ def register():
|
|||||||
bpy.types.Scene.session_settings = bpy.props.PointerProperty(
|
bpy.types.Scene.session_settings = bpy.props.PointerProperty(
|
||||||
type=session_settings)
|
type=session_settings)
|
||||||
|
|
||||||
|
# bpy.app.handlers.depsgraph_update_post.append(depsgraph_update)
|
||||||
# bpy.app.handlers.depsgraph_update_post.append(observer)
|
# bpy.app.handlers.depsgraph_update_post.append(observer)
|
||||||
|
|
||||||
|
|
||||||
@ -724,8 +751,11 @@ def unregister():
|
|||||||
pass
|
pass
|
||||||
global server
|
global server
|
||||||
global client
|
global client
|
||||||
# bpy.app.handlers.depsgraph_update_post.remove(observer)
|
|
||||||
|
# bpy.app.handlers.depsgraph_update_post.remove(depsgraph_update)
|
||||||
|
# bpy.app.handlers.depsgraph_update_post.remove(observer)
|
||||||
# bpy.app.handlers.depsgraph_update_post.remove(mark_objects_for_update)
|
# bpy.app.handlers.depsgraph_update_post.remove(mark_objects_for_update)
|
||||||
|
|
||||||
if server:
|
if server:
|
||||||
server.stop()
|
server.stop()
|
||||||
del server
|
del server
|
||||||
|
Reference in New Issue
Block a user