Merge branch '29-differential-revision' into 'develop'
Feat: implementation refactor progress See merge request slumber/multi-user!31
This commit is contained in:
@ -23,54 +23,26 @@ import numpy as np
|
||||
from enum import Enum
|
||||
|
||||
from .. import utils
|
||||
from ..libs.dump_anything import (
|
||||
Dumper, Loader, dump_collection_attr, load_collection_attr)
|
||||
from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
ENUM_EASING_TYPE = [
|
||||
'AUTO',
|
||||
'EAS_IN',
|
||||
'EASE_OUT',
|
||||
'EASE_IN_OUT']
|
||||
KEYFRAME = [
|
||||
'amplitude',
|
||||
'co',
|
||||
'back',
|
||||
'handle_left',
|
||||
'handle_right',
|
||||
'easing',
|
||||
'handle_left_type',
|
||||
'handle_right_type',
|
||||
'type',
|
||||
'interpolation',
|
||||
]
|
||||
|
||||
|
||||
ENUM_HANDLE_TYPE = [
|
||||
'FREE',
|
||||
'ALIGNED',
|
||||
'VECTOR',
|
||||
'AUTO',
|
||||
'AUTO_CLAMPED']
|
||||
|
||||
|
||||
ENUM_INTERPOLATION_TYPE = [
|
||||
'CONSTANT',
|
||||
'LINEAR',
|
||||
'BEZIER',
|
||||
'SINE',
|
||||
'QUAD',
|
||||
'CUBIC',
|
||||
'QUART',
|
||||
'QUINT',
|
||||
'EXPO',
|
||||
'CIRC',
|
||||
'BACK',
|
||||
'BOUNCE',
|
||||
'ELASTIC']
|
||||
|
||||
|
||||
ENUM_KEY_TYPE = [
|
||||
'KEYFRAME',
|
||||
'BREAKDOWN',
|
||||
'MOVING_HOLD',
|
||||
'EXTREME',
|
||||
'JITTER']
|
||||
|
||||
|
||||
# TODO: Automatic enum and numpy dump and loading
|
||||
|
||||
|
||||
def dump_fcurve(fcurve, use_numpy=True):
|
||||
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict:
|
||||
""" Dump a sigle curve to a dict
|
||||
|
||||
:arg fcurve: fcurve to dump
|
||||
@ -88,26 +60,10 @@ def dump_fcurve(fcurve, use_numpy=True):
|
||||
if use_numpy:
|
||||
points = fcurve.keyframe_points
|
||||
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||
|
||||
fcurve_data['amplitude'] = dump_collection_attr(points, 'amplitude')
|
||||
fcurve_data['co'] = dump_collection_attr(points, 'co')
|
||||
fcurve_data['back'] = dump_collection_attr(points, 'back')
|
||||
fcurve_data['handle_left'] = dump_collection_attr(points, 'handle_left')
|
||||
fcurve_data['handle_right'] = dump_collection_attr(points, 'handle_right')
|
||||
|
||||
fcurve_data['easing'] = [ENUM_EASING_TYPE.index(
|
||||
p.easing) for p in fcurve.keyframe_points]
|
||||
fcurve_data['handle_left_type'] = [ENUM_HANDLE_TYPE.index(
|
||||
p.handle_left_type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['handle_right_type'] = [ENUM_HANDLE_TYPE.index(
|
||||
p.handle_right_type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['type'] = [ENUM_KEY_TYPE.index(
|
||||
p.type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['interpolation'] = [ENUM_INTERPOLATION_TYPE.index(
|
||||
p.interpolation) for p in fcurve.keyframe_points]
|
||||
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
|
||||
|
||||
else: # Legacy method
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
fcurve_data["keyframe_points"] = []
|
||||
|
||||
for k in fcurve.keyframe_points:
|
||||
@ -136,19 +92,7 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
|
||||
if use_numpy:
|
||||
keyframe_points.add(fcurve_data['keyframes_count'])
|
||||
|
||||
load_collection_attr(keyframe_points, 'co', fcurve_data['co'])
|
||||
load_collection_attr(keyframe_points, 'back', fcurve_data['back'])
|
||||
load_collection_attr(keyframe_points, 'amplitude', fcurve_data['amplitude'])
|
||||
load_collection_attr(keyframe_points, 'handle_left', fcurve_data['handle_left'])
|
||||
load_collection_attr(keyframe_points, 'handle_right', fcurve_data['handle_right'])
|
||||
|
||||
for index, point in enumerate(keyframe_points):
|
||||
point.type = ENUM_KEY_TYPE[fcurve_data['type'][index]]
|
||||
point.easing = ENUM_EASING_TYPE[fcurve_data['easing'][index]]
|
||||
point.handle_left_type = ENUM_HANDLE_TYPE[fcurve_data['handle_left_type'][index]]
|
||||
point.handle_right_type = ENUM_HANDLE_TYPE[fcurve_data['handle_right_type'][index]]
|
||||
point.interpolation = ENUM_INTERPOLATION_TYPE[fcurve_data['interpolation'][index]]
|
||||
np_load_collection(fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
|
||||
|
||||
else:
|
||||
# paste dumped keyframes
|
||||
@ -163,12 +107,13 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
)
|
||||
|
||||
keycache = copy.copy(dumped_keyframe_point)
|
||||
keycache = utils.dump_anything.remove_items_from_dict(
|
||||
keycache = remove_items_from_dict(
|
||||
keycache,
|
||||
["co", "handle_left", "handle_right", 'type']
|
||||
)
|
||||
|
||||
utils.dump_anything.load(new_kf, keycache)
|
||||
loader = Loader()
|
||||
loader.load(new_kf, keycache)
|
||||
|
||||
new_kf.type = dumped_keyframe_point['type']
|
||||
new_kf.handle_left = [
|
||||
@ -211,7 +156,7 @@ class BlAction(BlDatablock):
|
||||
|
||||
def _dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'name_full',
|
||||
'original',
|
||||
|
@ -19,8 +19,9 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .. import presence, operators
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .. import presence, operators, utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
@ -35,7 +36,7 @@ class BlArmature(BlDatablock):
|
||||
def _construct(self, data):
|
||||
return bpy.data.armatures.new(data["name"])
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
# Load parent object
|
||||
parent_object = utils.find_from_attr(
|
||||
'uuid',
|
||||
@ -97,7 +98,8 @@ class BlArmature(BlDatablock):
|
||||
[bone]['parent']]
|
||||
new_bone.use_connect = bone_data['use_connect']
|
||||
|
||||
utils.dump_anything.load(new_bone, bone_data)
|
||||
loader = Loader()
|
||||
loader.load(new_bone, bone_data)
|
||||
|
||||
if bpy.context.mode != 'OBJECT':
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
@ -107,10 +109,10 @@ class BlArmature(BlDatablock):
|
||||
if 'EDIT' in current_mode:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 4
|
||||
dumper.include_filter = [
|
||||
'bones',
|
||||
|
@ -19,7 +19,7 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
@ -35,21 +35,22 @@ class BlCamera(BlDatablock):
|
||||
return bpy.data.cameras.new(data["name"])
|
||||
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
dof_settings = data.get('dof')
|
||||
|
||||
# DOF settings
|
||||
if dof_settings:
|
||||
utils.dump_anything.load(target.dof, dof_settings)
|
||||
loader.load(target.dof, dof_settings)
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
# TODO: background image support
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
|
@ -46,9 +46,8 @@ class BlCollection(BlDatablock):
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
# Load other meshes metadata
|
||||
# dump_anything.load(target, data)
|
||||
target.name = data["name"]
|
||||
|
||||
# link objects
|
||||
@ -72,7 +71,7 @@ class BlCollection(BlDatablock):
|
||||
if collection.uuid not in data["children"]:
|
||||
target.children.unlink(collection)
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
data = {}
|
||||
data['name'] = pointer.name
|
||||
@ -95,7 +94,7 @@ class BlCollection(BlDatablock):
|
||||
|
||||
return data
|
||||
|
||||
def resolve_deps_implementation(self):
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
for child in self.pointer.children:
|
||||
|
@ -19,11 +19,33 @@
|
||||
import bpy
|
||||
import bpy.types as T
|
||||
import mathutils
|
||||
import logging
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from ..libs import dump_anything
|
||||
from .dump_anything import (Dumper, Loader,
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SPLINE_BEZIER_POINT = [
|
||||
# "handle_left_type",
|
||||
# "handle_right_type",
|
||||
"handle_left",
|
||||
"co",
|
||||
"handle_right",
|
||||
"tilt",
|
||||
"weight_softbody",
|
||||
"radius",
|
||||
]
|
||||
|
||||
SPLINE_POINT = [
|
||||
"co",
|
||||
"tilt",
|
||||
"weight_softbody",
|
||||
"radius",
|
||||
]
|
||||
|
||||
class BlCurve(BlDatablock):
|
||||
bl_id = "curves"
|
||||
@ -36,49 +58,47 @@ class BlCurve(BlDatablock):
|
||||
def _construct(self, data):
|
||||
return bpy.data.curves.new(data["name"], data["type"])
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
dump_anything.load(target, data)
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
target.splines.clear()
|
||||
# load splines
|
||||
for spline in data['splines']:
|
||||
new_spline = target.splines.new(data['splines'][spline]['type'])
|
||||
dump_anything.load(new_spline, data['splines'][spline])
|
||||
for spline in data['splines'].values():
|
||||
new_spline = target.splines.new(spline['type'])
|
||||
|
||||
|
||||
# Load curve geometry data
|
||||
if new_spline.type == 'BEZIER':
|
||||
for bezier_point_index in data['splines'][spline]["bezier_points"]:
|
||||
if bezier_point_index != 0:
|
||||
new_spline.bezier_points.add(1)
|
||||
dump_anything.load(
|
||||
new_spline.bezier_points[bezier_point_index], data['splines'][spline]["bezier_points"][bezier_point_index])
|
||||
|
||||
bezier_points = new_spline.bezier_points
|
||||
bezier_points.add(spline['bezier_points_count'])
|
||||
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
|
||||
|
||||
# Not really working for now...
|
||||
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
|
||||
if new_spline.type == 'NURBS':
|
||||
new_spline.points.add(len(data['splines'][spline]["points"])-1)
|
||||
for point_index in data['splines'][spline]["points"]:
|
||||
dump_anything.load(
|
||||
new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
||||
logger.error("NURBS not supported.")
|
||||
# new_spline.points.add(len(data['splines'][spline]["points"])-1)
|
||||
# for point_index in data['splines'][spline]["points"]:
|
||||
# loader.load(
|
||||
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
loader.load(new_spline, spline)
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
|
||||
data = dumper.dump(pointer)
|
||||
data['splines'] = {}
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 3
|
||||
|
||||
for index, spline in enumerate(pointer.splines):
|
||||
spline_data = dump_anything.dump(spline)
|
||||
spline_data['points'] = dumper.dump(spline.points)
|
||||
spline_data['bezier_points'] = dumper.dump(spline.bezier_points)
|
||||
spline_data['type'] = dumper.dump(spline.type)
|
||||
dumper.depth = 2
|
||||
spline_data = dumper.dump(spline)
|
||||
# spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
|
||||
spline_data['bezier_points_count'] = len(spline.bezier_points)-1
|
||||
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||
data['splines'][index] = spline_data
|
||||
|
||||
|
||||
if isinstance(pointer, T.SurfaceCurve):
|
||||
data['type'] = 'SURFACE'
|
||||
elif isinstance(pointer, T.TextCurve):
|
||||
@ -86,4 +106,3 @@ class BlCurve(BlDatablock):
|
||||
elif isinstance(pointer, T.Curve):
|
||||
data['type'] = 'CURVE'
|
||||
return data
|
||||
|
||||
|
@ -20,9 +20,9 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from ..libs.replication.replication.data import ReplicatedDatablock
|
||||
from ..libs.replication.replication.constants import (UP, DIFF_BINARY)
|
||||
from ..libs import dump_anything
|
||||
|
||||
|
||||
def has_action(target):
|
||||
@ -42,7 +42,7 @@ def has_driver(target):
|
||||
|
||||
|
||||
def dump_driver(driver):
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 6
|
||||
data = dumper.dump(driver)
|
||||
|
||||
@ -50,6 +50,7 @@ def dump_driver(driver):
|
||||
|
||||
|
||||
def load_driver(target_datablock, src_driver):
|
||||
loader = Loader()
|
||||
drivers = target_datablock.animation_data.drivers
|
||||
src_driver_data = src_driver['driver']
|
||||
new_driver = drivers.new(src_driver['data_path'])
|
||||
@ -57,7 +58,7 @@ def load_driver(target_datablock, src_driver):
|
||||
# Settings
|
||||
new_driver.driver.type = src_driver_data['type']
|
||||
new_driver.driver.expression = src_driver_data['expression']
|
||||
dump_anything.load(new_driver, src_driver)
|
||||
loader.load(new_driver, src_driver)
|
||||
|
||||
# Variables
|
||||
for src_variable in src_driver_data['variables']:
|
||||
@ -70,7 +71,7 @@ def load_driver(target_datablock, src_driver):
|
||||
src_target_data = src_var_data['targets'][src_target]
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(
|
||||
src_target_data['id'], src_target_data['id_type'])
|
||||
dump_anything.load(
|
||||
loader.load(
|
||||
new_var.targets[src_target], src_target_data)
|
||||
|
||||
# Fcurve
|
||||
@ -82,8 +83,7 @@ def load_driver(target_datablock, src_driver):
|
||||
|
||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||
new_point = new_fcurve[index]
|
||||
dump_anything.load(
|
||||
new_point, src_driver['keyframe_points'][src_point])
|
||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||
|
||||
|
||||
class BlDatablock(ReplicatedDatablock):
|
||||
@ -127,10 +127,11 @@ class BlDatablock(ReplicatedDatablock):
|
||||
self.pointer = datablock_ref
|
||||
|
||||
def _dump(self, pointer=None):
|
||||
dumper = Dumper()
|
||||
data = {}
|
||||
# Dump animation data
|
||||
if has_action(pointer):
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = ['action']
|
||||
data['animation_data'] = dumper.dump(pointer.animation_data)
|
||||
|
||||
@ -143,13 +144,13 @@ class BlDatablock(ReplicatedDatablock):
|
||||
data.update(dumped_drivers)
|
||||
|
||||
if self.is_library:
|
||||
data.update(dump_anything.dump(pointer))
|
||||
data.update(dumper.dump(pointer))
|
||||
else:
|
||||
data.update(self.dump_implementation(data, pointer=pointer))
|
||||
data.update(self._dump_implementation(data, pointer=pointer))
|
||||
|
||||
return data
|
||||
|
||||
def dump_implementation(self, data, target):
|
||||
def _dump_implementation(self, data, target):
|
||||
raise NotImplementedError
|
||||
|
||||
def _load(self, data, target):
|
||||
@ -171,9 +172,9 @@ class BlDatablock(ReplicatedDatablock):
|
||||
if self.is_library:
|
||||
return
|
||||
else:
|
||||
self.load_implementation(data, target)
|
||||
self._load_implementation(data, target)
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
raise NotImplementedError
|
||||
|
||||
def resolve_deps(self):
|
||||
@ -183,11 +184,11 @@ class BlDatablock(ReplicatedDatablock):
|
||||
dependencies.append(self.pointer.animation_data.action)
|
||||
|
||||
if not self.is_library:
|
||||
dependencies.extend(self.resolve_deps_implementation())
|
||||
dependencies.extend(self._resolve_deps_implementation())
|
||||
|
||||
return dependencies
|
||||
|
||||
def resolve_deps_implementation(self):
|
||||
def _resolve_deps_implementation(self):
|
||||
return []
|
||||
|
||||
def is_valid(self):
|
||||
|
@ -20,15 +20,27 @@ import bpy
|
||||
import mathutils
|
||||
import numpy as np
|
||||
|
||||
from ..libs.dump_anything import (Dumper,
|
||||
from .dump_anything import (Dumper,
|
||||
Loader,
|
||||
dump_collection_attr,
|
||||
load_collection_attr)
|
||||
np_dump_collection,
|
||||
np_load_collection)
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
# GPencil data api is structured as it follow:
|
||||
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
|
||||
|
||||
STROKE_POINT = [
|
||||
'co',
|
||||
'pressure',
|
||||
'strength',
|
||||
'uv_factor',
|
||||
'uv_rotation'
|
||||
|
||||
]
|
||||
|
||||
if bpy.app.version[1] >= 83:
|
||||
STROKE_POINT.append('vertex_color')
|
||||
|
||||
def dump_stroke(stroke):
|
||||
""" Dump a grease pencil stroke to a dict
|
||||
|
||||
@ -59,12 +71,7 @@ def dump_stroke(stroke):
|
||||
# Stoke points
|
||||
p_count = len(stroke.points)
|
||||
dumped_stroke['p_count'] = p_count
|
||||
dumped_stroke['p_co'] = dump_collection_attr(stroke.points,'co')
|
||||
dumped_stroke['p_pressure'] = dump_collection_attr(stroke.points,'pressure')
|
||||
dumped_stroke['p_strength'] = dump_collection_attr(stroke.points,'strength')
|
||||
|
||||
if bpy.app.version[1] >= 83: # new in blender 2.83
|
||||
dumped_stroke['p_vertex_color'] = dump_collection_attr(stroke.points,'vertex_color')
|
||||
dumped_stroke['points'] = np_dump_collection(stroke.points, STROKE_POINT)
|
||||
|
||||
# TODO: uv_factor, uv_rotation
|
||||
|
||||
@ -86,13 +93,7 @@ def load_stroke(stroke_data, stroke):
|
||||
|
||||
stroke.points.add(stroke_data["p_count"])
|
||||
|
||||
load_collection_attr(stroke.points, 'co',stroke_data["p_co"])
|
||||
load_collection_attr(stroke.points, 'pressure',stroke_data["p_pressure"])
|
||||
load_collection_attr(stroke.points, 'strength',stroke_data["p_strength"])
|
||||
|
||||
if "p_vertex_color" in stroke_data:
|
||||
load_collection_attr(stroke.points, 'vertex_color',stroke_data["p_vertex_color"])
|
||||
|
||||
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
|
||||
|
||||
def dump_frame(frame):
|
||||
""" Dump a grease pencil frame to a dict
|
||||
@ -221,12 +222,15 @@ class BlGpencil(BlDatablock):
|
||||
def _construct(self, data):
|
||||
return bpy.data.grease_pencils.new(data["name"])
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
target.materials.clear()
|
||||
if "materials" in data.keys():
|
||||
for mat in data['materials']:
|
||||
target.materials.append(bpy.data.materials[mat])
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
# TODO: reuse existing layer
|
||||
for layer in target.layers:
|
||||
target.layers.remove(layer)
|
||||
@ -243,15 +247,22 @@ class BlGpencil(BlDatablock):
|
||||
|
||||
load_layer(layer_data, target_layer)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
|
||||
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
'materials',
|
||||
'name',
|
||||
'zdepth_offset',
|
||||
'stroke_thickness_space',
|
||||
'pixel_factor',
|
||||
'stroke_depth_order'
|
||||
]
|
||||
data = dumper.dump(pointer)
|
||||
|
||||
data['layers'] = {}
|
||||
@ -261,7 +272,7 @@ class BlGpencil(BlDatablock):
|
||||
|
||||
return data
|
||||
|
||||
def resolve_deps_implementation(self):
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
for material in self.pointer.materials:
|
||||
|
@ -21,6 +21,7 @@ import mathutils
|
||||
import os
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
def dump_image(image):
|
||||
@ -83,7 +84,7 @@ class BlImage(BlDatablock):
|
||||
assert(pointer)
|
||||
data = {}
|
||||
data['pixels'] = dump_image(pointer)
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
|
@ -19,9 +19,11 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||
|
||||
|
||||
class BlLattice(BlDatablock):
|
||||
bl_id = "lattices"
|
||||
@ -31,19 +33,20 @@ class BlLattice(BlDatablock):
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'LATTICE_DATA'
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
np_load_collection(data['points'], target.points, POINT)
|
||||
|
||||
for point in data['points']:
|
||||
utils.dump_anything.load(target.points[point], data["points"][point])
|
||||
def _construct(self, data):
|
||||
return bpy.data.lattices.new(data["name"])
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 3
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
'type',
|
||||
@ -53,17 +56,10 @@ class BlLattice(BlDatablock):
|
||||
'interpolation_type_u',
|
||||
'interpolation_type_v',
|
||||
'interpolation_type_w',
|
||||
'use_outside',
|
||||
'points',
|
||||
'co',
|
||||
'weight_softbody',
|
||||
'co_deform'
|
||||
'use_outside'
|
||||
]
|
||||
data = dumper.dump(pointer)
|
||||
|
||||
data['points'] = np_dump_collection(pointer.points, POINT)
|
||||
return data
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from ..libs import dump_anything
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
@ -40,6 +40,7 @@ class BlLibrary(BlDatablock):
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
return dump_anything.dump(pointer, 1)
|
||||
dumper = Dumper()
|
||||
return dumper.dump(pointer)
|
||||
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
@ -34,12 +34,13 @@ class BlLight(BlDatablock):
|
||||
def _construct(self, data):
|
||||
return bpy.data.lights.new(data["name"], data["type"])
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
|
@ -20,7 +20,7 @@ import bpy
|
||||
import mathutils
|
||||
import logging
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -41,15 +41,16 @@ class BlLightprobe(BlDatablock):
|
||||
else:
|
||||
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
if bpy.app.version[1] < 83:
|
||||
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
|
@ -21,7 +21,7 @@ import mathutils
|
||||
import logging
|
||||
|
||||
from .. import utils
|
||||
from ..libs import dump_anything
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -34,9 +34,10 @@ def load_node(node_data, node_tree):
|
||||
:arg node_tree: target node_tree
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
loader = Loader()
|
||||
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
|
||||
|
||||
dump_anything.load(target_node, node_data)
|
||||
loader.load(target_node, node_data)
|
||||
|
||||
|
||||
|
||||
@ -47,6 +48,7 @@ def load_node(node_data, node_tree):
|
||||
except:
|
||||
logger.error("{} not supported, skipping".format(input))
|
||||
|
||||
|
||||
def load_links(links_data, node_tree):
|
||||
""" Load node_tree links from a list
|
||||
|
||||
@ -62,6 +64,7 @@ def load_links(links_data, node_tree):
|
||||
|
||||
node_tree.links.new(input_socket, output_socket)
|
||||
|
||||
|
||||
def dump_links(links):
|
||||
""" Dump node_tree links collection to a list
|
||||
|
||||
@ -82,34 +85,106 @@ def dump_links(links):
|
||||
|
||||
return links_data
|
||||
|
||||
|
||||
def dump_node(node):
|
||||
""" Dump a single node to a dict
|
||||
|
||||
:arg node: target node
|
||||
:type node: bpy.types.Node
|
||||
:retrun: dict
|
||||
"""
|
||||
|
||||
node_dumper = Dumper()
|
||||
node_dumper.depth = 1
|
||||
node_dumper.exclude_filter = [
|
||||
"dimensions",
|
||||
"show_expanded",
|
||||
"name_full",
|
||||
"select",
|
||||
"bl_height_min",
|
||||
"bl_height_max",
|
||||
"bl_height_default",
|
||||
"bl_width_min",
|
||||
"bl_width_max",
|
||||
"type",
|
||||
"bl_icon",
|
||||
"bl_width_default",
|
||||
"bl_static_type",
|
||||
"show_tetxure",
|
||||
"is_active_output",
|
||||
"hide",
|
||||
"show_options",
|
||||
"show_preview",
|
||||
"show_texture",
|
||||
"outputs",
|
||||
"width_hidden"
|
||||
]
|
||||
|
||||
dumped_node = node_dumper.dump(node)
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
dumped_node['inputs'] = {}
|
||||
|
||||
for i in node.inputs:
|
||||
input_dumper = Dumper()
|
||||
input_dumper.depth = 2
|
||||
input_dumper.include_filter = ["default_value"]
|
||||
|
||||
if hasattr(i, 'default_value'):
|
||||
dumped_node['inputs'][i.name] = input_dumper.dump(
|
||||
i)
|
||||
if hasattr(node, 'color_ramp'):
|
||||
ramp_dumper = Dumper()
|
||||
ramp_dumper.depth = 4
|
||||
ramp_dumper.include_filter = [
|
||||
'elements',
|
||||
'alpha',
|
||||
'color',
|
||||
'position'
|
||||
]
|
||||
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
|
||||
if hasattr(node, 'mapping'):
|
||||
curve_dumper = Dumper()
|
||||
curve_dumper.depth = 5
|
||||
curve_dumper.include_filter = [
|
||||
'curves',
|
||||
'points',
|
||||
'location'
|
||||
]
|
||||
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
|
||||
|
||||
return dumped_node
|
||||
|
||||
|
||||
class BlMaterial(BlDatablock):
|
||||
bl_id = "materials"
|
||||
bl_class = bpy.types.Material
|
||||
bl_delay_refresh = 10
|
||||
bl_delay_apply = 10
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'MATERIAL_DATA'
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.materials.new(data["name"])
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
target.name = data['name']
|
||||
if data['is_grease_pencil']:
|
||||
if not target.is_grease_pencil:
|
||||
bpy.data.materials.create_gpencil_data(target)
|
||||
|
||||
dump_anything.load(
|
||||
loader.load(
|
||||
target.grease_pencil, data['grease_pencil'])
|
||||
|
||||
|
||||
elif data["use_nodes"]:
|
||||
if data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
|
||||
target.node_tree.nodes.clear()
|
||||
|
||||
dump_anything.load(target,data)
|
||||
loader.load(target,data)
|
||||
|
||||
# Load nodes
|
||||
for node in data["node_tree"]["nodes"]:
|
||||
@ -120,9 +195,9 @@ class BlMaterial(BlDatablock):
|
||||
|
||||
load_links(data["node_tree"]["links"], target.node_tree)
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
mat_dumper = dump_anything.Dumper()
|
||||
mat_dumper = Dumper()
|
||||
mat_dumper.depth = 2
|
||||
mat_dumper.exclude_filter = [
|
||||
"is_embed_data",
|
||||
@ -144,70 +219,14 @@ class BlMaterial(BlDatablock):
|
||||
|
||||
if pointer.use_nodes:
|
||||
nodes = {}
|
||||
node_dumper = dump_anything.Dumper()
|
||||
node_dumper.depth = 1
|
||||
node_dumper.exclude_filter = [
|
||||
"dimensions",
|
||||
"show_expanded",
|
||||
"name_full",
|
||||
"select",
|
||||
"bl_height_min",
|
||||
"bl_height_max",
|
||||
"bl_width_min",
|
||||
"bl_width_max",
|
||||
"type",
|
||||
"bl_icon",
|
||||
"bl_width_default",
|
||||
"bl_static_type",
|
||||
"show_tetxure",
|
||||
"hide",
|
||||
"show_options",
|
||||
"show_preview",
|
||||
"outputs",
|
||||
"width_hidden"
|
||||
]
|
||||
for node in pointer.node_tree.nodes:
|
||||
|
||||
nodes[node.name] = node_dumper.dump(node)
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
nodes[node.name]['inputs'] = {}
|
||||
|
||||
for i in node.inputs:
|
||||
input_dumper = dump_anything.Dumper()
|
||||
input_dumper.depth = 2
|
||||
input_dumper.include_filter = ["default_value"]
|
||||
|
||||
if hasattr(i, 'default_value'):
|
||||
nodes[node.name]['inputs'][i.name] = input_dumper.dump(
|
||||
i)
|
||||
if hasattr(node, 'color_ramp'):
|
||||
ramp_dumper = dump_anything.Dumper()
|
||||
ramp_dumper.depth = 4
|
||||
ramp_dumper.include_filter = [
|
||||
'elements',
|
||||
'alpha',
|
||||
'color',
|
||||
'position'
|
||||
]
|
||||
nodes[node.name]['color_ramp'] = ramp_dumper.dump(node.color_ramp)
|
||||
if hasattr(node, 'mapping'):
|
||||
curve_dumper = dump_anything.Dumper()
|
||||
curve_dumper.depth = 5
|
||||
curve_dumper.include_filter = [
|
||||
'curves',
|
||||
'points',
|
||||
'location'
|
||||
]
|
||||
nodes[node.name]['mapping'] = curve_dumper.dump(node.mapping)
|
||||
|
||||
nodes[node.name] = dump_node(node)
|
||||
data["node_tree"]['nodes'] = nodes
|
||||
|
||||
|
||||
data["node_tree"]["links"] = dump_links(pointer.node_tree.links)
|
||||
|
||||
elif pointer.is_grease_pencil:
|
||||
gp_mat_dumper = dump_anything.Dumper()
|
||||
if pointer.is_grease_pencil:
|
||||
gp_mat_dumper = Dumper()
|
||||
gp_mat_dumper.depth = 3
|
||||
|
||||
gp_mat_dumper.include_filter = [
|
||||
@ -235,7 +254,7 @@ class BlMaterial(BlDatablock):
|
||||
data['grease_pencil'] = gp_mat_dumper.dump(pointer.grease_pencil)
|
||||
return data
|
||||
|
||||
def resolve_deps_implementation(self):
|
||||
def _resolve_deps_implementation(self):
|
||||
# TODO: resolve node group deps
|
||||
deps = []
|
||||
|
||||
|
@ -22,12 +22,30 @@ import mathutils
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
from ..libs.dump_anything import Dumper, Loader, load_collection_attr, dump_collection_attr
|
||||
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||
from ..libs.replication.replication.constants import DIFF_BINARY
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VERTICE = ['co']
|
||||
|
||||
EDGE = [
|
||||
'vertices',
|
||||
'crease',
|
||||
'bevel_weight',
|
||||
]
|
||||
LOOP = [
|
||||
'vertex_index',
|
||||
'normal',
|
||||
]
|
||||
|
||||
POLYGON = [
|
||||
'loop_total',
|
||||
'loop_start',
|
||||
'use_smooth',
|
||||
'material_index',
|
||||
]
|
||||
|
||||
class BlMesh(BlDatablock):
|
||||
bl_id = "meshes"
|
||||
@ -42,7 +60,7 @@ class BlMesh(BlDatablock):
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
if not target or not target.is_editmode:
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
@ -63,41 +81,35 @@ class BlMesh(BlDatablock):
|
||||
target.polygons.add(data["poly_count"])
|
||||
|
||||
# LOADING
|
||||
load_collection_attr(target.vertices, 'co', data["verts_co"])
|
||||
load_collection_attr(target.edges, "vertices", data["egdes_vert"])
|
||||
if data['use_customdata_edge_crease']:
|
||||
load_collection_attr(
|
||||
target.edges, "crease", data["edges_crease"])
|
||||
|
||||
if data['use_customdata_edge_bevel']:
|
||||
load_collection_attr(
|
||||
target.edges, "bevel_weight", data["edges_bevel"])
|
||||
|
||||
load_collection_attr(
|
||||
target.loops, 'vertex_index', data["loop_vertex_index"])
|
||||
load_collection_attr(target.loops, 'normal', data["loop_normal"])
|
||||
load_collection_attr(
|
||||
target.polygons, 'loop_total', data["poly_loop_total"])
|
||||
load_collection_attr(
|
||||
target.polygons, 'loop_start', data["poly_loop_start"])
|
||||
load_collection_attr(
|
||||
target.polygons, 'use_smooth', data["poly_smooth"])
|
||||
load_collection_attr(
|
||||
target.polygons, 'material_index', data["poly_mat"])
|
||||
np_load_collection(data['vertices'], target.vertices, VERTICE)
|
||||
np_load_collection(data['edges'], target.edges, EDGE)
|
||||
np_load_collection(data['loops'], target.loops, LOOP)
|
||||
np_load_collection(data["polygons"],target.polygons, POLYGON)
|
||||
|
||||
# UV Layers
|
||||
for layer in data['uv_layers']:
|
||||
if layer not in target.uv_layers:
|
||||
target.uv_layers.new(name=layer)
|
||||
|
||||
uv_buffer = np.frombuffer(data["uv_layers"][layer]['data'])
|
||||
np_load_collection_primitives(
|
||||
target.uv_layers[layer].data,
|
||||
'uv',
|
||||
data["uv_layers"][layer]['data'])
|
||||
|
||||
# Vertex color
|
||||
for color_layer in data['vertex_colors']:
|
||||
if color_layer not in target.vertex_colors:
|
||||
target.vertex_colors.new(name=color_layer)
|
||||
|
||||
target.uv_layers[layer].data.foreach_set('uv', uv_buffer)
|
||||
np_load_collection_primitives(
|
||||
target.vertex_colors[color_layer].data,
|
||||
'color',
|
||||
data["vertex_colors"][color_layer]['data'])
|
||||
|
||||
target.validate()
|
||||
target.update()
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
mesh = pointer
|
||||
@ -116,41 +128,31 @@ class BlMesh(BlDatablock):
|
||||
|
||||
# VERTICES
|
||||
data["vertex_count"] = len(mesh.vertices)
|
||||
data["verts_co"] = dump_collection_attr(mesh.vertices, 'co')
|
||||
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
|
||||
|
||||
# EDGES
|
||||
data["egdes_count"] = len(mesh.edges)
|
||||
data["egdes_vert"] = dump_collection_attr(mesh.edges, 'vertices')
|
||||
|
||||
if mesh.use_customdata_edge_crease:
|
||||
data["edges_crease"] = dump_collection_attr(mesh.edges, 'crease')
|
||||
|
||||
if mesh.use_customdata_edge_bevel:
|
||||
data["edges_bevel"] = dump_collection_attr(
|
||||
mesh.edges, 'edges_bevel')
|
||||
data["edges"] = np_dump_collection(mesh.edges, EDGE)
|
||||
|
||||
# POLYGONS
|
||||
data["poly_count"] = len(mesh.polygons)
|
||||
data["poly_mat"] = dump_collection_attr(
|
||||
mesh.polygons, 'material_index')
|
||||
data["poly_loop_start"] = dump_collection_attr(
|
||||
mesh.polygons, 'loop_start')
|
||||
data["poly_loop_total"] = dump_collection_attr(
|
||||
mesh.polygons, 'loop_total')
|
||||
data["poly_smooth"] = dump_collection_attr(mesh.polygons, 'use_smooth')
|
||||
data["polygons"] = np_dump_collection(mesh.polygons, POLYGON)
|
||||
|
||||
# LOOPS
|
||||
data["loop_count"] = len(mesh.loops)
|
||||
data["loop_normal"] = dump_collection_attr(mesh.loops, 'normal')
|
||||
data["loop_vertex_index"] = dump_collection_attr(
|
||||
mesh.loops, 'vertex_index')
|
||||
data["loops"] = np_dump_collection(mesh.loops, LOOP)
|
||||
|
||||
# UV Layers
|
||||
data['uv_layers'] = {}
|
||||
for layer in mesh.uv_layers:
|
||||
data['uv_layers'][layer.name] = {}
|
||||
data['uv_layers'][layer.name]['data'] = dump_collection_attr(
|
||||
layer.data, 'uv')
|
||||
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
|
||||
|
||||
# Vertex color
|
||||
data['vertex_colors'] = {}
|
||||
for color_map in mesh.vertex_colors:
|
||||
data['vertex_colors'][color_map.name] = {}
|
||||
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
||||
|
||||
# Fix material index
|
||||
m_list = []
|
||||
@ -162,7 +164,7 @@ class BlMesh(BlDatablock):
|
||||
|
||||
return data
|
||||
|
||||
def resolve_deps_implementation(self):
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
for material in self.pointer.materials:
|
||||
|
@ -19,10 +19,49 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
|
||||
np_dump_collection, np_load_collection)
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
ELEMENT = [
|
||||
'co',
|
||||
'hide',
|
||||
'radius',
|
||||
'rotation',
|
||||
'size_x',
|
||||
'size_y',
|
||||
'size_z',
|
||||
'stiffness',
|
||||
'type'
|
||||
]
|
||||
|
||||
|
||||
def dump_metaball_elements(elements):
|
||||
""" Dump a metaball element
|
||||
|
||||
:arg element: metaball element
|
||||
:type bpy.types.MetaElement
|
||||
:return: dict
|
||||
"""
|
||||
|
||||
dumped_elements = np_dump_collection(elements, ELEMENT)
|
||||
|
||||
return dumped_elements
|
||||
|
||||
|
||||
def load_metaball_elements(elements_data, elements):
|
||||
""" Dump a metaball element
|
||||
|
||||
:arg element: metaball element
|
||||
:type bpy.types.MetaElement
|
||||
:return: dict
|
||||
"""
|
||||
np_load_collection(elements_data, elements, ELEMENT)
|
||||
|
||||
|
||||
class BlMetaball(BlDatablock):
|
||||
bl_id = "metaballs"
|
||||
bl_class = bpy.types.MetaBall
|
||||
@ -34,22 +73,30 @@ class BlMetaball(BlDatablock):
|
||||
def _construct(self, data):
|
||||
return bpy.data.metaballs.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
target.elements.clear()
|
||||
for element in data["elements"]:
|
||||
new_element = target.elements.new(type=data["elements"][element]['type'])
|
||||
utils.dump_anything.load(new_element, data["elements"][element])
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
target.elements.clear()
|
||||
|
||||
for mtype in data["elements"]['type']:
|
||||
new_element = target.elements.new()
|
||||
|
||||
load_metaball_elements(data['elements'], target.elements)
|
||||
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.exclude_filter = ["is_editmode"]
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = [
|
||||
"is_editmode",
|
||||
"is_evaluated",
|
||||
"is_embedded_data",
|
||||
"is_library_indirect",
|
||||
"name_full"
|
||||
]
|
||||
|
||||
data = dumper.dump(pointer)
|
||||
data['elements'] = dump_metaball_elements(pointer.elements)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
|
||||
|
@ -20,7 +20,7 @@ import bpy
|
||||
import mathutils
|
||||
import logging
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -28,8 +28,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def load_pose(target_bone, data):
|
||||
target_bone.rotation_mode = data['rotation_mode']
|
||||
|
||||
utils.dump_anything.load(target_bone, data)
|
||||
loader = Loader()
|
||||
loader.load(target_bone, data)
|
||||
|
||||
|
||||
class BlObject(BlDatablock):
|
||||
@ -87,9 +87,10 @@ class BlObject(BlDatablock):
|
||||
|
||||
return instance
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
# Load transformation data
|
||||
utils.dump_anything.load(target, data)
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
# Pose
|
||||
if 'pose' in data:
|
||||
@ -103,7 +104,7 @@ class BlObject(BlDatablock):
|
||||
if not bg_target:
|
||||
bg_target = target.pose.bone_groups.new(name=bg_name)
|
||||
|
||||
utils.dump_anything.load(bg_target, bg_data)
|
||||
loader.load(bg_target, bg_data)
|
||||
# target.pose.bone_groups.get
|
||||
|
||||
# Bones
|
||||
@ -112,7 +113,7 @@ class BlObject(BlDatablock):
|
||||
bone_data = data['pose']['bones'].get(bone)
|
||||
|
||||
if 'constraints' in bone_data.keys():
|
||||
utils.dump_anything.load(target_bone, bone_data['constraints'])
|
||||
loader.load(target_bone, bone_data['constraints'])
|
||||
|
||||
|
||||
load_pose(target_bone, bone_data)
|
||||
@ -140,7 +141,7 @@ class BlObject(BlDatablock):
|
||||
key_data = data['shape_keys']['key_blocks'][key_block]
|
||||
target.shape_key_add(name=key_block)
|
||||
|
||||
utils.dump_anything.load(
|
||||
loader.load(
|
||||
target.data.shape_keys.key_blocks[key_block], key_data)
|
||||
for vert in key_data['data']:
|
||||
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
||||
@ -151,9 +152,9 @@ class BlObject(BlDatablock):
|
||||
|
||||
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
@ -263,7 +264,7 @@ class BlObject(BlDatablock):
|
||||
# SHAPE KEYS
|
||||
object_data = pointer.data
|
||||
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
'reference_key',
|
||||
@ -290,7 +291,7 @@ class BlObject(BlDatablock):
|
||||
|
||||
return data
|
||||
|
||||
def resolve_deps_implementation(self):
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
# Avoid Empty case
|
||||
|
@ -19,7 +19,7 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
class BlScene(BlDatablock):
|
||||
@ -35,10 +35,11 @@ class BlScene(BlDatablock):
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
target = self.pointer
|
||||
# Load other meshes metadata
|
||||
utils.dump_anything.load(target, data)
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
# Load master collection
|
||||
for object in data["collection"]["objects"]:
|
||||
@ -67,11 +68,11 @@ class BlScene(BlDatablock):
|
||||
if 'grease_pencil' in data.keys():
|
||||
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
data = {}
|
||||
|
||||
scene_dumper = utils.dump_anything.Dumper()
|
||||
scene_dumper = Dumper()
|
||||
scene_dumper.depth = 1
|
||||
scene_dumper.include_filter = [
|
||||
'name',
|
||||
@ -89,7 +90,7 @@ class BlScene(BlDatablock):
|
||||
|
||||
return data
|
||||
|
||||
def resolve_deps_implementation(self):
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
# child collections
|
||||
|
@ -19,7 +19,7 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
@ -31,16 +31,17 @@ class BlSpeaker(BlDatablock):
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'SPEAKER'
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.speakers.new(data["name"])
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"muted",
|
||||
|
@ -19,23 +19,23 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_material import load_links, load_node, dump_links
|
||||
from .bl_material import load_links, load_node, dump_node, dump_links
|
||||
|
||||
|
||||
class BlWorld(BlDatablock):
|
||||
bl_id = "worlds"
|
||||
bl_class = bpy.types.World
|
||||
bl_delay_refresh = 4
|
||||
bl_delay_apply = 4
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'WORLD_DATA'
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.worlds.new(data["name"])
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
def _load_implementation(self, data, target):
|
||||
if data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
@ -51,10 +51,10 @@ class BlWorld(BlDatablock):
|
||||
|
||||
load_links(data["node_tree"]["links"], target.node_tree)
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
def _dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
world_dumper = utils.dump_anything.Dumper()
|
||||
world_dumper = Dumper()
|
||||
world_dumper.depth = 2
|
||||
world_dumper.exclude_filter = [
|
||||
"preview",
|
||||
@ -69,47 +69,17 @@ class BlWorld(BlDatablock):
|
||||
data = world_dumper.dump(pointer)
|
||||
if pointer.use_nodes:
|
||||
nodes = {}
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.exclude_filter = [
|
||||
"dimensions",
|
||||
"select",
|
||||
"bl_height_min",
|
||||
"bl_height_max",
|
||||
"bl_width_min",
|
||||
"bl_width_max",
|
||||
"bl_width_default",
|
||||
"hide",
|
||||
"show_options",
|
||||
"show_tetxures",
|
||||
"show_preview",
|
||||
"outputs",
|
||||
"preview",
|
||||
"original",
|
||||
"width_hidden",
|
||||
|
||||
]
|
||||
|
||||
for node in pointer.node_tree.nodes:
|
||||
nodes[node.name] = dumper.dump(node)
|
||||
nodes[node.name] = dump_node(node)
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
nodes[node.name]['inputs'] = {}
|
||||
|
||||
for i in node.inputs:
|
||||
input_dumper = utils.dump_anything.Dumper()
|
||||
input_dumper.depth = 2
|
||||
input_dumper.include_filter = ["default_value"]
|
||||
if hasattr(i, 'default_value'):
|
||||
nodes[node.name]['inputs'][i.name] = input_dumper.dump(
|
||||
i)
|
||||
data["node_tree"]['nodes'] = nodes
|
||||
|
||||
data["node_tree"]['links'] = dump_links(pointer.node_tree.links)
|
||||
|
||||
return data
|
||||
|
||||
def resolve_deps_implementation(self):
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
if self.pointer.use_nodes:
|
||||
|
@ -27,11 +27,79 @@ logger = logging.getLogger(__name__)
|
||||
BPY_TO_NUMPY_TYPES = {
|
||||
'FLOAT': np.float,
|
||||
'INT': np.int,
|
||||
'BOOL': np.bool
|
||||
}
|
||||
'BOOL': np.bool}
|
||||
|
||||
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
|
||||
|
||||
NP_COMPATIBLE_TYPES = ['FLOAT', 'INT', 'BOOLEAN', 'ENUM']
|
||||
|
||||
|
||||
def dump_collection_attr(collection, attribute):
|
||||
def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, attributes: list = None):
|
||||
""" Dump a list of attributes from the sane collection
|
||||
to the target dikt.
|
||||
|
||||
Without attribute given, it try to load all entry from dikt.
|
||||
|
||||
:arg dikt: target dict
|
||||
:type dikt: dict
|
||||
:arg collection: source collection
|
||||
:type collection: bpy.types.CollectionProperty
|
||||
:arg attributes: list of attributes name
|
||||
:type attributes: list
|
||||
"""
|
||||
if attributes is None:
|
||||
attributes = dikt.keys()
|
||||
|
||||
for attr in attributes:
|
||||
attr_type = collection[0].bl_rna.properties.get(attr).type
|
||||
|
||||
if attr_type in PRIMITIVE_TYPES:
|
||||
np_load_collection_primitives(collection, attr, dikt[attr])
|
||||
elif attr_type == 'ENUM':
|
||||
np_load_collection_enum(collection, attr, dikt[attr])
|
||||
else:
|
||||
logger.error(f"{attr} of type {attr_type} not supported.")
|
||||
|
||||
|
||||
def np_dump_collection(collection: bpy.types.CollectionProperty, attributes: list = None) -> dict:
|
||||
""" Dump a list of attributes from the sane collection
|
||||
to the target dikt
|
||||
|
||||
Without attributes given, it try to dump all properties
|
||||
that matches NP_COMPATIBLE_TYPES.
|
||||
|
||||
:arg collection: source collection
|
||||
:type collection: bpy.types.CollectionProperty
|
||||
:arg attributes: list of attributes name
|
||||
:type attributes: list
|
||||
:retrun: dict
|
||||
"""
|
||||
dumped_collection = {}
|
||||
|
||||
if len(collection) == 0:
|
||||
return dumped_collection
|
||||
|
||||
# TODO: find a way without getting the first item
|
||||
properties = collection[0].bl_rna.properties
|
||||
|
||||
if attributes is None:
|
||||
attributes = [p.identifier for p in properties if p.type in NP_COMPATIBLE_TYPES and not p.is_readonly]
|
||||
|
||||
for attr in attributes:
|
||||
attr_type = properties[attr].type
|
||||
|
||||
if attr_type in PRIMITIVE_TYPES:
|
||||
dumped_collection[attr] = np_dump_collection_primitive(
|
||||
collection, attr)
|
||||
elif attr_type == 'ENUM':
|
||||
dumped_collection[attr] = np_dump_collection_enum(collection, attr)
|
||||
else:
|
||||
logger.error(f"{attr} of type {attr_type} not supported. Only {PRIMITIVE_TYPES} and ENUM supported. Skipping it.")
|
||||
|
||||
return dumped_collection
|
||||
|
||||
|
||||
def np_dump_collection_primitive(collection: bpy.types.CollectionProperty, attribute: str) -> str:
|
||||
""" Dump a collection attribute as a sequence
|
||||
|
||||
!!! warning
|
||||
@ -59,8 +127,52 @@ def dump_collection_attr(collection, attribute):
|
||||
return dumped_sequence.tobytes()
|
||||
|
||||
|
||||
def load_collection_attr(collection, attribute, sequence):
|
||||
""" Load a collection attribute from a bytes sequence
|
||||
def np_dump_collection_enum(collection: bpy.types.CollectionProperty, attribute: str) -> list:
|
||||
""" Dump a collection enum attribute to an index list
|
||||
|
||||
:arg collection: target collection
|
||||
:type collection: bpy.types.CollectionProperty
|
||||
:arg attribute: target attribute
|
||||
:type attribute: bpy.types.EnumProperty
|
||||
:return: list of int
|
||||
"""
|
||||
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||
|
||||
assert(attr_infos.type == 'ENUM')
|
||||
|
||||
enum_items = attr_infos.enum_items
|
||||
return [enum_items[getattr(i, attribute)].value for i in collection]
|
||||
|
||||
|
||||
def np_load_collection_enum(collection: bpy.types.CollectionProperty, attribute: str, sequence: list):
|
||||
""" Load a collection enum attribute from a list sequence
|
||||
|
||||
!!! warning
|
||||
Only work with Enum
|
||||
|
||||
:arg collection: target collection
|
||||
:type collection: bpy.types.CollectionProperty
|
||||
:arg attribute: target attribute
|
||||
:type attribute: str
|
||||
:arg sequence: enum data buffer
|
||||
:type sequence: list
|
||||
:return: numpy byte buffer
|
||||
"""
|
||||
|
||||
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||
|
||||
assert(attr_infos.type == 'ENUM')
|
||||
|
||||
enum_items = attr_infos.enum_items
|
||||
enum_idx = [i.value for i in enum_items]
|
||||
|
||||
for index, item in enumerate(sequence):
|
||||
setattr(collection[index], attribute,
|
||||
enum_items[enum_idx.index(item)].identifier)
|
||||
|
||||
|
||||
def np_load_collection_primitives(collection: bpy.types.CollectionProperty, attribute: str, sequence: str):
|
||||
""" Load a collection attribute from a str bytes sequence
|
||||
|
||||
!!! warning
|
||||
Only work with int, float and bool attributes
|
||||
@ -69,6 +181,8 @@ def load_collection_attr(collection, attribute, sequence):
|
||||
:type collection: bpy.types.CollectionProperty
|
||||
:arg attribute: target attribute
|
||||
:type attribute: str
|
||||
:arg sequence: data buffer
|
||||
:type sequence: str
|
||||
:return: numpy byte buffer
|
||||
"""
|
||||
|
||||
@ -76,11 +190,10 @@ def load_collection_attr(collection, attribute, sequence):
|
||||
|
||||
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
|
||||
|
||||
# TODO: check types match
|
||||
collection.foreach_set(
|
||||
attribute,
|
||||
attribute,
|
||||
np.frombuffer(sequence, dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type)))
|
||||
|
||||
|
||||
|
||||
def remove_items_from_dict(d, keys, recursive=False):
|
||||
copy = dict(d)
|
||||
@ -91,7 +204,7 @@ def remove_items_from_dict(d, keys, recursive=False):
|
||||
copy[k] = remove_items_from_dict(copy[k], keys, recursive)
|
||||
return copy
|
||||
|
||||
|
||||
|
||||
def _is_dictionnary(v):
|
||||
return hasattr(v, "items") and callable(v.items)
|
||||
|
||||
@ -152,7 +265,7 @@ def _load_filter_default(default):
|
||||
|
||||
|
||||
class Dumper:
|
||||
# TODO: support occlude readonly
|
||||
# TODO: support occlude readonly
|
||||
# TODO: use foreach_set/get on collection compatible properties
|
||||
def __init__(self):
|
||||
self.verbose = True
|
||||
@ -320,8 +433,8 @@ class BlenderAPIElement:
|
||||
def write(self, value):
|
||||
# take precaution if property is read-only
|
||||
if self.sub_element_name and \
|
||||
not self.api_element.is_property_readonly(self.sub_element_name):
|
||||
|
||||
not self.api_element.is_property_readonly(self.sub_element_name):
|
||||
|
||||
setattr(self.api_element, self.sub_element_name, value)
|
||||
else:
|
||||
self.api_element = value
|
||||
@ -379,7 +492,7 @@ class Loader:
|
||||
|
||||
DESTRUCTOR_REMOVE = "remove"
|
||||
DESTRUCTOR_CLEAR = "clear"
|
||||
|
||||
|
||||
constructors = {
|
||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||
@ -388,20 +501,20 @@ class Loader:
|
||||
}
|
||||
|
||||
destructors = {
|
||||
T.ColorRampElement:DESTRUCTOR_REMOVE,
|
||||
T.ColorRampElement: DESTRUCTOR_REMOVE,
|
||||
T.Modifier: DESTRUCTOR_CLEAR,
|
||||
T.Constraint: CONSTRUCTOR_NEW,
|
||||
}
|
||||
element_type = element.bl_rna_property.fixed_type
|
||||
|
||||
|
||||
constructor = constructors.get(type(element_type))
|
||||
|
||||
if constructor is None: # collection type not supported
|
||||
return
|
||||
|
||||
destructor = destructors.get(type(element_type))
|
||||
destructor = destructors.get(type(element_type))
|
||||
|
||||
# Try to clear existing
|
||||
# Try to clear existing
|
||||
if destructor:
|
||||
if destructor == DESTRUCTOR_REMOVE:
|
||||
collection = element.read()
|
||||
@ -409,18 +522,18 @@ class Loader:
|
||||
collection.remove(collection[0])
|
||||
else:
|
||||
getattr(element.read(), DESTRUCTOR_CLEAR)()
|
||||
|
||||
|
||||
for dump_idx, dumped_element in enumerate(dump.values()):
|
||||
if dump_idx == 0 and len(element.read())>0:
|
||||
new_element = element.read()[0]
|
||||
if dump_idx == 0 and len(element.read()) > 0:
|
||||
new_element = element.read()[0]
|
||||
else:
|
||||
try:
|
||||
constructor_parameters = [dumped_element[name]
|
||||
for name in constructor[1]]
|
||||
for name in constructor[1]]
|
||||
except KeyError:
|
||||
logger.debug("Collection load error, missing parameters.")
|
||||
continue # TODO handle error
|
||||
|
||||
|
||||
new_element = getattr(element.read(), constructor[0])(
|
||||
*constructor_parameters)
|
||||
self._load_any(
|
||||
@ -441,11 +554,12 @@ class Loader:
|
||||
for curve_index, curve in dump['curves'].items():
|
||||
for point_idx, point in curve['points'].items():
|
||||
pos = point['location']
|
||||
|
||||
|
||||
if len(mapping.curves[curve_index].points) == 1:
|
||||
mapping.curves[curve_index].points[int(point_idx)].location = pos
|
||||
mapping.curves[curve_index].points[int(
|
||||
point_idx)].location = pos
|
||||
else:
|
||||
mapping.curves[curve_index].points.new(pos[0],pos[1])
|
||||
mapping.curves[curve_index].points.new(pos[0], pos[1])
|
||||
|
||||
def _load_pointer(self, pointer, dump):
|
||||
rna_property_type = pointer.bl_rna_property.fixed_type
|
||||
@ -509,9 +623,11 @@ class Loader:
|
||||
(_load_filter_type(mathutils.Matrix, use_bl_rna=False), self._load_matrix),
|
||||
# before float because bl_rna type of vector if FloatProperty
|
||||
(_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector),
|
||||
(_load_filter_type(mathutils.Quaternion, use_bl_rna=False), self._load_quaternion),
|
||||
(_load_filter_type(mathutils.Quaternion,
|
||||
use_bl_rna=False), self._load_quaternion),
|
||||
(_load_filter_type(mathutils.Euler, use_bl_rna=False), self._load_euler),
|
||||
(_load_filter_type(T.CurveMapping, use_bl_rna=False), self._load_curve_mapping),
|
||||
(_load_filter_type(T.CurveMapping, use_bl_rna=False),
|
||||
self._load_curve_mapping),
|
||||
(_load_filter_type(T.FloatProperty), self._load_identity),
|
||||
(_load_filter_type(T.StringProperty), self._load_identity),
|
||||
(_load_filter_type(T.EnumProperty), self._load_identity),
|
@ -135,10 +135,8 @@ def get_bb_coords_from_obj(object, parent=None):
|
||||
def get_view_matrix():
|
||||
area, region, rv3d = view3d_find()
|
||||
|
||||
if area and region and rv3d:
|
||||
matrix_dumper = utils.dump_anything.Dumper()
|
||||
|
||||
return matrix_dumper.dump(rv3d.view_matrix)
|
||||
if area and region and rv3d:
|
||||
return [list(v) for v in rv3d.view_matrix]
|
||||
|
||||
def update_presence(self, context):
|
||||
global renderer
|
||||
|
@ -28,7 +28,6 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from . import environment, presence
|
||||
from .libs import dump_anything
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
Reference in New Issue
Block a user