Merge branch '29-differential-revision' into 'develop'
Resolve "Implementation cleanup" See merge request slumber/multi-user!32
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@ -11,3 +11,6 @@ multi_user_updater/
|
|||||||
|
|
||||||
# sphinx build folder
|
# sphinx build folder
|
||||||
_build
|
_build
|
||||||
|
|
||||||
|
# ignore generated zip generated from blender_addon_tester
|
||||||
|
*.zip
|
@ -1,2 +1,8 @@
|
|||||||
|
stages:
|
||||||
|
- test
|
||||||
|
- build
|
||||||
|
|
||||||
include:
|
include:
|
||||||
|
- local: .gitlab/ci/test.gitlab-ci.yml
|
||||||
- local: .gitlab/ci/build.gitlab-ci.yml
|
- local: .gitlab/ci/build.gitlab-ci.yml
|
||||||
|
|
||||||
|
@ -1,17 +1,14 @@
|
|||||||
image: python:latest
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
stage: build
|
||||||
|
image: python:latest
|
||||||
script:
|
script:
|
||||||
- git submodule init
|
- git submodule init
|
||||||
- git submodule update
|
- git submodule update
|
||||||
- cd multi_user/libs/replication
|
- cd multi_user/libs/replication
|
||||||
- rm -rf tests .git .gitignore
|
- rm -rf tests .git .gitignore script
|
||||||
|
|
||||||
artifacts:
|
artifacts:
|
||||||
name: multi_user
|
name: multi_user
|
||||||
paths:
|
paths:
|
||||||
- multi_user
|
- multi_user
|
||||||
|
|
||||||
only:
|
|
||||||
- master
|
|
||||||
- develop
|
|
||||||
|
13
.gitlab/ci/test.gitlab-ci.yml
Normal file
13
.gitlab/ci/test.gitlab-ci.yml
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
test:
|
||||||
|
stage: test
|
||||||
|
image: python:latest
|
||||||
|
script:
|
||||||
|
- git submodule init
|
||||||
|
- git submodule update
|
||||||
|
- apt update
|
||||||
|
# install blender to get all required dependencies
|
||||||
|
# TODO: indtall only dependencies
|
||||||
|
- apt install -f -y blender
|
||||||
|
- pip install blender-addon-tester
|
||||||
|
- python scripts/test_addon.py
|
||||||
|
|
@ -46,21 +46,25 @@ from . import environment, utils
|
|||||||
# TODO: remove dependency as soon as replication will be installed as a module
|
# TODO: remove dependency as soon as replication will be installed as a module
|
||||||
DEPENDENCIES = {
|
DEPENDENCIES = {
|
||||||
("zmq","zmq"),
|
("zmq","zmq"),
|
||||||
("jsondiff","jsondiff")
|
("jsondiff","jsondiff"),
|
||||||
|
("deepdiff", "deepdiff")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger.setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
|
|
||||||
libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication\\replication"
|
libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication\\replication"
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
|
# Setup logging policy
|
||||||
|
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
|
||||||
|
|
||||||
if libs not in sys.path:
|
if libs not in sys.path:
|
||||||
sys.path.append(libs)
|
sys.path.append(libs)
|
||||||
|
|
||||||
|
try:
|
||||||
environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
|
environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
logging.fatal("Fail to install multi-user dependencies, try to execute blender with admin rights.")
|
||||||
|
return
|
||||||
|
|
||||||
from . import presence
|
from . import presence
|
||||||
from . import operators
|
from . import operators
|
||||||
|
@ -139,7 +139,7 @@ class BlAction(BlDatablock):
|
|||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.actions.new(data["name"])
|
return bpy.data.actions.new(data["name"])
|
||||||
|
|
||||||
def _load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
for dumped_fcurve in data["fcurves"]:
|
for dumped_fcurve in data["fcurves"]:
|
||||||
dumped_data_path = dumped_fcurve["data_path"]
|
dumped_data_path = dumped_fcurve["data_path"]
|
||||||
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
||||||
@ -154,8 +154,7 @@ class BlAction(BlDatablock):
|
|||||||
load_fcurve(dumped_fcurve, fcurve)
|
load_fcurve(dumped_fcurve, fcurve)
|
||||||
target.id_root = data['id_root']
|
target.id_root = data['id_root']
|
||||||
|
|
||||||
def _dump(self, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.exclude_filter = [
|
dumper.exclude_filter = [
|
||||||
'name_full',
|
'name_full',
|
||||||
@ -170,11 +169,11 @@ class BlAction(BlDatablock):
|
|||||||
'users'
|
'users'
|
||||||
]
|
]
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
data["fcurves"] = []
|
data["fcurves"] = []
|
||||||
|
|
||||||
for fcurve in self.pointer.fcurves:
|
for fcurve in instance.fcurves:
|
||||||
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
@ -46,7 +46,7 @@ class BlArmature(BlDatablock):
|
|||||||
|
|
||||||
if parent_object is None:
|
if parent_object is None:
|
||||||
parent_object = bpy.data.objects.new(
|
parent_object = bpy.data.objects.new(
|
||||||
data['user_name'], self.pointer)
|
data['user_name'], target)
|
||||||
parent_object.uuid = data['user']
|
parent_object.uuid = data['user']
|
||||||
|
|
||||||
is_object_in_master = (
|
is_object_in_master = (
|
||||||
@ -81,10 +81,10 @@ class BlArmature(BlDatablock):
|
|||||||
bpy.ops.object.mode_set(mode='EDIT')
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
|
||||||
for bone in data['bones']:
|
for bone in data['bones']:
|
||||||
if bone not in self.pointer.edit_bones:
|
if bone not in target.edit_bones:
|
||||||
new_bone = self.pointer.edit_bones.new(bone)
|
new_bone = target.edit_bones.new(bone)
|
||||||
else:
|
else:
|
||||||
new_bone = self.pointer.edit_bones[bone]
|
new_bone = target.edit_bones[bone]
|
||||||
|
|
||||||
bone_data = data['bones'].get(bone)
|
bone_data = data['bones'].get(bone)
|
||||||
|
|
||||||
@ -94,7 +94,7 @@ class BlArmature(BlDatablock):
|
|||||||
new_bone.head_radius = bone_data['head_radius']
|
new_bone.head_radius = bone_data['head_radius']
|
||||||
|
|
||||||
if 'parent' in bone_data:
|
if 'parent' in bone_data:
|
||||||
new_bone.parent = self.pointer.edit_bones[data['bones']
|
new_bone.parent = target.edit_bones[data['bones']
|
||||||
[bone]['parent']]
|
[bone]['parent']]
|
||||||
new_bone.use_connect = bone_data['use_connect']
|
new_bone.use_connect = bone_data['use_connect']
|
||||||
|
|
||||||
@ -109,8 +109,8 @@ class BlArmature(BlDatablock):
|
|||||||
if 'EDIT' in current_mode:
|
if 'EDIT' in current_mode:
|
||||||
bpy.ops.object.mode_set(mode='EDIT')
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 4
|
dumper.depth = 4
|
||||||
@ -126,13 +126,13 @@ class BlArmature(BlDatablock):
|
|||||||
'layers'
|
'layers'
|
||||||
|
|
||||||
]
|
]
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
for bone in pointer.bones:
|
for bone in instance.bones:
|
||||||
if bone.parent:
|
if bone.parent:
|
||||||
data['bones'][bone.name]['parent'] = bone.parent.name
|
data['bones'][bone.name]['parent'] = bone.parent.name
|
||||||
# get the parent Object
|
# get the parent Object
|
||||||
object_users = utils.get_datablock_users(pointer)[0]
|
object_users = utils.get_datablock_users(instance)[0]
|
||||||
data['user'] = object_users.uuid
|
data['user'] = object_users.uuid
|
||||||
data['user_name'] = object_users.name
|
data['user_name'] = object_users.name
|
||||||
|
|
||||||
|
@ -45,8 +45,8 @@ class BlCamera(BlDatablock):
|
|||||||
if dof_settings:
|
if dof_settings:
|
||||||
loader.load(target.dof, dof_settings)
|
loader.load(target.dof, dof_settings)
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
|
|
||||||
# TODO: background image support
|
# TODO: background image support
|
||||||
|
|
||||||
@ -80,6 +80,6 @@ class BlCamera(BlDatablock):
|
|||||||
'sensor_height',
|
'sensor_height',
|
||||||
'sensor_width',
|
'sensor_width',
|
||||||
]
|
]
|
||||||
return dumper.dump(pointer)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,57 +38,61 @@ class BlCollection(BlDatablock):
|
|||||||
name for name in sourceData.collections if name == self.data['name']]
|
name for name in sourceData.collections if name == self.data['name']]
|
||||||
|
|
||||||
instance = bpy.data.collections[self.data['name']]
|
instance = bpy.data.collections[self.data['name']]
|
||||||
instance.uuid = self.uuid
|
|
||||||
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
instance = bpy.data.collections.new(data["name"])
|
instance = bpy.data.collections.new(data["name"])
|
||||||
instance.uuid = self.uuid
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
# Load other meshes metadata
|
# Load other meshes metadata
|
||||||
target.name = data["name"]
|
target.name = data["name"]
|
||||||
|
|
||||||
# link objects
|
# Objects
|
||||||
for object in data["objects"]:
|
for object in data["objects"]:
|
||||||
object_ref = utils.find_from_attr('uuid', object, bpy.data.objects)
|
object_ref = bpy.data.objects.get(object)
|
||||||
if object_ref and object_ref.name not in target.objects.keys():
|
|
||||||
|
if object_ref is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if object not in target.objects.keys():
|
||||||
target.objects.link(object_ref)
|
target.objects.link(object_ref)
|
||||||
|
|
||||||
for object in target.objects:
|
for object in target.objects:
|
||||||
if object.uuid not in data["objects"]:
|
if object.name not in data["objects"]:
|
||||||
target.objects.unlink(object)
|
target.objects.unlink(object)
|
||||||
|
|
||||||
# Link childrens
|
# Link childrens
|
||||||
for collection in data["children"]:
|
for collection in data["children"]:
|
||||||
collection_ref = utils.find_from_attr(
|
collection_ref = bpy.data.collections.get(collection)
|
||||||
'uuid', collection, bpy.data.collections)
|
|
||||||
if collection_ref and collection_ref.name not in target.children.keys():
|
if collection_ref is None:
|
||||||
|
continue
|
||||||
|
if collection_ref.name not in target.children.keys():
|
||||||
target.children.link(collection_ref)
|
target.children.link(collection_ref)
|
||||||
|
|
||||||
for collection in target.children:
|
for collection in target.children:
|
||||||
if collection.uuid not in data["children"]:
|
if collection.name not in data["children"]:
|
||||||
target.children.unlink(collection)
|
target.children.unlink(collection)
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
data = {}
|
data = {}
|
||||||
data['name'] = pointer.name
|
data['name'] = instance.name
|
||||||
|
|
||||||
# dump objects
|
# dump objects
|
||||||
collection_objects = []
|
collection_objects = []
|
||||||
for object in pointer.objects:
|
for object in instance.objects:
|
||||||
if object not in collection_objects:
|
if object not in collection_objects:
|
||||||
collection_objects.append(object.uuid)
|
collection_objects.append(object.name)
|
||||||
|
|
||||||
data['objects'] = collection_objects
|
data['objects'] = collection_objects
|
||||||
|
|
||||||
# dump children collections
|
# dump children collections
|
||||||
collection_children = []
|
collection_children = []
|
||||||
for child in pointer.children:
|
for child in instance.children:
|
||||||
if child not in collection_children:
|
if child not in collection_children:
|
||||||
collection_children.append(child.uuid)
|
collection_children.append(child.name)
|
||||||
|
|
||||||
data['children'] = collection_children
|
data['children'] = collection_children
|
||||||
|
|
||||||
@ -97,10 +101,9 @@ class BlCollection(BlDatablock):
|
|||||||
def _resolve_deps_implementation(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
for child in self.pointer.children:
|
for child in self.instance.children:
|
||||||
deps.append(child)
|
deps.append(child)
|
||||||
for object in self.pointer.objects:
|
for object in self.instance.objects:
|
||||||
deps.append(object)
|
deps.append(object)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@ -27,7 +27,6 @@ from .dump_anything import (Dumper, Loader,
|
|||||||
np_load_collection,
|
np_load_collection,
|
||||||
np_dump_collection)
|
np_dump_collection)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
SPLINE_BEZIER_POINT = [
|
SPLINE_BEZIER_POINT = [
|
||||||
# "handle_left_type",
|
# "handle_left_type",
|
||||||
@ -77,21 +76,34 @@ class BlCurve(BlDatablock):
|
|||||||
# Not really working for now...
|
# Not really working for now...
|
||||||
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
|
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
|
||||||
if new_spline.type == 'NURBS':
|
if new_spline.type == 'NURBS':
|
||||||
logger.error("NURBS not supported.")
|
logging.error("NURBS not supported.")
|
||||||
# new_spline.points.add(len(data['splines'][spline]["points"])-1)
|
# new_spline.points.add(len(data['splines'][spline]["points"])-1)
|
||||||
# for point_index in data['splines'][spline]["points"]:
|
# for point_index in data['splines'][spline]["points"]:
|
||||||
# loader.load(
|
# loader.load(
|
||||||
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
||||||
|
|
||||||
loader.load(new_spline, spline)
|
loader.load(new_spline, spline)
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
|
# Conflicting attributes
|
||||||
data = dumper.dump(pointer)
|
# TODO: remove them with the NURBS support
|
||||||
|
dumper.exclude_filter = [
|
||||||
|
'users',
|
||||||
|
'order_u',
|
||||||
|
'order_v',
|
||||||
|
'point_count_v',
|
||||||
|
'point_count_u',
|
||||||
|
'active_textbox'
|
||||||
|
]
|
||||||
|
if instance.use_auto_texspace:
|
||||||
|
dumper.exclude_filter.extend([
|
||||||
|
'texspace_location',
|
||||||
|
'texspace_size'])
|
||||||
|
data = dumper.dump(instance)
|
||||||
data['splines'] = {}
|
data['splines'] = {}
|
||||||
|
|
||||||
for index, spline in enumerate(pointer.splines):
|
for index, spline in enumerate(instance.splines):
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
spline_data = dumper.dump(spline)
|
spline_data = dumper.dump(spline)
|
||||||
# spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
|
# spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
|
||||||
@ -99,10 +111,10 @@ class BlCurve(BlDatablock):
|
|||||||
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
|
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||||
data['splines'][index] = spline_data
|
data['splines'][index] = spline_data
|
||||||
|
|
||||||
if isinstance(pointer, T.SurfaceCurve):
|
if isinstance(instance, T.SurfaceCurve):
|
||||||
data['type'] = 'SURFACE'
|
data['type'] = 'SURFACE'
|
||||||
elif isinstance(pointer, T.TextCurve):
|
elif isinstance(instance, T.TextCurve):
|
||||||
data['type'] = 'FONT'
|
data['type'] = 'FONT'
|
||||||
elif isinstance(pointer, T.Curve):
|
elif isinstance(instance, T.Curve):
|
||||||
data['type'] = 'CURVE'
|
data['type'] = 'CURVE'
|
||||||
return data
|
return data
|
||||||
|
@ -99,54 +99,54 @@ class BlDatablock(ReplicatedDatablock):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
pointer = kwargs.get('pointer', None)
|
instance = kwargs.get('instance', None)
|
||||||
|
|
||||||
# TODO: use is_library_indirect
|
# TODO: use is_library_indirect
|
||||||
self.is_library = (pointer and hasattr(pointer, 'library') and
|
self.is_library = (instance and hasattr(instance, 'library') and
|
||||||
pointer.library) or \
|
instance.library) or \
|
||||||
(self.data and 'library' in self.data)
|
(self.data and 'library' in self.data)
|
||||||
|
|
||||||
if self.pointer and hasattr(self.pointer, 'uuid'):
|
if instance and hasattr(instance, 'uuid'):
|
||||||
self.pointer.uuid = self.uuid
|
instance.uuid = self.uuid
|
||||||
|
|
||||||
self.diff_method = DIFF_BINARY
|
# self.diff_method = DIFF_BINARY
|
||||||
|
|
||||||
def _resolve(self):
|
@property
|
||||||
|
def instance(self):
|
||||||
datablock_ref = None
|
datablock_ref = None
|
||||||
datablock_root = getattr(bpy.data, self.bl_id)
|
datablock_root = getattr(bpy.data, self.bl_id)
|
||||||
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
||||||
|
|
||||||
# In case of lost uuid (ex: undo), resolve by name and reassign it
|
# In case of lost uuid (ex: undo), resolve by name and reassign it
|
||||||
# TODO: avoid reference storing
|
|
||||||
if not datablock_ref:
|
if not datablock_ref:
|
||||||
datablock_ref = getattr(bpy.data, self.bl_id).get(self.data['name'])
|
datablock_ref = datablock_root.get(self.data['name'])
|
||||||
|
|
||||||
if datablock_ref:
|
if datablock_ref:
|
||||||
setattr(datablock_ref, 'uuid', self.uuid)
|
setattr(datablock_ref, 'uuid', self.uuid)
|
||||||
|
|
||||||
self.pointer = datablock_ref
|
return datablock_ref
|
||||||
|
|
||||||
def _dump(self, pointer=None):
|
def _dump(self, instance=None):
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
data = {}
|
data = {}
|
||||||
# Dump animation data
|
# Dump animation data
|
||||||
if has_action(pointer):
|
if has_action(instance):
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.include_filter = ['action']
|
dumper.include_filter = ['action']
|
||||||
data['animation_data'] = dumper.dump(pointer.animation_data)
|
data['animation_data'] = dumper.dump(instance.animation_data)
|
||||||
|
|
||||||
if has_driver(pointer):
|
if has_driver(instance):
|
||||||
dumped_drivers = {'animation_data': {'drivers': []}}
|
dumped_drivers = {'animation_data': {'drivers': []}}
|
||||||
for driver in pointer.animation_data.drivers:
|
for driver in instance.animation_data.drivers:
|
||||||
dumped_drivers['animation_data']['drivers'].append(
|
dumped_drivers['animation_data']['drivers'].append(
|
||||||
dump_driver(driver))
|
dump_driver(driver))
|
||||||
|
|
||||||
data.update(dumped_drivers)
|
data.update(dumped_drivers)
|
||||||
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
data.update(dumper.dump(pointer))
|
data.update(dumper.dump(instance))
|
||||||
else:
|
else:
|
||||||
data.update(self._dump_implementation(data, pointer=pointer))
|
data.update(self._dump_implementation(data, instance=instance))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@ -180,8 +180,8 @@ class BlDatablock(ReplicatedDatablock):
|
|||||||
def resolve_deps(self):
|
def resolve_deps(self):
|
||||||
dependencies = []
|
dependencies = []
|
||||||
|
|
||||||
if has_action(self.pointer):
|
if has_action(self.instance):
|
||||||
dependencies.append(self.pointer.animation_data.action)
|
dependencies.append(self.instance.animation_data.action)
|
||||||
|
|
||||||
if not self.is_library:
|
if not self.is_library:
|
||||||
dependencies.extend(self._resolve_deps_implementation())
|
dependencies.extend(self._resolve_deps_implementation())
|
||||||
|
@ -95,6 +95,7 @@ def load_stroke(stroke_data, stroke):
|
|||||||
|
|
||||||
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
|
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
|
||||||
|
|
||||||
|
|
||||||
def dump_frame(frame):
|
def dump_frame(frame):
|
||||||
""" Dump a grease pencil frame to a dict
|
""" Dump a grease pencil frame to a dict
|
||||||
|
|
||||||
@ -152,7 +153,7 @@ def dump_layer(layer):
|
|||||||
'opacity',
|
'opacity',
|
||||||
'channel_color',
|
'channel_color',
|
||||||
'color',
|
'color',
|
||||||
'thickness',
|
# 'thickness', #TODO: enabling only for annotation
|
||||||
'tint_color',
|
'tint_color',
|
||||||
'tint_factor',
|
'tint_factor',
|
||||||
'vertex_paint_opacity',
|
'vertex_paint_opacity',
|
||||||
@ -251,8 +252,8 @@ class BlGpencil(BlDatablock):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -263,11 +264,11 @@ class BlGpencil(BlDatablock):
|
|||||||
'pixel_factor',
|
'pixel_factor',
|
||||||
'stroke_depth_order'
|
'stroke_depth_order'
|
||||||
]
|
]
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
data['layers'] = {}
|
data['layers'] = {}
|
||||||
|
|
||||||
for layer in pointer.layers:
|
for layer in instance.layers:
|
||||||
data['layers'][layer.info] = dump_layer(layer)
|
data['layers'][layer.info] = dump_layer(layer)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@ -275,7 +276,7 @@ class BlGpencil(BlDatablock):
|
|||||||
def _resolve_deps_implementation(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
for material in self.pointer.materials:
|
for material in self.instance.materials:
|
||||||
deps.append(material)
|
deps.append(material)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
@ -28,7 +28,7 @@ def dump_image(image):
|
|||||||
pixels = None
|
pixels = None
|
||||||
if image.source == "GENERATED":
|
if image.source == "GENERATED":
|
||||||
prefs = utils.get_preferences()
|
prefs = utils.get_preferences()
|
||||||
img_name = "{}.png".format(image.name)
|
img_name = f"{image.name}.png"
|
||||||
|
|
||||||
# Cache the image on the disk
|
# Cache the image on the disk
|
||||||
image.filepath_raw = os.path.join(prefs.cache_directory, img_name)
|
image.filepath_raw = os.path.join(prefs.cache_directory, img_name)
|
||||||
@ -67,7 +67,7 @@ class BlImage(BlDatablock):
|
|||||||
image = target
|
image = target
|
||||||
prefs = utils.get_preferences()
|
prefs = utils.get_preferences()
|
||||||
|
|
||||||
img_name = "{}.png".format(image.name)
|
img_name = f"{image.name}.png"
|
||||||
|
|
||||||
img_path = os.path.join(prefs.cache_directory, img_name)
|
img_path = os.path.join(prefs.cache_directory, img_name)
|
||||||
|
|
||||||
@ -80,10 +80,10 @@ class BlImage(BlDatablock):
|
|||||||
image.colorspace_settings.name = data["colorspace_settings"]["name"]
|
image.colorspace_settings.name = data["colorspace_settings"]["name"]
|
||||||
|
|
||||||
|
|
||||||
def _dump(self, pointer=None):
|
def _dump(self, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
data = {}
|
data = {}
|
||||||
data['pixels'] = dump_image(pointer)
|
data['pixels'] = dump_image(instance)
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -95,7 +95,7 @@ class BlImage(BlDatablock):
|
|||||||
'filepath',
|
'filepath',
|
||||||
'source',
|
'source',
|
||||||
'colorspace_settings']
|
'colorspace_settings']
|
||||||
data.update(dumper.dump(pointer))
|
data.update(dumper.dump(instance))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ import mathutils
|
|||||||
|
|
||||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
from ..libs.replication.replication.exception import ContextError
|
||||||
|
|
||||||
POINT = ['co', 'weight_softbody', 'co_deform']
|
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||||
|
|
||||||
@ -33,17 +34,21 @@ class BlLattice(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'LATTICE_DATA'
|
bl_icon = 'LATTICE_DATA'
|
||||||
|
|
||||||
|
def _construct(self, data):
|
||||||
|
return bpy.data.lattices.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
|
if target.is_editmode:
|
||||||
|
raise ContextError("lattice is in edit mode")
|
||||||
|
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(target, data)
|
||||||
|
|
||||||
np_load_collection(data['points'], target.points, POINT)
|
np_load_collection(data['points'], target.points, POINT)
|
||||||
|
|
||||||
def _construct(self, data):
|
def _dump_implementation(self, data, instance=None):
|
||||||
return bpy.data.lattices.new(data["name"])
|
if instance.is_editmode:
|
||||||
|
raise ContextError("lattice is in edit mode")
|
||||||
def _dump_implementation(self, data, pointer=None):
|
|
||||||
assert(pointer)
|
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
@ -58,8 +63,9 @@ class BlLattice(BlDatablock):
|
|||||||
'interpolation_type_w',
|
'interpolation_type_w',
|
||||||
'use_outside'
|
'use_outside'
|
||||||
]
|
]
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
|
data['points'] = np_dump_collection(instance.points, POINT)
|
||||||
|
|
||||||
data['points'] = np_dump_collection(pointer.points, POINT)
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -38,9 +38,9 @@ class BlLibrary(BlDatablock):
|
|||||||
def _load(self, data, target):
|
def _load(self, data, target):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def dump(self, pointer=None):
|
def _dump(self, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
return dumper.dump(pointer)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,8 +38,8 @@ class BlLight(BlDatablock):
|
|||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(target, data)
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -61,9 +61,12 @@ class BlLight(BlDatablock):
|
|||||||
"contact_shadow_soft_size",
|
"contact_shadow_soft_size",
|
||||||
"contact_shadow_bias",
|
"contact_shadow_bias",
|
||||||
"contact_shadow_thickness",
|
"contact_shadow_thickness",
|
||||||
"shape"
|
"shape",
|
||||||
|
"size_y",
|
||||||
|
"size",
|
||||||
|
"angle"
|
||||||
]
|
]
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,7 +23,6 @@ import logging
|
|||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class BlLightprobe(BlDatablock):
|
class BlLightprobe(BlDatablock):
|
||||||
bl_id = "lightprobes"
|
bl_id = "lightprobes"
|
||||||
@ -39,16 +38,16 @@ class BlLightprobe(BlDatablock):
|
|||||||
if bpy.app.version[1] >= 83:
|
if bpy.app.version[1] >= 83:
|
||||||
return bpy.data.lightprobes.new(data["name"], type)
|
return bpy.data.lightprobes.new(data["name"], type)
|
||||||
else:
|
else:
|
||||||
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(target, data)
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
if bpy.app.version[1] < 83:
|
if bpy.app.version[1] < 83:
|
||||||
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
@ -73,7 +72,7 @@ class BlLightprobe(BlDatablock):
|
|||||||
'visibility_blur'
|
'visibility_blur'
|
||||||
]
|
]
|
||||||
|
|
||||||
return dumper.dump(pointer)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,7 +24,6 @@ from .. import utils
|
|||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def load_node(node_data, node_tree):
|
def load_node(node_data, node_tree):
|
||||||
""" Load a node into a node_tree from a dict
|
""" Load a node into a node_tree from a dict
|
||||||
@ -46,7 +45,7 @@ def load_node(node_data, node_tree):
|
|||||||
try:
|
try:
|
||||||
target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
|
target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
|
||||||
except:
|
except:
|
||||||
logger.error("{} not supported, skipping".format(input))
|
logging.error(f"Material {input} parameter not supported, skipping")
|
||||||
|
|
||||||
|
|
||||||
def load_links(links_data, node_tree):
|
def load_links(links_data, node_tree):
|
||||||
@ -195,8 +194,8 @@ class BlMaterial(BlDatablock):
|
|||||||
|
|
||||||
load_links(data["node_tree"]["links"], target.node_tree)
|
load_links(data["node_tree"]["links"], target.node_tree)
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
mat_dumper = Dumper()
|
mat_dumper = Dumper()
|
||||||
mat_dumper.depth = 2
|
mat_dumper.depth = 2
|
||||||
mat_dumper.exclude_filter = [
|
mat_dumper.exclude_filter = [
|
||||||
@ -215,17 +214,17 @@ class BlMaterial(BlDatablock):
|
|||||||
"line_color",
|
"line_color",
|
||||||
"view_center",
|
"view_center",
|
||||||
]
|
]
|
||||||
data = mat_dumper.dump(pointer)
|
data = mat_dumper.dump(instance)
|
||||||
|
|
||||||
if pointer.use_nodes:
|
if instance.use_nodes:
|
||||||
nodes = {}
|
nodes = {}
|
||||||
for node in pointer.node_tree.nodes:
|
for node in instance.node_tree.nodes:
|
||||||
nodes[node.name] = dump_node(node)
|
nodes[node.name] = dump_node(node)
|
||||||
data["node_tree"]['nodes'] = nodes
|
data["node_tree"]['nodes'] = nodes
|
||||||
|
|
||||||
data["node_tree"]["links"] = dump_links(pointer.node_tree.links)
|
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
|
||||||
|
|
||||||
if pointer.is_grease_pencil:
|
if instance.is_grease_pencil:
|
||||||
gp_mat_dumper = Dumper()
|
gp_mat_dumper = Dumper()
|
||||||
gp_mat_dumper.depth = 3
|
gp_mat_dumper.depth = 3
|
||||||
|
|
||||||
@ -251,19 +250,19 @@ class BlMaterial(BlDatablock):
|
|||||||
'mix_color',
|
'mix_color',
|
||||||
'flip'
|
'flip'
|
||||||
]
|
]
|
||||||
data['grease_pencil'] = gp_mat_dumper.dump(pointer.grease_pencil)
|
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
def _resolve_deps_implementation(self):
|
||||||
# TODO: resolve node group deps
|
# TODO: resolve node group deps
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.pointer.use_nodes:
|
if self.instance.use_nodes:
|
||||||
for node in self.pointer.node_tree.nodes:
|
for node in self.instance.node_tree.nodes:
|
||||||
if node.type == 'TEX_IMAGE':
|
if node.type == 'TEX_IMAGE':
|
||||||
deps.append(node.image)
|
deps.append(node.image)
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
deps.append(self.pointer.library)
|
deps.append(self.instance.library)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@ -24,9 +24,9 @@ import numpy as np
|
|||||||
|
|
||||||
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||||
from ..libs.replication.replication.constants import DIFF_BINARY
|
from ..libs.replication.replication.constants import DIFF_BINARY
|
||||||
|
from ..libs.replication.replication.exception import ContextError
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
VERTICE = ['co']
|
VERTICE = ['co']
|
||||||
|
|
||||||
@ -109,10 +109,12 @@ class BlMesh(BlDatablock):
|
|||||||
target.validate()
|
target.validate()
|
||||||
target.update()
|
target.update()
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
|
|
||||||
mesh = pointer
|
if instance.is_editmode:
|
||||||
|
raise ContextError("Mesh is in edit mode")
|
||||||
|
mesh = instance
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
@ -156,7 +158,7 @@ class BlMesh(BlDatablock):
|
|||||||
|
|
||||||
# Fix material index
|
# Fix material index
|
||||||
m_list = []
|
m_list = []
|
||||||
for material in pointer.materials:
|
for material in instance.materials:
|
||||||
if material:
|
if material:
|
||||||
m_list.append(material.name)
|
m_list.append(material.name)
|
||||||
|
|
||||||
@ -167,7 +169,7 @@ class BlMesh(BlDatablock):
|
|||||||
def _resolve_deps_implementation(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
for material in self.pointer.materials:
|
for material in self.instance.materials:
|
||||||
if material:
|
if material:
|
||||||
deps.append(material)
|
deps.append(material)
|
||||||
|
|
||||||
|
@ -84,19 +84,22 @@ class BlMetaball(BlDatablock):
|
|||||||
|
|
||||||
load_metaball_elements(data['elements'], target.elements)
|
load_metaball_elements(data['elements'], target.elements)
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.exclude_filter = [
|
dumper.include_filter = [
|
||||||
"is_editmode",
|
'name',
|
||||||
"is_evaluated",
|
'resolution',
|
||||||
"is_embedded_data",
|
'render_resolution',
|
||||||
"is_library_indirect",
|
'threshold',
|
||||||
"name_full"
|
'update_method',
|
||||||
|
'use_auto_texspace',
|
||||||
|
'texspace_location',
|
||||||
|
'texspace_size'
|
||||||
]
|
]
|
||||||
|
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
data['elements'] = dump_metaball_elements(pointer.elements)
|
data['elements'] = dump_metaball_elements(instance.elements)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
@ -22,8 +22,7 @@ import logging
|
|||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
from ..libs.replication.replication.exception import ContextError
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def load_pose(target_bone, data):
|
def load_pose(target_bone, data):
|
||||||
@ -41,7 +40,7 @@ class BlObject(BlDatablock):
|
|||||||
bl_icon = 'OBJECT_DATA'
|
bl_icon = 'OBJECT_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
pointer = None
|
instance = None
|
||||||
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||||
@ -56,33 +55,33 @@ class BlObject(BlDatablock):
|
|||||||
if "data" not in data:
|
if "data" not in data:
|
||||||
pass
|
pass
|
||||||
elif data["data"] in bpy.data.meshes.keys():
|
elif data["data"] in bpy.data.meshes.keys():
|
||||||
pointer = bpy.data.meshes[data["data"]]
|
instance = bpy.data.meshes[data["data"]]
|
||||||
elif data["data"] in bpy.data.lights.keys():
|
elif data["data"] in bpy.data.lights.keys():
|
||||||
pointer = bpy.data.lights[data["data"]]
|
instance = bpy.data.lights[data["data"]]
|
||||||
elif data["data"] in bpy.data.cameras.keys():
|
elif data["data"] in bpy.data.cameras.keys():
|
||||||
pointer = bpy.data.cameras[data["data"]]
|
instance = bpy.data.cameras[data["data"]]
|
||||||
elif data["data"] in bpy.data.curves.keys():
|
elif data["data"] in bpy.data.curves.keys():
|
||||||
pointer = bpy.data.curves[data["data"]]
|
instance = bpy.data.curves[data["data"]]
|
||||||
elif data["data"] in bpy.data.metaballs.keys():
|
elif data["data"] in bpy.data.metaballs.keys():
|
||||||
pointer = bpy.data.metaballs[data["data"]]
|
instance = bpy.data.metaballs[data["data"]]
|
||||||
elif data["data"] in bpy.data.armatures.keys():
|
elif data["data"] in bpy.data.armatures.keys():
|
||||||
pointer = bpy.data.armatures[data["data"]]
|
instance = bpy.data.armatures[data["data"]]
|
||||||
elif data["data"] in bpy.data.grease_pencils.keys():
|
elif data["data"] in bpy.data.grease_pencils.keys():
|
||||||
pointer = bpy.data.grease_pencils[data["data"]]
|
instance = bpy.data.grease_pencils[data["data"]]
|
||||||
elif data["data"] in bpy.data.curves.keys():
|
elif data["data"] in bpy.data.curves.keys():
|
||||||
pointer = bpy.data.curves[data["data"]]
|
instance = bpy.data.curves[data["data"]]
|
||||||
elif data["data"] in bpy.data.lattices.keys():
|
elif data["data"] in bpy.data.lattices.keys():
|
||||||
pointer = bpy.data.lattices[data["data"]]
|
instance = bpy.data.lattices[data["data"]]
|
||||||
elif data["data"] in bpy.data.speakers.keys():
|
elif data["data"] in bpy.data.speakers.keys():
|
||||||
pointer = bpy.data.speakers[data["data"]]
|
instance = bpy.data.speakers[data["data"]]
|
||||||
elif data["data"] in bpy.data.lightprobes.keys():
|
elif data["data"] in bpy.data.lightprobes.keys():
|
||||||
# Only supported since 2.83
|
# Only supported since 2.83
|
||||||
if bpy.app.version[1] >= 83:
|
if bpy.app.version[1] >= 83:
|
||||||
pointer = bpy.data.lightprobes[data["data"]]
|
instance = bpy.data.lightprobes[data["data"]]
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logging.warning(
|
||||||
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
instance = bpy.data.objects.new(data["name"], pointer)
|
instance = bpy.data.objects.new(data["name"], instance)
|
||||||
instance.uuid = self.uuid
|
instance.uuid = self.uuid
|
||||||
|
|
||||||
return instance
|
return instance
|
||||||
@ -126,7 +125,8 @@ class BlObject(BlDatablock):
|
|||||||
target.vertex_groups.clear()
|
target.vertex_groups.clear()
|
||||||
for vg in data['vertex_groups']:
|
for vg in data['vertex_groups']:
|
||||||
vertex_group = target.vertex_groups.new(name=vg['name'])
|
vertex_group = target.vertex_groups.new(name=vg['name'])
|
||||||
for vert in vg['vertices']:
|
point_attr = 'vertices' if 'vertices' in vg else 'points'
|
||||||
|
for vert in vg[point_attr]:
|
||||||
vertex_group.add(
|
vertex_group.add(
|
||||||
[vert['index']], vert['weight'], 'REPLACE')
|
[vert['index']], vert['weight'], 'REPLACE')
|
||||||
|
|
||||||
@ -152,8 +152,14 @@ class BlObject(BlDatablock):
|
|||||||
|
|
||||||
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
|
|
||||||
|
child_data = getattr(instance, 'data', None)
|
||||||
|
|
||||||
|
if child_data and hasattr(child_data, 'is_editmode') and child_data.is_editmode:
|
||||||
|
raise ContextError("Object is in edit-mode.")
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -169,33 +175,33 @@ class BlObject(BlDatablock):
|
|||||||
"instance_type",
|
"instance_type",
|
||||||
"location",
|
"location",
|
||||||
"scale",
|
"scale",
|
||||||
'rotation_quaternion' if pointer.rotation_mode == 'QUATERNION' else 'rotation_euler',
|
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
|
||||||
]
|
]
|
||||||
|
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
# MODIFIERS
|
# MODIFIERS
|
||||||
if hasattr(pointer, 'modifiers'):
|
if hasattr(instance, 'modifiers'):
|
||||||
dumper.include_filter = None
|
dumper.include_filter = None
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
data["modifiers"] = {}
|
data["modifiers"] = {}
|
||||||
for index, modifier in enumerate(pointer.modifiers):
|
for index, modifier in enumerate(instance.modifiers):
|
||||||
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
||||||
|
|
||||||
# CONSTRAINTS
|
# CONSTRAINTS
|
||||||
# OBJECT
|
# OBJECT
|
||||||
if hasattr(pointer, 'constraints'):
|
if hasattr(instance, 'constraints'):
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
data["constraints"] = dumper.dump(pointer.constraints)
|
data["constraints"] = dumper.dump(instance.constraints)
|
||||||
|
|
||||||
# POSE
|
# POSE
|
||||||
if hasattr(pointer, 'pose') and pointer.pose:
|
if hasattr(instance, 'pose') and instance.pose:
|
||||||
# BONES
|
# BONES
|
||||||
bones = {}
|
bones = {}
|
||||||
for bone in pointer.pose.bones:
|
for bone in instance.pose.bones:
|
||||||
bones[bone.name] = {}
|
bones[bone.name] = {}
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
||||||
@ -220,7 +226,7 @@ class BlObject(BlDatablock):
|
|||||||
|
|
||||||
# GROUPS
|
# GROUPS
|
||||||
bone_groups = {}
|
bone_groups = {}
|
||||||
for group in pointer.pose.bone_groups:
|
for group in instance.pose.bone_groups:
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'name',
|
'name',
|
||||||
@ -230,28 +236,29 @@ class BlObject(BlDatablock):
|
|||||||
data['pose']['bone_groups'] = bone_groups
|
data['pose']['bone_groups'] = bone_groups
|
||||||
|
|
||||||
# CHILDS
|
# CHILDS
|
||||||
if len(pointer.children) > 0:
|
if len(instance.children) > 0:
|
||||||
childs = []
|
childs = []
|
||||||
for child in pointer.children:
|
for child in instance.children:
|
||||||
childs.append(child.name)
|
childs.append(child.name)
|
||||||
|
|
||||||
data["children"] = childs
|
data["children"] = childs
|
||||||
|
|
||||||
# VERTEx GROUP
|
# VERTEx GROUP
|
||||||
if len(pointer.vertex_groups) > 0:
|
if len(instance.vertex_groups) > 0:
|
||||||
|
points_attr = 'vertices' if isinstance(instance.data, bpy.types.Mesh) else 'points'
|
||||||
vg_data = []
|
vg_data = []
|
||||||
for vg in pointer.vertex_groups:
|
for vg in instance.vertex_groups:
|
||||||
vg_idx = vg.index
|
vg_idx = vg.index
|
||||||
dumped_vg = {}
|
dumped_vg = {}
|
||||||
dumped_vg['name'] = vg.name
|
dumped_vg['name'] = vg.name
|
||||||
|
|
||||||
vertices = []
|
vertices = []
|
||||||
|
|
||||||
for v in pointer.data.vertices:
|
for i, v in enumerate(getattr(instance.data, points_attr)):
|
||||||
for vg in v.groups:
|
for vg in v.groups:
|
||||||
if vg.group == vg_idx:
|
if vg.group == vg_idx:
|
||||||
vertices.append({
|
vertices.append({
|
||||||
'index': v.index,
|
'index': i,
|
||||||
'weight': vg.weight
|
'weight': vg.weight
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -262,7 +269,7 @@ class BlObject(BlDatablock):
|
|||||||
data['vertex_groups'] = vg_data
|
data['vertex_groups'] = vg_data
|
||||||
|
|
||||||
# SHAPE KEYS
|
# SHAPE KEYS
|
||||||
object_data = pointer.data
|
object_data = instance.data
|
||||||
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
@ -295,17 +302,17 @@ class BlObject(BlDatablock):
|
|||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
# Avoid Empty case
|
# Avoid Empty case
|
||||||
if self.pointer.data:
|
if self.instance.data:
|
||||||
deps.append(self.pointer.data)
|
deps.append(self.instance.data)
|
||||||
if len(self.pointer.children) > 0:
|
if len(self.instance.children) > 0:
|
||||||
deps.extend(list(self.pointer.children))
|
deps.extend(list(self.instance.children))
|
||||||
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
deps.append(self.pointer.library)
|
deps.append(self.instance.library)
|
||||||
|
|
||||||
if self.pointer.instance_type == 'COLLECTION':
|
if self.instance.instance_type == 'COLLECTION':
|
||||||
# TODO: uuid based
|
# TODO: uuid based
|
||||||
deps.append(self.pointer.instance_collection)
|
deps.append(self.instance.instance_collection)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@ -34,11 +34,9 @@ class BlScene(BlDatablock):
|
|||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
instance = bpy.data.scenes.new(data["name"])
|
instance = bpy.data.scenes.new(data["name"])
|
||||||
instance.uuid = self.uuid
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
target = self.pointer
|
|
||||||
# Load other meshes metadata
|
# Load other meshes metadata
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
loader.load(target, data)
|
loader.load(target, data)
|
||||||
@ -84,8 +82,8 @@ class BlScene(BlDatablock):
|
|||||||
target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level']
|
target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level']
|
||||||
target.view_settings.curve_mapping.update()
|
target.view_settings.curve_mapping.update()
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
scene_dumper = Dumper()
|
scene_dumper = Dumper()
|
||||||
@ -97,12 +95,12 @@ class BlScene(BlDatablock):
|
|||||||
'camera',
|
'camera',
|
||||||
'grease_pencil',
|
'grease_pencil',
|
||||||
]
|
]
|
||||||
data = scene_dumper.dump(pointer)
|
data = scene_dumper.dump(instance)
|
||||||
|
|
||||||
scene_dumper.depth = 3
|
scene_dumper.depth = 3
|
||||||
|
|
||||||
scene_dumper.include_filter = ['children','objects','name']
|
scene_dumper.include_filter = ['children','objects','name']
|
||||||
data['collection'] = scene_dumper.dump(pointer.collection)
|
data['collection'] = scene_dumper.dump(instance.collection)
|
||||||
|
|
||||||
scene_dumper.depth = 1
|
scene_dumper.depth = 1
|
||||||
scene_dumper.include_filter = None
|
scene_dumper.include_filter = None
|
||||||
@ -125,19 +123,19 @@ class BlScene(BlDatablock):
|
|||||||
'samples',
|
'samples',
|
||||||
'volume_bounces'
|
'volume_bounces'
|
||||||
]
|
]
|
||||||
data['eevee'] = scene_dumper.dump(pointer.eevee)
|
data['eevee'] = scene_dumper.dump(instance.eevee)
|
||||||
data['cycles'] = scene_dumper.dump(pointer.cycles)
|
data['cycles'] = scene_dumper.dump(instance.cycles)
|
||||||
data['view_settings'] = scene_dumper.dump(pointer.view_settings)
|
data['view_settings'] = scene_dumper.dump(instance.view_settings)
|
||||||
data['view_settings']['curve_mapping'] = scene_dumper.dump(pointer.view_settings.curve_mapping)
|
|
||||||
|
|
||||||
if pointer.view_settings.use_curve_mapping:
|
if instance.view_settings.use_curve_mapping:
|
||||||
|
data['view_settings']['curve_mapping'] = scene_dumper.dump(instance.view_settings.curve_mapping)
|
||||||
scene_dumper.depth = 5
|
scene_dumper.depth = 5
|
||||||
scene_dumper.include_filter = [
|
scene_dumper.include_filter = [
|
||||||
'curves',
|
'curves',
|
||||||
'points',
|
'points',
|
||||||
'location'
|
'location'
|
||||||
]
|
]
|
||||||
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(pointer.view_settings.curve_mapping.curves)
|
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(instance.view_settings.curve_mapping.curves)
|
||||||
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@ -146,19 +144,19 @@ class BlScene(BlDatablock):
|
|||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
# child collections
|
# child collections
|
||||||
for child in self.pointer.collection.children:
|
for child in self.instance.collection.children:
|
||||||
deps.append(child)
|
deps.append(child)
|
||||||
|
|
||||||
# childs objects
|
# childs objects
|
||||||
for object in self.pointer.objects:
|
for object in self.instance.objects:
|
||||||
deps.append(object)
|
deps.append(object)
|
||||||
|
|
||||||
# world
|
# world
|
||||||
if self.pointer.world:
|
if self.instance.world:
|
||||||
deps.append(self.pointer.world)
|
deps.append(self.instance.world)
|
||||||
|
|
||||||
# annotations
|
# annotations
|
||||||
if self.pointer.grease_pencil:
|
if self.instance.grease_pencil:
|
||||||
deps.append(self.pointer.grease_pencil)
|
deps.append(self.instance.grease_pencil)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
@ -38,8 +38,8 @@ class BlSpeaker(BlDatablock):
|
|||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.speakers.new(data["name"])
|
return bpy.data.speakers.new(data["name"])
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
@ -58,7 +58,7 @@ class BlSpeaker(BlDatablock):
|
|||||||
'cone_volume_outer'
|
'cone_volume_outer'
|
||||||
]
|
]
|
||||||
|
|
||||||
return dumper.dump(pointer)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,8 +51,8 @@ class BlWorld(BlDatablock):
|
|||||||
|
|
||||||
load_links(data["node_tree"]["links"], target.node_tree)
|
load_links(data["node_tree"]["links"], target.node_tree)
|
||||||
|
|
||||||
def _dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
|
|
||||||
world_dumper = Dumper()
|
world_dumper = Dumper()
|
||||||
world_dumper.depth = 2
|
world_dumper.depth = 2
|
||||||
@ -66,27 +66,27 @@ class BlWorld(BlDatablock):
|
|||||||
"users",
|
"users",
|
||||||
"view_center"
|
"view_center"
|
||||||
]
|
]
|
||||||
data = world_dumper.dump(pointer)
|
data = world_dumper.dump(instance)
|
||||||
if pointer.use_nodes:
|
if instance.use_nodes:
|
||||||
nodes = {}
|
nodes = {}
|
||||||
|
|
||||||
for node in pointer.node_tree.nodes:
|
for node in instance.node_tree.nodes:
|
||||||
nodes[node.name] = dump_node(node)
|
nodes[node.name] = dump_node(node)
|
||||||
|
|
||||||
data["node_tree"]['nodes'] = nodes
|
data["node_tree"]['nodes'] = nodes
|
||||||
|
|
||||||
data["node_tree"]['links'] = dump_links(pointer.node_tree.links)
|
data["node_tree"]['links'] = dump_links(instance.node_tree.links)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.pointer.use_nodes:
|
if self.instance.use_nodes:
|
||||||
for node in self.pointer.node_tree.nodes:
|
for node in self.instance.node_tree.nodes:
|
||||||
if node.type == 'TEX_IMAGE':
|
if node.type == 'TEX_IMAGE':
|
||||||
deps.append(node.image)
|
deps.append(node.image)
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
deps.append(self.pointer.library)
|
deps.append(self.instance.library)
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@ -22,7 +22,6 @@ import bpy.types as T
|
|||||||
import mathutils
|
import mathutils
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
BPY_TO_NUMPY_TYPES = {
|
BPY_TO_NUMPY_TYPES = {
|
||||||
'FLOAT': np.float,
|
'FLOAT': np.float,
|
||||||
@ -47,6 +46,10 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
|
|||||||
:arg attributes: list of attributes name
|
:arg attributes: list of attributes name
|
||||||
:type attributes: list
|
:type attributes: list
|
||||||
"""
|
"""
|
||||||
|
if not dikt or len(collection) == 0:
|
||||||
|
logging.warning(f'Skipping collection')
|
||||||
|
return
|
||||||
|
|
||||||
if attributes is None:
|
if attributes is None:
|
||||||
attributes = dikt.keys()
|
attributes = dikt.keys()
|
||||||
|
|
||||||
@ -58,7 +61,7 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
|
|||||||
elif attr_type == 'ENUM':
|
elif attr_type == 'ENUM':
|
||||||
np_load_collection_enum(collection, attr, dikt[attr])
|
np_load_collection_enum(collection, attr, dikt[attr])
|
||||||
else:
|
else:
|
||||||
logger.error(f"{attr} of type {attr_type} not supported.")
|
logging.error(f"{attr} of type {attr_type} not supported.")
|
||||||
|
|
||||||
|
|
||||||
def np_dump_collection(collection: bpy.types.CollectionProperty, attributes: list = None) -> dict:
|
def np_dump_collection(collection: bpy.types.CollectionProperty, attributes: list = None) -> dict:
|
||||||
@ -94,7 +97,7 @@ def np_dump_collection(collection: bpy.types.CollectionProperty, attributes: lis
|
|||||||
elif attr_type == 'ENUM':
|
elif attr_type == 'ENUM':
|
||||||
dumped_collection[attr] = np_dump_collection_enum(collection, attr)
|
dumped_collection[attr] = np_dump_collection_enum(collection, attr)
|
||||||
else:
|
else:
|
||||||
logger.error(f"{attr} of type {attr_type} not supported. Only {PRIMITIVE_TYPES} and ENUM supported. Skipping it.")
|
logging.error(f"{attr} of type {attr_type} not supported. Only {PRIMITIVE_TYPES} and ENUM supported. Skipping it.")
|
||||||
|
|
||||||
return dumped_collection
|
return dumped_collection
|
||||||
|
|
||||||
@ -111,11 +114,15 @@ def np_dump_collection_primitive(collection: bpy.types.CollectionProperty, attri
|
|||||||
:type attribute: str
|
:type attribute: str
|
||||||
:return: numpy byte buffer
|
:return: numpy byte buffer
|
||||||
"""
|
"""
|
||||||
|
if len(collection) == 0:
|
||||||
|
logging.warning(f'Skipping empty {attribute} attribute')
|
||||||
|
return {}
|
||||||
|
|
||||||
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||||
|
|
||||||
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
|
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
|
||||||
|
|
||||||
|
|
||||||
size = sum(attr_infos.array_dimensions) if attr_infos.is_array else 1
|
size = sum(attr_infos.array_dimensions) if attr_infos.is_array else 1
|
||||||
|
|
||||||
dumped_sequence = np.zeros(
|
dumped_sequence = np.zeros(
|
||||||
@ -182,9 +189,11 @@ def np_load_collection_primitives(collection: bpy.types.CollectionProperty, attr
|
|||||||
:arg attribute: target attribute
|
:arg attribute: target attribute
|
||||||
:type attribute: str
|
:type attribute: str
|
||||||
:arg sequence: data buffer
|
:arg sequence: data buffer
|
||||||
:type sequence: str
|
:type sequence: strr
|
||||||
:return: numpy byte buffer
|
|
||||||
"""
|
"""
|
||||||
|
if len(collection) == 0 or not sequence:
|
||||||
|
logging.warning(f"Skipping loadin {attribute}")
|
||||||
|
return
|
||||||
|
|
||||||
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||||
|
|
||||||
@ -369,7 +378,7 @@ class Dumper:
|
|||||||
return False
|
return False
|
||||||
getattr(default, p)
|
getattr(default, p)
|
||||||
except AttributeError as err:
|
except AttributeError as err:
|
||||||
logger.debug(err)
|
logging.debug(err)
|
||||||
return False
|
return False
|
||||||
if p.startswith("__"):
|
if p.startswith("__"):
|
||||||
return False
|
return False
|
||||||
@ -479,7 +488,7 @@ class Loader:
|
|||||||
for i in range(len(dump)):
|
for i in range(len(dump)):
|
||||||
element.read()[i] = dump[i]
|
element.read()[i] = dump[i]
|
||||||
except AttributeError as err:
|
except AttributeError as err:
|
||||||
logger.debug(err)
|
logging.debug(err)
|
||||||
if not self.occlude_read_only:
|
if not self.occlude_read_only:
|
||||||
raise err
|
raise err
|
||||||
|
|
||||||
@ -493,7 +502,7 @@ class Loader:
|
|||||||
DESTRUCTOR_REMOVE = "remove"
|
DESTRUCTOR_REMOVE = "remove"
|
||||||
DESTRUCTOR_CLEAR = "clear"
|
DESTRUCTOR_CLEAR = "clear"
|
||||||
|
|
||||||
constructors = {
|
_constructors = {
|
||||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||||
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||||
@ -507,9 +516,9 @@ class Loader:
|
|||||||
}
|
}
|
||||||
element_type = element.bl_rna_property.fixed_type
|
element_type = element.bl_rna_property.fixed_type
|
||||||
|
|
||||||
constructor = constructors.get(type(element_type))
|
_constructor = _constructors.get(type(element_type))
|
||||||
|
|
||||||
if constructor is None: # collection type not supported
|
if _constructor is None: # collection type not supported
|
||||||
return
|
return
|
||||||
|
|
||||||
destructor = destructors.get(type(element_type))
|
destructor = destructors.get(type(element_type))
|
||||||
@ -528,14 +537,14 @@ class Loader:
|
|||||||
new_element = element.read()[0]
|
new_element = element.read()[0]
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
constructor_parameters = [dumped_element[name]
|
_constructor_parameters = [dumped_element[name]
|
||||||
for name in constructor[1]]
|
for name in _constructor[1]]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.debug("Collection load error, missing parameters.")
|
logging.debug("Collection load error, missing parameters.")
|
||||||
continue # TODO handle error
|
continue # TODO handle error
|
||||||
|
|
||||||
new_element = getattr(element.read(), constructor[0])(
|
new_element = getattr(element.read(), _constructor[0])(
|
||||||
*constructor_parameters)
|
*_constructor_parameters)
|
||||||
self._load_any(
|
self._load_any(
|
||||||
BlenderAPIElement(
|
BlenderAPIElement(
|
||||||
new_element, occlude_read_only=self.occlude_read_only),
|
new_element, occlude_read_only=self.occlude_read_only),
|
||||||
@ -566,24 +575,24 @@ class Loader:
|
|||||||
else:
|
else:
|
||||||
dst_curve.points.new(pos[0], pos[1])
|
dst_curve.points.new(pos[0], pos[1])
|
||||||
|
|
||||||
def _load_pointer(self, pointer, dump):
|
def _load_pointer(self, instance, dump):
|
||||||
rna_property_type = pointer.bl_rna_property.fixed_type
|
rna_property_type = instance.bl_rna_property.fixed_type
|
||||||
if not rna_property_type:
|
if not rna_property_type:
|
||||||
return
|
return
|
||||||
if isinstance(rna_property_type, T.Image):
|
if isinstance(rna_property_type, T.Image):
|
||||||
pointer.write(bpy.data.images.get(dump))
|
instance.write(bpy.data.images.get(dump))
|
||||||
elif isinstance(rna_property_type, T.Texture):
|
elif isinstance(rna_property_type, T.Texture):
|
||||||
pointer.write(bpy.data.textures.get(dump))
|
instance.write(bpy.data.textures.get(dump))
|
||||||
elif isinstance(rna_property_type, T.ColorRamp):
|
elif isinstance(rna_property_type, T.ColorRamp):
|
||||||
self._load_default(pointer, dump)
|
self._load_default(instance, dump)
|
||||||
elif isinstance(rna_property_type, T.Object):
|
elif isinstance(rna_property_type, T.Object):
|
||||||
pointer.write(bpy.data.objects.get(dump))
|
instance.write(bpy.data.objects.get(dump))
|
||||||
elif isinstance(rna_property_type, T.Mesh):
|
elif isinstance(rna_property_type, T.Mesh):
|
||||||
pointer.write(bpy.data.meshes.get(dump))
|
instance.write(bpy.data.meshes.get(dump))
|
||||||
elif isinstance(rna_property_type, T.Material):
|
elif isinstance(rna_property_type, T.Material):
|
||||||
pointer.write(bpy.data.materials.get(dump))
|
instance.write(bpy.data.materials.get(dump))
|
||||||
elif isinstance(rna_property_type, T.Collection):
|
elif isinstance(rna_property_type, T.Collection):
|
||||||
pointer.write(bpy.data.collections.get(dump))
|
instance.write(bpy.data.collections.get(dump))
|
||||||
|
|
||||||
def _load_matrix(self, matrix, dump):
|
def _load_matrix(self, matrix, dump):
|
||||||
matrix.write(mathutils.Matrix(dump))
|
matrix.write(mathutils.Matrix(dump))
|
||||||
@ -613,11 +622,11 @@ class Loader:
|
|||||||
for k in self._ordered_keys(dump.keys()):
|
for k in self._ordered_keys(dump.keys()):
|
||||||
v = dump[k]
|
v = dump[k]
|
||||||
if not hasattr(default.read(), k):
|
if not hasattr(default.read(), k):
|
||||||
logger.debug(f"Load default, skipping {default} : {k}")
|
logging.debug(f"Load default, skipping {default} : {k}")
|
||||||
try:
|
try:
|
||||||
self._load_any(default.extend(k), v)
|
self._load_any(default.extend(k), v)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
logger.debug(f"Cannot load {k}: {err}")
|
logging.debug(f"Cannot load {k}: {err}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def match_subset_all(self):
|
def match_subset_all(self):
|
||||||
|
@ -22,9 +22,6 @@ import bpy
|
|||||||
from . import operators, presence, utils
|
from . import operators, presence, utils
|
||||||
from .libs.replication.replication.constants import FETCHED, RP_COMMON, STATE_INITIAL,STATE_QUITTING, STATE_ACTIVE, STATE_SYNCING, STATE_SRV_SYNC
|
from .libs.replication.replication.constants import FETCHED, RP_COMMON, STATE_INITIAL,STATE_QUITTING, STATE_ACTIVE, STATE_SYNCING, STATE_SRV_SYNC
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger.setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
|
|
||||||
class Delayable():
|
class Delayable():
|
||||||
"""Delayable task interface
|
"""Delayable task interface
|
||||||
@ -92,8 +89,7 @@ class ApplyTimer(Timer):
|
|||||||
try:
|
try:
|
||||||
client.apply(node)
|
client.apply(node)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logging.error(f"Fail to apply {node_ref.uuid}: {e}")
|
||||||
"fail to apply {}: {}".format(node_ref.uuid, e))
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicRightSelectTimer(Timer):
|
class DynamicRightSelectTimer(Timer):
|
||||||
@ -164,7 +160,7 @@ class DynamicRightSelectTimer(Timer):
|
|||||||
}
|
}
|
||||||
|
|
||||||
session.update_user_metadata(user_metadata)
|
session.update_user_metadata(user_metadata)
|
||||||
logger.info("Update selection")
|
logging.debug("Update selection")
|
||||||
|
|
||||||
# Fix deselection until right managment refactoring (with Roles concepts)
|
# Fix deselection until right managment refactoring (with Roles concepts)
|
||||||
if len(current_selection) == 0 and self._right_strategy == RP_COMMON:
|
if len(current_selection) == 0 and self._right_strategy == RP_COMMON:
|
||||||
@ -214,11 +210,13 @@ class DrawClient(Draw):
|
|||||||
def execute(self):
|
def execute(self):
|
||||||
session = getattr(operators, 'client', None)
|
session = getattr(operators, 'client', None)
|
||||||
renderer = getattr(presence, 'renderer', None)
|
renderer = getattr(presence, 'renderer', None)
|
||||||
|
prefs = utils.get_preferences()
|
||||||
|
|
||||||
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
|
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
|
||||||
settings = bpy.context.window_manager.session
|
settings = bpy.context.window_manager.session
|
||||||
users = session.online_users
|
users = session.online_users
|
||||||
|
|
||||||
|
# Update users
|
||||||
for user in users.values():
|
for user in users.values():
|
||||||
metadata = user.get('metadata')
|
metadata = user.get('metadata')
|
||||||
color = metadata.get('color')
|
color = metadata.get('color')
|
||||||
@ -237,32 +235,52 @@ class DrawClient(Draw):
|
|||||||
renderer.flush_selection()
|
renderer.flush_selection()
|
||||||
renderer.flush_users()
|
renderer.flush_users()
|
||||||
|
|
||||||
|
|
||||||
class ClientUpdate(Timer):
|
class ClientUpdate(Timer):
|
||||||
def __init__(self, timout=.5):
|
def __init__(self, timout=.016):
|
||||||
super().__init__(timout)
|
super().__init__(timout)
|
||||||
self.handle_quit = False
|
self.handle_quit = False
|
||||||
|
self.users_metadata = {}
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
settings = utils.get_preferences()
|
settings = utils.get_preferences()
|
||||||
session = getattr(operators, 'client', None)
|
session = getattr(operators, 'client', None)
|
||||||
renderer = getattr(presence, 'renderer', None)
|
renderer = getattr(presence, 'renderer', None)
|
||||||
|
|
||||||
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
|
if session and renderer:
|
||||||
|
if session.state['STATE'] == STATE_ACTIVE:
|
||||||
# Check if session has been closes prematurely
|
# Check if session has been closes prematurely
|
||||||
if session.state['STATE'] == 0:
|
if session.state['STATE'] == 0:
|
||||||
bpy.ops.session.stop()
|
bpy.ops.session.stop()
|
||||||
|
|
||||||
local_user = operators.client.online_users.get(
|
local_user = operators.client.online_users.get(settings.username)
|
||||||
settings.username)
|
|
||||||
if not local_user:
|
if not local_user:
|
||||||
return
|
return
|
||||||
|
else:
|
||||||
|
for username, user_data in operators.client.online_users.items():
|
||||||
|
if username != settings.username:
|
||||||
|
cached_user_data = self.users_metadata.get(username)
|
||||||
|
new_user_data = operators.client.online_users[username]['metadata']
|
||||||
|
|
||||||
|
if cached_user_data is None:
|
||||||
|
self.users_metadata[username] = user_data['metadata']
|
||||||
|
elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']:
|
||||||
|
presence.refresh_3d_view()
|
||||||
|
self.users_metadata[username] = user_data['metadata']
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.users_metadata[username] = user_data['metadata']
|
||||||
|
|
||||||
local_user_metadata = local_user.get('metadata')
|
local_user_metadata = local_user.get('metadata')
|
||||||
current_view_corners = presence.get_view_corners()
|
|
||||||
scene_current = bpy.context.scene.name
|
scene_current = bpy.context.scene.name
|
||||||
|
local_user = session.online_users.get(settings.username)
|
||||||
|
current_view_corners = presence.get_view_corners()
|
||||||
|
|
||||||
|
# Init client metadata
|
||||||
if not local_user_metadata or 'color' not in local_user_metadata.keys():
|
if not local_user_metadata or 'color' not in local_user_metadata.keys():
|
||||||
metadata = {
|
metadata = {
|
||||||
'view_corners': current_view_corners,
|
'view_corners': presence.get_view_matrix(),
|
||||||
'view_matrix': presence.get_view_matrix(),
|
'view_matrix': presence.get_view_matrix(),
|
||||||
'color': (settings.client_color.r,
|
'color': (settings.client_color.r,
|
||||||
settings.client_color.g,
|
settings.client_color.g,
|
||||||
@ -272,15 +290,16 @@ class ClientUpdate(Timer):
|
|||||||
'scene_current': scene_current
|
'scene_current': scene_current
|
||||||
}
|
}
|
||||||
session.update_user_metadata(metadata)
|
session.update_user_metadata(metadata)
|
||||||
elif current_view_corners != local_user_metadata['view_corners']:
|
|
||||||
logger.info('update user metadata')
|
# Update client representation
|
||||||
local_user_metadata['view_corners'] = current_view_corners
|
# Update client current scene
|
||||||
local_user_metadata['view_matrix'] = presence.get_view_matrix()
|
|
||||||
session.update_user_metadata(local_user_metadata)
|
|
||||||
elif scene_current != local_user_metadata['scene_current']:
|
elif scene_current != local_user_metadata['scene_current']:
|
||||||
local_user_metadata['scene_current'] = scene_current
|
local_user_metadata['scene_current'] = scene_current
|
||||||
session.update_user_metadata(local_user_metadata)
|
session.update_user_metadata(local_user_metadata)
|
||||||
|
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
||||||
|
local_user_metadata['view_corners'] = current_view_corners
|
||||||
|
local_user_metadata['view_matrix'] = presence.get_view_matrix()
|
||||||
|
session.update_user_metadata(local_user_metadata)
|
||||||
# sync online users
|
# sync online users
|
||||||
session_users = operators.client.online_users
|
session_users = operators.client.online_users
|
||||||
ui_users = bpy.context.window_manager.online_users
|
ui_users = bpy.context.window_manager.online_users
|
||||||
@ -297,19 +316,15 @@ class ClientUpdate(Timer):
|
|||||||
new_key = ui_users.add()
|
new_key = ui_users.add()
|
||||||
new_key.name = user
|
new_key.name = user
|
||||||
new_key.username = user
|
new_key.username = user
|
||||||
|
|
||||||
# TODO: event drivent 3d view refresh
|
|
||||||
presence.refresh_3d_view()
|
|
||||||
elif session.state['STATE'] == STATE_QUITTING:
|
elif session.state['STATE'] == STATE_QUITTING:
|
||||||
presence.refresh_3d_view()
|
presence.refresh_sidebar_view()
|
||||||
self.handle_quit = True
|
self.handle_quit = True
|
||||||
elif session.state['STATE'] == STATE_INITIAL and self.handle_quit:
|
elif session.state['STATE'] == STATE_INITIAL and self.handle_quit:
|
||||||
self.handle_quit = False
|
self.handle_quit = False
|
||||||
presence.refresh_3d_view()
|
presence.refresh_sidebar_view()
|
||||||
|
|
||||||
operators.unregister_delayables()
|
operators.unregister_delayables()
|
||||||
|
|
||||||
presence.renderer.stop()
|
presence.renderer.stop()
|
||||||
# # ui update
|
|
||||||
elif session.state['STATE'] != STATE_INITIAL:
|
presence.refresh_sidebar_view()
|
||||||
presence.refresh_3d_view()
|
|
@ -23,8 +23,6 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger.setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||||
DEFAULT_CACHE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "cache")
|
DEFAULT_CACHE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "cache")
|
||||||
@ -49,8 +47,9 @@ def install_pip():
|
|||||||
|
|
||||||
|
|
||||||
def install_package(name):
|
def install_package(name):
|
||||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install",
|
logging.debug(f"Using {PYTHON_PATH} for installation")
|
||||||
name], cwd=SUBPROCESS_DIR)
|
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", name])
|
||||||
|
|
||||||
|
|
||||||
def check_dir(dir):
|
def check_dir(dir):
|
||||||
if not os.path.exists(dir):
|
if not os.path.exists(dir):
|
||||||
@ -68,3 +67,4 @@ def setup(dependencies, python_path):
|
|||||||
for module_name, package_name in dependencies:
|
for module_name, package_name in dependencies:
|
||||||
if not module_can_be_imported(module_name):
|
if not module_can_be_imported(module_name):
|
||||||
install_package(package_name)
|
install_package(package_name)
|
||||||
|
module_can_be_imported(package_name)
|
||||||
|
Submodule multi_user/libs/replication updated: 727bd6bbb1...f9892c2c8c
@ -39,8 +39,6 @@ from .libs.replication.replication.data import ReplicatedDataFactory
|
|||||||
from .libs.replication.replication.exception import NonAuthorizedOperationError
|
from .libs.replication.replication.exception import NonAuthorizedOperationError
|
||||||
from .libs.replication.replication.interface import Session
|
from .libs.replication.replication.interface import Session
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger.setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
client = None
|
client = None
|
||||||
delayables = []
|
delayables = []
|
||||||
@ -91,7 +89,7 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
# init the factory with supported types
|
# init the factory with supported types
|
||||||
for type in bl_types.types_to_register():
|
for type in bl_types.types_to_register():
|
||||||
type_module = getattr(bl_types, type)
|
type_module = getattr(bl_types, type)
|
||||||
type_impl_name = "Bl{}".format(type.split('_')[1].capitalize())
|
type_impl_name = f"Bl{type.split('_')[1].capitalize()}"
|
||||||
type_module_class = getattr(type_module, type_impl_name)
|
type_module_class = getattr(type_module, type_impl_name)
|
||||||
|
|
||||||
supported_bl_types.append(type_module_class.bl_id)
|
supported_bl_types.append(type_module_class.bl_id)
|
||||||
@ -135,7 +133,7 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.report({'ERROR'}, repr(e))
|
self.report({'ERROR'}, repr(e))
|
||||||
logger.error(f"Error: {e}")
|
logging.error(f"Error: {e}")
|
||||||
finally:
|
finally:
|
||||||
runtime_settings.is_admin = True
|
runtime_settings.is_admin = True
|
||||||
|
|
||||||
@ -153,7 +151,7 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.report({'ERROR'}, repr(e))
|
self.report({'ERROR'}, repr(e))
|
||||||
logger.error(f"Error: {e}")
|
logging.error(f"Error: {e}")
|
||||||
finally:
|
finally:
|
||||||
runtime_settings.is_admin = False
|
runtime_settings.is_admin = False
|
||||||
|
|
||||||
@ -177,7 +175,7 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
|
|
||||||
self.report(
|
self.report(
|
||||||
{'INFO'},
|
{'INFO'},
|
||||||
"connexion on tcp://{}:{}".format(settings.ip, settings.port))
|
f"connecting to tcp://{settings.ip}:{settings.port}")
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
@ -467,8 +465,7 @@ class ApplyArmatureOperator(bpy.types.Operator):
|
|||||||
try:
|
try:
|
||||||
client.apply(node)
|
client.apply(node)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logging.error("Dail to apply armature: {e}")
|
||||||
"fail to apply {}: {}".format(node_ref.uuid, e))
|
|
||||||
|
|
||||||
return {'PASS_THROUGH'}
|
return {'PASS_THROUGH'}
|
||||||
|
|
||||||
@ -510,19 +507,7 @@ def load_pre_handler(dummy):
|
|||||||
bpy.ops.session.stop()
|
bpy.ops.session.stop()
|
||||||
|
|
||||||
|
|
||||||
@persistent
|
|
||||||
def sanitize_deps_graph(dummy):
|
|
||||||
"""sanitize deps graph
|
|
||||||
|
|
||||||
Temporary solution to resolve each node pointers after a Undo.
|
|
||||||
A future solution should be to avoid storing dataclock reference...
|
|
||||||
|
|
||||||
"""
|
|
||||||
global client
|
|
||||||
|
|
||||||
if client and client.state['STATE'] in [STATE_ACTIVE]:
|
|
||||||
for node_key in client.list():
|
|
||||||
client.get(node_key)._resolve()
|
|
||||||
|
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
@ -532,40 +517,6 @@ def update_client_frame(scene):
|
|||||||
'frame_current': scene.frame_current
|
'frame_current': scene.frame_current
|
||||||
})
|
})
|
||||||
|
|
||||||
@persistent
|
|
||||||
def depsgraph_evaluation(scene):
|
|
||||||
if client and client.state['STATE'] == STATE_ACTIVE:
|
|
||||||
context = bpy.context
|
|
||||||
blender_depsgraph = bpy.context.view_layer.depsgraph
|
|
||||||
dependency_updates = [u for u in blender_depsgraph.updates]
|
|
||||||
session_infos = utils.get_preferences()
|
|
||||||
|
|
||||||
# NOTE: maybe we don't need to check each update but only the first
|
|
||||||
|
|
||||||
for update in reversed(dependency_updates):
|
|
||||||
# Is the object tracked ?
|
|
||||||
if update.id.uuid:
|
|
||||||
# Retrieve local version
|
|
||||||
node = client.get(update.id.uuid)
|
|
||||||
|
|
||||||
# Check our right on this update:
|
|
||||||
# - if its ours or ( under common and diff), launch the
|
|
||||||
# update process
|
|
||||||
# - if its to someone else, ignore the update (go deeper ?)
|
|
||||||
if node.owner == session_infos.username:
|
|
||||||
# Avoid slow geometry update
|
|
||||||
if 'EDIT' in context.mode:
|
|
||||||
break
|
|
||||||
logger.error("UPDATE: MODIFIFY {}".format(type(update.id)))
|
|
||||||
# client.commit(node.uuid)
|
|
||||||
# client.push(node.uuid)
|
|
||||||
else:
|
|
||||||
# Distant update
|
|
||||||
continue
|
|
||||||
# else:
|
|
||||||
# # New items !
|
|
||||||
# logger.error("UPDATE: ADD")C.obj
|
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
from bpy.utils import register_class
|
from bpy.utils import register_class
|
||||||
@ -574,13 +525,9 @@ def register():
|
|||||||
|
|
||||||
bpy.app.handlers.load_pre.append(load_pre_handler)
|
bpy.app.handlers.load_pre.append(load_pre_handler)
|
||||||
|
|
||||||
bpy.app.handlers.undo_post.append(sanitize_deps_graph)
|
|
||||||
bpy.app.handlers.redo_post.append(sanitize_deps_graph)
|
|
||||||
|
|
||||||
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
||||||
|
|
||||||
# bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
global client
|
global client
|
||||||
@ -595,13 +542,9 @@ def unregister():
|
|||||||
|
|
||||||
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
||||||
|
|
||||||
bpy.app.handlers.undo_post.remove(sanitize_deps_graph)
|
|
||||||
bpy.app.handlers.redo_post.remove(sanitize_deps_graph)
|
|
||||||
|
|
||||||
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
||||||
|
|
||||||
# bpy.app.handlers.depsgraph_update_post.remove(depsgraph_evaluation)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
register()
|
register()
|
||||||
|
@ -23,7 +23,6 @@ import string
|
|||||||
from . import utils, bl_types, environment, addon_updater_ops, presence, ui
|
from . import utils, bl_types, environment, addon_updater_ops, presence, ui
|
||||||
from .libs.replication.replication.constants import RP_COMMON
|
from .libs.replication.replication.constants import RP_COMMON
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def randomColor():
|
def randomColor():
|
||||||
@ -124,6 +123,18 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
],
|
],
|
||||||
default='CONFIG'
|
default='CONFIG'
|
||||||
)
|
)
|
||||||
|
# WIP
|
||||||
|
logging_level: bpy.props.EnumProperty(
|
||||||
|
name="Log level",
|
||||||
|
description="Log verbosity level",
|
||||||
|
items=[
|
||||||
|
('ERROR', "error", "show only errors"),
|
||||||
|
('WARNING', "warning", "only show warnings and errors"),
|
||||||
|
('INFO', "info", "default level"),
|
||||||
|
('DEBUG', "debug", "show all logs"),
|
||||||
|
],
|
||||||
|
default='INFO'
|
||||||
|
)
|
||||||
conf_session_identity_expanded: bpy.props.BoolProperty(
|
conf_session_identity_expanded: bpy.props.BoolProperty(
|
||||||
name="Identity",
|
name="Identity",
|
||||||
description="Identity",
|
description="Identity",
|
||||||
@ -287,7 +298,7 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
new_db = self.supported_datablocks.add()
|
new_db = self.supported_datablocks.add()
|
||||||
|
|
||||||
type_module = getattr(bl_types, type)
|
type_module = getattr(bl_types, type)
|
||||||
type_impl_name = "Bl{}".format(type.split('_')[1].capitalize())
|
type_impl_name = f"Bl{type.split('_')[1].capitalize()}"
|
||||||
type_module_class = getattr(type_module, type_impl_name)
|
type_module_class = getattr(type_module, type_impl_name)
|
||||||
|
|
||||||
new_db.name = type_impl_name
|
new_db.name = type_impl_name
|
||||||
@ -398,7 +409,7 @@ def register():
|
|||||||
|
|
||||||
prefs = bpy.context.preferences.addons[__package__].preferences
|
prefs = bpy.context.preferences.addons[__package__].preferences
|
||||||
if len(prefs.supported_datablocks) == 0:
|
if len(prefs.supported_datablocks) == 0:
|
||||||
logger.info('Generating bl_types preferences')
|
logging.debug('Generating bl_types preferences')
|
||||||
prefs.generate_supported_types()
|
prefs.generate_supported_types()
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,10 +32,12 @@ from . import utils
|
|||||||
|
|
||||||
renderer = None
|
renderer = None
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def view3d_find():
|
def view3d_find():
|
||||||
|
""" Find the first 'VIEW_3D' windows found in areas
|
||||||
|
|
||||||
|
:return: tuple(Area, Region, RegionView3D)
|
||||||
|
"""
|
||||||
for area in bpy.data.window_managers[0].windows[0].screen.areas:
|
for area in bpy.data.window_managers[0].windows[0].screen.areas:
|
||||||
if area.type == 'VIEW_3D':
|
if area.type == 'VIEW_3D':
|
||||||
v3d = area.spaces[0]
|
v3d = area.spaces[0]
|
||||||
@ -43,15 +45,22 @@ def view3d_find():
|
|||||||
for region in area.regions:
|
for region in area.regions:
|
||||||
if region.type == 'WINDOW':
|
if region.type == 'WINDOW':
|
||||||
return area, region, rv3d
|
return area, region, rv3d
|
||||||
|
|
||||||
return None, None, None
|
return None, None, None
|
||||||
|
|
||||||
|
|
||||||
def refresh_3d_view():
|
def refresh_3d_view():
|
||||||
|
""" Refresh the viewport
|
||||||
|
"""
|
||||||
area, region, rv3d = view3d_find()
|
area, region, rv3d = view3d_find()
|
||||||
if area and region and rv3d:
|
if area and region and rv3d:
|
||||||
area.tag_redraw()
|
area.tag_redraw()
|
||||||
|
|
||||||
|
def refresh_sidebar_view():
|
||||||
|
""" Refresh the blender sidebar
|
||||||
|
"""
|
||||||
|
area, region, rv3d = view3d_find()
|
||||||
|
|
||||||
|
area.regions[3].tag_redraw()
|
||||||
|
|
||||||
def get_target(region, rv3d, coord):
|
def get_target(region, rv3d, coord):
|
||||||
target = [0, 0, 0]
|
target = [0, 0, 0]
|
||||||
@ -199,7 +208,7 @@ class DrawFactory(object):
|
|||||||
|
|
||||||
def flush_selection(self, user=None):
|
def flush_selection(self, user=None):
|
||||||
key_to_remove = []
|
key_to_remove = []
|
||||||
select_key = "{}_select".format(user) if user else "select"
|
select_key = f"{user}_select" if user else "select"
|
||||||
for k in self.d3d_items.keys():
|
for k in self.d3d_items.keys():
|
||||||
|
|
||||||
if select_key in k:
|
if select_key in k:
|
||||||
@ -226,7 +235,7 @@ class DrawFactory(object):
|
|||||||
self.flush_selection(client_id)
|
self.flush_selection(client_id)
|
||||||
|
|
||||||
for select_ob in client_selection:
|
for select_ob in client_selection:
|
||||||
drawable_key = "{}_select_{}".format(client_id, select_ob)
|
drawable_key = f"{client_id}_select_{select_ob}"
|
||||||
|
|
||||||
ob = utils.find_from_attr("uuid", select_ob, bpy.data.objects)
|
ob = utils.find_from_attr("uuid", select_ob, bpy.data.objects)
|
||||||
if not ob:
|
if not ob:
|
||||||
@ -302,7 +311,7 @@ class DrawFactory(object):
|
|||||||
self.d2d_items[client_id] = (position[1], client_id, color)
|
self.d2d_items[client_id] = (position[1], client_id, color)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Draw client exception {}".format(e))
|
logging.error(f"Draw client exception: {e}")
|
||||||
|
|
||||||
def draw3d_callback(self):
|
def draw3d_callback(self):
|
||||||
bgl.glLineWidth(1.5)
|
bgl.glLineWidth(1.5)
|
||||||
@ -316,7 +325,7 @@ class DrawFactory(object):
|
|||||||
shader.uniform_float("color", color)
|
shader.uniform_float("color", color)
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error("3D Exception")
|
logging.error("3D Exception")
|
||||||
|
|
||||||
def draw2d_callback(self):
|
def draw2d_callback(self):
|
||||||
for position, font, color in self.d2d_items.values():
|
for position, font, color in self.d2d_items.values():
|
||||||
@ -330,7 +339,7 @@ class DrawFactory(object):
|
|||||||
blf.draw(0, font)
|
blf.draw(0, font)
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error("2D EXCEPTION")
|
logging.error("2D EXCEPTION")
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
|
@ -48,10 +48,9 @@ def printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1,
|
|||||||
From here:
|
From here:
|
||||||
https://gist.github.com/greenstick/b23e475d2bfdc3a82e34eaa1f6781ee4
|
https://gist.github.com/greenstick/b23e475d2bfdc3a82e34eaa1f6781ee4
|
||||||
"""
|
"""
|
||||||
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
|
|
||||||
filledLength = int(length * iteration // total)
|
filledLength = int(length * iteration // total)
|
||||||
bar = fill * filledLength + fill_empty * (length - filledLength)
|
bar = fill * filledLength + fill_empty * (length - filledLength)
|
||||||
return '{} |{}| {}/{}{}'.format(prefix, bar, iteration,total, suffix)
|
return f"{prefix} |{bar}| {iteration}/{total}{suffix}"
|
||||||
|
|
||||||
def get_state_str(state):
|
def get_state_str(state):
|
||||||
state_str = 'UNKNOWN'
|
state_str = 'UNKNOWN'
|
||||||
@ -422,7 +421,7 @@ def draw_property(context, parent, property_uuid, level=0):
|
|||||||
|
|
||||||
detail_item_box.label(text="",
|
detail_item_box.label(text="",
|
||||||
icon=settings.supported_datablocks[item.str_type].icon)
|
icon=settings.supported_datablocks[item.str_type].icon)
|
||||||
detail_item_box.label(text="{} ".format(name))
|
detail_item_box.label(text=f"{name}")
|
||||||
|
|
||||||
# Operations
|
# Operations
|
||||||
|
|
||||||
|
@ -29,9 +29,6 @@ import mathutils
|
|||||||
|
|
||||||
from . import environment, presence
|
from . import environment, presence
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger.setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
|
|
||||||
def find_from_attr(attr_name, attr_value, list):
|
def find_from_attr(attr_name, attr_value, list):
|
||||||
for item in list:
|
for item in list:
|
||||||
|
25
scripts/test_addon.py
Normal file
25
scripts/test_addon.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import sys
|
||||||
|
try:
|
||||||
|
import blender_addon_tester as BAT
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
addon = sys.argv[1]
|
||||||
|
else:
|
||||||
|
addon = "multi_user"
|
||||||
|
if len(sys.argv) > 2:
|
||||||
|
blender_rev = sys.argv[2]
|
||||||
|
else:
|
||||||
|
blender_rev = "2.82"
|
||||||
|
|
||||||
|
try:
|
||||||
|
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
exit_val = 1
|
||||||
|
sys.exit(exit_val)
|
||||||
|
|
||||||
|
main()
|
25
tests/test_bl_types/conftest.py
Normal file
25
tests/test_bl_types/conftest.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def clear_blend():
|
||||||
|
""" Remove all datablocks of a blend
|
||||||
|
"""
|
||||||
|
for type_name in dir(bpy.data):
|
||||||
|
try:
|
||||||
|
type_collection = getattr(bpy.data, type_name)
|
||||||
|
for item in type_collection:
|
||||||
|
type_collection.remove(item)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def load_blendfile(blendname):
|
||||||
|
print(f"loading {blendname}")
|
||||||
|
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
bpy.ops.wm.open_mainfile(filepath=os.path.join(dir_path, blendname))
|
38
tests/test_bl_types/test_action.py
Normal file
38
tests/test_bl_types/test_action.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_action import BlAction
|
||||||
|
|
||||||
|
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
|
||||||
|
|
||||||
|
# @pytest.mark.parametrize('blendname', ['test_action.blend'])
|
||||||
|
def test_action(clear_blend):
|
||||||
|
# Generate a random action
|
||||||
|
datablock = bpy.data.actions.new("sdsad")
|
||||||
|
fcurve_sample = datablock.fcurves.new('location')
|
||||||
|
fcurve_sample.keyframe_points.add(100)
|
||||||
|
datablock.id_root = 'MESH'
|
||||||
|
|
||||||
|
for i, point in enumerate(fcurve_sample.keyframe_points):
|
||||||
|
point.co[0] = i
|
||||||
|
point.co[1] = random.randint(-10,10)
|
||||||
|
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
|
||||||
|
|
||||||
|
bpy.ops.mesh.primitive_plane_add()
|
||||||
|
bpy.data.objects[0].animation_data_create()
|
||||||
|
bpy.data.objects[0].animation_data.action = datablock
|
||||||
|
|
||||||
|
# Test
|
||||||
|
implementation = BlAction()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.actions.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
22
tests/test_bl_types/test_armature.py
Normal file
22
tests/test_bl_types/test_armature.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_armature import BlArmature
|
||||||
|
|
||||||
|
def test_armature(clear_blend):
|
||||||
|
bpy.ops.object.armature_add()
|
||||||
|
datablock = bpy.data.armatures[0]
|
||||||
|
|
||||||
|
implementation = BlArmature()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.armatures.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
25
tests/test_bl_types/test_camera.py
Normal file
25
tests/test_bl_types/test_camera.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_camera import BlCamera
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('camera_type', ['PANO','PERSP','ORTHO'])
|
||||||
|
def test_camera(clear_blend, camera_type):
|
||||||
|
bpy.ops.object.camera_add()
|
||||||
|
|
||||||
|
datablock = bpy.data.cameras[0]
|
||||||
|
datablock.type = camera_type
|
||||||
|
|
||||||
|
camera_dumper = BlCamera()
|
||||||
|
expected = camera_dumper._dump(datablock)
|
||||||
|
bpy.data.cameras.remove(datablock)
|
||||||
|
|
||||||
|
test = camera_dumper._construct(expected)
|
||||||
|
camera_dumper._load(expected, test)
|
||||||
|
result = camera_dumper._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
28
tests/test_bl_types/test_collection.py
Normal file
28
tests/test_bl_types/test_collection.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_collection import BlCollection
|
||||||
|
|
||||||
|
def test_collection(clear_blend):
|
||||||
|
# Generate a collection with childrens and a cube
|
||||||
|
datablock = bpy.data.collections.new("root")
|
||||||
|
datablock.children.link(bpy.data.collections.new("child"))
|
||||||
|
datablock.children.link(bpy.data.collections.new("child2"))
|
||||||
|
|
||||||
|
bpy.ops.mesh.primitive_cube_add()
|
||||||
|
datablock.objects.link(bpy.data.objects[0])
|
||||||
|
|
||||||
|
# Test
|
||||||
|
implementation = BlCollection()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.collections.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
29
tests/test_bl_types/test_curve.py
Normal file
29
tests/test_bl_types/test_curve.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_curve import BlCurve
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('curve_type', ['TEXT','BEZIER'])
|
||||||
|
def test_curve(clear_blend, curve_type):
|
||||||
|
if curve_type == 'TEXT':
|
||||||
|
bpy.ops.object.text_add(enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
elif curve_type == 'BEZIER':
|
||||||
|
bpy.ops.curve.primitive_bezier_curve_add(enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
else: #TODO: NURBS support
|
||||||
|
bpy.ops.surface.primitive_nurbs_surface_curve_add(radius=1, enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
|
||||||
|
datablock = bpy.data.curves[0]
|
||||||
|
|
||||||
|
implementation = BlCurve()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.curves.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
23
tests/test_bl_types/test_gpencil.py
Normal file
23
tests/test_bl_types/test_gpencil.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_gpencil import BlGpencil
|
||||||
|
|
||||||
|
|
||||||
|
def test_gpencil(clear_blend):
|
||||||
|
bpy.ops.object.gpencil_add(type='MONKEY')
|
||||||
|
|
||||||
|
datablock = bpy.data.grease_pencils[0]
|
||||||
|
|
||||||
|
implementation = BlGpencil()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.grease_pencils.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
21
tests/test_bl_types/test_image.py
Normal file
21
tests/test_bl_types/test_image.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_image import BlImage
|
||||||
|
|
||||||
|
def test_image(clear_blend):
|
||||||
|
datablock = bpy.data.images.new('asd',2000,2000)
|
||||||
|
|
||||||
|
implementation = BlImage()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.images.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
23
tests/test_bl_types/test_lattice.py
Normal file
23
tests/test_bl_types/test_lattice.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_lattice import BlLattice
|
||||||
|
|
||||||
|
|
||||||
|
def test_lattice(clear_blend):
|
||||||
|
bpy.ops.object.add(type='LATTICE', enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
|
||||||
|
datablock = bpy.data.lattices[0]
|
||||||
|
|
||||||
|
implementation = BlLattice()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.lattices.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
24
tests/test_bl_types/test_lightprobes.py
Normal file
24
tests/test_bl_types/test_lightprobes.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_lightprobe import BlLightprobe
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(bpy.app.version[1] < 83, reason="requires blender 2.83 or higher")
|
||||||
|
@pytest.mark.parametrize('lightprobe_type', ['PLANAR','GRID','CUBEMAP'])
|
||||||
|
def test_lightprobes(clear_blend, lightprobe_type):
|
||||||
|
bpy.ops.object.lightprobe_add(type=lightprobe_type)
|
||||||
|
|
||||||
|
blender_light = bpy.data.lightprobes[0]
|
||||||
|
lightprobe_dumper = BlLightprobe()
|
||||||
|
expected = lightprobe_dumper._dump(blender_light)
|
||||||
|
bpy.data.lightprobes.remove(blender_light)
|
||||||
|
|
||||||
|
test = lightprobe_dumper._construct(expected)
|
||||||
|
lightprobe_dumper._load(expected, test)
|
||||||
|
result = lightprobe_dumper._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
23
tests/test_bl_types/test_lights.py
Normal file
23
tests/test_bl_types/test_lights.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_light import BlLight
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('light_type', ['SPOT','SUN','POINT','AREA'])
|
||||||
|
def test_light(clear_blend, light_type):
|
||||||
|
bpy.ops.object.light_add(type=light_type)
|
||||||
|
|
||||||
|
blender_light = bpy.data.lights[0]
|
||||||
|
light_dumper = BlLight()
|
||||||
|
expected = light_dumper._dump(blender_light)
|
||||||
|
bpy.data.lights.remove(blender_light)
|
||||||
|
|
||||||
|
test = light_dumper._construct(expected)
|
||||||
|
light_dumper._load(expected, test)
|
||||||
|
result = light_dumper._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
28
tests/test_bl_types/test_material.py
Normal file
28
tests/test_bl_types/test_material.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_material import BlMaterial
|
||||||
|
|
||||||
|
|
||||||
|
def test_material(clear_blend):
|
||||||
|
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()]
|
||||||
|
|
||||||
|
datablock = bpy.data.materials.new("test")
|
||||||
|
datablock.use_nodes = True
|
||||||
|
bpy.data.materials.create_gpencil_data(datablock)
|
||||||
|
|
||||||
|
for ntype in nodes_types:
|
||||||
|
datablock.node_tree.nodes.new(ntype)
|
||||||
|
|
||||||
|
implementation = BlMaterial()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.materials.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
28
tests/test_bl_types/test_mesh.py
Normal file
28
tests/test_bl_types/test_mesh.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_mesh import BlMesh
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('mesh_type', ['EMPTY','FILLED'])
|
||||||
|
def test_mesh(clear_blend, mesh_type):
|
||||||
|
if mesh_type == 'FILLED':
|
||||||
|
bpy.ops.mesh.primitive_monkey_add()
|
||||||
|
elif mesh_type == 'EMPTY':
|
||||||
|
bpy.data.meshes.new('empty_mesh')
|
||||||
|
|
||||||
|
datablock = bpy.data.meshes[0]
|
||||||
|
|
||||||
|
# Test
|
||||||
|
implementation = BlMesh()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.meshes.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
23
tests/test_bl_types/test_metaball.py
Normal file
23
tests/test_bl_types/test_metaball.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_metaball import BlMetaball
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('metaballs_type', ['PLANE','CAPSULE','BALL','ELLIPSOID','CUBE'])
|
||||||
|
def test_metaball(clear_blend, metaballs_type):
|
||||||
|
bpy.ops.object.metaball_add(type=metaballs_type)
|
||||||
|
|
||||||
|
datablock = bpy.data.metaballs[0]
|
||||||
|
dumper = BlMetaball()
|
||||||
|
expected = dumper._dump(datablock)
|
||||||
|
bpy.data.metaballs.remove(datablock)
|
||||||
|
|
||||||
|
test = dumper._construct(expected)
|
||||||
|
dumper._load(expected, test)
|
||||||
|
result = dumper._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
64
tests/test_bl_types/test_object.py
Normal file
64
tests/test_bl_types/test_object.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_object import BlObject
|
||||||
|
|
||||||
|
# Removed 'BUILD' modifier because the seed doesn't seems to be
|
||||||
|
# correctly initialized (#TODO: report the bug)
|
||||||
|
MOFIFIERS_TYPES = [
|
||||||
|
'DATA_TRANSFER', 'MESH_CACHE', 'MESH_SEQUENCE_CACHE',
|
||||||
|
'NORMAL_EDIT', 'WEIGHTED_NORMAL', 'UV_PROJECT', 'UV_WARP',
|
||||||
|
'VERTEX_WEIGHT_EDIT', 'VERTEX_WEIGHT_MIX',
|
||||||
|
'VERTEX_WEIGHT_PROXIMITY', 'ARRAY', 'BEVEL', 'BOOLEAN',
|
||||||
|
'DECIMATE', 'EDGE_SPLIT', 'MASK', 'MIRROR',
|
||||||
|
'MULTIRES', 'REMESH', 'SCREW', 'SKIN', 'SOLIDIFY',
|
||||||
|
'SUBSURF', 'TRIANGULATE',
|
||||||
|
'WELD', 'WIREFRAME', 'ARMATURE', 'CAST', 'CURVE',
|
||||||
|
'DISPLACE', 'HOOK', 'LAPLACIANDEFORM', 'LATTICE',
|
||||||
|
'MESH_DEFORM', 'SHRINKWRAP', 'SIMPLE_DEFORM', 'SMOOTH',
|
||||||
|
'CORRECTIVE_SMOOTH', 'LAPLACIANSMOOTH', 'SURFACE_DEFORM',
|
||||||
|
'WARP', 'WAVE', 'CLOTH', 'COLLISION', 'DYNAMIC_PAINT',
|
||||||
|
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE',
|
||||||
|
'SOFT_BODY', 'SURFACE']
|
||||||
|
|
||||||
|
CONSTRAINTS_TYPES = [
|
||||||
|
'CAMERA_SOLVER', 'FOLLOW_TRACK', 'OBJECT_SOLVER', 'COPY_LOCATION',
|
||||||
|
'COPY_ROTATION', 'COPY_SCALE', 'COPY_TRANSFORMS', 'LIMIT_DISTANCE',
|
||||||
|
'LIMIT_LOCATION', 'LIMIT_ROTATION', 'LIMIT_SCALE', 'MAINTAIN_VOLUME',
|
||||||
|
'TRANSFORM', 'TRANSFORM_CACHE', 'CLAMP_TO', 'DAMPED_TRACK', 'IK',
|
||||||
|
'LOCKED_TRACK', 'SPLINE_IK', 'STRETCH_TO', 'TRACK_TO', 'ACTION',
|
||||||
|
'ARMATURE', 'CHILD_OF', 'FLOOR', 'FOLLOW_PATH', 'PIVOT', 'SHRINKWRAP']
|
||||||
|
|
||||||
|
def test_object(clear_blend):
|
||||||
|
bpy.ops.mesh.primitive_cube_add(
|
||||||
|
enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
|
||||||
|
datablock = bpy.data.objects[0]
|
||||||
|
|
||||||
|
# Add modifiers
|
||||||
|
for mod_type in MOFIFIERS_TYPES:
|
||||||
|
datablock.modifiers.new(mod_type,mod_type)
|
||||||
|
|
||||||
|
# Add constraints
|
||||||
|
for const_type in CONSTRAINTS_TYPES:
|
||||||
|
datablock.constraints.new(const_type)
|
||||||
|
|
||||||
|
datablock.vertex_groups.new(name='vg')
|
||||||
|
datablock.vertex_groups.new(name='vg1')
|
||||||
|
datablock.shape_key_add(name='shape')
|
||||||
|
datablock.shape_key_add(name='shape2')
|
||||||
|
|
||||||
|
|
||||||
|
implementation = BlObject()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.objects.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
22
tests/test_bl_types/test_scene.py
Normal file
22
tests/test_bl_types/test_scene.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_scene import BlScene
|
||||||
|
|
||||||
|
def test_scene(clear_blend):
|
||||||
|
datablock = bpy.data.scenes.new("toto")
|
||||||
|
datablock.view_settings.use_curve_mapping = True
|
||||||
|
# Test
|
||||||
|
implementation = BlScene()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.scenes.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
22
tests/test_bl_types/test_speaker.py
Normal file
22
tests/test_bl_types/test_speaker.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_speaker import BlSpeaker
|
||||||
|
|
||||||
|
def test_speaker(clear_blend):
|
||||||
|
bpy.ops.object.speaker_add()
|
||||||
|
datablock = bpy.data.speakers[0]
|
||||||
|
|
||||||
|
implementation = BlSpeaker()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.speakers.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
22
tests/test_bl_types/test_world.py
Normal file
22
tests/test_bl_types/test_world.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_world import BlWorld
|
||||||
|
|
||||||
|
def test_world(clear_blend):
|
||||||
|
datablock = bpy.data.worlds.new('test')
|
||||||
|
datablock.use_nodes = True
|
||||||
|
|
||||||
|
implementation = BlWorld()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.worlds.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
20
tests/test_operators.py
Normal file
20
tests/test_operators.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
|
||||||
|
|
||||||
|
def test_start_session():
|
||||||
|
result = bpy.ops.session.start()
|
||||||
|
|
||||||
|
|
||||||
|
assert 'FINISHED' in result
|
||||||
|
|
||||||
|
def test_stop_session():
|
||||||
|
|
||||||
|
result = bpy.ops.session.stop()
|
||||||
|
|
||||||
|
assert 'FINISHED' in result
|
Reference in New Issue
Block a user