Compare commits
22 Commits
load-db-to
...
travel
Author | SHA1 | Date | |
---|---|---|---|
d78c42b02f | |||
8cb40b2d60 | |||
57fdd492ef | |||
e538752fbc | |||
53eaaa2fcd | |||
a7e9108bff | |||
570909a7c4 | |||
736c3df7c4 | |||
8e606068f3 | |||
eb631e2d4b | |||
70641435cc | |||
552c649d34 | |||
d9d5a34653 | |||
12acd22660 | |||
826a59085e | |||
5ee4988aca | |||
cb85a1db4c | |||
ffe419a46e | |||
bed33ca6ba | |||
56ea93508c | |||
5f95eadc1d | |||
40ad96b0af |
31
CHANGELOG.md
31
CHANGELOG.md
@ -157,4 +157,33 @@ All notable changes to this project will be documented in this file.
|
||||
- Empty and Light object selection highlights
|
||||
- Material renaming
|
||||
- Default material nodes input parameters
|
||||
- blender 2.91 python api compatibility
|
||||
- blender 2.91 python api compatibility
|
||||
|
||||
## [0.3.0] - 2021-04-14
|
||||
|
||||
### Added
|
||||
|
||||
- Curve material support
|
||||
- Cycle visibility settings
|
||||
- Session save/load operator
|
||||
- Add new scene support
|
||||
- Physic initial support
|
||||
- Geometry node initial support
|
||||
- Blender 2.93 compatibility
|
||||
### Changed
|
||||
|
||||
- Host documentation on Gitlab Page
|
||||
- Event driven update (from the blender deps graph)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Vertex group assignation
|
||||
- Parent relation can't be removed
|
||||
- Separate object
|
||||
- Delete animation
|
||||
- Sync missing holdout option for grease pencil material
|
||||
- Sync missing `skin_vertices`
|
||||
- Exception access violation during Undo/Redo
|
||||
- Sync missing armature bone Roll
|
||||
- Sync missing driver data_path
|
||||
- Constraint replication
|
60
README.md
60
README.md
@ -29,35 +29,35 @@ See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_sta
|
||||
|
||||
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
||||
|
||||
| Name | Status | Comment |
|
||||
| -------------- | :----: | :--------------------------------------------------------------------------: |
|
||||
| action | ✔️ | |
|
||||
| armature | ❗ | Not stable |
|
||||
| camera | ✔️ | |
|
||||
| collection | ✔️ | |
|
||||
| curve | ❗ | Nurbs surfaces not supported |
|
||||
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| node_groups | ❗ | Material only |
|
||||
| geometry nodes | ✔️ | |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| textures | ❗ | Supported for modifiers/materials only |
|
||||
| texts | ✔️ | |
|
||||
| scene | ✔️ | |
|
||||
| world | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| nla | ❌ | |
|
||||
| volumes | ✔️ | |
|
||||
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
|
||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||
| vse | ❗ | Mask and Clip not supported yet |
|
||||
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||
| libraries | ❗ | Partial |
|
||||
| Name | Status | Comment |
|
||||
| -------------- | :----: | :----------------------------------------------------------: |
|
||||
| action | ✔️ | |
|
||||
| armature | ❗ | Not stable |
|
||||
| camera | ✔️ | |
|
||||
| collection | ✔️ | |
|
||||
| curve | ❗ | Nurbs surfaces not supported |
|
||||
| gpencil | ✔️ | |
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| node_groups | ❗ | Material & Geometry only |
|
||||
| geometry nodes | ✔️ | |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| textures | ❗ | Supported for modifiers/materials/geo nodes only |
|
||||
| texts | ✔️ | |
|
||||
| scene | ✔️ | |
|
||||
| world | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| nla | ❌ | |
|
||||
| volumes | ✔️ | |
|
||||
| particles | ❗ | The cache isn't syncing. |
|
||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||
| vse | ❗ | Mask and Clip not supported yet |
|
||||
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||
| libraries | ❗ | Partial |
|
||||
|
||||
|
||||
|
||||
@ -70,7 +70,7 @@ I'm working on it.
|
||||
|
||||
| Dependencies | Version | Needed |
|
||||
| ------------ | :-----: | -----: |
|
||||
| Replication | latest | yes |
|
||||
| Replication | latest | yes |
|
||||
|
||||
|
||||
|
||||
|
@ -122,13 +122,13 @@ class addon_updater_install_popup(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
options={'HIDDEN'}
|
||||
)
|
||||
ignore_enum = bpy.props.EnumProperty(
|
||||
ignore_enum: bpy.props.EnumProperty(
|
||||
name="Process update",
|
||||
description="Decide to install, ignore, or defer new addon update",
|
||||
items=[
|
||||
@ -264,7 +264,7 @@ class addon_updater_update_now(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
@ -332,7 +332,7 @@ class addon_updater_update_target(bpy.types.Operator):
|
||||
i+=1
|
||||
return ret
|
||||
|
||||
target = bpy.props.EnumProperty(
|
||||
target: bpy.props.EnumProperty(
|
||||
name="Target version to install",
|
||||
description="Select the version to install",
|
||||
items=target_version
|
||||
@ -341,7 +341,7 @@ class addon_updater_update_target(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
@ -399,7 +399,7 @@ class addon_updater_install_manually(bpy.types.Operator):
|
||||
bl_description = "Proceed to manually install update"
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
|
||||
error = bpy.props.StringProperty(
|
||||
error: bpy.props.StringProperty(
|
||||
name="Error Occurred",
|
||||
default="",
|
||||
options={'HIDDEN'}
|
||||
@ -461,7 +461,7 @@ class addon_updater_updated_successful(bpy.types.Operator):
|
||||
bl_description = "Update installation response"
|
||||
bl_options = {'REGISTER', 'INTERNAL', 'UNDO'}
|
||||
|
||||
error = bpy.props.StringProperty(
|
||||
error: bpy.props.StringProperty(
|
||||
name="Error Occurred",
|
||||
default="",
|
||||
options={'HIDDEN'}
|
||||
|
@ -42,6 +42,7 @@ __all__ = [
|
||||
# 'bl_sequencer',
|
||||
'bl_node_group',
|
||||
'bl_texture',
|
||||
"bl_particle",
|
||||
] # Order here defines execution order
|
||||
|
||||
if bpy.app.version[1] >= 91:
|
||||
|
@ -54,6 +54,60 @@ def get_node_group_inputs(node_group):
|
||||
return inputs
|
||||
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
|
||||
|
||||
|
||||
def dump_physics(target: bpy.types.Object)->dict:
|
||||
"""
|
||||
Dump all physics settings from a given object excluding modifier
|
||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
physics_data = {}
|
||||
|
||||
# Collisions (collision)
|
||||
if target.collision and target.collision.use:
|
||||
physics_data['collision'] = dumper.dump(target.collision)
|
||||
|
||||
# Field (field)
|
||||
if target.field and target.field.type != "NONE":
|
||||
physics_data['field'] = dumper.dump(target.field)
|
||||
|
||||
# Rigid Body (rigid_body)
|
||||
if target.rigid_body:
|
||||
physics_data['rigid_body'] = dumper.dump(target.rigid_body)
|
||||
|
||||
# Rigid Body constraint (rigid_body_constraint)
|
||||
if target.rigid_body_constraint:
|
||||
physics_data['rigid_body_constraint'] = dumper.dump(target.rigid_body_constraint)
|
||||
|
||||
return physics_data
|
||||
|
||||
def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
||||
""" Load all physics settings from a given object excluding modifier
|
||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||
"""
|
||||
loader = Loader()
|
||||
|
||||
if 'collision' in dumped_settings:
|
||||
loader.load(target.collision, dumped_settings['collision'])
|
||||
|
||||
if 'field' in dumped_settings:
|
||||
loader.load(target.field, dumped_settings['field'])
|
||||
|
||||
if 'rigid_body' in dumped_settings:
|
||||
if not target.rigid_body:
|
||||
bpy.ops.rigidbody.object_add({"object": target})
|
||||
loader.load(target.rigid_body, dumped_settings['rigid_body'])
|
||||
elif target.rigid_body:
|
||||
bpy.ops.rigidbody.object_remove({"object": target})
|
||||
|
||||
if 'rigid_body_constraint' in dumped_settings:
|
||||
if not target.rigid_body_constraint:
|
||||
bpy.ops.rigidbody.constraint_add({"object": target})
|
||||
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
|
||||
elif target.rigid_body_constraint:
|
||||
bpy.ops.rigidbody.constraint_remove({"object": target})
|
||||
|
||||
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
|
||||
""" Dump geometry node modifier input properties
|
||||
|
||||
@ -186,7 +240,7 @@ def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -
|
||||
# parameter = mod.get(inpt.identifier)
|
||||
# if parameter and isinstance(parameter, bpy.types.ID):
|
||||
# dependencies.append(parameter)
|
||||
logging.info(dependencies)
|
||||
|
||||
return dependencies
|
||||
|
||||
|
||||
@ -317,9 +371,9 @@ class BlObject(BlDatablock):
|
||||
loader.load(target.display, data['display'])
|
||||
|
||||
# Parenting
|
||||
parent_id = data.get('parent_id')
|
||||
parent_id = data.get('parent_uid')
|
||||
if parent_id:
|
||||
parent = bpy.data.objects[parent_id]
|
||||
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
|
||||
# Avoid reloading
|
||||
if target.parent != parent and parent is not None:
|
||||
target.parent = parent
|
||||
@ -381,6 +435,30 @@ class BlObject(BlDatablock):
|
||||
load_modifier_geometry_node_inputs(
|
||||
data['modifiers'][modifier.name], modifier)
|
||||
|
||||
particles_modifiers = [
|
||||
mod for mod in target.modifiers if mod.type == 'PARTICLE_SYSTEM']
|
||||
|
||||
for mod in particles_modifiers:
|
||||
default = mod.particle_system.settings
|
||||
dumped_particles = data['modifiers'][mod.name]['particle_system']
|
||||
loader.load(mod.particle_system, dumped_particles)
|
||||
|
||||
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
||||
if settings:
|
||||
mod.particle_system.settings = settings
|
||||
# Hack to remove the default generated particle settings
|
||||
if not default.uuid:
|
||||
bpy.data.particles.remove(default)
|
||||
|
||||
phys_modifiers = [
|
||||
mod for mod in target.modifiers if mod.type in ['SOFT_BODY', 'CLOTH']]
|
||||
|
||||
for mod in phys_modifiers:
|
||||
loader.load(mod.settings, data['modifiers'][mod.name]['settings'])
|
||||
|
||||
# PHYSICS
|
||||
load_physics(data, target)
|
||||
|
||||
transform = data.get('transforms', None)
|
||||
if transform:
|
||||
target.matrix_parent_inverse = mathutils.Matrix(
|
||||
@ -388,6 +466,7 @@ class BlObject(BlDatablock):
|
||||
target.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
|
||||
target.matrix_local = mathutils.Matrix(transform['matrix_local'])
|
||||
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
@ -450,7 +529,7 @@ class BlObject(BlDatablock):
|
||||
|
||||
# PARENTING
|
||||
if instance.parent:
|
||||
data['parent_id'] = instance.parent.name
|
||||
data['parent_uid'] = (instance.parent.uuid, instance.parent.name)
|
||||
|
||||
# MODIFIERS
|
||||
if hasattr(instance, 'modifiers'):
|
||||
@ -459,13 +538,29 @@ class BlObject(BlDatablock):
|
||||
if modifiers:
|
||||
dumper.include_filter = None
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = ['is_active']
|
||||
for index, modifier in enumerate(modifiers):
|
||||
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
||||
dumped_modifier = dumper.dump(modifier)
|
||||
# hack to dump geometry nodes inputs
|
||||
if modifier.type == 'NODES':
|
||||
dumped_inputs = dump_modifier_geometry_node_inputs(
|
||||
modifier)
|
||||
data["modifiers"][modifier.name]['inputs'] = dumped_inputs
|
||||
dumped_modifier['inputs'] = dumped_inputs
|
||||
|
||||
elif modifier.type == 'PARTICLE_SYSTEM':
|
||||
dumper.exclude_filter = [
|
||||
"is_edited",
|
||||
"is_editable",
|
||||
"is_global_hair"
|
||||
]
|
||||
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
|
||||
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
|
||||
|
||||
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
||||
|
||||
data["modifiers"][modifier.name] = dumped_modifier
|
||||
|
||||
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
|
||||
|
||||
if gp_modifiers:
|
||||
@ -487,6 +582,7 @@ class BlObject(BlDatablock):
|
||||
'location']
|
||||
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
|
||||
|
||||
|
||||
# CONSTRAINTS
|
||||
if hasattr(instance, 'constraints'):
|
||||
dumper.include_filter = None
|
||||
@ -583,6 +679,9 @@ class BlObject(BlDatablock):
|
||||
]
|
||||
data['cycles_visibility'] = dumper.dump(instance.cycles_visibility)
|
||||
|
||||
# PHYSICS
|
||||
data.update(dump_physics(instance))
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@ -592,6 +691,10 @@ class BlObject(BlDatablock):
|
||||
if self.instance.data:
|
||||
deps.append(self.instance.data)
|
||||
|
||||
# Particle systems
|
||||
for particle_slot in self.instance.particle_systems:
|
||||
deps.append(particle_slot.settings)
|
||||
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
|
||||
|
90
multi_user/bl_types/bl_particle.py
Normal file
90
multi_user/bl_types/bl_particle.py
Normal file
@ -0,0 +1,90 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from . import dump_anything
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
|
||||
|
||||
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
|
||||
""" Dump every texture slot collection as the form:
|
||||
[(index, slot_texture_uuid, slot_texture_name), (), ...]
|
||||
"""
|
||||
dumped_slots = []
|
||||
for index, slot in enumerate(texture_slots):
|
||||
if slot and slot.texture:
|
||||
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
|
||||
|
||||
return dumped_slots
|
||||
|
||||
|
||||
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
|
||||
"""
|
||||
"""
|
||||
for index, slot in enumerate(target_slots):
|
||||
if slot:
|
||||
target_slots.clear(index)
|
||||
|
||||
for index, slot_uuid, slot_name in dumped_slots:
|
||||
target_slots.create(index).texture = get_datablock_from_uuid(
|
||||
slot_uuid, slot_name
|
||||
)
|
||||
|
||||
IGNORED_ATTR = [
|
||||
"is_embedded_data",
|
||||
"is_evaluated",
|
||||
"is_fluid",
|
||||
"is_library_indirect",
|
||||
"users"
|
||||
]
|
||||
|
||||
class BlParticle(BlDatablock):
|
||||
bl_id = "particles"
|
||||
bl_class = bpy.types.ParticleSettings
|
||||
bl_icon = "PARTICLES"
|
||||
bl_check_common = False
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.particles.new(data["name"])
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
dump_anything.load(target, data)
|
||||
|
||||
dump_anything.load(target.effector_weights, data["effector_weights"])
|
||||
|
||||
# Force field
|
||||
force_field_1 = data.get("force_field_1", None)
|
||||
if force_field_1:
|
||||
dump_anything.load(target.force_field_1, force_field_1)
|
||||
|
||||
force_field_2 = data.get("force_field_2", None)
|
||||
if force_field_2:
|
||||
dump_anything.load(target.force_field_2, force_field_2)
|
||||
|
||||
# Texture slots
|
||||
load_texture_slots(data["texture_slots"], target.texture_slots)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert instance
|
||||
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = IGNORED_ATTR
|
||||
data = dumper.dump(instance)
|
||||
|
||||
# Particle effectors
|
||||
data["effector_weights"] = dumper.dump(instance.effector_weights)
|
||||
if instance.force_field_1:
|
||||
data["force_field_1"] = dumper.dump(instance.force_field_1)
|
||||
if instance.force_field_2:
|
||||
data["force_field_2"] = dumper.dump(instance.force_field_2)
|
||||
|
||||
# Texture slots
|
||||
data["texture_slots"] = dump_textures_slots(instance.texture_slots)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return [t.texture for t in self.instance.texture_slots if t and t.texture]
|
@ -610,6 +610,8 @@ class Loader:
|
||||
instance.write(bpy.data.fonts.get(dump))
|
||||
elif isinstance(rna_property_type, T.Sound):
|
||||
instance.write(bpy.data.sounds.get(dump))
|
||||
# elif isinstance(rna_property_type, T.ParticleSettings):
|
||||
# instance.write(bpy.data.particles.get(dump))
|
||||
|
||||
def _load_matrix(self, matrix, dump):
|
||||
matrix.write(mathutils.Matrix(dump))
|
||||
|
Submodule multi_user/libs/replication deleted from 9a02e16d70
@ -32,6 +32,7 @@ from operator import itemgetter
|
||||
from pathlib import Path
|
||||
from queue import Queue
|
||||
from time import gmtime, strftime
|
||||
from numpy import interp
|
||||
|
||||
try:
|
||||
import _pickle as pickle
|
||||
@ -39,6 +40,7 @@ except ImportError:
|
||||
import pickle
|
||||
|
||||
import bpy
|
||||
import bmesh
|
||||
import mathutils
|
||||
from bpy.app.handlers import persistent
|
||||
from bpy_extras.io_utils import ExportHelper, ImportHelper
|
||||
@ -56,6 +58,226 @@ background_execution_queue = Queue()
|
||||
deleyables = []
|
||||
stop_modal_executor = False
|
||||
|
||||
CLEARED_DATABLOCKS = ['actions', 'armatures', 'cache_files', 'cameras',
|
||||
'collections', 'curves', 'fonts',
|
||||
'grease_pencils', 'images', 'lattices', 'libraries',
|
||||
'lightprobes', 'lights', 'linestyles', 'masks',
|
||||
'materials', 'meshes', 'metaballs', 'movieclips',
|
||||
'node_groups', 'objects', 'paint_curves', 'particles',
|
||||
'scenes', 'shape_keys', 'sounds', 'speakers', 'texts',
|
||||
'textures', 'volumes', 'worlds']
|
||||
|
||||
PERSISTENT_DATABLOCKS = ['LineStyle', 'Dots Stroke', 'replay_action']
|
||||
|
||||
def clean_scene(ignored_datablocks: list = None):
|
||||
"""
|
||||
Delete all datablock of the scene except PERSISTENT_DATABLOCKS and ignored
|
||||
ones in ignored_datablocks.
|
||||
"""
|
||||
PERSISTENT_DATABLOCKS.extend(ignored_datablocks)
|
||||
# Avoid to trigger a runtime error by keeping the last scene
|
||||
PERSISTENT_DATABLOCKS.append(bpy.data.scenes[0].name)
|
||||
|
||||
for type_name in CLEARED_DATABLOCKS:
|
||||
type_collection = getattr(bpy.data, type_name)
|
||||
for datablock in type_collection:
|
||||
if datablock.name in PERSISTENT_DATABLOCKS:
|
||||
logging.debug(f"Skipping {datablock.name}")
|
||||
continue
|
||||
else:
|
||||
logging.debug(f"Removing {datablock.name}")
|
||||
type_collection.remove(datablock)
|
||||
|
||||
# Clear sequencer
|
||||
bpy.context.scene.sequence_editor_clear()
|
||||
|
||||
def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object = None) -> list:
|
||||
""" Generate bounding box in world coordinate from object bound box
|
||||
|
||||
:param object: target object
|
||||
:type object: bpy.types.Object
|
||||
:param instance: optionnal instance
|
||||
:type instance: bpy.types.Object
|
||||
:return: list of 8 points [(x,y,z),...]
|
||||
"""
|
||||
base = object.matrix_world
|
||||
|
||||
if instance:
|
||||
scale = mathutils.Matrix.Diagonal(object.matrix_world.to_scale())
|
||||
base = instance.matrix_world @ scale.to_4x4()
|
||||
|
||||
bbox_corners = [base @ mathutils.Vector(
|
||||
corner) for corner in object.bound_box]
|
||||
|
||||
|
||||
return [(point.x, point.y, point.z) for point in bbox_corners]
|
||||
|
||||
|
||||
def bbox_from_obj(obj: bpy.types.Object, index: int = 1) -> list:
|
||||
""" Generate a bounding box for a given object by using its world matrix
|
||||
|
||||
:param obj: target object
|
||||
:type obj: bpy.types.Object
|
||||
:param index: indice offset
|
||||
:type index: int
|
||||
:return: list of 8 points [(x,y,z),...], list of 12 link between these points [(1,2),...]
|
||||
"""
|
||||
radius = 1.0 # Radius of the bounding box
|
||||
index = 8*index
|
||||
vertex_indices = (
|
||||
(0+index, 1+index), (0+index, 2+index), (1+index, 3+index), (2+index, 3+index),
|
||||
(4+index, 5+index), (4+index, 6+index), (5+index, 7+index), (6+index, 7+index),
|
||||
(0+index, 4+index), (1+index, 5+index), (2+index, 6+index), (3+index, 7+index))
|
||||
|
||||
if obj.type == 'EMPTY':
|
||||
radius = obj.empty_display_size
|
||||
elif obj.type == 'LIGHT':
|
||||
radius = obj.data.shadow_soft_size
|
||||
elif obj.type == 'LIGHT_PROBE':
|
||||
radius = obj.data.influence_distance
|
||||
elif obj.type == 'CAMERA':
|
||||
radius = obj.data.display_size
|
||||
elif hasattr(obj, 'bound_box'):
|
||||
vertex_indices = (
|
||||
(0+index, 1+index), (1+index, 2+index),
|
||||
(2+index, 3+index), (0+index, 3+index),
|
||||
(4+index, 5+index), (5+index, 6+index),
|
||||
(6+index, 7+index), (4+index, 7+index),
|
||||
(0+index, 4+index), (1+index, 5+index),
|
||||
(2+index, 6+index), (3+index, 7+index))
|
||||
vertex_pos = get_bb_coords_from_obj(obj)
|
||||
return vertex_pos, vertex_indices
|
||||
|
||||
coords = [
|
||||
(-radius, -radius, -radius), (+radius, -radius, -radius),
|
||||
(-radius, +radius, -radius), (+radius, +radius, -radius),
|
||||
(-radius, -radius, +radius), (+radius, -radius, +radius),
|
||||
(-radius, +radius, +radius), (+radius, +radius, +radius)]
|
||||
|
||||
base = obj.matrix_world
|
||||
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
|
||||
|
||||
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
|
||||
|
||||
return vertex_pos, vertex_indices
|
||||
|
||||
def draw_user(username, metadata, radius=0.01, intensity=10.0):
|
||||
"""
|
||||
Generate a mesh representation of a given user frustum and
|
||||
sight of view.
|
||||
"""
|
||||
view_corners = metadata.get('view_corners')
|
||||
color = metadata.get('color', (1,1,1,0))
|
||||
objects = metadata.get('selected_objects', None)
|
||||
scene = metadata.get('scene_current', bpy.context.scene.name)
|
||||
|
||||
user_collection = bpy.data.collections.new(username)
|
||||
|
||||
# User Color
|
||||
user_mat = bpy.data.materials.new(username)
|
||||
user_mat.use_nodes = True
|
||||
nodes = user_mat.node_tree.nodes
|
||||
nodes.remove(nodes['Principled BSDF'])
|
||||
emission_node = nodes.new('ShaderNodeEmission')
|
||||
emission_node.inputs['Color'].default_value = color
|
||||
emission_node.inputs['Strength'].default_value = intensity
|
||||
|
||||
output_node = nodes['Material Output']
|
||||
user_mat.node_tree.links.new(
|
||||
emission_node.outputs['Emission'], output_node.inputs['Surface'])
|
||||
|
||||
# Generate camera mesh
|
||||
camera_vertices = view_corners[:4]
|
||||
camera_vertices.append(view_corners[6])
|
||||
camera_mesh = bpy.data.meshes.new(f"{username}_camera")
|
||||
camera_obj = bpy.data.objects.new(f"{username}_camera", camera_mesh)
|
||||
frustum_bm = bmesh.new()
|
||||
frustum_bm.from_mesh(camera_mesh)
|
||||
|
||||
for p in camera_vertices:
|
||||
frustum_bm.verts.new(p)
|
||||
frustum_bm.verts.ensure_lookup_table()
|
||||
|
||||
frustum_bm.edges.new((frustum_bm.verts[0], frustum_bm.verts[2]))
|
||||
frustum_bm.edges.new((frustum_bm.verts[2], frustum_bm.verts[1]))
|
||||
frustum_bm.edges.new((frustum_bm.verts[1], frustum_bm.verts[3]))
|
||||
frustum_bm.edges.new((frustum_bm.verts[3], frustum_bm.verts[0]))
|
||||
|
||||
frustum_bm.edges.new((frustum_bm.verts[0], frustum_bm.verts[4]))
|
||||
frustum_bm.edges.new((frustum_bm.verts[2], frustum_bm.verts[4]))
|
||||
frustum_bm.edges.new((frustum_bm.verts[1], frustum_bm.verts[4]))
|
||||
frustum_bm.edges.new((frustum_bm.verts[3], frustum_bm.verts[4]))
|
||||
frustum_bm.edges.ensure_lookup_table()
|
||||
|
||||
frustum_bm.to_mesh(camera_mesh)
|
||||
frustum_bm.free() # free and prevent further access
|
||||
|
||||
camera_obj.modifiers.new("wireframe", "SKIN")
|
||||
camera_obj.data.skin_vertices[0].data[0].use_root = True
|
||||
for v in camera_mesh.skin_vertices[0].data:
|
||||
v.radius = [radius, radius]
|
||||
|
||||
camera_mesh.materials.append(user_mat)
|
||||
user_collection.objects.link(camera_obj)
|
||||
|
||||
# Generate sight mesh
|
||||
sight_mesh = bpy.data.meshes.new(f"{username}_sight")
|
||||
sight_obj = bpy.data.objects.new(f"{username}_sight", sight_mesh)
|
||||
sight_verts = view_corners[4:6]
|
||||
sight_bm = bmesh.new()
|
||||
sight_bm.from_mesh(sight_mesh)
|
||||
|
||||
for p in sight_verts:
|
||||
sight_bm.verts.new(p)
|
||||
sight_bm.verts.ensure_lookup_table()
|
||||
|
||||
sight_bm.edges.new((sight_bm.verts[0], sight_bm.verts[1]))
|
||||
sight_bm.edges.ensure_lookup_table()
|
||||
sight_bm.to_mesh(sight_mesh)
|
||||
sight_bm.free()
|
||||
|
||||
sight_obj.modifiers.new("wireframe", "SKIN")
|
||||
sight_obj.data.skin_vertices[0].data[0].use_root = True
|
||||
for v in sight_mesh.skin_vertices[0].data:
|
||||
v.radius = [radius, radius]
|
||||
|
||||
sight_mesh.materials.append(user_mat)
|
||||
user_collection.objects.link(sight_obj)
|
||||
|
||||
# Draw selected objects
|
||||
if objects:
|
||||
for o in list(objects):
|
||||
instance = bl_types.bl_datablock.get_datablock_from_uuid(o, None)
|
||||
if instance:
|
||||
bbox_mesh = bpy.data.meshes.new(f"{instance.name}_bbox")
|
||||
bbox_obj = bpy.data.objects.new(
|
||||
f"{instance.name}_bbox", bbox_mesh)
|
||||
bbox_verts, bbox_ind = bbox_from_obj(instance, index=0)
|
||||
bbox_bm = bmesh.new()
|
||||
bbox_bm.from_mesh(bbox_mesh)
|
||||
|
||||
for p in bbox_verts:
|
||||
bbox_bm.verts.new(p)
|
||||
bbox_bm.verts.ensure_lookup_table()
|
||||
|
||||
for e in bbox_ind:
|
||||
bbox_bm.edges.new(
|
||||
(bbox_bm.verts[e[0]], bbox_bm.verts[e[1]]))
|
||||
|
||||
bbox_bm.to_mesh(bbox_mesh)
|
||||
bbox_bm.free()
|
||||
bpy.data.collections[username].objects.link(bbox_obj)
|
||||
|
||||
bbox_obj.modifiers.new("wireframe", "SKIN")
|
||||
bbox_obj.data.skin_vertices[0].data[0].use_root = True
|
||||
for v in bbox_mesh.skin_vertices[0].data:
|
||||
v.radius = [radius, radius]
|
||||
|
||||
bbox_mesh.materials.append(user_mat)
|
||||
|
||||
bpy.data.scenes[scene].collection.children.link(user_collection)
|
||||
|
||||
|
||||
def session_callback(name):
|
||||
""" Session callback wrapper
|
||||
|
||||
@ -811,6 +1033,26 @@ class SessionStopAutoSaveOperator(bpy.types.Operator):
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
class SessionRenderReplay(bpy.types.Operator):
|
||||
bl_idname = "session.render_replay"
|
||||
bl_label = "Render Replay"
|
||||
bl_description = "Render Replay"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.window_manager.session.replay_files
|
||||
|
||||
def execute(self, context):
|
||||
base_path = str(context.scene.render.filepath)
|
||||
for frame in range(0,context.scene.frame_end):
|
||||
logging.info(f"Rendering frame {frame} to {base_path}_{frame}.png")
|
||||
context.scene.frame_current = frame
|
||||
filename = Path(bpy.context.window_manager.session.replay_files[context.scene.active_replay_file].name)
|
||||
context.scene.render.filepath = f"{base_path}{frame}_{filename.stem}"
|
||||
bpy.ops.render.render(write_still=True)
|
||||
|
||||
context.scene.render.filepath = base_path
|
||||
return {'FINISHED'}
|
||||
|
||||
class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
bl_idname = "session.load"
|
||||
@ -827,9 +1069,42 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
maxlen=255, # Max internal buffer length, longer would be clamped.
|
||||
)
|
||||
|
||||
draw_users: bpy.props.BoolProperty(
|
||||
name="Load users",
|
||||
description="Draw users in the scene",
|
||||
default=False,
|
||||
)
|
||||
|
||||
replay: bpy.props.BoolProperty(
|
||||
name="Replay mode",
|
||||
description="Enable replay functions",
|
||||
default=False,
|
||||
)
|
||||
|
||||
user_skin_radius: bpy.props.FloatProperty(
|
||||
name="Wireframe radius",
|
||||
description="Wireframe radius",
|
||||
default=0.005,
|
||||
)
|
||||
user_color_intensity: bpy.props.FloatProperty(
|
||||
name="Shading intensity",
|
||||
description="Shading intensity",
|
||||
default=1.0,
|
||||
)
|
||||
|
||||
files: bpy.props.CollectionProperty(
|
||||
name='File paths',
|
||||
type=bpy.types.OperatorFileListElement
|
||||
)
|
||||
|
||||
def draw(self, context):
|
||||
pass
|
||||
|
||||
def execute(self, context):
|
||||
from replication.graph import ReplicationGraph
|
||||
|
||||
runtime_settings = context.window_manager.session
|
||||
|
||||
# TODO: add filechecks
|
||||
|
||||
try:
|
||||
@ -878,7 +1153,16 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
|
||||
logging.info("Graph succefully loaded")
|
||||
|
||||
utils.clean_scene()
|
||||
# Persitstent collection
|
||||
ignored_datablocks = []
|
||||
|
||||
persistent_collection = bpy.data.collections.get("multiuser_timelapse")
|
||||
if self.replay and \
|
||||
runtime_settings.replay_persistent_collection and \
|
||||
persistent_collection:
|
||||
ignored_datablocks = ['multiuser_timelapse','multiuser_timelapse_cam','multiuser_timelapse_cam_obj','multiuser_timelapse_path','multiuser_timelapse_path_obj', 'multiuser_timelapse_pathAction']
|
||||
|
||||
clean_scene(ignored_datablocks=ignored_datablocks)
|
||||
|
||||
# Step 1: Construct nodes
|
||||
for node in graph.list_ordered():
|
||||
@ -888,6 +1172,69 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
for node in graph.list_ordered():
|
||||
graph[node].apply()
|
||||
|
||||
if len(self.files) > 1:
|
||||
runtime_settings.replay_files.clear()
|
||||
context.scene.active_replay_file = len(self.files)-1
|
||||
directory = Path(self.filepath).parent
|
||||
file_list = [f['name'] for f in self.files]
|
||||
file_list.sort()
|
||||
for f in file_list:
|
||||
snap = runtime_settings.replay_files.add()
|
||||
snap.name = str(Path(directory, f))
|
||||
print(f)
|
||||
|
||||
if runtime_settings.replay_mode == 'TIMELINE':
|
||||
replay_action = bpy.data.actions.get('replay_action', bpy.data.actions.new('replay_action'))
|
||||
|
||||
bpy.context.scene.animation_data_create()
|
||||
bpy.context.scene.animation_data.action = replay_action
|
||||
if len(replay_action.fcurves) > 0 and replay_action.fcurves[0].data_path == 'active_replay_file':
|
||||
replay_fcurve = replay_action.fcurves[0]
|
||||
else:
|
||||
replay_fcurve = replay_action.fcurves.new('active_replay_file')
|
||||
|
||||
for p in reversed(replay_fcurve.keyframe_points):
|
||||
replay_fcurve.keyframe_points.remove(p, fast=True)
|
||||
|
||||
duration = runtime_settings.replay_duration
|
||||
file_count = len(self.files)-1
|
||||
for index in range(0, file_count):
|
||||
frame = interp(index, [0, file_count], [bpy.context.scene.frame_start, duration])
|
||||
replay_fcurve.keyframe_points.insert(frame, index)
|
||||
|
||||
if self.draw_users:
|
||||
f = gzip.open(self.filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
|
||||
users = db.get("users")
|
||||
|
||||
for username, user_data in users.items():
|
||||
metadata = user_data['metadata']
|
||||
|
||||
if metadata:
|
||||
draw_user(username, metadata, radius=self.user_skin_radius, intensity=self.user_color_intensity)
|
||||
|
||||
|
||||
# Relink the persistent collection
|
||||
if self.replay and persistent_collection:
|
||||
logging.info(f"Relinking {persistent_collection.name}")
|
||||
bpy.context.scene.collection.children.link(persistent_collection)
|
||||
|
||||
# Reasign scene action
|
||||
if self.replay and \
|
||||
runtime_settings.replay_mode == 'TIMELINE' and \
|
||||
not bpy.context.scene.animation_data :
|
||||
bpy.context.scene.animation_data_create()
|
||||
bpy.context.scene.animation_data.action = bpy.data.actions.get('replay_action')
|
||||
bpy.context.scene.frame_end = runtime_settings.replay_duration
|
||||
|
||||
# Reasign the scene camera
|
||||
if self.replay and \
|
||||
runtime_settings.replay_persistent_collection and \
|
||||
runtime_settings.replay_camera:
|
||||
bpy.context.scene.camera = runtime_settings.replay_camera
|
||||
|
||||
bpy.context.scene.eevee.use_bloom = False
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@ -917,6 +1264,7 @@ classes = (
|
||||
SessionLoadSaveOperator,
|
||||
SessionStopAutoSaveOperator,
|
||||
SessionPurgeOperator,
|
||||
SessionRenderReplay
|
||||
)
|
||||
|
||||
def update_external_dependencies():
|
||||
@ -967,6 +1315,16 @@ def load_pre_handler(dummy):
|
||||
|
||||
@persistent
|
||||
def update_client_frame(scene):
|
||||
setting = bpy.context.window_manager.session
|
||||
if setting.replay_mode == 'TIMELINE' and \
|
||||
setting.replay_files and \
|
||||
scene.active_replay_file != setting.replay_frame_current :
|
||||
index = bpy.context.scene.active_replay_file
|
||||
bpy.ops.session.load(filepath=bpy.context.window_manager.session.replay_files[index].name,
|
||||
draw_users=True,
|
||||
replay=True)
|
||||
setting.replay_frame_current = index
|
||||
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
session.update_user_metadata({
|
||||
'frame_current': scene.frame_current
|
||||
|
@ -28,7 +28,7 @@ from . import bl_types, environment, addon_updater_ops, presence, ui
|
||||
from .utils import get_preferences, get_expanded_icon
|
||||
from replication.constants import RP_COMMON
|
||||
from replication.interface import session
|
||||
|
||||
from numpy import interp
|
||||
# From https://stackoverflow.com/a/106223
|
||||
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
|
||||
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
|
||||
@ -93,6 +93,88 @@ def set_log_level(self, value):
|
||||
def get_log_level(self):
|
||||
return logging.getLogger().level
|
||||
|
||||
def set_active_replay(self, value):
|
||||
files_count = len(bpy.context.window_manager.session.replay_files)
|
||||
|
||||
if files_count == 0:
|
||||
return
|
||||
|
||||
max_index = files_count-1
|
||||
|
||||
if value > max_index:
|
||||
value = max_index
|
||||
|
||||
if hasattr(self, 'active_replay_file'):
|
||||
self["active_replay_file"] = value
|
||||
else:
|
||||
self.active_replay_file = value
|
||||
|
||||
if bpy.context.window_manager.session.replay_mode == 'MANUAL':
|
||||
bpy.ops.session.load(
|
||||
filepath=bpy.context.window_manager.session.replay_files[value].name,
|
||||
draw_users=True,
|
||||
replay=True)
|
||||
|
||||
def get_active_replay(self):
|
||||
return self.get('active_replay_file', 0)
|
||||
|
||||
|
||||
def set_replay_persistent_collection(self, value):
|
||||
if hasattr(self, 'replay_persistent_collection'):
|
||||
self["replay_persistent_collection"] = value
|
||||
else:
|
||||
self.replay_persistent_collection = value
|
||||
|
||||
collection = bpy.data.collections.get("multiuser_timelapse", None)
|
||||
|
||||
if collection is None and value:
|
||||
collection = bpy.data.collections.new('multiuser_timelapse')
|
||||
cam = bpy.data.cameras.get('multiuser_timelapse_cam', bpy.data.cameras.new('multiuser_timelapse_cam'))
|
||||
cam_obj = bpy.data.objects.get('multiuser_timelapse_cam_obj', bpy.data.objects.new('multiuser_timelapse_cam_obj', cam))
|
||||
curve = bpy.data.curves.get('multiuser_timelapse_path', bpy.data.curves.new('multiuser_timelapse_path', 'CURVE'))
|
||||
curve_obj = bpy.data.objects.get('multiuser_timelapse_path_obj', bpy.data.objects.new('multiuser_timelapse_path_obj', curve))
|
||||
|
||||
if cam_obj.name not in collection.objects:
|
||||
collection.objects.link(cam_obj)
|
||||
if curve_obj.name not in collection.objects:
|
||||
collection.objects.link(curve_obj)
|
||||
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
elif collection and not value:
|
||||
for o in collection.objects:
|
||||
bpy.data.objects.remove(o)
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
def get_replay_persistent_collection(self):
|
||||
return self.get('replay_persistent_collection', False)
|
||||
|
||||
def set_replay_duration(self, value):
|
||||
if hasattr(self, 'replay_duration'):
|
||||
self["replay_duration"] = value
|
||||
else:
|
||||
self.replay_duration = value
|
||||
|
||||
# Update the animation fcurve
|
||||
replay_action = bpy.data.actions.get('replay_action')
|
||||
replay_fcurve = None
|
||||
|
||||
for fcurve in replay_action.fcurves:
|
||||
if fcurve.data_path == 'active_replay_file':
|
||||
replay_fcurve = fcurve
|
||||
|
||||
if replay_fcurve:
|
||||
for p in reversed(replay_fcurve.keyframe_points):
|
||||
replay_fcurve.keyframe_points.remove(p, fast=True)
|
||||
|
||||
bpy.context.scene.frame_end = value
|
||||
files_count = len(bpy.context.window_manager.session.replay_files)-1
|
||||
for index in range(0, files_count):
|
||||
frame = interp(index,[0, files_count],[bpy.context.scene.frame_start, value])
|
||||
replay_fcurve.keyframe_points.insert(frame, index)
|
||||
|
||||
def get_replay_duration(self):
|
||||
return self.get('replay_duration', 10)
|
||||
|
||||
|
||||
class ReplicatedDatablock(bpy.types.PropertyGroup):
|
||||
type_name: bpy.props.StringProperty()
|
||||
@ -530,6 +612,37 @@ class SessionProps(bpy.types.PropertyGroup):
|
||||
is_host: bpy.props.BoolProperty(
|
||||
default=False
|
||||
)
|
||||
replay_files: bpy.props.CollectionProperty(
|
||||
name='File paths',
|
||||
type=bpy.types.OperatorFileListElement
|
||||
)
|
||||
replay_persistent_collection: bpy.props.BoolProperty(
|
||||
name="replay_persistent_collection",
|
||||
description='Enable a collection that persist accross frames loading',
|
||||
get=get_replay_persistent_collection,
|
||||
set=set_replay_persistent_collection,
|
||||
)
|
||||
replay_mode: bpy.props.EnumProperty(
|
||||
name='replay method',
|
||||
description='Replay in keyframe (timeline) or manually',
|
||||
items={
|
||||
('TIMELINE', 'TIMELINE', 'Replay from the timeline.'),
|
||||
('MANUAL', 'MANUAL', 'Replay manually, from the replay frame widget.')},
|
||||
default='TIMELINE')
|
||||
replay_duration: bpy.props.IntProperty(
|
||||
name='replay interval',
|
||||
default=250,
|
||||
min=10,
|
||||
set=set_replay_duration,
|
||||
get=get_replay_duration,
|
||||
)
|
||||
replay_frame_current: bpy.props.IntProperty(
|
||||
name='replay_frame_current',
|
||||
)
|
||||
replay_camera: bpy.props.PointerProperty(
|
||||
name='Replay camera',
|
||||
type=bpy.types.Object
|
||||
)
|
||||
|
||||
|
||||
classes = (
|
||||
@ -552,9 +665,20 @@ def register():
|
||||
logging.debug('Generating bl_types preferences')
|
||||
prefs.generate_supported_types()
|
||||
|
||||
bpy.types.Scene.active_replay_file = bpy.props.IntProperty(
|
||||
name="active_replay_file",
|
||||
default=0,
|
||||
min=0,
|
||||
description='Active snapshot',
|
||||
set=set_active_replay,
|
||||
get=get_active_replay,
|
||||
options={'ANIMATABLE'}
|
||||
)
|
||||
|
||||
def unregister():
|
||||
from bpy.utils import unregister_class
|
||||
|
||||
for cls in reversed(classes):
|
||||
unregister_class(cls)
|
||||
|
||||
del bpy.types.Scene.active_replay_file
|
||||
|
@ -615,6 +615,39 @@ class VIEW3D_PT_overlay_session(bpy.types.Panel):
|
||||
row.active = settings.presence_show_user
|
||||
row.prop(settings, "presence_show_far_user")
|
||||
|
||||
class SESSION_PT_replay(bpy.types.Panel):
|
||||
bl_idname = "MULTIUSER_REPLAY_PT_panel"
|
||||
bl_label = "Replay"
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.window_manager.session.replay_files
|
||||
|
||||
def draw_header(self, context):
|
||||
self.layout.label(text="", icon='RECOVER_LAST')
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
settings = context.window_manager.session
|
||||
row= layout.row()
|
||||
row.prop(settings,'replay_mode', toggle=True, expand=True)
|
||||
row= layout.row()
|
||||
if settings.replay_mode == 'MANUAL':
|
||||
row.prop(bpy.context.scene, 'active_replay_file', text="Snapshot index")
|
||||
else:
|
||||
row.prop(settings, 'replay_duration', text="Replay Duration")
|
||||
row= layout.row()
|
||||
row.prop(settings, 'replay_persistent_collection', text="persistent collection", toggle=True, icon='OUTLINER_COLLECTION')
|
||||
|
||||
if settings.replay_persistent_collection:
|
||||
row= layout.row()
|
||||
row.prop(settings, 'replay_camera', text="", icon='VIEW_CAMERA')
|
||||
|
||||
|
||||
classes = (
|
||||
SESSION_UL_users,
|
||||
SESSION_PT_settings,
|
||||
@ -624,6 +657,7 @@ classes = (
|
||||
SESSION_PT_advanced_settings,
|
||||
SESSION_PT_user,
|
||||
SESSION_PT_repository,
|
||||
SESSION_PT_replay,
|
||||
VIEW3D_PT_overlay_session,
|
||||
)
|
||||
|
||||
|
@ -100,18 +100,6 @@ def get_state_str(state):
|
||||
return state_str
|
||||
|
||||
|
||||
def clean_scene():
|
||||
for type_name in dir(bpy.data):
|
||||
try:
|
||||
type_collection = getattr(bpy.data, type_name)
|
||||
for item in type_collection:
|
||||
type_collection.remove(item)
|
||||
except:
|
||||
continue
|
||||
|
||||
# Clear sequencer
|
||||
bpy.context.scene.sequence_editor_clear()
|
||||
|
||||
def get_selected_objects(scene, active_view_layer):
|
||||
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]
|
||||
|
||||
|
@ -13,7 +13,7 @@ def main():
|
||||
if len(sys.argv) > 2:
|
||||
blender_rev = sys.argv[2]
|
||||
else:
|
||||
blender_rev = "2.91.0"
|
||||
blender_rev = "2.92.0"
|
||||
|
||||
try:
|
||||
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)
|
||||
|
@ -7,7 +7,7 @@ import bpy
|
||||
import random
|
||||
from multi_user.bl_types.bl_object import BlObject
|
||||
|
||||
# Removed 'BUILD' modifier because the seed doesn't seems to be
|
||||
# Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be
|
||||
# correctly initialized (#TODO: report the bug)
|
||||
MOFIFIERS_TYPES = [
|
||||
'DATA_TRANSFER', 'MESH_CACHE', 'MESH_SEQUENCE_CACHE',
|
||||
@ -22,8 +22,7 @@ MOFIFIERS_TYPES = [
|
||||
'MESH_DEFORM', 'SHRINKWRAP', 'SIMPLE_DEFORM', 'SMOOTH',
|
||||
'CORRECTIVE_SMOOTH', 'LAPLACIANSMOOTH', 'SURFACE_DEFORM',
|
||||
'WARP', 'WAVE', 'CLOTH', 'COLLISION', 'DYNAMIC_PAINT',
|
||||
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE',
|
||||
'SOFT_BODY', 'SURFACE']
|
||||
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE', 'SURFACE']
|
||||
|
||||
GP_MODIFIERS_TYPE = [
|
||||
'GP_ARRAY', 'GP_BUILD', 'GP_MIRROR', 'GP_MULTIPLY',
|
||||
@ -72,5 +71,5 @@ def test_object(clear_blend):
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
|
||||
print(DeepDiff(expected, result))
|
||||
assert not DeepDiff(expected, result)
|
||||
|
Reference in New Issue
Block a user