Compare commits

...

50 Commits

Author SHA1 Message Date
2ad93cf304 Merge branch 'develop' into remove-services 2021-04-30 16:53:02 +02:00
771d76a98b fix: missing shapekeys attr 2021-04-30 16:51:11 +02:00
1e83241494 feat: remove pull socket 2021-04-30 16:26:20 +02:00
1bcbff3ed4 Merge branch 'develop' into remove-services 2021-04-29 14:41:55 +02:00
9a45fe7125 fix: shapekey animation data 2021-04-29 14:41:11 +02:00
207901afdd Merge branch '184-uv-project-modifier-target-object-doesn-t-sync' into 'develop'
Resolve "UV project modifier target object doesn't sync"

See merge request slumber/multi-user!118
2021-04-29 09:11:47 +00:00
c6eb1ba22f fix: shapekey performances
Related to #187
2021-04-29 11:06:46 +02:00
ba4168d0fd Merge branch 'develop' into remove-services 2021-04-28 16:56:20 +02:00
00e7adf022 fix: Image Empty is not loading.
Related to #186
2021-04-28 10:01:04 +02:00
d9d8ca7ca0 revert: image source replication until a proper fix is done 2021-04-23 15:35:19 +02:00
e8cd271bd8 fix: renable gitlab-ci file 2021-04-23 11:48:01 +02:00
e71af6402c feat: increment addon version 2021-04-23 11:46:29 +02:00
dd1c6a4fc7 feat: enable back ci 2021-04-23 11:45:47 +02:00
7fe1ae83b1 feat: update replication version to the right one 2021-04-23 11:25:15 +02:00
a7ad9d30c3 Merge branch 'develop' into remove-services 2021-04-23 11:21:16 +02:00
14779be1ed feat: support video file as camera background images 2021-04-22 15:52:06 +02:00
a36c3740cc fix: load driver variable without id 2021-04-22 15:00:08 +02:00
d2108facab feat: fcurve modifiers support 2021-04-22 14:52:43 +02:00
e5651151d9 fix: having both animation and drivers on the same object 2021-04-22 14:00:26 +02:00
fb61b380b6 fix: uv_projector modifier
refactor: move modifier related code to dump_modifiers and load_modifier_custom_data
2021-04-22 11:05:34 +02:00
e538752fbc Merge branch 'master' of gitlab.com:slumber/multi-user into develop 2021-04-15 15:31:59 +02:00
53eaaa2fcd fix: auto-updater operator registration for blender 2.93 compatibility 2021-04-15 15:28:59 +02:00
a7e9108bff Merge branch 'develop' into 'master'
v0.3.0

See merge request slumber/multi-user!106
2021-04-14 14:32:24 +00:00
570909a7c4 fix: prevent field from being dumped if unused
fix: bl_object tests
2021-04-14 16:25:21 +02:00
736c3df7c4 feat: remove new particle systems
clean: remove logs
2021-04-14 15:50:53 +02:00
8e606068f3 fix: particle system duplication
feat: update Readme
2021-04-14 15:29:02 +02:00
eb631e2d4b feat: update changelog 0.3.0 release 2021-04-14 14:36:06 +02:00
70641435cc feat: initial rigid body supports 2021-04-14 12:25:16 +02:00
552c649d34 feat: physics forcefield and collision support 2021-04-14 11:49:34 +02:00
d9d5a34653 clean: remove libs 2021-04-14 09:56:07 +02:00
12acd22660 feat: ignore some attributes 2021-04-14 09:54:34 +02:00
826a59085e feat: particle texture slot support 2021-04-14 09:45:18 +02:00
5ee4988aca Merge branch '24-particle-support' into develop 2021-04-13 22:45:27 +02:00
cb85a1db4c feat: dual identification for object parents 2021-04-13 14:37:43 +02:00
3a02711baa feat: faster root management 2021-03-14 20:58:25 +01:00
c7e8002fed fix: apply api
clean: ipc port propertie
2021-03-14 18:32:04 +01:00
f4e7ec6be8 Merge branch 'develop' into 173-differential-revision-milestone-2-replication-refactoring 2021-03-14 17:46:23 +01:00
235db712fd fix: api 2021-03-11 15:45:48 +01:00
647ac46c01 feat: move apply to porcelain
feat: move data access to repository
feat: object_store layer to repository (with GraphObjectStore)
revert: missing network services
2021-03-09 14:07:59 +01:00
8e3c86561f refactor: move add to porcelain 2021-03-09 10:19:51 +01:00
dba19e831d Merge branch 'develop' into 173-differential-revision-milestone-2-replication-refactoring 2021-03-08 22:16:14 +01:00
93df5ca5fa fix: disconnect callback 2021-03-06 10:20:57 +01:00
b17104c67e fix: naming 2021-03-05 10:35:35 +01:00
875b9ce934 feat: temporary disable CI jobs for this branch because of breaking changes 2021-03-04 14:24:03 +01:00
2d638ef76f refactor: interface api changes 2021-03-04 14:22:54 +01:00
ffe419a46e Merge branch 'develop' into 'master'
v0.2.0

See merge request slumber/multi-user!73
2020-12-17 13:34:41 +00:00
bed33ca6ba Merge branch 'develop' into 'master'
v0.1.1

See merge request slumber/multi-user!54
2020-10-16 09:11:20 +00:00
56ea93508c Merge branch 'develop' into 24-particle-support 2020-04-03 18:23:29 +02:00
5f95eadc1d feat: test particle cache access 2020-03-11 18:37:43 +01:00
40ad96b0af feat: initial particle system support
Related to #24
2020-03-11 17:45:56 +01:00
25 changed files with 612 additions and 243 deletions

View File

@ -157,4 +157,33 @@ All notable changes to this project will be documented in this file.
- Empty and Light object selection highlights
- Material renaming
- Default material nodes input parameters
- blender 2.91 python api compatibility
- blender 2.91 python api compatibility
## [0.3.0] - 2021-04-14
### Added
- Curve material support
- Cycle visibility settings
- Session save/load operator
- Add new scene support
- Physic initial support
- Geometry node initial support
- Blender 2.93 compatibility
### Changed
- Host documentation on Gitlab Page
- Event driven update (from the blender deps graph)
### Fixed
- Vertex group assignation
- Parent relation can't be removed
- Separate object
- Delete animation
- Sync missing holdout option for grease pencil material
- Sync missing `skin_vertices`
- Exception access violation during Undo/Redo
- Sync missing armature bone Roll
- Sync missing driver data_path
- Constraint replication

View File

@ -29,35 +29,35 @@ See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_sta
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment |
| -------------- | :----: | :--------------------------------------------------------------------------: |
| action | ✔️ | |
| armature | | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | ❗ | Nurbs surfaces not supported |
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
| image | ✔️ | |
| mesh | ✔️ | |
| material | ✔️ | |
| node_groups | ❗ | Material only |
| geometry nodes | ✔️ | |
| metaball | ✔️ | |
| object | ✔️ | |
| textures | ❗ | Supported for modifiers/materials only |
| texts | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | | |
| volumes | ✔️ | |
| particles | | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❗ | Mask and Clip not supported yet |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
| Name | Status | Comment |
| -------------- | :----: | :----------------------------------------------------------: |
| action | ✔️ | |
| armature | | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | ❗ | Nurbs surfaces not supported |
| gpencil | ✔️ | |
| image | ✔️ | |
| mesh | ✔️ | |
| material | ✔️ | |
| node_groups | ❗ | Material & Geometry only |
| geometry nodes | ✔️ | |
| metaball | ✔️ | |
| object | ✔️ | |
| textures | ❗ | Supported for modifiers/materials/geo nodes only |
| texts | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | | |
| volumes | ✔️ | |
| particles | | The cache isn't syncing. |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❗ | Mask and Clip not supported yet |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
@ -70,7 +70,7 @@ I'm working on it.
| Dependencies | Version | Needed |
| ------------ | :-----: | -----: |
| Replication | latest | yes |
| Replication | latest | yes |

View File

@ -374,15 +374,6 @@ Network
Advanced network settings
**IPC Port** is the port used for Inter Process Communication. This port is used
by the multi-user subprocesses to communicate with each other. If different instances
of multi-user are using the same IPC port, this will create conflict !
.. note::
You only need to modify this setting if you need to launch multiple clients from the same
computer (or if you try to host and join from the same computer). To resolve this, you simply need to enter a different
**IPC port** for each blender instance.
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
You should only increase it if you have a bad connection.

View File

@ -19,7 +19,7 @@
bl_info = {
"name": "Multi-User",
"author": "Swann Martinez",
"version": (0, 3, 0),
"version": (0, 4, 0),
"description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 82, 0),
"location": "3D View > Sidebar > Multi-User tab",
@ -44,7 +44,7 @@ from . import environment
DEPENDENCIES = {
("replication", '0.1.26'),
("replication", '0.1.33'),
}

View File

@ -122,13 +122,13 @@ class addon_updater_install_popup(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains
clean_install = bpy.props.BoolProperty(
clean_install: bpy.props.BoolProperty(
name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False,
options={'HIDDEN'}
)
ignore_enum = bpy.props.EnumProperty(
ignore_enum: bpy.props.EnumProperty(
name="Process update",
description="Decide to install, ignore, or defer new addon update",
items=[
@ -264,7 +264,7 @@ class addon_updater_update_now(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains
clean_install = bpy.props.BoolProperty(
clean_install: bpy.props.BoolProperty(
name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False,
@ -332,7 +332,7 @@ class addon_updater_update_target(bpy.types.Operator):
i+=1
return ret
target = bpy.props.EnumProperty(
target: bpy.props.EnumProperty(
name="Target version to install",
description="Select the version to install",
items=target_version
@ -341,7 +341,7 @@ class addon_updater_update_target(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains
clean_install = bpy.props.BoolProperty(
clean_install: bpy.props.BoolProperty(
name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False,
@ -399,7 +399,7 @@ class addon_updater_install_manually(bpy.types.Operator):
bl_description = "Proceed to manually install update"
bl_options = {'REGISTER', 'INTERNAL'}
error = bpy.props.StringProperty(
error: bpy.props.StringProperty(
name="Error Occurred",
default="",
options={'HIDDEN'}
@ -461,7 +461,7 @@ class addon_updater_updated_successful(bpy.types.Operator):
bl_description = "Update installation response"
bl_options = {'REGISTER', 'INTERNAL', 'UNDO'}
error = bpy.props.StringProperty(
error: bpy.props.StringProperty(
name="Error Occurred",
default="",
options={'HIDDEN'}

View File

@ -42,13 +42,14 @@ __all__ = [
# 'bl_sequencer',
'bl_node_group',
'bl_texture',
"bl_particle",
] # Order here defines execution order
if bpy.app.version[1] >= 91:
__all__.append('bl_volume')
from . import *
from replication.data import ReplicatedDataFactory
from replication.data import DataTranslationProtocol
def types_to_register():
return __all__

View File

@ -25,7 +25,7 @@ from enum import Enum
from .. import utils
from .dump_anything import (
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
from .bl_datablock import BlDatablock
from .bl_datablock import BlDatablock, has_action, has_driver, dump_driver, load_driver
KEYFRAME = [
@ -61,7 +61,6 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
points = fcurve.keyframe_points
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
else: # Legacy method
dumper = Dumper()
fcurve_data["keyframe_points"] = []
@ -71,6 +70,18 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
dumper.dump(k)
)
if fcurve.modifiers:
dumper = Dumper()
dumper.exclude_filter = [
'is_valid',
'active'
]
dumped_modifiers = []
for modfifier in fcurve.modifiers:
dumped_modifiers.append(dumper.dump(modfifier))
fcurve_data['modifiers'] = dumped_modifiers
return fcurve_data
@ -83,7 +94,7 @@ def load_fcurve(fcurve_data, fcurve):
:type fcurve: bpy.types.FCurve
"""
use_numpy = fcurve_data.get('use_numpy')
loader = Loader()
keyframe_points = fcurve.keyframe_points
# Remove all keyframe points
@ -128,6 +139,64 @@ def load_fcurve(fcurve_data, fcurve):
fcurve.update()
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
if dumped_fcurve_modifiers:
# clear modifiers
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
# Load each modifiers in order
for modifier_data in dumped_fcurve_modifiers:
modifier = fcurve.modifiers.new(modifier_data['type'])
loader.load(modifier, modifier_data)
elif fcurve.modifiers:
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
def dump_animation_data(datablock):
animation_data = {}
if has_action(datablock):
animation_data['action'] = datablock.animation_data.action.name
if has_driver(datablock):
animation_data['drivers'] = []
for driver in datablock.animation_data.drivers:
animation_data['drivers'].append(dump_driver(driver))
return animation_data
def load_animation_data(animation_data, datablock):
# Load animation data
if animation_data:
if datablock.animation_data is None:
datablock.animation_data_create()
for d in datablock.animation_data.drivers:
datablock.animation_data.drivers.remove(d)
if 'drivers' in animation_data:
for driver in animation_data['drivers']:
load_driver(datablock, driver)
if 'action' in animation_data:
datablock.animation_data.action = bpy.data.actions[animation_data['action']]
elif datablock.animation_data.action:
datablock.animation_data.action = None
# Remove existing animation data if there is not more to load
elif hasattr(datablock, 'animation_data') and datablock.animation_data:
datablock.animation_data_clear()
def resolve_animation_dependencies(datablock):
if has_action(datablock):
return [datablock.animation_data.action]
else:
return []
class BlAction(BlDatablock):
bl_id = "actions"

View File

@ -56,6 +56,11 @@ class BlCamera(BlDatablock):
target_img.image = bpy.data.images[img_id]
loader.load(target_img, img_data)
img_user = img_data.get('image_user')
if img_user:
loader.load(target_img.image_user, img_user)
def _dump_implementation(self, data, instance=None):
assert(instance)
@ -101,10 +106,19 @@ class BlCamera(BlDatablock):
'scale',
'use_flip_x',
'use_flip_y',
'image'
'image_user',
'image',
'frame_duration',
'frame_start',
'frame_offset',
'use_cyclic',
'use_auto_refresh'
]
return dumper.dump(instance)
data = dumper.dump(instance)
for index, image in enumerate(instance.background_images):
if image.image_user:
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
return data
def _resolve_deps_implementation(self):
deps = []
for background in self.instance.background_images:

View File

@ -72,10 +72,10 @@ def load_driver(target_datablock, src_driver):
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
new_var.targets[src_target].id = utils.resolve_from_id(
src_target_data['id'], src_target_data['id_type'])
loader.load(
new_var.targets[src_target], src_target_data)
src_id = src_target_data.get('id')
if src_id:
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
loader.load(new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
@ -161,19 +161,17 @@ class BlDatablock(ReplicatedDatablock):
def _dump(self, instance=None):
dumper = Dumper()
data = {}
animation_data = {}
# Dump animation data
if has_action(instance):
dumper = Dumper()
dumper.include_filter = ['action']
data['animation_data'] = dumper.dump(instance.animation_data)
animation_data['action'] = instance.animation_data.action.name
if has_driver(instance):
dumped_drivers = {'animation_data': {'drivers': []}}
animation_data['drivers'] = []
for driver in instance.animation_data.drivers:
dumped_drivers['animation_data']['drivers'].append(
dump_driver(driver))
animation_data['drivers'].append(dump_driver(driver))
data.update(dumped_drivers)
if animation_data:
data['animation_data'] = animation_data
if self.is_library:
data.update(dumper.dump(instance))
@ -200,6 +198,9 @@ class BlDatablock(ReplicatedDatablock):
if 'action' in data['animation_data']:
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
elif target.animation_data.action:
target.animation_data.action = None
# Remove existing animation data if there is not more to load
elif hasattr(target, 'animation_data') and target.animation_data:
target.animation_data_clear()

View File

@ -134,6 +134,8 @@ class BlFile(ReplicatedDatablock):
if self.preferences.clear_memory_filecache:
return False
else:
if not self.instance:
return False
memory_size = sys.getsizeof(self.data['file'])-33
disk_size = self.instance.stat().st_size
return memory_size != disk_size

View File

@ -68,7 +68,10 @@ class BlImage(BlDatablock):
target.source = 'FILE'
target.filepath_raw = get_filepath(data['filename'])
target.colorspace_settings.name = data["colorspace_settings"]["name"]
color_space_name = data["colorspace_settings"]["name"]
if color_space_name:
target.colorspace_settings.name = color_space_name
def _dump(self, instance=None):
assert(instance)
@ -83,6 +86,7 @@ class BlImage(BlDatablock):
dumper.depth = 2
dumper.include_filter = [
"name",
# 'source',
'size',
'height',
'alpha',

View File

@ -24,6 +24,7 @@ from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .dump_anything import (
Dumper,
Loader,
@ -37,6 +38,12 @@ SKIN_DATA = [
'use_root'
]
SHAPEKEY_BLOCK_ATTR = [
'mute',
'value',
'slider_min',
'slider_max',
]
if bpy.app.version[1] >= 93:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else:
@ -54,6 +61,60 @@ def get_node_group_inputs(node_group):
return inputs
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
def dump_physics(target: bpy.types.Object)->dict:
"""
Dump all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
dumper = Dumper()
dumper.depth = 1
physics_data = {}
# Collisions (collision)
if target.collision and target.collision.use:
physics_data['collision'] = dumper.dump(target.collision)
# Field (field)
if target.field and target.field.type != "NONE":
physics_data['field'] = dumper.dump(target.field)
# Rigid Body (rigid_body)
if target.rigid_body:
physics_data['rigid_body'] = dumper.dump(target.rigid_body)
# Rigid Body constraint (rigid_body_constraint)
if target.rigid_body_constraint:
physics_data['rigid_body_constraint'] = dumper.dump(target.rigid_body_constraint)
return physics_data
def load_physics(dumped_settings: dict, target: bpy.types.Object):
""" Load all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
loader = Loader()
if 'collision' in dumped_settings:
loader.load(target.collision, dumped_settings['collision'])
if 'field' in dumped_settings:
loader.load(target.field, dumped_settings['field'])
if 'rigid_body' in dumped_settings:
if not target.rigid_body:
bpy.ops.rigidbody.object_add({"object": target})
loader.load(target.rigid_body, dumped_settings['rigid_body'])
elif target.rigid_body:
bpy.ops.rigidbody.object_remove({"object": target})
if 'rigid_body_constraint' in dumped_settings:
if not target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_add({"object": target})
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_remove({"object": target})
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties
@ -186,7 +247,7 @@ def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -
# parameter = mod.get(inpt.identifier)
# if parameter and isinstance(parameter, bpy.types.ID):
# dependencies.append(parameter)
logging.info(dependencies)
return dependencies
@ -234,7 +295,147 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE')
def dump_shape_keys(target_key: bpy.types.Key)->dict:
""" Dump the target shape_keys datablock to a dict using numpy
:param dumped_key: target key datablock
:type dumped_key: bpy.types.Key
:return: dict
"""
dumped_key_blocks = []
dumper = Dumper()
dumper.include_filter = [
'name',
'mute',
'value',
'slider_min',
'slider_max',
]
for key in target_key.key_blocks:
dumped_key_block = dumper.dump(key)
dumped_key_block['data'] = np_dump_collection(key.data, ['co'])
dumped_key_block['relative_key'] = key.relative_key.name
dumped_key_blocks.append(dumped_key_block)
return {
'reference_key': target_key.reference_key.name,
'use_relative': target_key.use_relative,
'key_blocks': dumped_key_blocks,
'animation_data': dump_animation_data(target_key)
}
def load_shape_keys(dumped_shape_keys: dict, target_object: bpy.types.Object):
""" Load the target shape_keys datablock to a dict using numpy
:param dumped_key: src key data
:type dumped_key: bpy.types.Key
:param target_object: object used to load the shapekeys data onto
:type target_object: bpy.types.Object
"""
loader = Loader()
# Remove existing ones
target_object.shape_key_clear()
# Create keys and load vertices coords
dumped_key_blocks = dumped_shape_keys.get('key_blocks')
for dumped_key_block in dumped_key_blocks:
key_block = target_object.shape_key_add(name=dumped_key_block['name'])
loader.load(key_block, dumped_key_block)
np_load_collection(dumped_key_block['data'], key_block.data, ['co'])
# Load relative key after all
for dumped_key_block in dumped_key_blocks:
relative_key_name = dumped_key_block.get('relative_key')
key_name = dumped_key_block.get('name')
target_keyblock = target_object.data.shape_keys.key_blocks[key_name]
relative_key = target_object.data.shape_keys.key_blocks[relative_key_name]
target_keyblock.relative_key = relative_key
# Shape keys animation data
anim_data = dumped_shape_keys.get('animation_data')
if anim_data:
load_animation_data(anim_data, target_object.data.shape_keys)
def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
""" Dump all modifiers of a modifier collection into a dict
:param modifiers: modifiers
:type modifiers: bpy.types.bpy_prop_collection
:return: dict
"""
dumped_modifiers = {}
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for index, modifier in enumerate(modifiers):
dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
dumped_modifier['inputs'] = dumped_inputs
elif modifier.type == 'PARTICLE_SYSTEM':
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
elif modifier.type == 'UV_PROJECT':
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
dumped_modifiers[modifier.name] = dumped_modifier
return dumped_modifiers
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
""" Load modifiers custom data not managed by the dump_anything loader
:param dumped_modifiers: modifiers to load
:type dumped_modifiers: dict
:param modifiers: target modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
"""
loader = Loader()
for modifier in modifiers:
dumped_modifier = dumped_modifiers.get(modifier.name)
if modifier.type == 'NODES':
load_modifier_geometry_node_inputs(dumped_modifier, modifier)
elif modifier.type == 'PARTICLE_SYSTEM':
default = modifier.particle_system.settings
dumped_particles = dumped_modifier['particle_system']
loader.load(modifier.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
modifier.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
loader.load(modifier.settings, dumped_modifier['settings'])
elif modifier.type == 'UV_PROJECT':
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
target_object = bpy.data.objects.get(projector_object)
if target_object:
modifier.projectors[projector_index].object = target_object
else:
logging.error("Could't load projector target object {projector_object}")
class BlObject(BlDatablock):
bl_id = "objects"
bl_class = bpy.types.Object
@ -258,13 +459,14 @@ class BlObject(BlDatablock):
object_name = data.get("name")
data_uuid = data.get("data_uuid")
data_id = data.get("data")
data_type = data.get("type")
object_data = get_datablock_from_uuid(
data_uuid,
find_data_from_name(data_id),
ignore=['images']) # TODO: use resolve_from_id
if object_data is None and data_uuid:
if data_type != 'EMPTY' and object_data is None:
raise Exception(f"Fail to load object {data['name']}({self.uuid})")
instance = bpy.data.objects.new(object_name, object_data)
@ -290,24 +492,9 @@ class BlObject(BlDatablock):
object_data = target.data
# SHAPE KEYS
if 'shape_keys' in data:
target.shape_key_clear()
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data = data['shape_keys']['key_blocks'][key_block]
target.shape_key_add(name=key_block)
loader.load(
target.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
shape_keys = data.get('shape_keys')
if shape_keys:
load_shape_keys(shape_keys, target)
# Load transformation data
loader.load(target, data)
@ -317,9 +504,9 @@ class BlObject(BlDatablock):
loader.load(target.display, data['display'])
# Parenting
parent_id = data.get('parent_id')
parent_id = data.get('parent_uid')
if parent_id:
parent = bpy.data.objects[parent_id]
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
# Avoid reloading
if target.parent != parent and parent is not None:
target.parent = parent
@ -373,13 +560,11 @@ class BlObject(BlDatablock):
and 'cycles_visibility' in data:
loader.load(target.cycles_visibility, data['cycles_visibility'])
# TODO: handle geometry nodes input from dump_anything
if hasattr(target, 'modifiers'):
nodes_modifiers = [
mod for mod in target.modifiers if mod.type == 'NODES']
for modifier in nodes_modifiers:
load_modifier_geometry_node_inputs(
data['modifiers'][modifier.name], modifier)
load_modifiers_custom_data(data['modifiers'], target.modifiers)
# PHYSICS
load_physics(data, target)
transform = data.get('transforms', None)
if transform:
@ -388,6 +573,7 @@ class BlObject(BlDatablock):
target.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
target.matrix_local = mathutils.Matrix(transform['matrix_local'])
def _dump_implementation(self, data, instance=None):
assert(instance)
@ -450,22 +636,13 @@ class BlObject(BlDatablock):
# PARENTING
if instance.parent:
data['parent_id'] = instance.parent.name
data['parent_uid'] = (instance.parent.uuid, instance.parent.name)
# MODIFIERS
modifiers = getattr(instance, 'modifiers', None)
if hasattr(instance, 'modifiers'):
data["modifiers"] = {}
modifiers = getattr(instance, 'modifiers', None)
if modifiers:
dumper.include_filter = None
dumper.depth = 1
for index, modifier in enumerate(modifiers):
data["modifiers"][modifier.name] = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
data["modifiers"][modifier.name]['inputs'] = dumped_inputs
data['modifiers'] = dump_modifiers(modifiers)
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
if gp_modifiers:
@ -487,6 +664,7 @@ class BlObject(BlDatablock):
'location']
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
# CONSTRAINTS
if hasattr(instance, 'constraints'):
dumper.include_filter = None
@ -538,30 +716,7 @@ class BlObject(BlDatablock):
# SHAPE KEYS
object_data = instance.data
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = [
'reference_key',
'use_relative'
]
data['shape_keys'] = dumper.dump(object_data.shape_keys)
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
key_blocks = {}
for key in object_data.shape_keys.key_blocks:
dumper.depth = 3
dumper.include_filter = [
'name',
'data',
'mute',
'value',
'slider_min',
'slider_max',
'data',
'co'
]
key_blocks[key.name] = dumper.dump(key)
key_blocks[key.name]['relative_key'] = key.relative_key.name
data['shape_keys']['key_blocks'] = key_blocks
data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
# SKIN VERTICES
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
@ -583,6 +738,9 @@ class BlObject(BlDatablock):
]
data['cycles_visibility'] = dumper.dump(instance.cycles_visibility)
# PHYSICS
data.update(dump_physics(instance))
return data
def _resolve_deps_implementation(self):
@ -592,6 +750,10 @@ class BlObject(BlDatablock):
if self.instance.data:
deps.append(self.instance.data)
# Particle systems
for particle_slot in self.instance.particle_systems:
deps.append(particle_slot.settings)
if self.is_library:
deps.append(self.instance.library)
@ -606,4 +768,6 @@ class BlObject(BlDatablock):
deps.extend(find_textures_dependencies(self.instance.modifiers))
deps.extend(find_geometry_nodes_dependencies(self.instance.modifiers))
if hasattr(self.instance.data, 'shape_keys') and self.instance.data.shape_keys:
deps.extend(resolve_animation_dependencies(self.instance.data.shape_keys))
return deps

View File

@ -0,0 +1,90 @@
import bpy
import mathutils
from . import dump_anything
from .bl_datablock import BlDatablock, get_datablock_from_uuid
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
""" Dump every texture slot collection as the form:
[(index, slot_texture_uuid, slot_texture_name), (), ...]
"""
dumped_slots = []
for index, slot in enumerate(texture_slots):
if slot and slot.texture:
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
return dumped_slots
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
"""
"""
for index, slot in enumerate(target_slots):
if slot:
target_slots.clear(index)
for index, slot_uuid, slot_name in dumped_slots:
target_slots.create(index).texture = get_datablock_from_uuid(
slot_uuid, slot_name
)
IGNORED_ATTR = [
"is_embedded_data",
"is_evaluated",
"is_fluid",
"is_library_indirect",
"users"
]
class BlParticle(BlDatablock):
bl_id = "particles"
bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES"
bl_check_common = False
bl_reload_parent = False
def _construct(self, data):
instance = bpy.data.particles.new(data["name"])
instance.uuid = self.uuid
return instance
def _load_implementation(self, data, target):
dump_anything.load(target, data)
dump_anything.load(target.effector_weights, data["effector_weights"])
# Force field
force_field_1 = data.get("force_field_1", None)
if force_field_1:
dump_anything.load(target.force_field_1, force_field_1)
force_field_2 = data.get("force_field_2", None)
if force_field_2:
dump_anything.load(target.force_field_2, force_field_2)
# Texture slots
load_texture_slots(data["texture_slots"], target.texture_slots)
def _dump_implementation(self, data, instance=None):
assert instance
dumper = dump_anything.Dumper()
dumper.depth = 1
dumper.exclude_filter = IGNORED_ATTR
data = dumper.dump(instance)
# Particle effectors
data["effector_weights"] = dumper.dump(instance.effector_weights)
if instance.force_field_1:
data["force_field_1"] = dumper.dump(instance.force_field_1)
if instance.force_field_2:
data["force_field_2"] = dumper.dump(instance.force_field_2)
# Texture slots
data["texture_slots"] = dump_textures_slots(instance.texture_slots)
return data
def _resolve_deps_implementation(self):
return [t.texture for t in self.instance.texture_slots if t and t.texture]

View File

@ -368,6 +368,8 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
class BlScene(BlDatablock):
is_root = True
bl_id = "scenes"
bl_class = bpy.types.Scene
bl_check_common = True

View File

@ -610,6 +610,8 @@ class Loader:
instance.write(bpy.data.fonts.get(dump))
elif isinstance(rna_property_type, T.Sound):
instance.write(bpy.data.sounds.get(dump))
# elif isinstance(rna_property_type, T.ParticleSettings):
# instance.write(bpy.data.particles.get(dump))
def _load_matrix(self, matrix, dump):
matrix.write(mathutils.Matrix(dump))

Submodule multi_user/libs/replication updated: 9a02e16d70...001fbdc60d

View File

@ -32,6 +32,7 @@ from operator import itemgetter
from pathlib import Path
from queue import Queue
from time import gmtime, strftime
import traceback
try:
import _pickle as pickle
@ -44,9 +45,11 @@ from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP)
from replication.data import ReplicatedDataFactory
from replication.exception import NonAuthorizedOperationError, ContextError
from replication.data import DataTranslationProtocol
from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session
from replication.porcelain import add, apply
from replication.repository import Repository
from . import bl_types, environment, timers, ui, utils
from .presence import SessionStatusWidget, renderer, view3d_find
@ -80,8 +83,8 @@ def initialize_session():
# Step 1: Constrect nodes
logging.info("Constructing nodes")
for node in session._graph.list_ordered():
node_ref = session.get(uuid=node)
for node in session.repository.list_ordered():
node_ref = session.repository.get_node(node)
if node_ref is None:
logging.error(f"Can't construct node {node}")
elif node_ref.state == FETCHED:
@ -89,8 +92,8 @@ def initialize_session():
# Step 2: Load nodes
logging.info("Loading nodes")
for node in session._graph.list_ordered():
node_ref = session.get(uuid=node)
for node in session.repository.list_ordered():
node_ref = session.repository.get_node(node)
if node_ref is None:
logging.error(f"Can't load node {node}")
@ -186,7 +189,7 @@ class SessionStartOperator(bpy.types.Operator):
handler.setFormatter(formatter)
bpy_factory = ReplicatedDataFactory()
bpy_protocol = DataTranslationProtocol()
supported_bl_types = []
# init the factory with supported types
@ -205,7 +208,7 @@ class SessionStartOperator(bpy.types.Operator):
type_local_config = settings.supported_datablocks[type_impl_name]
bpy_factory.register_type(
bpy_protocol.register_type(
type_module_class.bl_class,
type_module_class,
check_common=type_module_class.bl_check_common)
@ -215,10 +218,7 @@ class SessionStartOperator(bpy.types.Operator):
else:
python_binary_path = bpy.app.binary_path_python
session.configure(
factory=bpy_factory,
python_path=python_binary_path,
external_update_handling=True)
repo = Repository(data_protocol=bpy_protocol)
# Host a session
if self.host:
@ -229,13 +229,14 @@ class SessionStartOperator(bpy.types.Operator):
runtime_settings.internet_ip = environment.get_ip()
try:
# Init repository
for scene in bpy.data.scenes:
session.add(scene)
add(repo, scene)
session.host(
repository= repo,
id=settings.username,
port=settings.port,
ipc_port=settings.ipc_port,
timeout=settings.connection_timeout,
password=admin_pass,
cache_directory=settings.cache_directory,
@ -245,7 +246,6 @@ class SessionStartOperator(bpy.types.Operator):
except Exception as e:
self.report({'ERROR'}, repr(e))
logging.error(f"Error: {e}")
import traceback
traceback.print_exc()
# Join a session
else:
@ -256,10 +256,10 @@ class SessionStartOperator(bpy.types.Operator):
try:
session.connect(
repository= repo,
id=settings.username,
address=settings.ip,
port=settings.port,
ipc_port=settings.ipc_port,
timeout=settings.connection_timeout,
password=admin_pass
)
@ -279,7 +279,9 @@ class SessionStartOperator(bpy.types.Operator):
session_user_sync = timers.SessionUserSync()
session_background_executor = timers.MainThreadExecutor(
execution_queue=background_execution_queue)
session_listen = timers.SessionListenTimer(timeout=0.001)
session_listen.register()
session_update.register()
session_user_sync.register()
session_background_executor.register()
@ -287,7 +289,7 @@ class SessionStartOperator(bpy.types.Operator):
deleyables.append(session_background_executor)
deleyables.append(session_update)
deleyables.append(session_user_sync)
deleyables.append(session_listen)
self.report(
@ -328,7 +330,7 @@ class SessionInitOperator(bpy.types.Operator):
utils.clean_scene()
for scene in bpy.data.scenes:
session.add(scene)
add(session.repository, scene)
session.init()
@ -350,7 +352,7 @@ class SessionStopOperator(bpy.types.Operator):
if session:
try:
session.disconnect()
session.disconnect(reason='user')
except Exception as e:
self.report({'ERROR'}, repr(e))
@ -599,17 +601,22 @@ class SessionApply(bpy.types.Operator):
def execute(self, context):
logging.debug(f"Running apply on {self.target}")
try:
node_ref = session.get(uuid=self.target)
session.apply(self.target,
force=True,
force_dependencies=self.reset_dependencies)
node_ref = session.repository.get_node(self.target)
apply(session.repository,
self.target,
force=True,
force_dependencies=self.reset_dependencies)
if node_ref.bl_reload_parent:
for parent in session._graph.find_parents(self.target):
for parent in session.repository.get_parents(self.target):
logging.debug(f"Refresh parent {parent}")
session.apply(parent, force=True)
apply(session.repository,
parent.uuid,
force=True)
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
traceback.print_exc()
return {"CANCELLED"}
return {"FINISHED"}
@ -649,15 +656,15 @@ class ApplyArmatureOperator(bpy.types.Operator):
return {'CANCELLED'}
if event.type == 'TIMER':
if session and session.state['STATE'] == STATE_ACTIVE:
if session and session.state == STATE_ACTIVE:
nodes = session.list(filter=bl_types.bl_armature.BlArmature)
for node in nodes:
node_ref = session.get(uuid=node)
node_ref = session.repository.get_node(node)
if node_ref.state == FETCHED:
try:
session.apply(node)
apply(session.repository, node)
except Exception as e:
logging.error("Fail to apply armature: {e}")
@ -794,7 +801,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
@classmethod
def poll(cls, context):
return session.state['STATE'] == STATE_ACTIVE
return session.state == STATE_ACTIVE
class SessionStopAutoSaveOperator(bpy.types.Operator):
bl_idname = "session.cancel_autosave"
@ -803,7 +810,7 @@ class SessionStopAutoSaveOperator(bpy.types.Operator):
@classmethod
def poll(cls, context):
return (session.state['STATE'] == STATE_ACTIVE and 'SessionBackupTimer' in registry)
return (session.state == STATE_ACTIVE and 'SessionBackupTimer' in registry)
def execute(self, context):
autosave_timer = registry.get('SessionBackupTimer')
@ -828,7 +835,7 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
)
def execute(self, context):
from replication.graph import ReplicationGraph
from replication.repository import Repository
# TODO: add filechecks
@ -848,7 +855,7 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
# init the factory with supported types
bpy_factory = ReplicatedDataFactory()
bpy_protocol = DataTranslationProtocol()
for type in bl_types.types_to_register():
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
@ -856,16 +863,16 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
type_module_class = getattr(type_module, type_impl_name)
bpy_factory.register_type(
bpy_protocol.register_type(
type_module_class.bl_class,
type_module_class)
graph = ReplicationGraph()
graph = Repository()
for node, node_data in nodes:
node_type = node_data.get('str_type')
impl = bpy_factory.get_implementation_from_net(node_type)
impl = bpy_protocol.get_implementation_from_net(node_type)
if impl:
logging.info(f"Loading {node}")
@ -873,7 +880,7 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
uuid=node,
dependencies=node_data['dependencies'],
data=node_data['data'])
instance.store(graph)
graph.do_commit(instance)
instance.state = FETCHED
logging.info("Graph succefully loaded")
@ -922,7 +929,7 @@ classes = (
def update_external_dependencies():
nodes_ids = session.list(filter=bl_types.bl_file.BlFile)
for node_id in nodes_ids:
node = session.get(node_id)
node = session.repository.get_node(node_id)
if node and node.owner in [session.id, RP_COMMON] \
and node.has_changed():
session.commit(node_id)
@ -931,11 +938,11 @@ def update_external_dependencies():
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state['STATE'] == STATE_ACTIVE:
if session and session.state == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node_key in session.list():
node = session.get(node_key)
node = session.repository.get_node(node_key)
if node is None \
or (node.state == UP and not node.resolve(construct=False)):
if remove_nodes:
@ -956,18 +963,18 @@ def resolve_deps_graph(dummy):
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state['STATE'] == STATE_ACTIVE:
if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
if session and session.state == STATE_ACTIVE:
session.update_user_metadata({
'frame_current': scene.frame_current
})
@ -975,7 +982,7 @@ def update_client_frame(scene):
@persistent
def depsgraph_evaluation(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
if session and session.state == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
@ -988,7 +995,7 @@ def depsgraph_evaluation(scene):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.get(uuid=update.id.uuid)
node = session.repository.get_node(update.id.uuid)
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
@ -1012,11 +1019,11 @@ def depsgraph_evaluation(scene):
continue
# A new scene is created
elif isinstance(update.id, bpy.types.Scene):
ref = session.get(reference=update.id)
ref = session.repository.get_node_by_datablock(update.id)
if ref:
ref.resolve()
else:
scn_uuid = session.add(update.id)
scn_uuid = add(session.repository, update.id)
session.commit(scn_uuid)
session.push(scn_uuid, check_data=False)
def register():
@ -1034,7 +1041,7 @@ def register():
def unregister():
if session and session.state['STATE'] == STATE_ACTIVE:
if session and session.state == STATE_ACTIVE:
session.disconnect()
from bpy.utils import unregister_class

View File

@ -66,14 +66,6 @@ def update_ip(self, context):
self['ip'] = "127.0.0.1"
def update_port(self, context):
max_port = self.port + 3
if self.ipc_port < max_port and \
self['ipc_port'] >= self.port:
logging.error(
"IPC Port in conflict with the port, assigning a random value")
self['ipc_port'] = random.randrange(self.port+4, 10000)
def update_directory(self, context):
@ -174,12 +166,6 @@ class SessionPrefs(bpy.types.AddonPreferences):
supported_datablocks: bpy.props.CollectionProperty(
type=ReplicatedDatablock,
)
ipc_port: bpy.props.IntProperty(
name="ipc_port",
description='internal ttl port(only useful for multiple local instances)',
default=random.randrange(5570, 70000),
update=update_port,
)
init_method: bpy.props.EnumProperty(
name='init_method',
description='Init repo',

View File

@ -30,7 +30,7 @@ import mathutils
from bpy_extras import view3d_utils
from gpu_extras.batch import batch_for_shader
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
STATE_INITIAL, STATE_LAUNCHING_SERVICES,
STATE_INITIAL, CONNECTING,
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
STATE_SYNCING, STATE_WAITING)
from replication.interface import session
@ -399,7 +399,7 @@ class SessionStatusWidget(Widget):
text_scale = self.preferences.presence_hud_scale
ui_scale = bpy.context.preferences.view.ui_scale
color = [1, 1, 0, 1]
state = session.state.get('STATE')
state = session.state
state_str = f"{get_state_str(state)}"
if state == STATE_ACTIVE:

View File

@ -24,6 +24,7 @@ from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_SRV_SYNC, STATE_SYNCING, UP)
from replication.exception import NonAuthorizedOperationError, ContextError
from replication.interface import session
from replication.porcelain import apply, add
from . import operators, utils
from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
@ -71,7 +72,7 @@ class Timer(object):
except Exception as e:
logging.error(e)
self.unregister()
session.disconnect()
session.disconnect(reason=f"Error during timer {self.id} execution")
else:
if self.is_running:
return self._timeout
@ -100,25 +101,31 @@ class SessionBackupTimer(Timer):
def execute(self):
session.save(self._filepath)
class SessionListenTimer(Timer):
def execute(self):
session.listen()
class ApplyTimer(Timer):
def execute(self):
if session and session.state['STATE'] == STATE_ACTIVE:
if session and session.state == STATE_ACTIVE:
nodes = session.list()
for node in nodes:
node_ref = session.get(uuid=node)
node_ref = session.repository.get_node(node)
if node_ref.state == FETCHED:
try:
session.apply(node)
apply(session.repository, node)
except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}")
traceback.print_exc()
else:
if node_ref.bl_reload_parent:
for parent in session._graph.find_parents(node):
for parent in session.repository.get_parents(node):
logging.debug("Refresh parent {node}")
session.apply(parent, force=True)
apply(session.repository,
parent.uuid,
force=True)
class DynamicRightSelectTimer(Timer):
@ -131,7 +138,7 @@ class DynamicRightSelectTimer(Timer):
def execute(self):
settings = utils.get_preferences()
if session and session.state['STATE'] == STATE_ACTIVE:
if session and session.state == STATE_ACTIVE:
# Find user
if self._user is None:
self._user = session.online_users.get(settings.username)
@ -145,7 +152,7 @@ class DynamicRightSelectTimer(Timer):
# if an annotation exist and is tracked
if annotation_gp and annotation_gp.uuid:
registered_gp = session.get(uuid=annotation_gp.uuid)
registered_gp = session.repository.get_node(annotation_gp.uuid)
if is_annotating(bpy.context):
# try to get the right on it
if registered_gp.owner == RP_COMMON:
@ -159,7 +166,7 @@ class DynamicRightSelectTimer(Timer):
affect_dependencies=False)
if registered_gp.owner == settings.username:
gp_node = session.get(uuid=annotation_gp.uuid)
gp_node = session.repository.get_node(annotation_gp.uuid)
if gp_node.has_changed():
session.commit(gp_node.uuid)
session.push(gp_node.uuid, check_data=False)
@ -183,7 +190,7 @@ class DynamicRightSelectTimer(Timer):
# change old selection right to common
for obj in obj_common:
node = session.get(uuid=obj)
node = session.repository.get_node(obj)
if node and (node.owner == settings.username or node.owner == RP_COMMON):
recursive = True
@ -201,7 +208,7 @@ class DynamicRightSelectTimer(Timer):
# change new selection to our
for obj in obj_ours:
node = session.get(uuid=obj)
node = session.repository.get_node(obj)
if node and node.owner == RP_COMMON:
recursive = True
@ -234,7 +241,7 @@ class DynamicRightSelectTimer(Timer):
owned_keys = session.list(
filter_owner=settings.username)
for key in owned_keys:
node = session.get(uuid=key)
node = session.repository.get_node(key)
try:
session.change_owner(
key,
@ -263,7 +270,7 @@ class ClientUpdate(Timer):
settings = utils.get_preferences()
if session and renderer:
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
if session.state in [STATE_ACTIVE, STATE_LOBBY]:
local_user = session.online_users.get(
settings.username)

View File

@ -26,7 +26,7 @@ from replication.constants import (ADDED, ERROR, FETCHED,
STATE_INITIAL, STATE_SRV_SYNC,
STATE_WAITING, STATE_QUITTING,
STATE_LOBBY,
STATE_LAUNCHING_SERVICES)
CONNECTING)
from replication import __version__
from replication.interface import session
from .timers import registry
@ -71,9 +71,9 @@ class SESSION_PT_settings(bpy.types.Panel):
def draw_header(self, context):
layout = self.layout
if session and session.state['STATE'] != STATE_INITIAL:
if session and session.state != STATE_INITIAL:
cli_state = session.state
state = session.state.get('STATE')
state = session.state
connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
if state == STATE_ACTIVE:
@ -81,7 +81,7 @@ class SESSION_PT_settings(bpy.types.Panel):
else:
connection_icon = 'PROP_CON'
layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
layout.label(text=f"Session - {get_state_str(cli_state)}", icon=connection_icon)
else:
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
@ -94,13 +94,13 @@ class SESSION_PT_settings(bpy.types.Panel):
if hasattr(context.window_manager, 'session'):
# STATE INITIAL
if not session \
or (session and session.state['STATE'] == STATE_INITIAL):
or (session and session.state == STATE_INITIAL):
pass
else:
cli_state = session.state
progress = session.state_progress
row = layout.row()
current_state = cli_state['STATE']
current_state = session.state
info_msg = None
if current_state in [STATE_ACTIVE]:
@ -124,8 +124,8 @@ class SESSION_PT_settings(bpy.types.Panel):
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
info_box = row.box()
info_box.row().label(text=printProgressBar(
cli_state['CURRENT'],
cli_state['TOTAL'],
progress['current'],
progress['total'],
length=16
))
@ -141,7 +141,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not session \
or (session and session.state['STATE'] == 0)
or (session and session.state == 0)
def draw_header(self, context):
self.layout.label(text="", icon='URL')
@ -199,7 +199,7 @@ class SESSION_PT_settings_user(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not session \
or (session and session.state['STATE'] == 0)
or (session and session.state == 0)
def draw_header(self, context):
self.layout.label(text="", icon='USER')
@ -230,7 +230,7 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not session \
or (session and session.state['STATE'] == 0)
or (session and session.state == 0)
def draw_header(self, context):
self.layout.label(text="", icon='PREFERENCES')
@ -251,9 +251,6 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
emboss=False)
if settings.sidebar_advanced_net_expanded:
net_section_row = net_section.row()
net_section_row.label(text="IPC Port:")
net_section_row.prop(settings, "ipc_port", text="")
net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="")
@ -322,7 +319,7 @@ class SESSION_PT_user(bpy.types.Panel):
@classmethod
def poll(cls, context):
return session and session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
return session and session.state in [STATE_ACTIVE, STATE_LOBBY]
def draw_header(self, context):
self.layout.label(text="", icon='USER')
@ -353,7 +350,7 @@ class SESSION_PT_user(bpy.types.Panel):
if active_user != 0 and active_user.username != settings.username:
row = layout.row()
user_operations = row.split()
if session.state['STATE'] == STATE_ACTIVE:
if session.state == STATE_ACTIVE:
user_operations.alert = context.window_manager.session.time_snap_running
user_operations.operator(
@ -411,7 +408,7 @@ class SESSION_PT_presence(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not session \
or (session and session.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
or (session and session.state in [STATE_INITIAL, STATE_ACTIVE])
def draw_header(self, context):
self.layout.prop(context.window_manager.session,
@ -441,7 +438,7 @@ class SESSION_PT_presence(bpy.types.Panel):
def draw_property(context, parent, property_uuid, level=0):
settings = get_preferences()
runtime_settings = context.window_manager.session
item = session.get(uuid=property_uuid)
item = session.repository.get_node(property_uuid)
area_msg = parent.row(align=True)
@ -519,8 +516,8 @@ class SESSION_PT_repository(bpy.types.Panel):
admin = usr['admin']
return hasattr(context.window_manager, 'session') and \
session and \
(session.state['STATE'] == STATE_ACTIVE or \
session.state['STATE'] == STATE_LOBBY and admin)
(session.state == STATE_ACTIVE or \
session.state == STATE_LOBBY and admin)
def draw_header(self, context):
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
@ -536,7 +533,7 @@ class SESSION_PT_repository(bpy.types.Panel):
row = layout.row()
if session.state['STATE'] == STATE_ACTIVE:
if session.state == STATE_ACTIVE:
if 'SessionBackupTimer' in registry:
row.alert = True
row.operator('session.cancel_autosave', icon="CANCEL")
@ -568,7 +565,7 @@ class SESSION_PT_repository(bpy.types.Panel):
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
client_keys = [key for key in key_to_filter
if session.get(uuid=key).str_type
if session.repository.get_node(key).str_type
in types_filter]
if client_keys:
@ -579,7 +576,7 @@ class SESSION_PT_repository(bpy.types.Panel):
else:
row.label(text="Empty")
elif session.state['STATE'] == STATE_LOBBY and usr and usr['admin']:
elif session.state == STATE_LOBBY and usr and usr['admin']:
row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
else:
row.label(text="Waiting to start")

View File

@ -36,7 +36,7 @@ from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_INITIAL, STATE_SRV_SYNC,
STATE_WAITING, STATE_QUITTING,
STATE_LOBBY,
STATE_LAUNCHING_SERVICES)
CONNECTING)
def find_from_attr(attr_name, attr_value, list):
@ -92,7 +92,7 @@ def get_state_str(state):
state_str = 'OFFLINE'
elif state == STATE_QUITTING:
state_str = 'QUITTING'
elif state == STATE_LAUNCHING_SERVICES:
elif state == CONNECTING:
state_str = 'LAUNCHING SERVICES'
elif state == STATE_LOBBY:
state_str = 'LOBBY'

View File

@ -13,7 +13,7 @@ def main():
if len(sys.argv) > 2:
blender_rev = sys.argv[2]
else:
blender_rev = "2.91.0"
blender_rev = "2.92.0"
try:
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)

View File

@ -8,6 +8,7 @@ import random
from multi_user.bl_types.bl_action import BlAction
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
FMODIFIERS = ['GENERATOR', 'FNGENERATOR', 'ENVELOPE', 'CYCLES', 'NOISE', 'LIMITS', 'STEPPED']
# @pytest.mark.parametrize('blendname', ['test_action.blend'])
def test_action(clear_blend):
@ -22,6 +23,9 @@ def test_action(clear_blend):
point.co[1] = random.randint(-10,10)
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
for mod_type in FMODIFIERS:
fcurve_sample.modifiers.new(mod_type)
bpy.ops.mesh.primitive_plane_add()
bpy.data.objects[0].animation_data_create()
bpy.data.objects[0].animation_data.action = datablock

View File

@ -7,7 +7,7 @@ import bpy
import random
from multi_user.bl_types.bl_object import BlObject
# Removed 'BUILD' modifier because the seed doesn't seems to be
# Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be
# correctly initialized (#TODO: report the bug)
MOFIFIERS_TYPES = [
'DATA_TRANSFER', 'MESH_CACHE', 'MESH_SEQUENCE_CACHE',
@ -22,8 +22,7 @@ MOFIFIERS_TYPES = [
'MESH_DEFORM', 'SHRINKWRAP', 'SIMPLE_DEFORM', 'SMOOTH',
'CORRECTIVE_SMOOTH', 'LAPLACIANSMOOTH', 'SURFACE_DEFORM',
'WARP', 'WAVE', 'CLOTH', 'COLLISION', 'DYNAMIC_PAINT',
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE',
'SOFT_BODY', 'SURFACE']
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE', 'SURFACE']
GP_MODIFIERS_TYPE = [
'GP_ARRAY', 'GP_BUILD', 'GP_MIRROR', 'GP_MULTIPLY',
@ -72,5 +71,5 @@ def test_object(clear_blend):
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
print(DeepDiff(expected, result))
assert not DeepDiff(expected, result)