Compare commits

..

3 Commits

Author SHA1 Message Date
5a2ec0c328 clean: logging 2022-02-10 11:11:21 +01:00
cc5ceafea9 refactor: use np_load 2022-02-10 10:53:51 +01:00
b1464c9e14 feat: bevel custom profile support 2022-02-10 10:53:40 +01:00
40 changed files with 292 additions and 395 deletions

1
.gitignore vendored
View File

@ -15,4 +15,3 @@ _build
# ignore generated zip generated from blender_addon_tester # ignore generated zip generated from blender_addon_tester
*.zip *.zip
libs libs
venv

View File

@ -1,8 +1,13 @@
stages: stages:
- test
- build - build
- deploy
- doc - doc
include: include:
- local: .gitlab/ci/test.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml - local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/deploy.gitlab-ci.yml
- local: .gitlab/ci/doc.gitlab-ci.yml - local: .gitlab/ci/doc.gitlab-ci.yml

View File

@ -1,5 +1,6 @@
build: build:
stage: build stage: build
needs: ["test"]
image: debian:stable-slim image: debian:stable-slim
script: script:
- rm -rf tests .git .gitignore script - rm -rf tests .git .gitignore script
@ -7,3 +8,5 @@ build:
name: multi_user name: multi_user
paths: paths:
- multi_user - multi_user
variables:
GIT_SUBMODULE_STRATEGY: recursive

View File

@ -0,0 +1,21 @@
deploy:
stage: deploy
needs: ["build"]
image: slumber/docker-python
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs"
GIT_SUBMODULE_STRATEGY: recursive
services:
- docker:19.03.12-dind
script:
- RP_VERSION="$(python scripts/get_replication_version.py)"
- VERSION="$(python scripts/get_addon_version.py)"
- echo "Building docker image with replication ${RP_VERSION}"
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
- echo "Pushing to gitlab registry ${VERSION}"
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} registry.gitlab.com/slumber/multi-user/multi-user-server:${CI_COMMIT_REF_NAME}
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server

View File

@ -1,5 +1,6 @@
pages: pages:
stage: doc stage: doc
needs: ["deploy"]
image: python image: python
script: script:
- pip install -U sphinx sphinx_rtd_theme sphinx-material - pip install -U sphinx sphinx_rtd_theme sphinx-material

View File

@ -0,0 +1,7 @@
test:
stage: test
image: slumber/blender-addon-testing:latest
script:
- python3 scripts/test_addon.py
variables:
GIT_SUBMODULE_STRATEGY: recursive

3
.gitmodules vendored
View File

@ -0,0 +1,3 @@
[submodule "multi_user/libs/replication"]
path = multi_user/libs/replication
url = https://gitlab.com/slumber/replication.git

View File

@ -217,57 +217,3 @@ All notable changes to this project will be documented in this file.
- GPencil fill stroke - GPencil fill stroke
- Sculpt and GPencil brushes deleted when joining a session (@Kysios) - Sculpt and GPencil brushes deleted when joining a session (@Kysios)
- Auto-updater doesn't work for master and develop builds - Auto-updater doesn't work for master and develop builds
## [0.5.0] - 2022-02-10
### Added
- New overall UI and UX (@Kysios)
- Documentation overall update (@Kysios)
- Server presets (@Kysios)
- Server online status (@Kysios)
- Draw connected user color in the user list
- Private session (access protected with a password) (@Kysios)
### Changed
- Dependencies are now installed in the addon folder and correctly cleaned during the addon removal process
### Fixed
- Python 3.10 compatibility (@notfood)
- Blender 3.x compatibility
- Skin vertex radius synchronization (@kromar)
- Sequencer audio strip synchronization
- Crash with empty after a reconnection
## [0.5.1] - 2022-02-10
### Fixed
- Auto updater breaks dependency auto installer
- Auto updater update from tag
## [0.5.2] - 2022-02-18
### Fixed
- Objects not selectable after user leaves session
- Geometry nodes attribute toogle doesn't sync
## [0.5.3] - 2022-03-11
### Changed
- Snapshots logs
### Fixed
- Server crashing during snapshots
- Blender 3.1 numpy loading error during early connection process
- Server docker arguments
## [0.5.5] - 2022-06-12
### Fixed
- Numpy mesh serialization error

View File

@ -16,12 +16,12 @@ import sys
# -- Project information ----------------------------------------------------- # -- Project information -----------------------------------------------------
project = 'Multi-User 0.5.x Documentation' project = 'Multi-User 0.5.0 Documentation'
copyright = '2020, Swann Martinez' copyright = '2020, Swann Martinez'
author = 'Swann Martinez, Poochy, Fabian' author = 'Swann Martinez, Poochy, Fabian'
# The full version, including alpha/beta/rc tags # The full version, including alpha/beta/rc tags
version_release = '0.5.5' version_release = '0.5.1-develop'
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------

View File

@ -206,9 +206,9 @@ You can run the dedicated server on any platform by following these steps:
.. code-block:: bash .. code-block:: bash
python -m pip install replication python -m pip install replication==0.1.13
3. Launch the server with: 4. Launch the server with:
.. code-block:: bash .. code-block:: bash
@ -562,7 +562,7 @@ The default Docker image essentially runs the equivalent of:
.. code-block:: bash .. code-block:: bash
replication.server -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log replication.serve -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters. This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.

View File

@ -19,9 +19,9 @@
bl_info = { bl_info = {
"name": "Multi-User", "name": "Multi-User",
"author": "Swann Martinez", "author": "Swann Martinez",
"version": (0, 6, 0), "version": (0, 4, 1),
"description": "Enable real-time collaborative workflow inside blender", "description": "Enable real-time collaborative workflow inside blender",
"blender": (4, 0, 0), "blender": (2, 82, 0),
"location": "3D View > Sidebar > Multi-User tab", "location": "3D View > Sidebar > Multi-User tab",
"warning": "Unstable addon, use it at your own risks", "warning": "Unstable addon, use it at your own risks",
"category": "Collaboration", "category": "Collaboration",
@ -43,8 +43,6 @@ from bpy.app.handlers import persistent
from . import environment from . import environment
environment.preload_modules()
module_error_msg = "Insufficient rights to install the multi-user \ module_error_msg = "Insufficient rights to install the multi-user \
dependencies, aunch blender with administrator rights." dependencies, aunch blender with administrator rights."

View File

@ -1015,11 +1015,9 @@ class Singleton_updater(object):
for path, dirs, files in os.walk(base): for path, dirs, files in os.walk(base):
# prune ie skip updater folder # prune ie skip updater folder
dirs[:] = [d for d in dirs if os.path.join(path,d) not in [self._updater_path]] dirs[:] = [d for d in dirs if os.path.join(path,d) not in [self._updater_path]]
for directory in dirs:
shutil.rmtree(os.path.join(path,directory))
for file in files: for file in files:
for ptrn in self.remove_pre_update_patterns:
if fnmatch.filter([file],ptrn):
try: try:
fl = os.path.join(path,file) fl = os.path.join(path,file)
os.remove(fl) os.remove(fl)
@ -1703,7 +1701,7 @@ class GitlabEngine(object):
def parse_tags(self, response, updater): def parse_tags(self, response, updater):
if response == None: if response == None:
return [] return []
return [{"name": tag["name"], "zipball_url": f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{tag['name']}/download?job=build"} for tag in response] return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------

View File

@ -267,7 +267,7 @@ class addon_updater_update_now(bpy.types.Operator):
clean_install: bpy.props.BoolProperty( clean_install: bpy.props.BoolProperty(
name="Clean install", name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=True, default=False,
options={'HIDDEN'} options={'HIDDEN'}
) )

View File

@ -28,14 +28,9 @@ from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from bpy.types import (NodeSocketGeometry, NodeSocketShader,
NodeSocketVirtual, NodeSocketCollection,
NodeSocketObject, NodeSocketMaterial)
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]') NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['NodeSocketGeometry', 'NodeSocketShader', 'CUSTOM', 'NodeSocketVirtual'] IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
IGNORED_SOCKETS_TYPES = (NodeSocketGeometry, NodeSocketShader, NodeSocketVirtual)
ID_NODE_SOCKETS = (NodeSocketObject, NodeSocketCollection, NodeSocketMaterial)
def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree): def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
""" Load a node into a node_tree from a dict """ Load a node into a node_tree from a dict
@ -62,23 +57,17 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
if node_tree_uuid: if node_tree_uuid:
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None) target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
if target_node.bl_idname == 'GeometryNodeRepeatOutput':
target_node.repeat_items.clear()
for sock_name, sock_type in node_data['repeat_items'].items():
target_node.repeat_items.new(sock_type, sock_name)
inputs_data = node_data.get('inputs') inputs_data = node_data.get('inputs')
if inputs_data: if inputs_data:
inputs = [i for i in target_node.inputs if not isinstance(i, IGNORED_SOCKETS_TYPES)] inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs): for idx, inpt in enumerate(inputs):
if idx < len(inputs_data) and hasattr(inpt, "default_value"): if idx < len(inputs_data) and hasattr(inpt, "default_value"):
loaded_input = inputs_data[idx] loaded_input = inputs_data[idx]
try: try:
if isinstance(inpt, ID_NODE_SOCKETS): if inpt.type in ['OBJECT', 'COLLECTION']:
inpt.default_value = get_datablock_from_uuid(loaded_input, None) inpt.default_value = get_datablock_from_uuid(loaded_input, None)
else: else:
inpt.default_value = loaded_input inpt.default_value = loaded_input
setattr(inpt, 'default_value', loaded_input)
except Exception as e: except Exception as e:
logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})") logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})")
else: else:
@ -86,12 +75,12 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
outputs_data = node_data.get('outputs') outputs_data = node_data.get('outputs')
if outputs_data: if outputs_data:
outputs = [o for o in target_node.outputs if not isinstance(o, IGNORED_SOCKETS_TYPES)] outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS]
for idx, output in enumerate(outputs): for idx, output in enumerate(outputs):
if idx < len(outputs_data) and hasattr(output, "default_value"): if idx < len(outputs_data) and hasattr(output, "default_value"):
loaded_output = outputs_data[idx] loaded_output = outputs_data[idx]
try: try:
if isinstance(output, ID_NODE_SOCKETS): if output.type in ['OBJECT', 'COLLECTION']:
output.default_value = get_datablock_from_uuid(loaded_output, None) output.default_value = get_datablock_from_uuid(loaded_output, None)
else: else:
output.default_value = loaded_output output.default_value = loaded_output
@ -152,7 +141,7 @@ def dump_node(node: bpy.types.ShaderNode) -> dict:
if hasattr(node, 'inputs'): if hasattr(node, 'inputs'):
dumped_node['inputs'] = [] dumped_node['inputs'] = []
inputs = [i for i in node.inputs if not isinstance(i, IGNORED_SOCKETS_TYPES)] inputs = [i for i in node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs): for idx, inpt in enumerate(inputs):
if hasattr(inpt, 'default_value'): if hasattr(inpt, 'default_value'):
if isinstance(inpt.default_value, bpy.types.ID): if isinstance(inpt.default_value, bpy.types.ID):
@ -165,7 +154,7 @@ def dump_node(node: bpy.types.ShaderNode) -> dict:
if hasattr(node, 'outputs'): if hasattr(node, 'outputs'):
dumped_node['outputs'] = [] dumped_node['outputs'] = []
for idx, output in enumerate(node.outputs): for idx, output in enumerate(node.outputs):
if not isinstance(output, IGNORED_SOCKETS_TYPES): if output.type not in IGNORED_SOCKETS:
if hasattr(output, 'default_value'): if hasattr(output, 'default_value'):
dumped_node['outputs'].append( dumped_node['outputs'].append(
io_dumper.dump(output.default_value)) io_dumper.dump(output.default_value))
@ -196,12 +185,6 @@ def dump_node(node: bpy.types.ShaderNode) -> dict:
dumped_node['image_uuid'] = node.image.uuid dumped_node['image_uuid'] = node.image.uuid
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'): if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
dumped_node['node_tree_uuid'] = node.node_tree.uuid dumped_node['node_tree_uuid'] = node.node_tree.uuid
if node.bl_idname == 'GeometryNodeRepeatInput':
dumped_node['paired_output'] = node.paired_output.name
if node.bl_idname == 'GeometryNodeRepeatOutput':
dumped_node['repeat_items'] = {item.name: item.socket_type for item in node.repeat_items}
return dumped_node return dumped_node
@ -216,8 +199,10 @@ def load_links(links_data, node_tree):
""" """
for link in links_data: for link in links_data:
input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])] input_socket = node_tree.nodes[link['to_node']
output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])] ].inputs[int(link['to_socket'])]
output_socket = node_tree.nodes[link['from_node']].outputs[int(
link['from_socket'])]
node_tree.links.new(input_socket, output_socket) node_tree.links.new(input_socket, output_socket)
@ -250,7 +235,7 @@ def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
""" Dump a shader node_tree to a dict including links and nodes """ Dump a shader node_tree to a dict including links and nodes
:arg node_tree: dumped shader node tree :arg node_tree: dumped shader node tree
:type node_tree: bpy.types.ShaderNodeTree` :type node_tree: bpy.types.ShaderNodeTree
:return: dict :return: dict
""" """
node_tree_data = { node_tree_data = {
@ -260,8 +245,9 @@ def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
'type': type(node_tree).__name__ 'type': type(node_tree).__name__
} }
sockets = [item for item in node_tree.interface.items_tree if item.item_type == 'SOCKET'] for socket_id in ['inputs', 'outputs']:
node_tree_data['interface'] = dump_node_tree_sockets(sockets) socket_collection = getattr(node_tree, socket_id)
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
return node_tree_data return node_tree_data
@ -277,21 +263,18 @@ def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
""" """
sockets_data = [] sockets_data = []
for socket in sockets: for socket in sockets:
if not socket.socket_type: try:
logging.error(f"Socket {socket.name} has no type, skipping") socket_uuid = socket['uuid']
raise ValueError(f"Socket {socket.name} has no type, skipping") except Exception:
sockets_data.append( socket_uuid = str(uuid4())
( socket['uuid'] = socket_uuid
socket.name,
socket.socket_type, sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
socket.in_out
)
)
return sockets_data return sockets_data
def load_node_tree_sockets(interface: bpy.types.NodeTreeInterface, def load_node_tree_sockets(sockets: bpy.types.Collection,
sockets_data: dict): sockets_data: dict):
""" load sockets of a shader_node_tree """ load sockets of a shader_node_tree
@ -302,20 +285,20 @@ def load_node_tree_sockets(interface: bpy.types.NodeTreeInterface,
:arg socket_data: dumped socket data :arg socket_data: dumped socket data
:type socket_data: dict :type socket_data: dict
""" """
# Remove old sockets # Check for removed sockets
interface.clear() for socket in sockets:
if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]:
sockets.remove(socket)
# Check for new sockets # Check for new sockets
for name, socket_type, in_out in sockets_data: for idx, socket_data in enumerate(sockets_data):
if not socket_type: try:
logging.error(f"Socket {name} has no type, skipping") checked_socket = sockets[idx]
continue if checked_socket.name != socket_data[0]:
socket = interface.new_socket( checked_socket.name = socket_data[0]
name, except Exception:
in_out=in_out, s = sockets.new(socket_data[1], socket_data[0])
socket_type=socket_type s['uuid'] = socket_data[2]
)
def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeTree) -> dict: def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeTree) -> dict:
@ -332,8 +315,13 @@ def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeT
if not target_node_tree.is_property_readonly('name'): if not target_node_tree.is_property_readonly('name'):
target_node_tree.name = node_tree_data['name'] target_node_tree.name = node_tree_data['name']
if 'interface' in node_tree_data: if 'inputs' in node_tree_data:
load_node_tree_sockets(target_node_tree.interface, node_tree_data['interface']) socket_collection = getattr(target_node_tree, 'inputs')
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
if 'outputs' in node_tree_data:
socket_collection = getattr(target_node_tree, 'outputs')
load_node_tree_sockets(socket_collection, node_tree_data['outputs'])
# Load nodes # Load nodes
for node in node_tree_data["nodes"]: for node in node_tree_data["nodes"]:
@ -347,15 +335,6 @@ def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeT
target_node.parent = target_node_tree.nodes[node_data['parent']] target_node.parent = target_node_tree.nodes[node_data['parent']]
else: else:
target_node.parent = None target_node.parent = None
# Load geo node repeat zones
zone_input_to_pair = [node_data for node_data in node_tree_data["nodes"].values() if node_data['bl_idname'] == 'GeometryNodeRepeatInput']
for node_input_data in zone_input_to_pair:
zone_input = target_node_tree.nodes.get(node_input_data['name'])
zone_output = target_node_tree.nodes.get(node_input_data['paired_output'])
zone_input.pair_with_output(zone_output)
# TODO: load only required nodes links # TODO: load only required nodes links
# Load nodes links # Load nodes links
target_node_tree.links.clear() target_node_tree.links.clear()

View File

@ -37,6 +37,8 @@ VERTICE = ['co']
EDGE = [ EDGE = [
'vertices', 'vertices',
'crease',
'bevel_weight',
'use_seam', 'use_seam',
'use_edge_sharp', 'use_edge_sharp',
] ]
@ -52,18 +54,6 @@ POLYGON = [
'material_index', 'material_index',
] ]
GENERIC_ATTRIBUTES =[
'crease_vert',
'crease_edge',
'bevel_weight_vert',
'bevel_weight_edge'
]
GENERIC_ATTRIBUTES_ENSURE = {
'crease_vert': 'vertex_crease_ensure',
'crease_edge': 'edge_crease_ensure'
}
class BlMesh(ReplicatedDatablock): class BlMesh(ReplicatedDatablock):
use_delta = True use_delta = True
@ -129,16 +119,6 @@ class BlMesh(ReplicatedDatablock):
'color', 'color',
data["vertex_colors"][color_layer]['data']) data["vertex_colors"][color_layer]['data'])
# Generic attibutes
for attribute_name, attribute_data_type, attribute_domain, attribute_data in data["attributes"]:
if attribute_name not in datablock.attributes:
datablock.attributes.new(
attribute_name,
attribute_data_type,
attribute_domain
)
np_load_collection(attribute_data, datablock.attributes[attribute_name].data ,['value'])
datablock.validate() datablock.validate()
datablock.update() datablock.update()
@ -155,6 +135,7 @@ class BlMesh(ReplicatedDatablock):
'use_auto_smooth', 'use_auto_smooth',
'auto_smooth_angle', 'auto_smooth_angle',
'use_customdata_edge_bevel', 'use_customdata_edge_bevel',
'use_customdata_edge_crease'
] ]
data = dumper.dump(mesh) data = dumper.dump(mesh)
@ -169,21 +150,6 @@ class BlMesh(ReplicatedDatablock):
data["egdes_count"] = len(mesh.edges) data["egdes_count"] = len(mesh.edges)
data["edges"] = np_dump_collection(mesh.edges, EDGE) data["edges"] = np_dump_collection(mesh.edges, EDGE)
# ATTIBUTES
data["attributes"] = []
for attribute_name in GENERIC_ATTRIBUTES:
if attribute_name in datablock.attributes:
attribute_data = datablock.attributes.get(attribute_name)
dumped_attr_data = np_dump_collection(attribute_data.data, ['value'])
data["attributes"].append(
(
attribute_name,
attribute_data.data_type,
attribute_data.domain,
dumped_attr_data
)
)
# POLYGONS # POLYGONS
data["poly_count"] = len(mesh.polygons) data["poly_count"] = len(mesh.polygons)
data["polygons"] = np_dump_collection(mesh.polygons, POLYGON) data["polygons"] = np_dump_collection(mesh.polygons, POLYGON)

View File

@ -47,7 +47,10 @@ SHAPEKEY_BLOCK_ATTR = [
'slider_max', 'slider_max',
] ]
CURVE_POINT = [
'location',
'handle_type_2',
]
if bpy.app.version >= (2,93,0): if bpy.app.version >= (2,93,0):
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float) SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else: else:
@ -58,17 +61,23 @@ else:
def get_node_group_properties_identifiers(node_group): def get_node_group_properties_identifiers(node_group):
props_ids = [] props_ids = []
# Inputs
for socket in node_group.interface.items_tree: for inpt in node_group.inputs:
if socket.socket_type in IGNORED_SOCKETS: if inpt.type in IGNORED_SOCKETS:
continue continue
else: else:
props_ids.append((socket.identifier, socket.socket_type)) props_ids.append((inpt.identifier, inpt.type))
props_ids.append((f"{socket.identifier}_attribute_name",'NodeSocketString')) if inpt.type in ['INT', 'VALUE', 'BOOLEAN', 'RGBA', 'VECTOR']:
props_ids.append((f"{socket.identifier}_use_attribute", 'NodeSocketBool')) props_ids.append((f"{inpt.identifier}_attribute_name",'STR'))
props_ids.append((f"{inpt.identifier}_use_attribute", 'BOOL'))
for outpt in node_group.outputs:
if outpt.type not in IGNORED_SOCKETS and outpt.type in ['INT', 'VALUE', 'BOOLEAN', 'RGBA', 'VECTOR']:
props_ids.append((f"{outpt.identifier}_attribute_name", 'STR'))
return props_ids return props_ids
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
def dump_physics(target: bpy.types.Object)->dict: def dump_physics(target: bpy.types.Object)->dict:
@ -113,21 +122,17 @@ def load_physics(dumped_settings: dict, target: bpy.types.Object):
if 'rigid_body' in dumped_settings: if 'rigid_body' in dumped_settings:
if not target.rigid_body: if not target.rigid_body:
with bpy.context.temp_override(object=target): bpy.ops.rigidbody.object_add({"object": target})
bpy.ops.rigidbody.object_add()
loader.load(target.rigid_body, dumped_settings['rigid_body']) loader.load(target.rigid_body, dumped_settings['rigid_body'])
elif target.rigid_body: elif target.rigid_body:
with bpy.context.temp_override(object=target): bpy.ops.rigidbody.object_remove({"object": target})
bpy.ops.rigidbody.object_remove()
if 'rigid_body_constraint' in dumped_settings: if 'rigid_body_constraint' in dumped_settings:
if not target.rigid_body_constraint: if not target.rigid_body_constraint:
with bpy.context.temp_override(object=target): bpy.ops.rigidbody.constraint_add({"object": target})
bpy.ops.rigidbody.constraint_add()
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint']) loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint: elif target.rigid_body_constraint:
with bpy.context.temp_override(object=target): bpy.ops.rigidbody.constraint_remove({"object": target})
bpy.ops.rigidbody.constraint_remove()
def dump_modifier_geometry_node_props(modifier: bpy.types.Modifier) -> list: def dump_modifier_geometry_node_props(modifier: bpy.types.Modifier) -> list:
@ -138,11 +143,11 @@ def dump_modifier_geometry_node_props(modifier: bpy.types.Modifier) -> list:
""" """
dumped_props = [] dumped_props = []
for prop_id, prop_type in get_node_group_properties_identifiers(modifier.node_group): for prop_value, prop_type in get_node_group_properties_identifiers(modifier.node_group):
try: try:
prop_value = modifier[prop_id] prop_value = modifier[prop_value]
except KeyError as e: except KeyError as e:
logging.error(f"fail to dump geomety node modifier property : {prop_id} ({e})") logging.error(f"fail to dump geomety node modifier property : {prop_value} ({e})")
else: else:
dump = None dump = None
if isinstance(prop_value, bpy.types.ID): if isinstance(prop_value, bpy.types.ID):
@ -153,6 +158,7 @@ def dump_modifier_geometry_node_props(modifier: bpy.types.Modifier) -> list:
dump = prop_value.to_list() dump = prop_value.to_list()
dumped_props.append((dump, prop_type)) dumped_props.append((dump, prop_type))
# logging.info(prop_value)
return dumped_props return dumped_props
@ -169,12 +175,13 @@ def load_modifier_geometry_node_props(dumped_modifier: dict, target_modifier: bp
for input_index, inpt in enumerate(get_node_group_properties_identifiers(target_modifier.node_group)): for input_index, inpt in enumerate(get_node_group_properties_identifiers(target_modifier.node_group)):
dumped_value, dumped_type = dumped_modifier['props'][input_index] dumped_value, dumped_type = dumped_modifier['props'][input_index]
input_value = target_modifier[inpt[0]] input_value = target_modifier[inpt[0]]
if dumped_type in ['NodeSocketInt', 'NodeSocketFloat', 'NodeSocketString', 'NodeSocketBool']: if dumped_type in ['INT', 'VALUE', 'STR']:
logging.info(f"{inpt[0]}/{dumped_value}")
target_modifier[inpt[0]] = dumped_value target_modifier[inpt[0]] = dumped_value
elif dumped_type in ['NodeSocketColor', 'NodeSocketVector']: elif dumped_type in ['RGBA', 'VECTOR']:
for index in range(len(input_value)): for index in range(len(input_value)):
input_value[index] = dumped_value[index] input_value[index] = dumped_value[index]
elif dumped_type in ['NodeSocketCollection', 'NodeSocketObject', 'NodeSocketImage', 'NodeSocketTexture', 'NodeSocketMaterial']: elif dumped_type in ['COLLECTION', 'OBJECT', 'IMAGE', 'TEXTURE', 'MATERIAL']:
target_modifier[inpt[0]] = get_datablock_from_uuid(dumped_value, None) target_modifier[inpt[0]] = get_datablock_from_uuid(dumped_value, None)
@ -414,7 +421,8 @@ def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
dumped_modifier['settings'] = dumper.dump(modifier.settings) dumped_modifier['settings'] = dumper.dump(modifier.settings)
elif modifier.type == 'UV_PROJECT': elif modifier.type == 'UV_PROJECT':
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object] dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
elif modifier.type == 'BEVEL' and modifier.profile_type == 'CUSTOM':
dumped_modifier['custom_profile'] = np_dump_collection(modifier.custom_profile.points, CURVE_POINT)
dumped_modifiers.append(dumped_modifier) dumped_modifiers.append(dumped_modifier)
return dumped_modifiers return dumped_modifiers
@ -486,11 +494,31 @@ def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collect
loaded_modifier.projectors[projector_index].object = target_object loaded_modifier.projectors[projector_index].object = target_object
else: else:
logging.error("Could't load projector target object {projector_object}") logging.error("Could't load projector target object {projector_object}")
elif loaded_modifier.type == 'BEVEL':
src_cust_profile = dumped_modifier.get('custom_profile')
if src_cust_profile:
dst_points = loaded_modifier.custom_profile.points
# TODO: refactor to be diff-compatible
for p in dst_points:
try:
dst_points.remove(dst_points[0])
except Exception:
break
for i in range(len(src_cust_profile['handle_type_2'])-len(dst_points)):
dst_points.add(0,0)
np_load_collection(src_cust_profile, dst_points, CURVE_POINT)
loaded_modifier.custom_profile.points.update()
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection): def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
""" Load modifiers custom data not managed by the dump_anything loader """ Load modifiers custom data not managed by the dump_anything loader
git
:param dumped_modifiers: modifiers to load :param dumped_modifiers: modifiers to load
:type dumped_modifiers: dict :type dumped_modifiers: dict
:param modifiers: target modifiers collection :param modifiers: target modifiers collection
@ -575,6 +603,16 @@ class BlObject(ReplicatedDatablock):
if 'pose' in data: if 'pose' in data:
if not datablock.pose: if not datablock.pose:
raise Exception('No pose data yet (Fixed in a near futur)') raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups
for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name)
bg_target = datablock.pose.bone_groups.get(bg_name)
if not bg_target:
bg_target = datablock.pose.bone_groups.new(name=bg_name)
loader.load(bg_target, bg_data)
# datablock.pose.bone_groups.get
# Bones # Bones
for bone in data['pose']['bones']: for bone in data['pose']['bones']:
@ -586,6 +624,9 @@ class BlObject(ReplicatedDatablock):
load_pose(target_bone, bone_data) load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys():
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
# TODO: find another way... # TODO: find another way...
if datablock.empty_display_type == "IMAGE": if datablock.empty_display_type == "IMAGE":
img_uuid = data.get('data_uuid') img_uuid = data.get('data_uuid')
@ -725,6 +766,7 @@ class BlObject(ReplicatedDatablock):
bones[bone.name] = {} bones[bone.name] = {}
dumper.depth = 1 dumper.depth = 1
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler' rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
group_index = 'bone_group_index' if bone.bone_group else None
dumper.include_filter = [ dumper.include_filter = [
'rotation_mode', 'rotation_mode',
'location', 'location',
@ -732,6 +774,7 @@ class BlObject(ReplicatedDatablock):
'custom_shape', 'custom_shape',
'use_custom_shape_bone_size', 'use_custom_shape_bone_size',
'custom_shape_scale', 'custom_shape_scale',
group_index,
rotation rotation
] ]
bones[bone.name] = dumper.dump(bone) bones[bone.name] = dumper.dump(bone)
@ -742,6 +785,17 @@ class BlObject(ReplicatedDatablock):
data['pose'] = {'bones': bones} data['pose'] = {'bones': bones}
# GROUPS
bone_groups = {}
for group in datablock.pose.bone_groups:
dumper.depth = 3
dumper.include_filter = [
'name',
'color_set'
]
bone_groups[group.name] = dumper.dump(group)
data['pose']['bone_groups'] = bone_groups
# VERTEx GROUP # VERTEx GROUP
if len(datablock.vertex_groups) > 0: if len(datablock.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(datablock) data['vertex_groups'] = dump_vertex_groups(datablock)

View File

@ -26,8 +26,7 @@ import numpy as np
BPY_TO_NUMPY_TYPES = { BPY_TO_NUMPY_TYPES = {
'FLOAT': np.float32, 'FLOAT': np.float32,
'INT': np.int32, 'INT': np.int32,
'BOOL': bool, 'BOOL': np.bool}
'BOOLEAN': bool}
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN'] PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
@ -583,6 +582,7 @@ class Loader:
dst_curve.points.new(pos[0], pos[1]) dst_curve.points.new(pos[0], pos[1])
curves.update() curves.update()
def _load_pointer(self, instance, dump): def _load_pointer(self, instance, dump):
rna_property_type = instance.bl_rna_property.fixed_type rna_property_type = instance.bl_rna_property.fixed_type
if not rna_property_type: if not rna_property_type:

View File

@ -29,6 +29,15 @@ import bpy
VERSION_EXPR = re.compile('\d+.\d+.\d+') VERSION_EXPR = re.compile('\d+.\d+.\d+')
DEFAULT_CACHE_DIR = os.path.join( DEFAULT_CACHE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "cache") os.path.dirname(os.path.abspath(__file__)), "cache")
REPLICATION_DEPENDENCIES = {
"zmq",
"deepdiff"
}
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
REPLICATION = os.path.join(LIBS,"replication")
PYTHON_PATH = None
SUBPROCESS_DIR = None
rtypes = [] rtypes = []
@ -41,20 +50,39 @@ def module_can_be_imported(name: str) -> bool:
return False return False
def install_pip(python_path): def install_pip():
# pip can not necessarily be imported into Blender after this # pip can not necessarily be imported into Blender after this
subprocess.run([str(python_path), "-m", "ensurepip"]) subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
def preload_modules(): def install_package(name: str, install_dir: str):
from . import wheels logging.info(f"installing {name} version...")
env = os.environ
if "PIP_REQUIRE_VIRTUALENV" in env:
# PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env
# https://docs.python-guide.org/dev/pip-virtualenv/
# But since Blender's pip is outside of a virtual env, it can block our packages installation, so we unset the
# env var for the subprocess.
env = os.environ.copy()
del env["PIP_REQUIRE_VIRTUALENV"]
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
wheels.load_wheel_global("ordered_set", "ordered_set") if name in sys.modules:
wheels.load_wheel_global("deepdiff", "deepdiff") del sys.modules[name]
wheels.load_wheel_global("replication", "replication")
wheels.load_wheel_global("zmq", "pyzmq", match_platform=True)
def check_package_version(name: str, required_version: str):
logging.info(f"Checking {name} version...")
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
version = VERSION_EXPR.search(out.stdout.decode())
if version and version.group() == required_version:
logging.info(f"{name} is up to date")
return True
else:
logging.info(f"{name} need an update")
return False
def get_ip(): def get_ip():
""" """
@ -91,8 +119,32 @@ def remove_paths(paths: list):
sys.path.remove(path) sys.path.remove(path)
def install_modules(dependencies: list, python_path: str, install_dir: str):
global PYTHON_PATH, SUBPROCESS_DIR
PYTHON_PATH = Path(python_path)
SUBPROCESS_DIR = PYTHON_PATH.parent
if not module_can_be_imported("pip"):
install_pip()
for package_name in dependencies:
if not module_can_be_imported(package_name):
install_package(package_name, install_dir=install_dir)
module_can_be_imported(package_name)
def register(): def register():
check_dir(DEFAULT_CACHE_DIR) if bpy.app.version >= (2,91,0):
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
for module_name in list(sys.modules.keys()):
if 'replication' in module_name:
del sys.modules[module_name]
setup_paths([LIBS, REPLICATION])
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
def unregister(): def unregister():
pass remove_paths([REPLICATION, LIBS])

View File

@ -81,9 +81,9 @@ def on_scene_update(scene):
# NOTE: maybe we don't need to check each update but only the first # NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates): for update in reversed(dependency_updates):
update_uuid = getattr(update.id.original, 'uuid', None) update_uuid = getattr(update.id, 'uuid', None)
if update_uuid: if update_uuid:
node = session.repository.graph.get(update_uuid) node = session.repository.graph.get(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
if node and (node.owner == session.repository.username or check_common): if node and (node.owner == session.repository.username or check_common):

View File

@ -16,20 +16,27 @@
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import asyncio
import copy
import gzip import gzip
import logging import logging
from multi_user.preferences import ServerPreset
import os import os
import queue
import random
import shutil
import string
import sys import sys
import time
import traceback import traceback
from uuid import uuid4 from uuid import uuid4
from datetime import datetime from datetime import datetime
from operator import itemgetter
from pathlib import Path from pathlib import Path
from queue import Queue from queue import Queue
from time import gmtime, strftime
from bpy.props import FloatProperty
import bmesh import bmesh
try: try:
@ -39,11 +46,15 @@ except ImportError:
import bpy import bpy
import mathutils import mathutils
from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper from bpy_extras.io_utils import ExportHelper, ImportHelper
from replication import porcelain from replication import porcelain
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE) from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP)
from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session from replication.interface import session
from replication.objects import Node
from replication.protocol import DataTranslationProtocol
from replication.repository import Repository from replication.repository import Repository
from . import bl_types, environment, shared_data, timers, ui, utils from . import bl_types, environment, shared_data, timers, ui, utils
@ -239,9 +250,6 @@ def on_connection_end(reason="none"):
if on_scene_update in bpy.app.handlers.depsgraph_update_post: if on_scene_update in bpy.app.handlers.depsgraph_update_post:
bpy.app.handlers.depsgraph_update_post.remove(on_scene_update) bpy.app.handlers.depsgraph_update_post.remove(on_scene_update)
renderer.clear_widgets()
renderer.add_widget("session_status", SessionStatusWidget())
# Step 3: remove file handled # Step 3: remove file handled
logger = logging.getLogger() logger = logging.getLogger()
for handler in logger.handlers: for handler in logger.handlers:
@ -1028,7 +1036,7 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
def poll(cls, context): def poll(cls, context):
return True return True
class SESSION_PT_ImportUser(bpy.types.Panel): class SessionImportUser(bpy.types.Panel):
bl_space_type = 'FILE_BROWSER' bl_space_type = 'FILE_BROWSER'
bl_region_type = 'TOOL_PROPS' bl_region_type = 'TOOL_PROPS'
bl_label = "Users" bl_label = "Users"
@ -1292,7 +1300,7 @@ classes = (
SessionNotifyOperator, SessionNotifyOperator,
SessionSaveBackupOperator, SessionSaveBackupOperator,
SessionLoadSaveOperator, SessionLoadSaveOperator,
SESSION_PT_ImportUser, SessionImportUser,
SessionStopAutoSaveOperator, SessionStopAutoSaveOperator,
SessionPurgeOperator, SessionPurgeOperator,
SessionPresetServerAdd, SessionPresetServerAdd,

View File

@ -44,6 +44,13 @@ DEFAULT_PRESETS = {
"admin_password": "admin", "admin_password": "admin",
"server_password": "" "server_password": ""
}, },
"public session" : {
"server_name": "public session",
"ip": "51.75.71.183",
"port": 5555,
"admin_password": "",
"server_password": ""
},
} }
def randomColor(): def randomColor():

View File

@ -67,10 +67,8 @@ def refresh_sidebar_view():
""" """
area, region, rv3d = view3d_find() area, region, rv3d = view3d_find()
if area is not None : if area:
for region in area.regions: area.regions[3].tag_redraw()
if region.type == "UI":
region.tag_redraw()
def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D, coords: list, distance: float = 1.0) -> list: def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D, coords: list, distance: float = 1.0) -> list:
@ -255,9 +253,10 @@ class Widget(object):
return True return True
def configure_bgl(self): def configure_bgl(self):
gpu.state.line_width_set(2.0) bgl.glLineWidth(2.)
gpu.state.depth_test_set("LESS") bgl.glEnable(bgl.GL_DEPTH_TEST)
gpu.state.blend_set("ALPHA") bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
def draw(self): def draw(self):
@ -301,8 +300,7 @@ class UserFrustumWidget(Widget):
def draw(self): def draw(self):
location = self.data.get('view_corners') location = self.data.get('view_corners')
shader = gpu.shader.from_builtin('UNIFORM_COLOR') shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
# 'FLAT_COLOR', 'IMAGE', 'IMAGE_COLOR', 'SMOOTH_COLOR', 'UNIFORM_COLOR', 'POLYLINE_FLAT_COLOR', 'POLYLINE_SMOOTH_COLOR', 'POLYLINE_UNIFORM_COLOR'
positions = [tuple(coord) for coord in location] positions = [tuple(coord) for coord in location]
if len(positions) != 7: if len(positions) != 7:
@ -374,7 +372,7 @@ class UserSelectionWidget(Widget):
vertex_pos += bbox_pos vertex_pos += bbox_pos
vertex_ind += bbox_ind vertex_ind += bbox_ind
shader = gpu.shader.from_builtin('UNIFORM_COLOR') shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader( batch = batch_for_shader(
shader, shader,
'LINES', 'LINES',
@ -423,7 +421,7 @@ class UserNameWidget(Widget):
if coords: if coords:
blf.position(0, coords[0], coords[1]+10, 0) blf.position(0, coords[0], coords[1]+10, 0)
blf.size(0, 16) blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3]) blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, self.username) blf.draw(0, self.username)
@ -479,7 +477,7 @@ class UserModeWidget(Widget):
if origin_coord : if origin_coord :
blf.position(0, origin_coord[0]+8, origin_coord[1]-15, 0) blf.position(0, origin_coord[0]+8, origin_coord[1]-15, 0)
blf.size(0, 16) blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3]) blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, mode_current) blf.draw(0, mode_current)
@ -513,7 +511,7 @@ class SessionStatusWidget(Widget):
vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100 vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100
blf.position(0, hpos, vpos, 0) blf.position(0, hpos, vpos, 0)
blf.size(0, int(text_scale*ui_scale)) blf.size(0, int(text_scale*ui_scale), 72)
blf.color(0, color[0], color[1], color[2], color[3]) blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, state_str) blf.draw(0, state_str)

View File

@ -32,7 +32,6 @@ from replication.constants import (ADDED, ERROR, FETCHED,
from replication import __version__ from replication import __version__
from replication.interface import session from replication.interface import session
from .timers import registry from .timers import registry
from . import icons
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
'TRIA_UP', # COMMITED 'TRIA_UP', # COMMITED
@ -110,6 +109,7 @@ class SESSION_PT_settings(bpy.types.Panel):
layout = self.layout layout = self.layout
settings = get_preferences() settings = get_preferences()
from multi_user import icons
offline_icon = icons.icons_col["session_status_offline"] offline_icon = icons.icons_col["session_status_offline"]
waiting_icon = icons.icons_col["session_status_waiting"] waiting_icon = icons.icons_col["session_status_waiting"]
online_icon = icons.icons_col["session_status_online"] online_icon = icons.icons_col["session_status_online"]
@ -531,7 +531,7 @@ def draw_property(context, parent, property_uuid, level=0):
have_right_to_modify = (item.owner == settings.username or \ have_right_to_modify = (item.owner == settings.username or \
item.owner == RP_COMMON) and item.state != ERROR item.owner == RP_COMMON) and item.state != ERROR
from multi_user import icons
sync_status = icons.icons_col["repository_push"] #TODO: Link all icons to the right sync (push/merge/issue). For issue use "UNLINKED" for icon sync_status = icons.icons_col["repository_push"] #TODO: Link all icons to the right sync (push/merge/issue). For issue use "UNLINKED" for icon
# sync_status = icons.icons_col["repository_merge"] # sync_status = icons.icons_col["repository_merge"]
@ -727,7 +727,7 @@ class SESSION_UL_network(bpy.types.UIList):
else: else:
split.label(text=server_name) split.label(text=server_name)
from . import icons from multi_user import icons
server_status = icons.icons_col["server_offline"] server_status = icons.icons_col["server_offline"]
if item.is_online: if item.is_online:
server_status = icons.icons_col["server_online"] server_status = icons.icons_col["server_online"]

View File

@ -1,149 +0,0 @@
"""External dependencies loader."""
import contextlib
import importlib
from pathlib import Path
import sys
import logging
import sysconfig
from types import ModuleType
from typing import Iterator, Iterable
import zipfile
_my_dir = Path(__file__).parent
_log = logging.getLogger(__name__)
_env_folder = Path(__file__).parent.joinpath("venv")
def load_wheel(module_name: str, submodules: Iterable[str]) -> list[ModuleType]:
"""Loads modules from a wheel file 'module_name*.whl'.
Loads `module_name`, and if submodules are given, loads
`module_name.submodule` for each of the submodules. This allows loading all
required modules from the same wheel in one session, ensuring that
inter-submodule references are correct.
Returns the loaded modules, so [module, submodule, submodule, ...].
"""
fname_prefix = _fname_prefix_from_module_name(module_name)
wheel = _wheel_filename(fname_prefix)
loaded_modules: list[ModuleType] = []
to_load = [module_name] + [f"{module_name}.{submodule}" for submodule in submodules]
# Load the module from the wheel file. Keep a backup of sys.path so that it
# can be restored later. This should ensure that future import statements
# cannot find this wheel file, increasing the separation of dependencies of
# this add-on from other add-ons.
with _sys_path_mod_backup(wheel):
for modname in to_load:
try:
module = importlib.import_module(modname)
except ImportError as ex:
raise ImportError(
"Unable to load %r from %s: %s" % (modname, wheel, ex)
) from None
assert isinstance(module, ModuleType)
loaded_modules.append(module)
_log.info("Loaded %s from %s", modname, module.__file__)
assert len(loaded_modules) == len(
to_load
), f"expecting to load {len(to_load)} modules, but only have {len(loaded_modules)}: {loaded_modules}"
return loaded_modules
def load_wheel_global(module_name: str, fname_prefix: str = "", match_platform: bool = False) -> ModuleType:
"""Loads a wheel from 'fname_prefix*.whl', unless the named module can be imported.
This allows us to use system-installed packages before falling back to the shipped wheels.
This is useful for development, less so for deployment.
If `fname_prefix` is the empty string, it will use the first package from `module_name`.
In other words, `module_name="pkg.subpkg"` will result in `fname_prefix="pkg"`.
"""
if not fname_prefix:
fname_prefix = _fname_prefix_from_module_name(module_name)
try:
module = importlib.import_module(module_name)
except ImportError as ex:
_log.debug("Unable to import %s directly, will try wheel: %s", module_name, ex)
else:
_log.debug(
"Was able to load %s from %s, no need to load wheel %s",
module_name,
module.__file__,
fname_prefix,
)
return module
wheel = _wheel_filename(fname_prefix, match_platform=match_platform)
wheel_filepath = str(wheel)
wheel_archive = zipfile.ZipFile(wheel_filepath)
wheel_archive.extractall(_env_folder)
if str(_env_folder) not in sys.path:
sys.path.insert(0, str(_env_folder))
try:
module = importlib.import_module(module_name)
except ImportError as ex:
raise ImportError(
"Unable to load %r from %s: %s" % (module_name, wheel, ex)
) from None
_log.debug("Globally loaded %s from %s", module_name, module.__file__)
return module
@contextlib.contextmanager
def _sys_path_mod_backup(wheel_file: Path) -> Iterator[None]:
"""Temporarily inserts a wheel onto sys.path.
When the context exits, it restores sys.path and sys.modules, so that
anything that was imported within the context remains unimportable by other
modules.
"""
old_syspath = sys.path[:]
old_sysmod = sys.modules.copy()
try:
sys.path.insert(0, str(wheel_file))
yield
finally:
# Restore without assigning a new list instance. That way references
# held by other code will stay valid.
sys.path[:] = old_syspath
sys.modules.clear()
sys.modules.update(old_sysmod)
def _wheel_filename(fname_prefix: str, match_platform: bool = False) -> Path:
if match_platform:
platform_tag = sysconfig.get_platform().replace('-','_').replace('.','_')
path_pattern = f"{fname_prefix}*{platform_tag}.whl"
else:
path_pattern = f"{fname_prefix}*.whl"
wheels: list[Path] = list(_my_dir.glob(path_pattern))
if not wheels:
raise RuntimeError("Unable to find wheel at %r" % path_pattern)
# If there are multiple wheels that match, load the last-modified one.
# Alphabetical sorting isn't going to cut it since BAT 1.10 was released.
def modtime(filepath: Path) -> float:
return filepath.stat().st_mtime
wheels.sort(key=modtime)
return wheels[-1]
def _fname_prefix_from_module_name(module_name: str) -> str:
return module_name.split(".", 1)[0]

View File

@ -22,4 +22,4 @@ RUN pip install replication==$replication_version
# Run the server with parameters # Run the server with parameters
ENTRYPOINT ["/bin/sh", "-c"] ENTRYPOINT ["/bin/sh", "-c"]
CMD ["replication.server -apwd ${password} -spwd '' -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"] CMD ["replication.serve -apwd ${password} -spwd '' -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]