Compare commits
41 Commits
Author | SHA1 | Date | |
---|---|---|---|
7bc45f8b58 | |||
7e6fc3ef3a | |||
acc74b9bf9 | |||
62904e8106 | |||
b58871470c | |||
8935c94101 | |||
12355b6457 | |||
74ad4e5e1f | |||
2a88c3e5ac | |||
4c42a5be92 | |||
757ee7015a | |||
15d66579c6 | |||
4128a47b88 | |||
689a565c75 | |||
c5f1bf1176 | |||
4dc6781c94 | |||
5311e55208 | |||
4cb64e5e77 | |||
ff67b581b1 | |||
f7bec3fc08 | |||
5e929db3ee | |||
629f2e1cdb | |||
b8fed806ed | |||
8190846b59 | |||
c228b6ad7f | |||
48651ce890 | |||
26847cf459 | |||
bfa6991c00 | |||
0c60c86775 | |||
70b6f9bcfa | |||
8d176b55e4 | |||
4c0356e724 | |||
6b04d1d8d6 | |||
edfcdd8867 | |||
bdd6599614 | |||
6efd1321ce | |||
047bd47048 | |||
d32cbb7b30 | |||
adabce3822 | |||
62f52db5b2 | |||
745f45b682 |
@ -1,13 +1,7 @@
|
||||
stages:
|
||||
- test
|
||||
- build
|
||||
- deploy
|
||||
- doc
|
||||
|
||||
|
||||
|
||||
include:
|
||||
- local: .gitlab/ci/test.gitlab-ci.yml
|
||||
- local: .gitlab/ci/build.gitlab-ci.yml
|
||||
- local: .gitlab/ci/deploy.gitlab-ci.yml
|
||||
- local: .gitlab/ci/doc.gitlab-ci.yml
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
build:
|
||||
stage: build
|
||||
needs: ["test"]
|
||||
image: debian:stable-slim
|
||||
script:
|
||||
- rm -rf tests .git .gitignore script
|
||||
|
@ -1,7 +0,0 @@
|
||||
test:
|
||||
stage: test
|
||||
image: slumber/blender-addon-testing:latest
|
||||
script:
|
||||
- python3 scripts/test_addon.py
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
31
CHANGELOG.md
31
CHANGELOG.md
@ -240,3 +240,34 @@ All notable changes to this project will be documented in this file.
|
||||
- Skin vertex radius synchronization (@kromar)
|
||||
- Sequencer audio strip synchronization
|
||||
- Crash with empty after a reconnection
|
||||
|
||||
## [0.5.1] - 2022-02-10
|
||||
|
||||
### Fixed
|
||||
|
||||
- Auto updater breaks dependency auto installer
|
||||
- Auto updater update from tag
|
||||
|
||||
## [0.5.2] - 2022-02-18
|
||||
|
||||
### Fixed
|
||||
|
||||
- Objects not selectable after user leaves session
|
||||
- Geometry nodes attribute toogle doesn't sync
|
||||
|
||||
## [0.5.3] - 2022-03-11
|
||||
|
||||
### Changed
|
||||
|
||||
- Snapshots logs
|
||||
### Fixed
|
||||
|
||||
- Server crashing during snapshots
|
||||
- Blender 3.1 numpy loading error during early connection process
|
||||
- Server docker arguments
|
||||
|
||||
## [0.5.5] - 2022-06-12
|
||||
|
||||
### Fixed
|
||||
|
||||
- Numpy mesh serialization error
|
@ -16,12 +16,12 @@ import sys
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = 'Multi-User 0.5.0 Documentation'
|
||||
project = 'Multi-User 0.5.x Documentation'
|
||||
copyright = '2020, Swann Martinez'
|
||||
author = 'Swann Martinez, Poochy, Fabian'
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
version_release = '0.5.1-develop'
|
||||
version_release = '0.5.5'
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
@ -19,7 +19,7 @@
|
||||
bl_info = {
|
||||
"name": "Multi-User",
|
||||
"author": "Swann Martinez",
|
||||
"version": (0, 5, 0),
|
||||
"version": (0, 6, 0),
|
||||
"description": "Enable real-time collaborative workflow inside blender",
|
||||
"blender": (2, 82, 0),
|
||||
"location": "3D View > Sidebar > Multi-User tab",
|
||||
|
@ -1015,16 +1015,18 @@ class Singleton_updater(object):
|
||||
for path, dirs, files in os.walk(base):
|
||||
# prune ie skip updater folder
|
||||
dirs[:] = [d for d in dirs if os.path.join(path,d) not in [self._updater_path]]
|
||||
|
||||
for directory in dirs:
|
||||
shutil.rmtree(os.path.join(path,directory))
|
||||
|
||||
for file in files:
|
||||
for ptrn in self.remove_pre_update_patterns:
|
||||
if fnmatch.filter([file],ptrn):
|
||||
try:
|
||||
fl = os.path.join(path,file)
|
||||
os.remove(fl)
|
||||
if self._verbose: print("Pre-removed file "+file)
|
||||
except OSError:
|
||||
print("Failed to pre-remove "+file)
|
||||
self.print_trace()
|
||||
try:
|
||||
fl = os.path.join(path,file)
|
||||
os.remove(fl)
|
||||
if self._verbose: print("Pre-removed file "+file)
|
||||
except OSError:
|
||||
print("Failed to pre-remove "+file)
|
||||
self.print_trace()
|
||||
|
||||
# Walk through the temp addon sub folder for replacements
|
||||
# this implements the overwrite rules, which apply after
|
||||
@ -1701,7 +1703,7 @@ class GitlabEngine(object):
|
||||
def parse_tags(self, response, updater):
|
||||
if response == None:
|
||||
return []
|
||||
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
|
||||
return [{"name": tag["name"], "zipball_url": f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{tag['name']}/download?job=build"} for tag in response]
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
@ -267,7 +267,7 @@ class addon_updater_update_now(bpy.types.Operator):
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
default=True,
|
||||
options={'HIDDEN'}
|
||||
)
|
||||
|
||||
|
@ -245,9 +245,8 @@ def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
|
||||
'type': type(node_tree).__name__
|
||||
}
|
||||
|
||||
for socket_id in ['inputs', 'outputs']:
|
||||
socket_collection = getattr(node_tree, socket_id)
|
||||
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
|
||||
sockets = [item for item in node_tree.interface.items_tree if item.item_type == 'SOCKET']
|
||||
node_tree_data['interface'] = dump_node_tree_sockets(sockets)
|
||||
|
||||
return node_tree_data
|
||||
|
||||
@ -263,18 +262,18 @@ def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
|
||||
"""
|
||||
sockets_data = []
|
||||
for socket in sockets:
|
||||
try:
|
||||
socket_uuid = socket['uuid']
|
||||
except Exception:
|
||||
socket_uuid = str(uuid4())
|
||||
socket['uuid'] = socket_uuid
|
||||
|
||||
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
|
||||
sockets_data.append(
|
||||
(
|
||||
socket.name,
|
||||
socket.socket_type,
|
||||
socket.in_out
|
||||
)
|
||||
)
|
||||
|
||||
return sockets_data
|
||||
|
||||
|
||||
def load_node_tree_sockets(sockets: bpy.types.Collection,
|
||||
def load_node_tree_sockets(interface: bpy.types.NodeTreeInterface,
|
||||
sockets_data: dict):
|
||||
""" load sockets of a shader_node_tree
|
||||
|
||||
@ -285,20 +284,17 @@ def load_node_tree_sockets(sockets: bpy.types.Collection,
|
||||
:arg socket_data: dumped socket data
|
||||
:type socket_data: dict
|
||||
"""
|
||||
# Check for removed sockets
|
||||
for socket in sockets:
|
||||
if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]:
|
||||
sockets.remove(socket)
|
||||
# Remove old sockets
|
||||
interface.clear()
|
||||
|
||||
# Check for new sockets
|
||||
for idx, socket_data in enumerate(sockets_data):
|
||||
try:
|
||||
checked_socket = sockets[idx]
|
||||
if checked_socket.name != socket_data[0]:
|
||||
checked_socket.name = socket_data[0]
|
||||
except Exception:
|
||||
s = sockets.new(socket_data[1], socket_data[0])
|
||||
s['uuid'] = socket_data[2]
|
||||
for name, socket_type, in_out in sockets_data:
|
||||
socket = interface.new_socket(
|
||||
name,
|
||||
in_out=in_out,
|
||||
socket_type=socket_type
|
||||
)
|
||||
|
||||
|
||||
|
||||
def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeTree) -> dict:
|
||||
@ -315,13 +311,8 @@ def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeT
|
||||
if not target_node_tree.is_property_readonly('name'):
|
||||
target_node_tree.name = node_tree_data['name']
|
||||
|
||||
if 'inputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'inputs')
|
||||
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
|
||||
|
||||
if 'outputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'outputs')
|
||||
load_node_tree_sockets(socket_collection, node_tree_data['outputs'])
|
||||
if 'interface' in node_tree_data:
|
||||
load_node_tree_sockets(target_node_tree.interface, node_tree_data['interface'])
|
||||
|
||||
# Load nodes
|
||||
for node in node_tree_data["nodes"]:
|
||||
|
@ -37,8 +37,8 @@ VERTICE = ['co']
|
||||
|
||||
EDGE = [
|
||||
'vertices',
|
||||
'crease',
|
||||
'bevel_weight',
|
||||
# 'crease',
|
||||
# 'bevel_weight',
|
||||
'use_seam',
|
||||
'use_edge_sharp',
|
||||
]
|
||||
|
@ -172,7 +172,7 @@ def load_modifier_geometry_node_props(dumped_modifier: dict, target_modifier: bp
|
||||
for input_index, inpt in enumerate(get_node_group_properties_identifiers(target_modifier.node_group)):
|
||||
dumped_value, dumped_type = dumped_modifier['props'][input_index]
|
||||
input_value = target_modifier[inpt[0]]
|
||||
if dumped_type in ['INT', 'VALUE', 'STR']:
|
||||
if dumped_type in ['INT', 'VALUE', 'STR', 'BOOL']:
|
||||
logging.info(f"{inpt[0]}/{dumped_value}")
|
||||
target_modifier[inpt[0]] = dumped_value
|
||||
elif dumped_type in ['RGBA', 'VECTOR']:
|
||||
|
@ -26,7 +26,8 @@ import numpy as np
|
||||
BPY_TO_NUMPY_TYPES = {
|
||||
'FLOAT': np.float32,
|
||||
'INT': np.int32,
|
||||
'BOOL': np.bool}
|
||||
'BOOL': bool,
|
||||
'BOOLEAN': bool}
|
||||
|
||||
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
|
||||
|
||||
|
@ -36,8 +36,6 @@ REPLICATION_DEPENDENCIES = {
|
||||
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
REPLICATION = os.path.join(LIBS,"replication")
|
||||
|
||||
PYTHON_PATH = None
|
||||
SUBPROCESS_DIR = None
|
||||
|
||||
rtypes = []
|
||||
|
||||
@ -50,13 +48,13 @@ def module_can_be_imported(name: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def install_pip():
|
||||
def install_pip(python_path):
|
||||
# pip can not necessarily be imported into Blender after this
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
||||
subprocess.run([str(python_path), "-m", "ensurepip"])
|
||||
|
||||
|
||||
def install_package(name: str, install_dir: str):
|
||||
logging.info(f"installing {name} version...")
|
||||
def install_requirements(python_path:str, module_requirement: str, install_dir: str):
|
||||
logging.info(f"Installing {module_requirement} dependencies in {install_dir}")
|
||||
env = os.environ
|
||||
if "PIP_REQUIRE_VIRTUALENV" in env:
|
||||
# PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env
|
||||
@ -65,23 +63,7 @@ def install_package(name: str, install_dir: str):
|
||||
# env var for the subprocess.
|
||||
env = os.environ.copy()
|
||||
del env["PIP_REQUIRE_VIRTUALENV"]
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
|
||||
|
||||
if name in sys.modules:
|
||||
del sys.modules[name]
|
||||
|
||||
|
||||
def check_package_version(name: str, required_version: str):
|
||||
logging.info(f"Checking {name} version...")
|
||||
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
||||
|
||||
version = VERSION_EXPR.search(out.stdout.decode())
|
||||
if version and version.group() == required_version:
|
||||
logging.info(f"{name} is up to date")
|
||||
return True
|
||||
else:
|
||||
logging.info(f"{name} need an update")
|
||||
return False
|
||||
subprocess.run([str(python_path), "-m", "pip", "install", "-r", f"{install_dir}/{module_requirement}/requirements.txt", "-t", install_dir], env=env)
|
||||
|
||||
|
||||
def get_ip():
|
||||
@ -117,21 +99,7 @@ def remove_paths(paths: list):
|
||||
if path in sys.path:
|
||||
logging.debug(f"Removing {path} dir from the path.")
|
||||
sys.path.remove(path)
|
||||
|
||||
|
||||
def install_modules(dependencies: list, python_path: str, install_dir: str):
|
||||
global PYTHON_PATH, SUBPROCESS_DIR
|
||||
|
||||
PYTHON_PATH = Path(python_path)
|
||||
SUBPROCESS_DIR = PYTHON_PATH.parent
|
||||
|
||||
if not module_can_be_imported("pip"):
|
||||
install_pip()
|
||||
|
||||
for package_name in dependencies:
|
||||
if not module_can_be_imported(package_name):
|
||||
install_package(package_name, install_dir=install_dir)
|
||||
module_can_be_imported(package_name)
|
||||
|
||||
|
||||
def register():
|
||||
if bpy.app.version >= (2,91,0):
|
||||
@ -139,12 +107,21 @@ def register():
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
python_path = Path(python_binary_path)
|
||||
|
||||
for module_name in list(sys.modules.keys()):
|
||||
if 'replication' in module_name:
|
||||
del sys.modules[module_name]
|
||||
|
||||
setup_paths([LIBS, REPLICATION])
|
||||
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
|
||||
|
||||
if not module_can_be_imported("pip"):
|
||||
install_pip(python_path)
|
||||
|
||||
deps_not_installed = [package_name for package_name in REPLICATION_DEPENDENCIES if not module_can_be_imported(package_name)]
|
||||
if any(deps_not_installed):
|
||||
install_requirements(python_path, module_requirement='replication', install_dir=LIBS)
|
||||
|
||||
|
||||
def unregister():
|
||||
remove_paths([REPLICATION, LIBS])
|
Submodule multi_user/libs/replication updated: 90b928c53c...3e9eb4f5c0
@ -44,13 +44,6 @@ DEFAULT_PRESETS = {
|
||||
"admin_password": "admin",
|
||||
"server_password": ""
|
||||
},
|
||||
"public session" : {
|
||||
"server_name": "public session",
|
||||
"ip": "51.75.71.183",
|
||||
"port": 5555,
|
||||
"admin_password": "",
|
||||
"server_password": ""
|
||||
},
|
||||
}
|
||||
|
||||
def randomColor():
|
||||
|
@ -253,11 +253,9 @@ class Widget(object):
|
||||
return True
|
||||
|
||||
def configure_bgl(self):
|
||||
bgl.glLineWidth(2.)
|
||||
bgl.glEnable(bgl.GL_DEPTH_TEST)
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
bgl.glEnable(bgl.GL_LINE_SMOOTH)
|
||||
|
||||
gpu.state.blend_set('ALPHA')
|
||||
gpu.state.depth_test_set('LESS_EQUAL')
|
||||
gpu.state.line_width_set(2.0)
|
||||
|
||||
def draw(self):
|
||||
"""How to draw the widget
|
||||
@ -300,7 +298,7 @@ class UserFrustumWidget(Widget):
|
||||
|
||||
def draw(self):
|
||||
location = self.data.get('view_corners')
|
||||
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
||||
shader = gpu.shader.from_builtin('UNIFORM_COLOR')
|
||||
positions = [tuple(coord) for coord in location]
|
||||
|
||||
if len(positions) != 7:
|
||||
@ -372,7 +370,7 @@ class UserSelectionWidget(Widget):
|
||||
vertex_pos += bbox_pos
|
||||
vertex_ind += bbox_ind
|
||||
|
||||
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
||||
shader = gpu.shader.from_builtin('UNIFORM_COLOR')
|
||||
batch = batch_for_shader(
|
||||
shader,
|
||||
'LINES',
|
||||
@ -421,7 +419,7 @@ class UserNameWidget(Widget):
|
||||
|
||||
if coords:
|
||||
blf.position(0, coords[0], coords[1]+10, 0)
|
||||
blf.size(0, 16, 72)
|
||||
blf.size(0, 16)
|
||||
blf.color(0, color[0], color[1], color[2], color[3])
|
||||
blf.draw(0, self.username)
|
||||
|
||||
@ -511,7 +509,7 @@ class SessionStatusWidget(Widget):
|
||||
vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100
|
||||
|
||||
blf.position(0, hpos, vpos, 0)
|
||||
blf.size(0, int(text_scale*ui_scale), 72)
|
||||
blf.size(0, int(text_scale*ui_scale))
|
||||
blf.color(0, color[0], color[1], color[2], color[3])
|
||||
blf.draw(0, state_str)
|
||||
|
||||
|
@ -22,4 +22,4 @@ RUN pip install replication==$replication_version
|
||||
|
||||
# Run the server with parameters
|
||||
ENTRYPOINT ["/bin/sh", "-c"]
|
||||
CMD ["replication.serve -apwd ${password} -spwd '' -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]
|
||||
CMD ["replication.server -apwd ${password} -spwd '' -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]
|
Reference in New Issue
Block a user