diff --git a/.gitignore b/.gitignore index 0cef51c..c721bc9 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,5 @@ _build # ignore generated zip generated from blender_addon_tester *.zip -libs \ No newline at end of file +libs +venv \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index 1bca499..e69de29 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +0,0 @@ -[submodule "multi_user/libs/replication"] - path = multi_user/libs/replication - url = https://gitlab.com/slumber/replication.git diff --git a/multi_user/__init__.py b/multi_user/__init__.py index 861ad43..027d27f 100644 --- a/multi_user/__init__.py +++ b/multi_user/__init__.py @@ -43,6 +43,8 @@ from bpy.app.handlers import persistent from . import environment +environment.preload_modules() + module_error_msg = "Insufficient rights to install the multi-user \ dependencies, aunch blender with administrator rights." diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index f016277..bb0f679 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -235,7 +235,7 @@ def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict: """ Dump a shader node_tree to a dict including links and nodes :arg node_tree: dumped shader node tree - :type node_tree: bpy.types.ShaderNodeTree + :type node_tree: bpy.types.ShaderNodeTree` :return: dict """ node_tree_data = { @@ -246,8 +246,9 @@ def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict: } for socket_id in ['inputs', 'outputs']: - socket_collection = getattr(node_tree, socket_id) - node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection) + if hasattr(node_tree, socket_id): + socket_collection = getattr(node_tree, socket_id) + node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection) return node_tree_data diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 2b7e31a..2f7376b 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -37,8 +37,6 @@ VERTICE = ['co'] EDGE = [ 'vertices', - 'crease', - 'bevel_weight', 'use_seam', 'use_edge_sharp', ] @@ -150,6 +148,10 @@ class BlMesh(ReplicatedDatablock): data["egdes_count"] = len(mesh.edges) data["edges"] = np_dump_collection(mesh.edges, EDGE) + # TODO 4.0: use bevel_weight_vertex, bevel_weight_edge, crease_edge, crease_vert + # https://developer.blender.org/docs/release_notes/4.0/python_api/ + # ex: C.object.data.attributes['crease_edge'].data[1].value = 0.5 + # POLYGONS data["poly_count"] = len(mesh.polygons) data["polygons"] = np_dump_collection(mesh.polygons, POLYGON) diff --git a/multi_user/environment.py b/multi_user/environment.py index 34eb71d..c6b672f 100644 --- a/multi_user/environment.py +++ b/multi_user/environment.py @@ -29,13 +29,6 @@ import bpy VERSION_EXPR = re.compile('\d+.\d+.\d+') DEFAULT_CACHE_DIR = os.path.join( os.path.dirname(os.path.abspath(__file__)), "cache") -REPLICATION_DEPENDENCIES = { - "zmq", - "deepdiff" -} -LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs") -REPLICATION = os.path.join(LIBS,"replication") - rtypes = [] @@ -53,17 +46,14 @@ def install_pip(python_path): subprocess.run([str(python_path), "-m", "ensurepip"]) -def install_requirements(python_path:str, module_requirement: str, install_dir: str): - logging.info(f"Installing {module_requirement} dependencies in {install_dir}") - env = os.environ - if "PIP_REQUIRE_VIRTUALENV" in env: - # PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env - # https://docs.python-guide.org/dev/pip-virtualenv/ - # But since Blender's pip is outside of a virtual env, it can block our packages installation, so we unset the - # env var for the subprocess. - env = os.environ.copy() - del env["PIP_REQUIRE_VIRTUALENV"] - subprocess.run([str(python_path), "-m", "pip", "install", "-r", f"{install_dir}/{module_requirement}/requirements.txt", "-t", install_dir], env=env) +def preload_modules(): + from . import wheels + + wheels.load_wheel_global("ordered_set", "ordered_set") + wheels.load_wheel_global("deepdiff", "deepdiff") + wheels.load_wheel_global("replication", "replication") + wheels.load_wheel_global("zmq", "pyzmq") + def get_ip(): @@ -102,26 +92,7 @@ def remove_paths(paths: list): def register(): - if bpy.app.version >= (2,91,0): - python_binary_path = sys.executable - else: - python_binary_path = bpy.app.binary_path_python - - python_path = Path(python_binary_path) - - for module_name in list(sys.modules.keys()): - if 'replication' in module_name: - del sys.modules[module_name] - - setup_paths([LIBS, REPLICATION]) - - if not module_can_be_imported("pip"): - install_pip(python_path) - - deps_not_installed = [package_name for package_name in REPLICATION_DEPENDENCIES if not module_can_be_imported(package_name)] - if any(deps_not_installed): - install_requirements(python_path, module_requirement='replication', install_dir=LIBS) - + check_dir(DEFAULT_CACHE_DIR) def unregister(): - remove_paths([REPLICATION, LIBS]) \ No newline at end of file + pass \ No newline at end of file diff --git a/multi_user/libs/replication b/multi_user/libs/replication deleted file mode 160000 index 3e9eb4f..0000000 --- a/multi_user/libs/replication +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 3e9eb4f5c052177c2fe1e16ff5d1f042456c30d0 diff --git a/multi_user/operators.py b/multi_user/operators.py index 9dee28a..998e15f 100644 --- a/multi_user/operators.py +++ b/multi_user/operators.py @@ -16,27 +16,20 @@ # ##### END GPL LICENSE BLOCK ##### -import asyncio -import copy + import gzip import logging -from multi_user.preferences import ServerPreset + import os -import queue -import random -import shutil -import string import sys -import time + import traceback from uuid import uuid4 from datetime import datetime -from operator import itemgetter + from pathlib import Path from queue import Queue -from time import gmtime, strftime -from bpy.props import FloatProperty import bmesh try: @@ -46,15 +39,11 @@ except ImportError: import bpy import mathutils -from bpy.app.handlers import persistent + from bpy_extras.io_utils import ExportHelper, ImportHelper from replication import porcelain -from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE, - STATE_INITIAL, STATE_SYNCING, UP) -from replication.exception import ContextError, NonAuthorizedOperationError +from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE) from replication.interface import session -from replication.objects import Node -from replication.protocol import DataTranslationProtocol from replication.repository import Repository from . import bl_types, environment, shared_data, timers, ui, utils diff --git a/multi_user/presence.py b/multi_user/presence.py index 2da5096..667ef2e 100644 --- a/multi_user/presence.py +++ b/multi_user/presence.py @@ -253,10 +253,9 @@ class Widget(object): return True def configure_bgl(self): - bgl.glLineWidth(2.) - bgl.glEnable(bgl.GL_DEPTH_TEST) - bgl.glEnable(bgl.GL_BLEND) - bgl.glEnable(bgl.GL_LINE_SMOOTH) + gpu.state.line_width_set(2.0) + gpu.state.depth_test_set("LESS") + gpu.state.blend_set("ALPHA") def draw(self): @@ -300,7 +299,8 @@ class UserFrustumWidget(Widget): def draw(self): location = self.data.get('view_corners') - shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR') + shader = gpu.shader.from_builtin('UNIFORM_COLOR') + # 'FLAT_COLOR', 'IMAGE', 'IMAGE_COLOR', 'SMOOTH_COLOR', 'UNIFORM_COLOR', 'POLYLINE_FLAT_COLOR', 'POLYLINE_SMOOTH_COLOR', 'POLYLINE_UNIFORM_COLOR' positions = [tuple(coord) for coord in location] if len(positions) != 7: @@ -372,7 +372,7 @@ class UserSelectionWidget(Widget): vertex_pos += bbox_pos vertex_ind += bbox_ind - shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR') + shader = gpu.shader.from_builtin('UNIFORM_COLOR') batch = batch_for_shader( shader, 'LINES', @@ -421,7 +421,7 @@ class UserNameWidget(Widget): if coords: blf.position(0, coords[0], coords[1]+10, 0) - blf.size(0, 16, 72) + blf.size(0, 16) blf.color(0, color[0], color[1], color[2], color[3]) blf.draw(0, self.username) @@ -477,7 +477,7 @@ class UserModeWidget(Widget): if origin_coord : blf.position(0, origin_coord[0]+8, origin_coord[1]-15, 0) - blf.size(0, 16, 72) + blf.size(0, 16) blf.color(0, color[0], color[1], color[2], color[3]) blf.draw(0, mode_current) @@ -511,7 +511,7 @@ class SessionStatusWidget(Widget): vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100 blf.position(0, hpos, vpos, 0) - blf.size(0, int(text_scale*ui_scale), 72) + blf.size(0, int(text_scale*ui_scale)) blf.color(0, color[0], color[1], color[2], color[3]) blf.draw(0, state_str) diff --git a/multi_user/ui.py b/multi_user/ui.py index 6161905..d333b94 100644 --- a/multi_user/ui.py +++ b/multi_user/ui.py @@ -32,6 +32,7 @@ from replication.constants import (ADDED, ERROR, FETCHED, from replication import __version__ from replication.interface import session from .timers import registry +from . import icons ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED 'TRIA_UP', # COMMITED @@ -109,7 +110,6 @@ class SESSION_PT_settings(bpy.types.Panel): layout = self.layout settings = get_preferences() - from multi_user import icons offline_icon = icons.icons_col["session_status_offline"] waiting_icon = icons.icons_col["session_status_waiting"] online_icon = icons.icons_col["session_status_online"] @@ -531,7 +531,7 @@ def draw_property(context, parent, property_uuid, level=0): have_right_to_modify = (item.owner == settings.username or \ item.owner == RP_COMMON) and item.state != ERROR - from multi_user import icons + sync_status = icons.icons_col["repository_push"] #TODO: Link all icons to the right sync (push/merge/issue). For issue use "UNLINKED" for icon # sync_status = icons.icons_col["repository_merge"] @@ -727,7 +727,7 @@ class SESSION_UL_network(bpy.types.UIList): else: split.label(text=server_name) - from multi_user import icons + from . import icons server_status = icons.icons_col["server_offline"] if item.is_online: server_status = icons.icons_col["server_online"] diff --git a/multi_user/wheels/__init__.py b/multi_user/wheels/__init__.py new file mode 100644 index 0000000..2aa67c9 --- /dev/null +++ b/multi_user/wheels/__init__.py @@ -0,0 +1,142 @@ +"""External dependencies loader.""" + +import contextlib +import importlib +from pathlib import Path +import sys +import logging +from types import ModuleType +from typing import Iterator, Iterable + +_my_dir = Path(__file__).parent +_log = logging.getLogger(__name__) +_env_folder = Path(__file__).parent.joinpath("venv") + +def load_wheel(module_name: str, submodules: Iterable[str]) -> list[ModuleType]: + """Loads modules from a wheel file 'module_name*.whl'. + + Loads `module_name`, and if submodules are given, loads + `module_name.submodule` for each of the submodules. This allows loading all + required modules from the same wheel in one session, ensuring that + inter-submodule references are correct. + + Returns the loaded modules, so [module, submodule, submodule, ...]. + """ + + fname_prefix = _fname_prefix_from_module_name(module_name) + wheel = _wheel_filename(fname_prefix) + + loaded_modules: list[ModuleType] = [] + to_load = [module_name] + [f"{module_name}.{submodule}" for submodule in submodules] + + # Load the module from the wheel file. Keep a backup of sys.path so that it + # can be restored later. This should ensure that future import statements + # cannot find this wheel file, increasing the separation of dependencies of + # this add-on from other add-ons. + with _sys_path_mod_backup(wheel): + for modname in to_load: + try: + module = importlib.import_module(modname) + except ImportError as ex: + raise ImportError( + "Unable to load %r from %s: %s" % (modname, wheel, ex) + ) from None + assert isinstance(module, ModuleType) + loaded_modules.append(module) + _log.info("Loaded %s from %s", modname, module.__file__) + + assert len(loaded_modules) == len( + to_load + ), f"expecting to load {len(to_load)} modules, but only have {len(loaded_modules)}: {loaded_modules}" + return loaded_modules + + +def load_wheel_global(module_name: str, fname_prefix: str = "") -> ModuleType: + """Loads a wheel from 'fname_prefix*.whl', unless the named module can be imported. + + This allows us to use system-installed packages before falling back to the shipped wheels. + This is useful for development, less so for deployment. + + If `fname_prefix` is the empty string, it will use the first package from `module_name`. + In other words, `module_name="pkg.subpkg"` will result in `fname_prefix="pkg"`. + """ + + if not fname_prefix: + fname_prefix = _fname_prefix_from_module_name(module_name) + + try: + module = importlib.import_module(module_name) + except ImportError as ex: + _log.debug("Unable to import %s directly, will try wheel: %s", module_name, ex) + else: + _log.debug( + "Was able to load %s from %s, no need to load wheel %s", + module_name, + module.__file__, + fname_prefix, + ) + return module + + wheel = _wheel_filename(fname_prefix) + + wheel_filepath = str(wheel) + import zipfile + + wheel_archive = zipfile.ZipFile(wheel_filepath) + + + wheel_archive.extractall(_env_folder) + + if str(_env_folder) not in sys.path: + sys.path.insert(0, str(_env_folder)) + + try: + module = importlib.import_module(module_name) + except ImportError as ex: + raise ImportError( + "Unable to load %r from %s: %s" % (module_name, wheel, ex) + ) from None + + _log.debug("Globally loaded %s from %s", module_name, module.__file__) + return module + + +@contextlib.contextmanager +def _sys_path_mod_backup(wheel_file: Path) -> Iterator[None]: + """Temporarily inserts a wheel onto sys.path. + + When the context exits, it restores sys.path and sys.modules, so that + anything that was imported within the context remains unimportable by other + modules. + """ + old_syspath = sys.path[:] + old_sysmod = sys.modules.copy() + + try: + sys.path.insert(0, str(wheel_file)) + yield + finally: + # Restore without assigning a new list instance. That way references + # held by other code will stay valid. + sys.path[:] = old_syspath + sys.modules.clear() + sys.modules.update(old_sysmod) + + +def _wheel_filename(fname_prefix: str) -> Path: + path_pattern = "%s*.whl" % fname_prefix + wheels: list[Path] = list(_my_dir.glob(path_pattern)) + if not wheels: + raise RuntimeError("Unable to find wheel at %r" % path_pattern) + + # If there are multiple wheels that match, load the last-modified one. + # Alphabetical sorting isn't going to cut it since BAT 1.10 was released. + def modtime(filepath: Path) -> float: + return filepath.stat().st_mtime + + wheels.sort(key=modtime) + return wheels[-1] + + +def _fname_prefix_from_module_name(module_name: str) -> str: + return module_name.split(".", 1)[0] diff --git a/multi_user/wheels/deepdiff-5.7.0-py3-none-any.whl b/multi_user/wheels/deepdiff-5.7.0-py3-none-any.whl new file mode 100644 index 0000000..3981719 Binary files /dev/null and b/multi_user/wheels/deepdiff-5.7.0-py3-none-any.whl differ diff --git a/multi_user/wheels/ordered_set-4.1.0-py3-none-any.whl b/multi_user/wheels/ordered_set-4.1.0-py3-none-any.whl new file mode 100644 index 0000000..a486105 Binary files /dev/null and b/multi_user/wheels/ordered_set-4.1.0-py3-none-any.whl differ diff --git a/multi_user/wheels/pyzmq-25.1.2-cp310-cp310-win_amd64.whl b/multi_user/wheels/pyzmq-25.1.2-cp310-cp310-win_amd64.whl new file mode 100644 index 0000000..92dcc36 Binary files /dev/null and b/multi_user/wheels/pyzmq-25.1.2-cp310-cp310-win_amd64.whl differ diff --git a/multi_user/wheels/replication-0.9.9-py3-none-any.whl b/multi_user/wheels/replication-0.9.9-py3-none-any.whl new file mode 100644 index 0000000..2f3ec34 Binary files /dev/null and b/multi_user/wheels/replication-0.9.9-py3-none-any.whl differ