diff --git a/.gitignore b/.gitignore index 9bdca26..bed8f4f 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ __pycache__/ cache config *.code-workspace +multi_user_updater/ # sphinx build folder _build \ No newline at end of file diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..8960bc8 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,2 @@ +include: + - local: .gitlab/ci/build.gitlab-ci.yml diff --git a/.gitlab/ci/build.gitlab-ci.yml b/.gitlab/ci/build.gitlab-ci.yml new file mode 100644 index 0000000..000f8da --- /dev/null +++ b/.gitlab/ci/build.gitlab-ci.yml @@ -0,0 +1,17 @@ +image: python:latest + +build: + script: + - git submodule init + - git submodule update + - cd multi_user/libs/replication + - rm -rf tests .git .gitignore + + artifacts: + name: multi_user + paths: + - multi_user + + only: + - master + - develop diff --git a/.gitlab/issue_templates/Bug.md b/.gitlab/issue_templates/Bug.md new file mode 100644 index 0000000..8d48115 --- /dev/null +++ b/.gitlab/issue_templates/Bug.md @@ -0,0 +1,46 @@ + + +### Summary + +(Summarize the bug encountered concisely) + +* Addon version: (your addon-version) +* Blender version: (your blender version) +* OS: (your os windows/linux/mac) + + +### Steps to reproduce + +(How one can reproduce the issue - this is very important) + +### Example Project [optionnal] +(If possible, please create an example project that exhibits the problematic behavior, and link to it here in the bug report) + + +### What is the current *bug* behavior? + +(What actually happens) + + +### Relevant logs and/or screenshots + +(Paste any relevant logs - please use code blocks (```) to format console output, +logs, and code as it's tough to read otherwise.) + + +### Possible fixes [optionnal] + +(If you can, link to the line of code that might be responsible for the problem) + + +/label ~bug +/cc @project-manager diff --git a/README.md b/README.md index 28ba30a..4c340da 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ > Enable real-time collaborative workflow inside blender -![demo](https://i.imgur.com/X0B7O1Q.gif) + :warning: Under development, use it at your own risks. Currently tested on Windows platform. :warning: @@ -25,22 +25,28 @@ See the [documentation](https://multi-user.readthedocs.io/en/latest/) for detail Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones. -| Name | Status | Comment | -| ----------- | :----------------: | :--------------------------------: | -| action | :exclamation: | Not stable | -| armature | :exclamation: | Not stable | -| camera | :white_check_mark: | | -| collection | :white_check_mark: | | -| curve | :white_check_mark: | Nurbs surface don't load correctly | -| gpencil | :white_check_mark: | | -| image | :exclamation: | Not stable yet | -| mesh | :white_check_mark: | | -| material | :white_check_mark: | | -| metaball | :white_check_mark: | | -| object | :white_check_mark: | | -| scene | :white_check_mark: | | -| world | :white_check_mark: | | -| lightprobes | :white_check_mark: | | +| Name | Status | Comment | +| ----------- | :----: | :-----------------------------------------------------------: | +| action | ❗ | Not stable | +| armature | ❗ | Not stable | +| camera | ✔️ | | +| collection | ✔️ | | +| curve | ✔️ | Nurbs surface don't load correctly | +| gpencil | ✔️ | | +| image | ❗ | Not stable yet | +| mesh | ✔️ | | +| material | ✔️ | | +| metaball | ✔️ | | +| object | ✔️ | | +| scene | ✔️ | | +| world | ✔️ | | +| lightprobes | ✔️ | | +| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) | +| speakers | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/65) | +| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) | +| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) | +| libraries | ❗ | Partial | + ### Performance issues @@ -52,8 +58,6 @@ I'm working on it. | Dependencies | Version | Needed | | ------------ | :-----: | -----: | | ZeroMQ | latest | yes | -| msgpack | latest | yes | -| PyYAML | latest | yes | | JsonDiff | latest | yes | @@ -65,4 +69,3 @@ See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_c See [license](LICENSE) -[![Documentation Status](https://readthedocs.org/projects/multi-user/badge/?version=latest)](https://multi-user.readthedocs.io/en/latest/?badge=latest) diff --git a/docs/getting_started/img/quickstart_advanced.png b/docs/getting_started/img/quickstart_advanced.png index 3cb0ad8..2416b2d 100644 Binary files a/docs/getting_started/img/quickstart_advanced.png and b/docs/getting_started/img/quickstart_advanced.png differ diff --git a/docs/getting_started/img/quickstart_presence.png b/docs/getting_started/img/quickstart_presence.png new file mode 100644 index 0000000..eca6b87 Binary files /dev/null and b/docs/getting_started/img/quickstart_presence.png differ diff --git a/docs/getting_started/img/quickstart_users.png b/docs/getting_started/img/quickstart_users.png index 473130c..0839e40 100644 Binary files a/docs/getting_started/img/quickstart_users.png and b/docs/getting_started/img/quickstart_users.png differ diff --git a/docs/getting_started/install.rst b/docs/getting_started/install.rst index 888b2db..bf9bd16 100644 --- a/docs/getting_started/install.rst +++ b/docs/getting_started/install.rst @@ -4,6 +4,6 @@ Installation *The process is the same for linux, mac and windows.* -1. Download latest release `multi_user.zip `_. +1. Download latest `release `_ or `develop (unstable !) `_ build. 2. Run blender as administrator (to allow python dependencies auto-installation). -3. Install last_version.zip from your addon preferences. \ No newline at end of file +3. Install **multi-user.zip** from your addon preferences. \ No newline at end of file diff --git a/docs/getting_started/quickstart.rst b/docs/getting_started/quickstart.rst index b7295bc..319960f 100644 --- a/docs/getting_started/quickstart.rst +++ b/docs/getting_started/quickstart.rst @@ -46,6 +46,8 @@ If 5555 is given in host settings, it will use 5555, 5556 (5555+1), 5557 (5555+2 .. image:: img/quickstart_advanced.png +**Synchronise render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients. + **Right strategy** (only host) enable you to choose between a strict and a relaxed pattern: - **Strict**: Host is the king, by default the host own each properties, only him can grant modification rights. @@ -76,6 +78,19 @@ By selecting a user in the list you'll have access to different **actions**: - The **camera button** allow you to snap on the user viewpoint. - The **time button** allow you to snap on the user time. +- The **cross button** [**host only**] allow the admin to kick users + +------------------- +Presence show flags +------------------- + +.. image:: img/quickstart_presence.png + +This pannel allow you to tweak users overlay in the viewport: + +- **Show selected objects**: display other users current selection +- **Show users**: display users current viewpoint +- **Show different scenes**: display users on other scenes --------------------- Replicated properties diff --git a/multi_user/__init__.py b/multi_user/__init__.py index 1abd894..e328298 100644 --- a/multi_user/__init__.py +++ b/multi_user/__init__.py @@ -1,12 +1,31 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + bl_info = { "name": "Multi-User", "author": "Swann Martinez", - "version": (0, 0, 2), + "version": (0, 0, 3), "description": "Enable real-time collaborative workflow inside blender", "blender": (2, 80, 0), "location": "3D View > Sidebar > Multi-User tab", "warning": "Unstable addon, use it at your own risks", "category": "Collaboration", + "doc_url": "https://multi-user.readthedocs.io/en/develop/index.html", "wiki_url": "https://multi-user.readthedocs.io/en/develop/index.html", "tracker_url": "https://gitlab.com/slumber/multi-user/issues", "support": "COMMUNITY" @@ -21,15 +40,12 @@ import sys import bpy from bpy.app.handlers import persistent -from . import environment, utils, presence, preferences -from .libs.replication.replication.constants import RP_COMMON +from . import environment, utils # TODO: remove dependency as soon as replication will be installed as a module DEPENDENCIES = { ("zmq","zmq"), - ("msgpack","msgpack"), - ("yaml","pyyaml"), ("jsondiff","jsondiff") } @@ -37,89 +53,6 @@ DEPENDENCIES = { logger = logging.getLogger(__name__) logger.setLevel(logging.WARNING) -def client_list_callback(scene, context): - from . import operators - - items = [(RP_COMMON, RP_COMMON, "")] - - username = utils.get_preferences().username - cli = operators.client - if cli: - client_ids = cli.online_users.keys() - for id in client_ids: - name_desc = id - if id == username: - name_desc += " (self)" - - items.append((id, name_desc, "")) - - return items - -class SessionUser(bpy.types.PropertyGroup): - """Session User - - Blender user information property - """ - username: bpy.props.StringProperty(name="username") - current_frame: bpy.props.IntProperty(name="current_frame") - - -class SessionProps(bpy.types.PropertyGroup): - is_admin: bpy.props.BoolProperty( - name="is_admin", - default=False - ) - session_mode: bpy.props.EnumProperty( - name='session_mode', - description='session mode', - items={ - ('HOST', 'hosting', 'host a session'), - ('CONNECT', 'connexion', 'connect to a session')}, - default='HOST') - clients: bpy.props.EnumProperty( - name="clients", - description="client enum", - items=client_list_callback) - enable_presence: bpy.props.BoolProperty( - name="Presence overlay", - description='Enable overlay drawing module', - default=True, - update=presence.update_presence - ) - presence_show_selected: bpy.props.BoolProperty( - name="Show selected objects", - description='Enable selection overlay ', - default=True, - update=presence.update_overlay_settings - ) - presence_show_user: bpy.props.BoolProperty( - name="Show users", - description='Enable user overlay ', - default=True, - update=presence.update_overlay_settings - ) - presence_show_far_user: bpy.props.BoolProperty( - name="Show different scenes", - description="Show user on different scenes", - default=False, - update=presence.update_overlay_settings - ) - filter_owned: bpy.props.BoolProperty( - name="filter_owned", - description='Show only owned datablocks', - default=True - ) - user_snap_running: bpy.props.BoolProperty( - default=False - ) - time_snap_running: bpy.props.BoolProperty( - default=False - ) - -classes = ( - SessionUser, - SessionProps, -) libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication\\replication" @@ -133,34 +66,36 @@ def register(): from . import operators from . import ui from . import preferences - - for cls in classes: - bpy.utils.register_class(cls) - - bpy.types.WindowManager.session = bpy.props.PointerProperty( - type=SessionProps) - bpy.types.ID.uuid = bpy.props.StringProperty(default="") - bpy.types.WindowManager.online_users = bpy.props.CollectionProperty( - type=SessionUser - ) - bpy.types.WindowManager.user_index = bpy.props.IntProperty() + from . import addon_updater_ops preferences.register() + addon_updater_ops.register(bl_info) presence.register() operators.register() ui.register() + bpy.types.WindowManager.session = bpy.props.PointerProperty( + type=preferences.SessionProps) + bpy.types.ID.uuid = bpy.props.StringProperty(default="") + bpy.types.WindowManager.online_users = bpy.props.CollectionProperty( + type=preferences.SessionUser + ) + bpy.types.WindowManager.user_index = bpy.props.IntProperty() + def unregister(): from . import presence from . import operators from . import ui from . import preferences + from . import addon_updater_ops presence.unregister() + addon_updater_ops.unregister() ui.unregister() operators.unregister() preferences.unregister() - del bpy.types.WindowManager.session - for cls in reversed(classes): - bpy.utils.unregister_class(cls) + del bpy.types.WindowManager.session + del bpy.types.ID.uuid + del bpy.types.WindowManager.online_users + del bpy.types.WindowManager.user_index diff --git a/multi_user/addon_updater.py b/multi_user/addon_updater.py new file mode 100644 index 0000000..6750358 --- /dev/null +++ b/multi_user/addon_updater.py @@ -0,0 +1,1671 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + + +""" +See documentation for usage +https://github.com/CGCookie/blender-addon-updater + +""" + +import errno +import ssl +import urllib.request +import urllib +import os +import json +import zipfile +import shutil +import threading +import fnmatch +from datetime import datetime, timedelta + +# blender imports, used in limited cases +import bpy +import addon_utils + +# ----------------------------------------------------------------------------- +# Define error messages/notices & hard coded globals +# ----------------------------------------------------------------------------- + +# currently not used +DEFAULT_TIMEOUT = 10 +DEFAULT_PER_PAGE = 30 + + +# ----------------------------------------------------------------------------- +# The main class +# ----------------------------------------------------------------------------- + +class Singleton_updater(object): + """ + This is the singleton class to reference a copy from, + it is the shared module level class + """ + def __init__(self): + + self._engine = GithubEngine() + self._user = None + self._repo = None + self._website = None + self._current_version = None + self._subfolder_path = None + self._tags = [] + self._tag_latest = None + self._tag_names = [] + self._latest_release = None + self._use_releases = False + self._include_branches = False + self._include_branch_list = ['master'] + self._include_branch_autocheck = False + self._manual_only = False + self._version_min_update = None + self._version_max_update = None + + # by default, backup current addon if new is being loaded + self._backup_current = True + self._backup_ignore_patterns = None + + # set patterns for what files to overwrite on update + self._overwrite_patterns = ["*.py","*.pyc"] + self._remove_pre_update_patterns = [] + + # by default, don't auto enable/disable the addon on update + # as it is slightly less stable/won't always fully reload module + self._auto_reload_post_update = False + + # settings relating to frequency and whether to enable auto background check + self._check_interval_enable = False + self._check_interval_months = 0 + self._check_interval_days = 7 + self._check_interval_hours = 0 + self._check_interval_minutes = 0 + + # runtime variables, initial conditions + self._verbose = False + self._fake_install = False + self._async_checking = False # only true when async daemon started + self._update_ready = None + self._update_link = None + self._update_version = None + self._source_zip = None + self._check_thread = None + self._select_link = None + self.skip_tag = None + + # get from module data + self._addon = __package__.lower() + self._addon_package = __package__ # must not change + self._updater_path = os.path.join(os.path.dirname(__file__), + self._addon+"_updater") + self._addon_root = os.path.dirname(__file__) + self._json = {} + self._error = None + self._error_msg = None + self._prefiltered_tag_count = 0 + + # UI code only, ie not used within this module but still useful + # properties to have + + # to verify a valid import, in place of placeholder import + self.showpopups = True # used in UI to show or not show update popups + self.invalidupdater = False + + # pre-assign basic select-link function + def select_link_function(self, tag): + return tag["zipball_url"] + + self._select_link = select_link_function + + + # ------------------------------------------------------------------------- + # Getters and setters + # ------------------------------------------------------------------------- + + + @property + def addon(self): + return self._addon + @addon.setter + def addon(self, value): + self._addon = str(value) + + @property + def api_url(self): + return self._engine.api_url + @api_url.setter + def api_url(self, value): + if self.check_is_url(value) == False: + raise ValueError("Not a valid URL: " + value) + self._engine.api_url = value + + @property + def async_checking(self): + return self._async_checking + + @property + def auto_reload_post_update(self): + return self._auto_reload_post_update + @auto_reload_post_update.setter + def auto_reload_post_update(self, value): + try: + self._auto_reload_post_update = bool(value) + except: + raise ValueError("Must be a boolean value") + + @property + def backup_current(self): + return self._backup_current + @backup_current.setter + def backup_current(self, value): + if value == None: + self._backup_current = False + return + else: + self._backup_current = value + + @property + def backup_ignore_patterns(self): + return self._backup_ignore_patterns + @backup_ignore_patterns.setter + def backup_ignore_patterns(self, value): + if value == None: + self._backup_ignore_patterns = None + return + elif type(value) != type(['list']): + raise ValueError("Backup pattern must be in list format") + else: + self._backup_ignore_patterns = value + + @property + def check_interval(self): + return (self._check_interval_enable, + self._check_interval_months, + self._check_interval_days, + self._check_interval_hours, + self._check_interval_minutes) + + @property + def current_version(self): + return self._current_version + @current_version.setter + def current_version(self, tuple_values): + if tuple_values==None: + self._current_version = None + return + elif type(tuple_values) is not tuple: + try: + tuple(tuple_values) + except: + raise ValueError( + "Not a tuple! current_version must be a tuple of integers") + for i in tuple_values: + if type(i) is not int: + raise ValueError( + "Not an integer! current_version must be a tuple of integers") + self._current_version = tuple(tuple_values) + + @property + def engine(self): + return self._engine.name + @engine.setter + def engine(self, value): + if value.lower()=="github": + self._engine = GithubEngine() + elif value.lower()=="gitlab": + self._engine = GitlabEngine() + elif value.lower()=="bitbucket": + self._engine = BitbucketEngine() + else: + raise ValueError("Invalid engine selection") + + @property + def error(self): + return self._error + + @property + def error_msg(self): + return self._error_msg + + @property + def fake_install(self): + return self._fake_install + @fake_install.setter + def fake_install(self, value): + if type(value) != type(False): + raise ValueError("fake_install must be a boolean value") + self._fake_install = bool(value) + + # not currently used + @property + def include_branch_autocheck(self): + return self._include_branch_autocheck + @include_branch_autocheck.setter + def include_branch_autocheck(self, value): + try: + self._include_branch_autocheck = bool(value) + except: + raise ValueError("include_branch_autocheck must be a boolean value") + + @property + def include_branch_list(self): + return self._include_branch_list + @include_branch_list.setter + def include_branch_list(self, value): + try: + if value == None: + self._include_branch_list = ['master'] + elif type(value) != type(['master']) or value==[]: + raise ValueError("include_branch_list should be a list of valid branches") + else: + self._include_branch_list = value + except: + raise ValueError("include_branch_list should be a list of valid branches") + + @property + def include_branches(self): + return self._include_branches + @include_branches.setter + def include_branches(self, value): + try: + self._include_branches = bool(value) + except: + raise ValueError("include_branches must be a boolean value") + + @property + def json(self): + if self._json == {}: + self.set_updater_json() + return self._json + + @property + def latest_release(self): + if self._latest_release == None: + return None + return self._latest_release + + @property + def manual_only(self): + return self._manual_only + @manual_only.setter + def manual_only(self, value): + try: + self._manual_only = bool(value) + except: + raise ValueError("manual_only must be a boolean value") + + @property + def overwrite_patterns(self): + return self._overwrite_patterns + @overwrite_patterns.setter + def overwrite_patterns(self, value): + if value == None: + self._overwrite_patterns = ["*.py","*.pyc"] + elif type(value) != type(['']): + raise ValueError("overwrite_patterns needs to be in a list format") + else: + self._overwrite_patterns = value + + @property + def private_token(self): + return self._engine.token + @private_token.setter + def private_token(self, value): + if value==None: + self._engine.token = None + else: + self._engine.token = str(value) + + @property + def remove_pre_update_patterns(self): + return self._remove_pre_update_patterns + @remove_pre_update_patterns.setter + def remove_pre_update_patterns(self, value): + if value == None: + self._remove_pre_update_patterns = [] + elif type(value) != type(['']): + raise ValueError("remove_pre_update_patterns needs to be in a list format") + else: + self._remove_pre_update_patterns = value + + @property + def repo(self): + return self._repo + @repo.setter + def repo(self, value): + try: + self._repo = str(value) + except: + raise ValueError("User must be a string") + + @property + def select_link(self): + return self._select_link + @select_link.setter + def select_link(self, value): + # ensure it is a function assignment, with signature: + # input self, tag; returns link name + if not hasattr(value, "__call__"): + raise ValueError("select_link must be a function") + self._select_link = value + + @property + def stage_path(self): + return self._updater_path + @stage_path.setter + def stage_path(self, value): + if value == None: + if self._verbose: print("Aborting assigning stage_path, it's null") + return + elif value != None and not os.path.exists(value): + try: + os.makedirs(value) + except: + if self._verbose: print("Error trying to staging path") + return + self._updater_path = value + + @property + def subfolder_path(self): + return self._subfolder_path + @subfolder_path.setter + def subfolder_path(self, value): + self._subfolder_path = value + + @property + def tags(self): + if self._tags == []: + return [] + tag_names = [] + for tag in self._tags: + tag_names.append(tag["name"]) + return tag_names + + @property + def tag_latest(self): + if self._tag_latest == None: + return None + return self._tag_latest["name"] + + @property + def update_link(self): + return self._update_link + + @property + def update_ready(self): + return self._update_ready + + @property + def update_version(self): + return self._update_version + + @property + def use_releases(self): + return self._use_releases + @use_releases.setter + def use_releases(self, value): + try: + self._use_releases = bool(value) + except: + raise ValueError("use_releases must be a boolean value") + + @property + def user(self): + return self._user + @user.setter + def user(self, value): + try: + self._user = str(value) + except: + raise ValueError("User must be a string value") + + @property + def verbose(self): + return self._verbose + @verbose.setter + def verbose(self, value): + try: + self._verbose = bool(value) + if self._verbose == True: + print(self._addon+" updater verbose is enabled") + except: + raise ValueError("Verbose must be a boolean value") + + @property + def version_max_update(self): + return self._version_max_update + @version_max_update.setter + def version_max_update(self, value): + if value == None: + self._version_max_update = None + return + if type(value) != type((1,2,3)): + raise ValueError("Version maximum must be a tuple") + for subvalue in value: + if type(subvalue) != int: + raise ValueError("Version elements must be integers") + self._version_max_update = value + + @property + def version_min_update(self): + return self._version_min_update + @version_min_update.setter + def version_min_update(self, value): + if value == None: + self._version_min_update = None + return + if type(value) != type((1,2,3)): + raise ValueError("Version minimum must be a tuple") + for subvalue in value: + if type(subvalue) != int: + raise ValueError("Version elements must be integers") + self._version_min_update = value + + @property + def website(self): + return self._website + @website.setter + def website(self, value): + if self.check_is_url(value) == False: + raise ValueError("Not a valid URL: " + value) + self._website = value + + + # ------------------------------------------------------------------------- + # Parameter validation related functions + # ------------------------------------------------------------------------- + + + def check_is_url(self, url): + if not ("http://" in url or "https://" in url): + return False + if "." not in url: + return False + return True + + def get_tag_names(self): + tag_names = [] + self.get_tags() + for tag in self._tags: + tag_names.append(tag["name"]) + return tag_names + + def set_check_interval(self,enable=False,months=0,days=14,hours=0,minutes=0): + # enabled = False, default initially will not check against frequency + # if enabled, default is then 2 weeks + + if type(enable) is not bool: + raise ValueError("Enable must be a boolean value") + if type(months) is not int: + raise ValueError("Months must be an integer value") + if type(days) is not int: + raise ValueError("Days must be an integer value") + if type(hours) is not int: + raise ValueError("Hours must be an integer value") + if type(minutes) is not int: + raise ValueError("Minutes must be an integer value") + + if enable==False: + self._check_interval_enable = False + else: + self._check_interval_enable = True + + self._check_interval_months = months + self._check_interval_days = days + self._check_interval_hours = hours + self._check_interval_minutes = minutes + + # declare how the class gets printed + + def __repr__(self): + return "".format(a=__file__) + + def __str__(self): + return "Updater, with user: {a}, repository: {b}, url: {c}".format( + a=self._user, + b=self._repo, c=self.form_repo_url()) + + + # ------------------------------------------------------------------------- + # API-related functions + # ------------------------------------------------------------------------- + + def form_repo_url(self): + return self._engine.form_repo_url(self) + + def form_tags_url(self): + return self._engine.form_tags_url(self) + + def form_branch_url(self, branch): + return self._engine.form_branch_url(branch, self) + + def get_tags(self): + request = self.form_tags_url() + if self._verbose: print("Getting tags from server") + + # get all tags, internet call + all_tags = self._engine.parse_tags(self.get_api(request), self) + if all_tags is not None: + self._prefiltered_tag_count = len(all_tags) + else: + self._prefiltered_tag_count = 0 + all_tags = [] + + # pre-process to skip tags + if self.skip_tag != None: + self._tags = [tg for tg in all_tags if self.skip_tag(self, tg)==False] + else: + self._tags = all_tags + + # get additional branches too, if needed, and place in front + # Does NO checking here whether branch is valid + if self._include_branches == True: + temp_branches = self._include_branch_list.copy() + temp_branches.reverse() + for branch in temp_branches: + request = self.form_branch_url(branch) + include = { + "name":branch.title(), + "zipball_url":request + } + self._tags = [include] + self._tags # append to front + + if self._tags == None: + # some error occurred + self._tag_latest = None + self._tags = [] + return + elif self._prefiltered_tag_count == 0 and self._include_branches == False: + self._tag_latest = None + if self._error == None: # if not None, could have had no internet + self._error = "No releases found" + self._error_msg = "No releases or tags found on this repository" + if self._verbose: print("No releases or tags found on this repository") + elif self._prefiltered_tag_count == 0 and self._include_branches == True: + if not self._error: self._tag_latest = self._tags[0] + if self._verbose: + branch = self._include_branch_list[0] + print("{} branch found, no releases".format(branch), self._tags[0]) + elif (len(self._tags)-len(self._include_branch_list)==0 and self._include_branches==True) \ + or (len(self._tags)==0 and self._include_branches==False) \ + and self._prefiltered_tag_count > 0: + self._tag_latest = None + self._error = "No releases available" + self._error_msg = "No versions found within compatible version range" + if self._verbose: print("No versions found within compatible version range") + else: + if self._include_branches == False: + self._tag_latest = self._tags[0] + if self._verbose: print("Most recent tag found:",self._tags[0]['name']) + else: + # don't return branch if in list + n = len(self._include_branch_list) + self._tag_latest = self._tags[n] # guaranteed at least len()=n+1 + if self._verbose: print("Most recent tag found:",self._tags[n]['name']) + + + # all API calls to base url + def get_raw(self, url): + # print("Raw request:", url) + request = urllib.request.Request(url) + try: + context = ssl._create_unverified_context() + except: + # some blender packaged python versions don't have this, largely + # useful for local network setups otherwise minimal impact + context = None + + # setup private request headers if appropriate + if self._engine.token != None: + if self._engine.name == "gitlab": + request.add_header('PRIVATE-TOKEN',self._engine.token) + else: + if self._verbose: print("Tokens not setup for engine yet") + + # run the request + try: + if context: + result = urllib.request.urlopen(request, context=context) + else: + result = urllib.request.urlopen(request) + except urllib.error.HTTPError as e: + if str(e.code) == "403": + self._error = "HTTP error (access denied)" + self._error_msg = str(e.code) + " - server error response" + print(self._error, self._error_msg) + else: + self._error = "HTTP error" + self._error_msg = str(e.code) + print(self._error, self._error_msg) + self._update_ready = None + except urllib.error.URLError as e: + reason = str(e.reason) + if "TLSV1_ALERT" in reason or "SSL" in reason.upper(): + self._error = "Connection rejected, download manually" + self._error_msg = reason + print(self._error, self._error_msg) + else: + self._error = "URL error, check internet connection" + self._error_msg = reason + print(self._error, self._error_msg) + self._update_ready = None + return None + else: + result_string = result.read() + result.close() + return result_string.decode() + + + # result of all api calls, decoded into json format + def get_api(self, url): + # return the json version + get = None + get = self.get_raw(url) + if get != None: + try: + return json.JSONDecoder().decode(get) + except Exception as e: + self._error = "API response has invalid JSON format" + self._error_msg = str(e.reason) + self._update_ready = None + print(self._error, self._error_msg) + return None + else: + return None + + + # create a working directory and download the new files + def stage_repository(self, url): + + local = os.path.join(self._updater_path,"update_staging") + error = None + + # make/clear the staging folder + # ensure the folder is always "clean" + if self._verbose: print("Preparing staging folder for download:\n",local) + if os.path.isdir(local) == True: + try: + shutil.rmtree(local) + os.makedirs(local) + except: + error = "failed to remove existing staging directory" + else: + try: + os.makedirs(local) + except: + error = "failed to create staging directory" + + if error != None: + if self._verbose: print("Error: Aborting update, "+error) + self._error = "Update aborted, staging path error" + self._error_msg = "Error: {}".format(error) + return False + + if self._backup_current==True: + self.create_backup() + if self._verbose: print("Now retrieving the new source zip") + + self._source_zip = os.path.join(local,"source.zip") + + if self._verbose: print("Starting download update zip") + try: + request = urllib.request.Request(url) + context = ssl._create_unverified_context() + + # setup private token if appropriate + if self._engine.token != None: + if self._engine.name == "gitlab": + request.add_header('PRIVATE-TOKEN',self._engine.token) + else: + if self._verbose: print("Tokens not setup for selected engine yet") + self.urlretrieve(urllib.request.urlopen(request,context=context), self._source_zip) + # add additional checks on file size being non-zero + if self._verbose: print("Successfully downloaded update zip") + return True + except Exception as e: + self._error = "Error retrieving download, bad link?" + self._error_msg = "Error: {}".format(e) + if self._verbose: + print("Error retrieving download, bad link?") + print("Error: {}".format(e)) + return False + + + def create_backup(self): + if self._verbose: print("Backing up current addon folder") + local = os.path.join(self._updater_path,"backup") + tempdest = os.path.join(self._addon_root, + os.pardir, + self._addon+"_updater_backup_temp") + + if self._verbose: print("Backup destination path: ",local) + + if os.path.isdir(local): + try: + shutil.rmtree(local) + except: + if self._verbose:print("Failed to removed previous backup folder, contininuing") + + # remove the temp folder; shouldn't exist but could if previously interrupted + if os.path.isdir(tempdest): + try: + shutil.rmtree(tempdest) + except: + if self._verbose:print("Failed to remove existing temp folder, contininuing") + # make the full addon copy, which temporarily places outside the addon folder + if self._backup_ignore_patterns != None: + shutil.copytree( + self._addon_root,tempdest, + ignore=shutil.ignore_patterns(*self._backup_ignore_patterns)) + else: + shutil.copytree(self._addon_root,tempdest) + shutil.move(tempdest,local) + + # save the date for future ref + now = datetime.now() + self._json["backup_date"] = "{m}-{d}-{yr}".format( + m=now.strftime("%B"),d=now.day,yr=now.year) + self.save_updater_json() + + def restore_backup(self): + if self._verbose: print("Restoring backup") + + if self._verbose: print("Backing up current addon folder") + backuploc = os.path.join(self._updater_path,"backup") + tempdest = os.path.join(self._addon_root, + os.pardir, + self._addon+"_updater_backup_temp") + tempdest = os.path.abspath(tempdest) + + # make the copy + shutil.move(backuploc,tempdest) + shutil.rmtree(self._addon_root) + os.rename(tempdest,self._addon_root) + + self._json["backup_date"] = "" + self._json["just_restored"] = True + self._json["just_updated"] = True + self.save_updater_json() + + self.reload_addon() + + def unpack_staged_zip(self,clean=False): + """Unzip the downloaded file, and validate contents""" + if os.path.isfile(self._source_zip) == False: + if self._verbose: print("Error, update zip not found") + self._error = "Install failed" + self._error_msg = "Downloaded zip not found" + return -1 + + # clear the existing source folder in case previous files remain + outdir = os.path.join(self._updater_path, "source") + try: + shutil.rmtree(outdir) + if self._verbose: + print("Source folder cleared") + except: + pass + + # Create parent directories if needed, would not be relevant unless + # installing addon into another location or via an addon manager + try: + os.mkdir(outdir) + except Exception as err: + print("Error occurred while making extract dir:") + print(str(err)) + self._error = "Install failed" + self._error_msg = "Failed to make extract directory" + return -1 + + if not os.path.isdir(outdir): + print("Failed to create source directory") + self._error = "Install failed" + self._error_msg = "Failed to create extract directory" + return -1 + + if self._verbose: + print("Begin extracting source from zip:", self._source_zip) + zfile = zipfile.ZipFile(self._source_zip, "r") + + if not zfile: + if self._verbose: + print("Resulting file is not a zip, cannot extract") + self._error = "Install failed" + self._error_msg = "Resulting file is not a zip, cannot extract" + return -1 + + # Now extract directly from the first subfolder (not root) + # this avoids adding the first subfolder to the path length, + # which can be too long if the download has the SHA in the name + zsep = '/' #os.sep # might just always be / even on windows + for name in zfile.namelist(): + if zsep not in name: + continue + top_folder = name[:name.index(zsep)+1] + if name == top_folder + zsep: + continue # skip top level folder + subpath = name[name.index(zsep)+1:] + if name.endswith(zsep): + try: + os.mkdir(os.path.join(outdir, subpath)) + if self._verbose: + print("Extract - mkdir: ", os.path.join(outdir, subpath)) + except OSError as exc: + if exc.errno != errno.EEXIST: + self._error = "Install failed" + self._error_msg = "Could not create folder from zip" + return -1 + else: + with open(os.path.join(outdir, subpath), "wb") as outfile: + data = zfile.read(name) + outfile.write(data) + if self._verbose: + print("Extract - create:", os.path.join(outdir, subpath)) + + if self._verbose: + print("Extracted source") + + unpath = os.path.join(self._updater_path, "source") + if not os.path.isdir(unpath): + self._error = "Install failed" + self._error_msg = "Extracted path does not exist" + print("Extracted path does not exist: ", unpath) + return -1 + + if self._subfolder_path: + self._subfolder_path.replace('/', os.path.sep) + self._subfolder_path.replace('\\', os.path.sep) + + # either directly in root of zip/one subfolder, or use specified path + if os.path.isfile(os.path.join(unpath,"__init__.py")) == False: + dirlist = os.listdir(unpath) + if len(dirlist)>0: + if self._subfolder_path == "" or self._subfolder_path == None: + unpath = os.path.join(unpath, dirlist[0]) + else: + unpath = os.path.join(unpath, self._subfolder_path) + + # smarter check for additional sub folders for a single folder + # containing __init__.py + if os.path.isfile(os.path.join(unpath,"__init__.py")) == False: + if self._verbose: + print("not a valid addon found") + print("Paths:") + print(dirlist) + self._error = "Install failed" + self._error_msg = "No __init__ file found in new source" + return -1 + + # merge code with running addon directory, using blender default behavior + # plus any modifiers indicated by user (e.g. force remove/keep) + self.deepMergeDirectory(self._addon_root, unpath, clean) + + # Now save the json state + # Change to True, to trigger the handler on other side + # if allowing reloading within same blender instance + self._json["just_updated"] = True + self.save_updater_json() + self.reload_addon() + self._update_ready = False + return 0 + + + def deepMergeDirectory(self,base,merger,clean=False): + """Merge folder 'merger' into folder 'base' without deleting existing""" + if not os.path.exists(base): + if self._verbose: + print("Base path does not exist:", base) + return -1 + elif not os.path.exists(merger): + if self._verbose: + print("Merger path does not exist") + return -1 + + # paths to be aware of and not overwrite/remove/etc + staging_path = os.path.join(self._updater_path,"update_staging") + backup_path = os.path.join(self._updater_path,"backup") + + # If clean install is enabled, clear existing files ahead of time + # note: will not delete the update.json, update folder, staging, or staging + # but will delete all other folders/files in addon directory + error = None + if clean==True: + try: + # implement clearing of all folders/files, except the + # updater folder and updater json + # Careful, this deletes entire subdirectories recursively... + # make sure that base is not a high level shared folder, but + # is dedicated just to the addon itself + if self._verbose: print("clean=True, clearing addon folder to fresh install state") + + # remove root files and folders (except update folder) + files = [f for f in os.listdir(base) if os.path.isfile(os.path.join(base,f))] + folders = [f for f in os.listdir(base) if os.path.isdir(os.path.join(base,f))] + + for f in files: + os.remove(os.path.join(base,f)) + print("Clean removing file {}".format(os.path.join(base,f))) + for f in folders: + if os.path.join(base,f)==self._updater_path: continue + shutil.rmtree(os.path.join(base,f)) + print("Clean removing folder and contents {}".format(os.path.join(base,f))) + + except Exception as err: + error = "failed to create clean existing addon folder" + print(error, str(err)) + + # Walk through the base addon folder for rules on pre-removing + # but avoid removing/altering backup and updater file + for path, dirs, files in os.walk(base): + # prune ie skip updater folder + dirs[:] = [d for d in dirs if os.path.join(path,d) not in [self._updater_path]] + for file in files: + for ptrn in self.remove_pre_update_patterns: + if fnmatch.filter([file],ptrn): + try: + fl = os.path.join(path,file) + os.remove(fl) + if self._verbose: print("Pre-removed file "+file) + except OSError: + print("Failed to pre-remove "+file) + + # Walk through the temp addon sub folder for replacements + # this implements the overwrite rules, which apply after + # the above pre-removal rules. This also performs the + # actual file copying/replacements + for path, dirs, files in os.walk(merger): + # verify this structure works to prune updater sub folder overwriting + dirs[:] = [d for d in dirs if os.path.join(path,d) not in [self._updater_path]] + relPath = os.path.relpath(path, merger) + destPath = os.path.join(base, relPath) + if not os.path.exists(destPath): + os.makedirs(destPath) + for file in files: + # bring in additional logic around copying/replacing + # Blender default: overwrite .py's, don't overwrite the rest + destFile = os.path.join(destPath, file) + srcFile = os.path.join(path, file) + + # decide whether to replace if file already exists, and copy new over + if os.path.isfile(destFile): + # otherwise, check each file to see if matches an overwrite pattern + replaced=False + for ptrn in self._overwrite_patterns: + if fnmatch.filter([destFile],ptrn): + replaced=True + break + if replaced: + os.remove(destFile) + os.rename(srcFile, destFile) + if self._verbose: print("Overwrote file "+os.path.basename(destFile)) + else: + if self._verbose: print("Pattern not matched to "+os.path.basename(destFile)+", not overwritten") + else: + # file did not previously exist, simply move it over + os.rename(srcFile, destFile) + if self._verbose: print("New file "+os.path.basename(destFile)) + + # now remove the temp staging folder and downloaded zip + try: + shutil.rmtree(staging_path) + except: + error = "Error: Failed to remove existing staging directory, consider manually removing "+staging_path + if self._verbose: print(error) + + + def reload_addon(self): + # if post_update false, skip this function + # else, unload/reload addon & trigger popup + if self._auto_reload_post_update == False: + print("Restart blender to reload addon and complete update") + return + + if self._verbose: print("Reloading addon...") + addon_utils.modules(refresh=True) + bpy.utils.refresh_script_paths() + + # not allowed in restricted context, such as register module + # toggle to refresh + bpy.ops.wm.addon_disable(module=self._addon_package) + bpy.ops.wm.addon_refresh() + bpy.ops.wm.addon_enable(module=self._addon_package) + + + # ------------------------------------------------------------------------- + # Other non-api functions and setups + # ------------------------------------------------------------------------- + + def clear_state(self): + self._update_ready = None + self._update_link = None + self._update_version = None + self._source_zip = None + self._error = None + self._error_msg = None + + # custom urlretrieve implementation + def urlretrieve(self, urlfile, filepath): + chunk = 1024*8 + f = open(filepath, "wb") + while 1: + data = urlfile.read(chunk) + if not data: + #print("done.") + break + f.write(data) + #print("Read %s bytes"%len(data)) + f.close() + + + def version_tuple_from_text(self,text): + if text == None: return () + + # should go through string and remove all non-integers, + # and for any given break split into a different section + segments = [] + tmp = '' + for l in str(text): + if l.isdigit()==False: + if len(tmp)>0: + segments.append(int(tmp)) + tmp = '' + else: + tmp+=l + if len(tmp)>0: + segments.append(int(tmp)) + + if len(segments)==0: + if self._verbose: print("No version strings found text: ",text) + if self._include_branches == False: + return () + else: + return (text) + return tuple(segments) + + # called for running check in a background thread + def check_for_update_async(self, callback=None): + + if self._json != None and "update_ready" in self._json and self._json["version_text"]!={}: + if self._json["update_ready"] == True: + self._update_ready = True + self._update_link = self._json["version_text"]["link"] + self._update_version = str(self._json["version_text"]["version"]) + # cached update + callback(True) + return + + # do the check + if self._check_interval_enable == False: + return + elif self._async_checking == True: + if self._verbose: print("Skipping async check, already started") + return # already running the bg thread + elif self._update_ready == None: + self.start_async_check_update(False, callback) + + + def check_for_update_now(self, callback=None): + + self._error = None + self._error_msg = None + + if self._verbose: + print("Check update pressed, first getting current status") + if self._async_checking == True: + if self._verbose: print("Skipping async check, already started") + return # already running the bg thread + elif self._update_ready == None: + self.start_async_check_update(True, callback) + else: + self._update_ready = None + self.start_async_check_update(True, callback) + + + # this function is not async, will always return in sequential fashion + # but should have a parent which calls it in another thread + def check_for_update(self, now=False): + if self._verbose: print("Checking for update function") + + # clear the errors if any + self._error = None + self._error_msg = None + + # avoid running again in, just return past result if found + # but if force now check, then still do it + if self._update_ready != None and now == False: + return (self._update_ready,self._update_version,self._update_link) + + if self._current_version == None: + raise ValueError("current_version not yet defined") + if self._repo == None: + raise ValueError("repo not yet defined") + if self._user == None: + raise ValueError("username not yet defined") + + self.set_updater_json() # self._json + + if now == False and self.past_interval_timestamp()==False: + if self._verbose: + print("Aborting check for updated, check interval not reached") + return (False, None, None) + + # check if using tags or releases + # note that if called the first time, this will pull tags from online + if self._fake_install == True: + if self._verbose: + print("fake_install = True, setting fake version as ready") + self._update_ready = True + self._update_version = "(999,999,999)" + self._update_link = "http://127.0.0.1" + + return (self._update_ready, self._update_version, self._update_link) + + # primary internet call + self.get_tags() # sets self._tags and self._tag_latest + + self._json["last_check"] = str(datetime.now()) + self.save_updater_json() + + # can be () or ('master') in addition to branches, and version tag + new_version = self.version_tuple_from_text(self.tag_latest) + + if len(self._tags)==0: + self._update_ready = False + self._update_version = None + self._update_link = None + return (False, None, None) + if self._include_branches == False: + link = self.select_link(self, self._tags[0]) + else: + n = len(self._include_branch_list) + if len(self._tags)==n: + # effectively means no tags found on repo + # so provide the first one as default + link = self.select_link(self, self._tags[0]) + else: + link = self.select_link(self, self._tags[n]) + + if new_version == (): + self._update_ready = False + self._update_version = None + self._update_link = None + return (False, None, None) + elif str(new_version).lower() in self._include_branch_list: + # handle situation where master/whichever branch is included + # however, this code effectively is not triggered now + # as new_version will only be tag names, not branch names + if self._include_branch_autocheck == False: + # don't offer update as ready, + # but set the link for the default + # branch for installing + self._update_ready = False + self._update_version = new_version + self._update_link = link + self.save_updater_json() + return (True, new_version, link) + else: + raise ValueError("include_branch_autocheck: NOT YET DEVELOPED") + # bypass releases and look at timestamp of last update + # from a branch compared to now, see if commit values + # match or not. + + else: + # situation where branches not included + + if new_version > self._current_version: + + self._update_ready = True + self._update_version = new_version + self._update_link = link + self.save_updater_json() + return (True, new_version, link) + + # elif new_version != self._current_version: + # self._update_ready = False + # self._update_version = new_version + # self._update_link = link + # self.save_updater_json() + # return (True, new_version, link) + + # if no update, set ready to False from None + self._update_ready = False + self._update_version = None + self._update_link = None + return (False, None, None) + + + def set_tag(self, name): + """Assign the tag name and url to update to""" + tg = None + for tag in self._tags: + if name == tag["name"]: + tg = tag + break + if tg: + new_version = self.version_tuple_from_text(self.tag_latest) + self._update_version = new_version + self._update_link = self.select_link(self, tg) + elif self._include_branches and name in self._include_branch_list: + # scenario if reverting to a specific branch name instead of tag + tg = name + link = self.form_branch_url(tg) + self._update_version = name # this will break things + self._update_link = link + if not tg: + raise ValueError("Version tag not found: "+name) + + + def run_update(self,force=False,revert_tag=None,clean=False,callback=None): + """Runs an install, update, or reversion of an addon from online source + + Arguments: + force: Install assigned link, even if self.update_ready is False + revert_tag: Version to install, if none uses detected update link + clean: not used, but in future could use to totally refresh addon + callback: used to run function on update completion + """ + self._json["update_ready"] = False + self._json["ignore"] = False # clear ignore flag + self._json["version_text"] = {} + + if revert_tag != None: + self.set_tag(revert_tag) + self._update_ready = True + + # clear the errors if any + self._error = None + self._error_msg = None + + if self._verbose: print("Running update") + + if self._fake_install == True: + # change to True, to trigger the reload/"update installed" handler + if self._verbose: + print("fake_install=True") + print("Just reloading and running any handler triggers") + self._json["just_updated"] = True + self.save_updater_json() + if self._backup_current == True: + self.create_backup() + self.reload_addon() + self._update_ready = False + res = True # fake "success" zip download flag + + elif force==False: + if self._update_ready != True: + if self._verbose: + print("Update stopped, new version not ready") + if callback: + callback( + self._addon_package, + "Update stopped, new version not ready") + return "Update stopped, new version not ready" + elif self._update_link == None: + # this shouldn't happen if update is ready + if self._verbose: + print("Update stopped, update link unavailable") + if callback: + callback( + self._addon_package, + "Update stopped, update link unavailable") + return "Update stopped, update link unavailable" + + if self._verbose and revert_tag==None: + print("Staging update") + elif self._verbose: + print("Staging install") + + res = self.stage_repository(self._update_link) + if res !=True: + print("Error in staging repository: "+str(res)) + if callback != None: + callback(self._addon_package, self._error_msg) + return self._error_msg + res = self.unpack_staged_zip(clean) + if res<0: + if callback: + callback(self._addon_package, self._error_msg) + return res + + else: + if self._update_link == None: + if self._verbose: + print("Update stopped, could not get link") + return "Update stopped, could not get link" + if self._verbose: + print("Forcing update") + + res = self.stage_repository(self._update_link) + if res !=True: + print("Error in staging repository: "+str(res)) + if callback: + callback(self._addon_package, self._error_msg) + return self._error_msg + res = self.unpack_staged_zip(clean) + if res<0: + return res + # would need to compare against other versions held in tags + + # run the front-end's callback if provided + if callback: + callback(self._addon_package) + + # return something meaningful, 0 means it worked + return 0 + + + def past_interval_timestamp(self): + if self._check_interval_enable == False: + return True # ie this exact feature is disabled + + if "last_check" not in self._json or self._json["last_check"] == "": + return True + else: + now = datetime.now() + last_check = datetime.strptime(self._json["last_check"], + "%Y-%m-%d %H:%M:%S.%f") + next_check = last_check + offset = timedelta( + days=self._check_interval_days + 30*self._check_interval_months, + hours=self._check_interval_hours, + minutes=self._check_interval_minutes + ) + + delta = (now - offset) - last_check + if delta.total_seconds() > 0: + if self._verbose: + print("{} Updater: Time to check for updates!".format(self._addon)) + return True + else: + if self._verbose: + print("{} Updater: Determined it's not yet time to check for updates".format(self._addon)) + return False + + def get_json_path(self): + """Returns the full path to the JSON state file used by this updater. + + Will also rename old file paths to addon-specific path if found + """ + json_path = os.path.join(self._updater_path, + "{}_updater_status.json".format(self._addon_package)) + old_json_path = os.path.join(self._updater_path, "updater_status.json") + + # rename old file if it exists + try: + os.rename(old_json_path, json_path) + except FileNotFoundError: + pass + except Exception as err: + print("Other OS error occurred while trying to rename old JSON") + print(err) + return json_path + + def set_updater_json(self): + """Load or initialize JSON dictionary data for updater state""" + if self._updater_path == None: + raise ValueError("updater_path is not defined") + elif os.path.isdir(self._updater_path) == False: + os.makedirs(self._updater_path) + + jpath = self.get_json_path() + if os.path.isfile(jpath): + with open(jpath) as data_file: + self._json = json.load(data_file) + if self._verbose: + print("{} Updater: Read in JSON settings from file".format( + self._addon)) + else: + # set data structure + self._json = { + "last_check":"", + "backup_date":"", + "update_ready":False, + "ignore":False, + "just_restored":False, + "just_updated":False, + "version_text":{} + } + self.save_updater_json() + + + def save_updater_json(self): + # first save the state + if self._update_ready == True: + if type(self._update_version) == type((0,0,0)): + self._json["update_ready"] = True + self._json["version_text"]["link"]=self._update_link + self._json["version_text"]["version"]=self._update_version + else: + self._json["update_ready"] = False + self._json["version_text"] = {} + else: + self._json["update_ready"] = False + self._json["version_text"] = {} + + jpath = self.get_json_path() + outf = open(jpath,'w') + data_out = json.dumps(self._json, indent=4) + outf.write(data_out) + outf.close() + if self._verbose: + print(self._addon+": Wrote out updater JSON settings to file, with the contents:") + print(self._json) + + def json_reset_postupdate(self): + self._json["just_updated"] = False + self._json["update_ready"] = False + self._json["version_text"] = {} + self.save_updater_json() + + def json_reset_restore(self): + self._json["just_restored"] = False + self._json["update_ready"] = False + self._json["version_text"] = {} + self.save_updater_json() + self._update_ready = None # reset so you could check update again + + def ignore_update(self): + self._json["ignore"] = True + self.save_updater_json() + + + # ------------------------------------------------------------------------- + # ASYNC stuff + # ------------------------------------------------------------------------- + + def start_async_check_update(self, now=False, callback=None): + """Start a background thread which will check for updates""" + if self._async_checking is True: + return + if self._verbose: + print("{} updater: Starting background checking thread".format( + self._addon)) + check_thread = threading.Thread(target=self.async_check_update, + args=(now,callback,)) + check_thread.daemon = True + self._check_thread = check_thread + check_thread.start() + + def async_check_update(self, now, callback=None): + """Perform update check, run as target of background thread""" + self._async_checking = True + if self._verbose: + print("{} BG thread: Checking for update now in background".format( + self._addon)) + + try: + self.check_for_update(now=now) + except Exception as exception: + print("Checking for update error:") + print(exception) + if not self._error: + self._update_ready = False + self._update_version = None + self._update_link = None + self._error = "Error occurred" + self._error_msg = "Encountered an error while checking for updates" + + self._async_checking = False + self._check_thread = None + + if self._verbose: + print("{} BG thread: Finished checking for update, doing callback".format(self._addon)) + if callback: + callback(self._update_ready) + + def stop_async_check_update(self): + """Method to give impression of stopping check for update. + + Currently does nothing but allows user to retry/stop blocking UI from + hitting a refresh button. This does not actually stop the thread, as it + will complete after the connection timeout regardless. If the thread + does complete with a successful response, this will be still displayed + on next UI refresh (ie no update, or update available). + """ + if self._check_thread != None: + if self._verbose: print("Thread will end in normal course.") + # however, "There is no direct kill method on a thread object." + # better to let it run its course + #self._check_thread.stop() + self._async_checking = False + self._error = None + self._error_msg = None + + +# ----------------------------------------------------------------------------- +# Updater Engines +# ----------------------------------------------------------------------------- + + +class BitbucketEngine(object): + """Integration to Bitbucket API for git-formatted repositories""" + + def __init__(self): + self.api_url = 'https://api.bitbucket.org' + self.token = None + self.name = "bitbucket" + + def form_repo_url(self, updater): + return self.api_url+"/2.0/repositories/"+updater.user+"/"+updater.repo + + def form_tags_url(self, updater): + return self.form_repo_url(updater) + "/refs/tags?sort=-name" + + def form_branch_url(self, branch, updater): + return self.get_zip_url(branch, updater) + + def get_zip_url(self, name, updater): + return "https://bitbucket.org/{user}/{repo}/get/{name}.zip".format( + user=updater.user, + repo=updater.repo, + name=name) + + def parse_tags(self, response, updater): + if response == None: + return [] + return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["name"], updater)} for tag in response["values"]] + + +class GithubEngine(object): + """Integration to Github API""" + + def __init__(self): + self.api_url = 'https://api.github.com' + self.token = None + self.name = "github" + + def form_repo_url(self, updater): + return "{}{}{}{}{}".format(self.api_url,"/repos/",updater.user, + "/",updater.repo) + + def form_tags_url(self, updater): + if updater.use_releases: + return "{}{}".format(self.form_repo_url(updater),"/releases") + else: + return "{}{}".format(self.form_repo_url(updater),"/tags") + + def form_branch_list_url(self, updater): + return "{}{}".format(self.form_repo_url(updater),"/branches") + + def form_branch_url(self, branch, updater): + return "{}{}{}".format(self.form_repo_url(updater), + "/zipball/",branch) + + def parse_tags(self, response, updater): + if response == None: + return [] + return response + + +class GitlabEngine(object): + """Integration to GitLab API""" + + def __init__(self): + self.api_url = 'https://gitlab.com' + self.token = None + self.name = "gitlab" + + def form_repo_url(self, updater): + return "{}{}{}".format(self.api_url,"/api/v4/projects/",updater.repo) + + def form_tags_url(self, updater): + if updater.use_releases: + return "{}{}".format(self.form_repo_url(updater),"/releases") + else: + return "{}{}".format(self.form_repo_url(updater),"/repository/tags") + + def form_branch_list_url(self, updater): + # does not validate branch name. + return "{}{}".format( + self.form_repo_url(updater), + "/repository/branches") + + def form_branch_url(self, branch, updater): + # Could clash with tag names and if it does, it will + # download TAG zip instead of branch zip to get + # direct path, would need. + return "{}{}{}".format( + self.form_repo_url(updater), + "/repository/archive.zip?sha=", + branch) + + def get_zip_url(self, sha, updater): + return "{base}/repository/archive.zip?sha={sha}".format( + base=self.form_repo_url(updater), + sha=sha) + + # def get_commit_zip(self, id, updater): + # return self.form_repo_url(updater)+"/repository/archive.zip?sha:"+id + + def parse_tags(self, response, updater): + if response == None: + return [] + # Return asset links from release + if updater.use_releases: + return [{"name": release["name"], "zipball_url": release["assets"]["links"][0]["url"]} for release in response] + else: + return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response] + + + +# ----------------------------------------------------------------------------- +# The module-shared class instance, +# should be what's imported to other files +# ----------------------------------------------------------------------------- + +Updater = Singleton_updater() diff --git a/multi_user/addon_updater_ops.py b/multi_user/addon_updater_ops.py new file mode 100644 index 0000000..30c0e47 --- /dev/null +++ b/multi_user/addon_updater_ops.py @@ -0,0 +1,1454 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +import os + +import bpy +from bpy.app.handlers import persistent + +# updater import, import safely +# Prevents popups for users with invalid python installs e.g. missing libraries +try: + from .addon_updater import Updater as updater +except Exception as e: + print("ERROR INITIALIZING UPDATER") + print(str(e)) + + class Singleton_updater_none(object): + def __init__(self): + self.addon = None + self.verbose = False + self.invalidupdater = True # used to distinguish bad install + self.error = None + self.error_msg = None + self.async_checking = None + + def clear_state(self): + self.addon = None + self.verbose = False + self.invalidupdater = True + self.error = None + self.error_msg = None + self.async_checking = None + + def run_update(self): pass + def check_for_update(self): pass + updater = Singleton_updater_none() + updater.error = "Error initializing updater module" + updater.error_msg = str(e) + +# Must declare this before classes are loaded +# otherwise the bl_idname's will not match and have errors. +# Must be all lowercase and no spaces +updater.addon = "multiuser" + + +# ----------------------------------------------------------------------------- +# Blender version utils +# ----------------------------------------------------------------------------- + + +def make_annotations(cls): + """Add annotation attribute to class fields to avoid Blender 2.8 warnings""" + if not hasattr(bpy.app, "version") or bpy.app.version < (2, 80): + return cls + bl_props = {k: v for k, v in cls.__dict__.items() if isinstance(v, tuple)} + if bl_props: + if '__annotations__' not in cls.__dict__: + setattr(cls, '__annotations__', {}) + annotations = cls.__dict__['__annotations__'] + for k, v in bl_props.items(): + annotations[k] = v + delattr(cls, k) + return cls + + +def layout_split(layout, factor=0.0, align=False): + """Intermediate method for pre and post blender 2.8 split UI function""" + if not hasattr(bpy.app, "version") or bpy.app.version < (2, 80): + return layout.split(percentage=factor, align=align) + return layout.split(factor=factor, align=align) + + +def get_user_preferences(context=None): + """Intermediate method for pre and post blender 2.8 grabbing preferences""" + if not context: + context = bpy.context + prefs = None + if hasattr(context, "user_preferences"): + prefs = context.user_preferences.addons.get(__package__, None) + elif hasattr(context, "preferences"): + prefs = context.preferences.addons.get(__package__, None) + if prefs: + return prefs.preferences + # To make the addon stable and non-exception prone, return None + # raise Exception("Could not fetch user preferences") + return None + + +# ----------------------------------------------------------------------------- +# Updater operators +# ----------------------------------------------------------------------------- + + +# simple popup for prompting checking for update & allow to install if available +class addon_updater_install_popup(bpy.types.Operator): + """Check and install update if available""" + bl_label = "Update {x} addon".format(x=updater.addon) + bl_idname = updater.addon+".updater_install_popup" + bl_description = "Popup menu to check and display current updates available" + bl_options = {'REGISTER', 'INTERNAL'} + + # if true, run clean install - ie remove all files before adding new + # equivalent to deleting the addon and reinstalling, except the + # updater folder/backup folder remains + clean_install = bpy.props.BoolProperty( + name="Clean install", + description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", + default=False, + options={'HIDDEN'} + ) + ignore_enum = bpy.props.EnumProperty( + name="Process update", + description="Decide to install, ignore, or defer new addon update", + items=[ + ("install", "Update Now", "Install update now"), + ("ignore", "Ignore", "Ignore this update to prevent future popups"), + ("defer", "Defer", "Defer choice till next blender session") + ], + options={'HIDDEN'} + ) + + def check(self, context): + return True + + def invoke(self, context, event): + return context.window_manager.invoke_props_dialog(self) + + def draw(self, context): + layout = self.layout + if updater.invalidupdater == True: + layout.label(text="Updater module error") + return + elif updater.update_ready == True: + col = layout.column() + col.scale_y = 0.7 + col.label(text="Update {} ready!".format(str(updater.update_version)), + icon="LOOP_FORWARDS") + col.label( + text="Choose 'Update Now' & press OK to install, ", icon="BLANK1") + col.label(text="or click outside window to defer", icon="BLANK1") + row = col.row() + row.prop(self, "ignore_enum", expand=True) + col.split() + elif updater.update_ready == False: + col = layout.column() + col.scale_y = 0.7 + col.label(text="No updates available") + col.label(text="Press okay to dismiss dialog") + # add option to force install + else: + # case: updater.update_ready = None + # we have not yet checked for the update + layout.label(text="Check for update now?") + + # potentially in future, could have UI for 'check to select old version' + # to revert back to. + + def execute(self, context): + + # in case of error importing updater + if updater.invalidupdater == True: + return {'CANCELLED'} + + if updater.manual_only == True: + bpy.ops.wm.url_open(url=updater.website) + elif updater.update_ready == True: + + # action based on enum selection + if self.ignore_enum == 'defer': + return {'FINISHED'} + elif self.ignore_enum == 'ignore': + updater.ignore_update() + return {'FINISHED'} + # else: "install update now!" + + res = updater.run_update( + force=False, + callback=post_update_callback, + clean=self.clean_install) + # should return 0, if not something happened + if updater.verbose: + if res == 0: + print("Updater returned successful") + else: + print("Updater returned {}, error occurred".format(res)) + elif updater.update_ready == None: + _ = updater.check_for_update(now=True) + + # re-launch this dialog + atr = addon_updater_install_popup.bl_idname.split(".") + getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT') + else: + if updater.verbose: + print("Doing nothing, not ready for update") + return {'FINISHED'} + + +# User preference check-now operator +class addon_updater_check_now(bpy.types.Operator): + bl_label = "Check now for "+updater.addon+" update" + bl_idname = updater.addon+".updater_check_now" + bl_description = "Check now for an update to the {x} addon".format( + x=updater.addon) + bl_options = {'REGISTER', 'INTERNAL'} + + def execute(self, context): + if updater.invalidupdater == True: + return {'CANCELLED'} + + if updater.async_checking == True and updater.error == None: + # Check already happened + # Used here to just avoid constant applying settings below + # Ignoring if error, to prevent being stuck on the error screen + return {'CANCELLED'} + + # apply the UI settings + settings = get_user_preferences(context) + if not settings: + if updater.verbose: + print("Could not get {} preferences, update check skipped".format( + __package__)) + return {'CANCELLED'} + updater.set_check_interval(enable=settings.auto_check_update, + months=settings.updater_intrval_months, + days=settings.updater_intrval_days, + hours=settings.updater_intrval_hours, + minutes=settings.updater_intrval_minutes + ) # optional, if auto_check_update + + # input is an optional callback function + # this function should take a bool input, if true: update ready + # if false, no update ready + updater.check_for_update_now(ui_refresh) + + return {'FINISHED'} + + +class addon_updater_update_now(bpy.types.Operator): + bl_label = "Update "+updater.addon+" addon now" + bl_idname = updater.addon+".updater_update_now" + bl_description = "Update to the latest version of the {x} addon".format( + x=updater.addon) + bl_options = {'REGISTER', 'INTERNAL'} + + # if true, run clean install - ie remove all files before adding new + # equivalent to deleting the addon and reinstalling, except the + # updater folder/backup folder remains + clean_install = bpy.props.BoolProperty( + name="Clean install", + description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", + default=False, + options={'HIDDEN'} + ) + + def execute(self, context): + + # in case of error importing updater + if updater.invalidupdater == True: + return {'CANCELLED'} + + if updater.manual_only == True: + bpy.ops.wm.url_open(url=updater.website) + if updater.update_ready == True: + # if it fails, offer to open the website instead + try: + res = updater.run_update( + force=False, + callback=post_update_callback, + clean=self.clean_install) + + # should return 0, if not something happened + if updater.verbose: + if res == 0: + print("Updater returned successful") + else: + print("Updater returned "+str(res)+", error occurred") + except Exception as e: + updater._error = "Error trying to run update" + updater._error_msg = str(e) + atr = addon_updater_install_manually.bl_idname.split(".") + getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT') + elif updater.update_ready == None: + (update_ready, version, link) = updater.check_for_update(now=True) + # re-launch this dialog + atr = addon_updater_install_popup.bl_idname.split(".") + getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT') + + elif updater.update_ready == False: + self.report({'INFO'}, "Nothing to update") + else: + self.report( + {'ERROR'}, "Encountered problem while trying to update") + + return {'FINISHED'} + + +class addon_updater_update_target(bpy.types.Operator): + bl_label = updater.addon+" version target" + bl_idname = updater.addon+".updater_update_target" + bl_description = "Install a targeted version of the {x} addon".format( + x=updater.addon) + bl_options = {'REGISTER', 'INTERNAL'} + + def target_version(self, context): + # in case of error importing updater + if updater.invalidupdater == True: + ret = [] + + ret = [] + i = 0 + for tag in updater.tags: + ret.append((tag, tag, "Select to install "+tag)) + i += 1 + return ret + + target = bpy.props.EnumProperty( + name="Target version to install", + description="Select the version to install", + items=target_version + ) + + # if true, run clean install - ie remove all files before adding new + # equivalent to deleting the addon and reinstalling, except the + # updater folder/backup folder remains + clean_install = bpy.props.BoolProperty( + name="Clean install", + description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", + default=False, + options={'HIDDEN'} + ) + + @classmethod + def poll(cls, context): + if updater.invalidupdater == True: + return False + return updater.update_ready != None and len(updater.tags) > 0 + + def invoke(self, context, event): + return context.window_manager.invoke_props_dialog(self) + + def draw(self, context): + layout = self.layout + if updater.invalidupdater == True: + layout.label(text="Updater error") + return + split = layout_split(layout, factor=0.66) + subcol = split.column() + subcol.label(text="Select install version") + subcol = split.column() + subcol.prop(self, "target", text="") + + def execute(self, context): + + # in case of error importing updater + if updater.invalidupdater == True: + return {'CANCELLED'} + + res = updater.run_update( + force=False, + revert_tag=self.target, + callback=post_update_callback, + clean=self.clean_install) + + # should return 0, if not something happened + if res == 0: + if updater.verbose: + print("Updater returned successful") + else: + if updater.verbose: + print("Updater returned "+str(res)+", error occurred") + return {'CANCELLED'} + + return {'FINISHED'} + + +class addon_updater_install_manually(bpy.types.Operator): + """As a fallback, direct the user to download the addon manually""" + bl_label = "Install update manually" + bl_idname = updater.addon+".updater_install_manually" + bl_description = "Proceed to manually install update" + bl_options = {'REGISTER', 'INTERNAL'} + + error = bpy.props.StringProperty( + name="Error Occurred", + default="", + options={'HIDDEN'} + ) + + def invoke(self, context, event): + return context.window_manager.invoke_popup(self) + + def draw(self, context): + layout = self.layout + + if updater.invalidupdater == True: + layout.label(text="Updater error") + return + + # use a "failed flag"? it shows this label if the case failed. + if self.error != "": + col = layout.column() + col.scale_y = 0.7 + col.label( + text="There was an issue trying to auto-install", icon="ERROR") + col.label( + text="Press the download button below and install", icon="BLANK1") + col.label(text="the zip file like a normal addon.", icon="BLANK1") + else: + col = layout.column() + col.scale_y = 0.7 + col.label(text="Install the addon manually") + col.label(text="Press the download button below and install") + col.label(text="the zip file like a normal addon.") + + # if check hasn't happened, i.e. accidentally called this menu + # allow to check here + + row = layout.row() + + if updater.update_link != None: + row.operator("wm.url_open", + text="Direct download").url = updater.update_link + else: + row.operator("wm.url_open", + text="(failed to retrieve direct download)") + row.enabled = False + + if updater.website != None: + row = layout.row() + row.operator("wm.url_open", text="Open website").url =\ + updater.website + else: + row = layout.row() + row.label(text="See source website to download the update") + + def execute(self, context): + + return {'FINISHED'} + + +class addon_updater_updated_successful(bpy.types.Operator): + """Addon in place, popup telling user it completed or what went wrong""" + bl_label = "Installation Report" + bl_idname = updater.addon+".updater_update_successful" + bl_description = "Update installation response" + bl_options = {'REGISTER', 'INTERNAL', 'UNDO'} + + error = bpy.props.StringProperty( + name="Error Occurred", + default="", + options={'HIDDEN'} + ) + + def invoke(self, context, event): + return context.window_manager.invoke_props_popup(self, event) + + def draw(self, context): + layout = self.layout + + if updater.invalidupdater == True: + layout.label(text="Updater error") + return + + saved = updater.json + if self.error != "": + col = layout.column() + col.scale_y = 0.7 + col.label(text="Error occurred, did not install", icon="ERROR") + if updater.error_msg: + msg = updater.error_msg + else: + msg = self.error + col.label(text=str(msg), icon="BLANK1") + rw = col.row() + rw.scale_y = 2 + rw.operator("wm.url_open", + text="Click for manual download.", + icon="BLANK1" + ).url = updater.website + # manual download button here + elif updater.auto_reload_post_update == False: + # tell user to restart blender + if "just_restored" in saved and saved["just_restored"] == True: + col = layout.column() + col.scale_y = 0.7 + col.label(text="Addon restored", icon="RECOVER_LAST") + col.label(text="Restart blender to reload.", icon="BLANK1") + updater.json_reset_restore() + else: + col = layout.column() + col.scale_y = 0.7 + col.label(text="Addon successfully installed", + icon="FILE_TICK") + col.label(text="Restart blender to reload.", icon="BLANK1") + + else: + # reload addon, but still recommend they restart blender + if "just_restored" in saved and saved["just_restored"] == True: + col = layout.column() + col.scale_y = 0.7 + col.label(text="Addon restored", icon="RECOVER_LAST") + col.label(text="Consider restarting blender to fully reload.", + icon="BLANK1") + updater.json_reset_restore() + else: + col = layout.column() + col.scale_y = 0.7 + col.label(text="Addon successfully installed", + icon="FILE_TICK") + col.label(text="Consider restarting blender to fully reload.", + icon="BLANK1") + + def execute(self, context): + return {'FINISHED'} + + +class addon_updater_restore_backup(bpy.types.Operator): + """Restore addon from backup""" + bl_label = "Restore backup" + bl_idname = updater.addon+".updater_restore_backup" + bl_description = "Restore addon from backup" + bl_options = {'REGISTER', 'INTERNAL'} + + @classmethod + def poll(cls, context): + try: + return os.path.isdir(os.path.join(updater.stage_path, "backup")) + except: + return False + + def execute(self, context): + # in case of error importing updater + if updater.invalidupdater == True: + return {'CANCELLED'} + updater.restore_backup() + return {'FINISHED'} + + +class addon_updater_ignore(bpy.types.Operator): + """Prevent future update notice popups""" + bl_label = "Ignore update" + bl_idname = updater.addon+".updater_ignore" + bl_description = "Ignore update to prevent future popups" + bl_options = {'REGISTER', 'INTERNAL'} + + @classmethod + def poll(cls, context): + if updater.invalidupdater == True: + return False + elif updater.update_ready == True: + return True + else: + return False + + def execute(self, context): + # in case of error importing updater + if updater.invalidupdater == True: + return {'CANCELLED'} + updater.ignore_update() + self.report({"INFO"}, "Open addon preferences for updater options") + return {'FINISHED'} + + +class addon_updater_end_background(bpy.types.Operator): + """Stop checking for update in the background""" + bl_label = "End background check" + bl_idname = updater.addon+".end_background_check" + bl_description = "Stop checking for update in the background" + bl_options = {'REGISTER', 'INTERNAL'} + + # @classmethod + # def poll(cls, context): + # if updater.async_checking == True: + # return True + # else: + # return False + + def execute(self, context): + # in case of error importing updater + if updater.invalidupdater == True: + return {'CANCELLED'} + updater.stop_async_check_update() + return {'FINISHED'} + + +# ----------------------------------------------------------------------------- +# Handler related, to create popups +# ----------------------------------------------------------------------------- + + +# global vars used to prevent duplicate popup handlers +ran_autocheck_install_popup = False +ran_update_sucess_popup = False + +# global var for preventing successive calls +ran_background_check = False + + +@persistent +def updater_run_success_popup_handler(scene): + global ran_update_sucess_popup + ran_update_sucess_popup = True + + # in case of error importing updater + if updater.invalidupdater == True: + return + + try: + bpy.app.handlers.scene_update_post.remove( + updater_run_success_popup_handler) + except: + pass + + atr = addon_updater_updated_successful.bl_idname.split(".") + getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT') + + +@persistent +def updater_run_install_popup_handler(scene): + global ran_autocheck_install_popup + ran_autocheck_install_popup = True + + # in case of error importing updater + if updater.invalidupdater == True: + return + + try: + bpy.app.handlers.scene_update_post.remove( + updater_run_install_popup_handler) + except: + pass + + if "ignore" in updater.json and updater.json["ignore"] == True: + return # don't do popup if ignore pressed + # elif type(updater.update_version) != type((0,0,0)): + # # likely was from master or another branch, shouldn't trigger popup + # updater.json_reset_restore() + # return + elif "version_text" in updater.json and "version" in updater.json["version_text"]: + version = updater.json["version_text"]["version"] + ver_tuple = updater.version_tuple_from_text(version) + + if ver_tuple < updater.current_version: + # user probably manually installed to get the up to date addon + # in here. Clear out the update flag using this function + if updater.verbose: + print("{} updater: appears user updated, clearing flag".format( + updater.addon)) + updater.json_reset_restore() + return + atr = addon_updater_install_popup.bl_idname.split(".") + getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT') + + +def background_update_callback(update_ready): + """Passed into the updater, background thread updater""" + global ran_autocheck_install_popup + + # in case of error importing updater + if updater.invalidupdater == True: + return + if updater.showpopups == False: + return + if update_ready != True: + return + if updater_run_install_popup_handler not in \ + bpy.app.handlers.scene_update_post and \ + ran_autocheck_install_popup == False: + bpy.app.handlers.scene_update_post.append( + updater_run_install_popup_handler) + ran_autocheck_install_popup = True + + +def post_update_callback(module_name, res=None): + """Callback for once the run_update function has completed + + Only makes sense to use this if "auto_reload_post_update" == False, + i.e. don't auto-restart the addon + + Arguments: + module_name: returns the module name from updater, but unused here + res: If an error occurred, this is the detail string + """ + + # in case of error importing updater + if updater.invalidupdater == True: + return + + if res == None: + # this is the same code as in conditional at the end of the register function + # ie if "auto_reload_post_update" == True, comment out this code + if updater.verbose: + print("{} updater: Running post update callback".format(updater.addon)) + # bpy.app.handlers.scene_update_post.append(updater_run_success_popup_handler) + + atr = addon_updater_updated_successful.bl_idname.split(".") + getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT') + global ran_update_sucess_popup + ran_update_sucess_popup = True + else: + # some kind of error occurred and it was unable to install, + # offer manual download instead + atr = addon_updater_updated_successful.bl_idname.split(".") + getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT', error=res) + return + + +def ui_refresh(update_status): + # find a way to just re-draw self? + # callback intended for trigger by async thread + for windowManager in bpy.data.window_managers: + for window in windowManager.windows: + for area in window.screen.areas: + area.tag_redraw() + + +def check_for_update_background(): + """Function for asynchronous background check. + + *Could* be called on register, but would be bad practice. + """ + if updater.invalidupdater == True: + return + global ran_background_check + if ran_background_check == True: + # Global var ensures check only happens once + return + elif updater.update_ready != None or updater.async_checking == True: + # Check already happened + # Used here to just avoid constant applying settings below + return + + # apply the UI settings + settings = get_user_preferences(bpy.context) + if not settings: + return + updater.set_check_interval(enable=settings.auto_check_update, + months=settings.updater_intrval_months, + days=settings.updater_intrval_days, + hours=settings.updater_intrval_hours, + minutes=settings.updater_intrval_minutes + ) # optional, if auto_check_update + + # input is an optional callback function + # this function should take a bool input, if true: update ready + # if false, no update ready + if updater.verbose: + print("{} updater: Running background check for update".format( + updater.addon)) + updater.check_for_update_async(background_update_callback) + ran_background_check = True + + +def check_for_update_nonthreaded(self, context): + """Can be placed in front of other operators to launch when pressed""" + if updater.invalidupdater == True: + return + + # only check if it's ready, ie after the time interval specified + # should be the async wrapper call here + settings = get_user_preferences(bpy.context) + if not settings: + if updater.verbose: + print("Could not get {} preferences, update check skipped".format( + __package__)) + return + updater.set_check_interval(enable=settings.auto_check_update, + months=settings.updater_intrval_months, + days=settings.updater_intrval_days, + hours=settings.updater_intrval_hours, + minutes=settings.updater_intrval_minutes + ) # optional, if auto_check_update + + (update_ready, version, link) = updater.check_for_update(now=False) + if update_ready == True: + atr = addon_updater_install_popup.bl_idname.split(".") + getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT') + else: + if updater.verbose: + print("No update ready") + self.report({'INFO'}, "No update ready") + + +def showReloadPopup(): + """For use in register only, to show popup after re-enabling the addon + + Must be enabled by developer + """ + if updater.invalidupdater == True: + return + saved_state = updater.json + global ran_update_sucess_popup + + a = saved_state != None + b = "just_updated" in saved_state + c = saved_state["just_updated"] + + if a and b and c: + updater.json_reset_postupdate() # so this only runs once + + # no handlers in this case + if updater.auto_reload_post_update == False: + return + + if updater_run_success_popup_handler not in \ + bpy.app.handlers.scene_update_post \ + and ran_update_sucess_popup == False: + bpy.app.handlers.scene_update_post.append( + updater_run_success_popup_handler) + ran_update_sucess_popup = True + + +# ----------------------------------------------------------------------------- +# Example UI integrations +# ----------------------------------------------------------------------------- + + +def update_notice_box_ui(self, context): + """ Panel - Update Available for placement at end/beginning of panel + + After a check for update has occurred, this function will draw a box + saying an update is ready, and give a button for: update now, open website, + or ignore popup. Ideal to be placed at the end / beginning of a panel + """ + + if updater.invalidupdater == True: + return + + saved_state = updater.json + if updater.auto_reload_post_update == False: + if "just_updated" in saved_state and saved_state["just_updated"] == True: + layout = self.layout + box = layout.box() + col = box.column() + col.scale_y = 0.7 + col.label(text="Restart blender", icon="ERROR") + col.label(text="to complete update") + return + + # if user pressed ignore, don't draw the box + if "ignore" in updater.json and updater.json["ignore"] == True: + return + if updater.update_ready != True: + return + + layout = self.layout + box = layout.box() + col = box.column(align=True) + col.label(text="Update ready!", icon="ERROR") + col.separator() + row = col.row(align=True) + split = row.split(align=True) + colL = split.column(align=True) + colL.scale_y = 1.5 + colL.operator(addon_updater_ignore.bl_idname, icon="X", text="Ignore") + colR = split.column(align=True) + colR.scale_y = 1.5 + if updater.manual_only == False: + colR.operator(addon_updater_update_now.bl_idname, + text="Update", icon="LOOP_FORWARDS") + col.operator("wm.url_open", text="Open website").url = updater.website + #col.operator("wm.url_open",text="Direct download").url=updater.update_link + col.operator(addon_updater_install_manually.bl_idname, + text="Install manually") + else: + #col.operator("wm.url_open",text="Direct download").url=updater.update_link + col.operator("wm.url_open", text="Get it now").url = updater.website + + +def update_settings_ui(self, context, element=None): + """Preferences - for drawing with full width inside user preferences + + Create a function that can be run inside user preferences panel for prefs UI + Place inside UI draw using: addon_updater_ops.updaterSettingsUI(self, context) + or by: addon_updater_ops.updaterSettingsUI(context) + """ + + # element is a UI element, such as layout, a row, column, or box + if element == None: + element = self.layout + box = element.box() + + # in case of error importing updater + if updater.invalidupdater == True: + box.label(text="Error initializing updater code:") + box.label(text=updater.error_msg) + return + settings = get_user_preferences(context) + if not settings: + box.label(text="Error getting updater preferences", icon='ERROR') + return + + # auto-update settings + box.label(text="Updater Settings") + row = box.row() + + # special case to tell user to restart blender, if set that way + if updater.auto_reload_post_update == False: + saved_state = updater.json + if "just_updated" in saved_state and saved_state["just_updated"] == True: + row.label(text="Restart blender to complete update", icon="ERROR") + return + + split = layout_split(row, factor=0.3) + subcol = split.column() + subcol.prop(settings, "auto_check_update") + subcol = split.column() + + if settings.auto_check_update == False: + subcol.enabled = False + subrow = subcol.row() + subrow.label(text="Interval between checks") + subrow = subcol.row(align=True) + checkcol = subrow.column(align=True) + checkcol.prop(settings, "updater_intrval_months") + checkcol = subrow.column(align=True) + checkcol.prop(settings, "updater_intrval_days") + checkcol = subrow.column(align=True) + checkcol.prop(settings, "updater_intrval_hours") + checkcol = subrow.column(align=True) + checkcol.prop(settings, "updater_intrval_minutes") + + # checking / managing updates + row = box.row() + col = row.column() + if updater.error != None: + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.scale_y = 2 + if "ssl" in updater.error_msg.lower(): + split.enabled = True + split.operator(addon_updater_install_manually.bl_idname, + text=updater.error) + else: + split.enabled = False + split.operator(addon_updater_check_now.bl_idname, + text=updater.error) + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="", icon="FILE_REFRESH") + + elif updater.update_ready == None and updater.async_checking == False: + col.scale_y = 2 + col.operator(addon_updater_check_now.bl_idname) + elif updater.update_ready == None: # async is running + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.enabled = False + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="Checking...") + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_end_background.bl_idname, + text="", icon="X") + + elif updater.include_branches == True and \ + len(updater.tags) == len(updater.include_branch_list) and \ + updater.manual_only == False: + # no releases found, but still show the appropriate branch + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_update_now.bl_idname, + text="Update directly to "+str(updater.include_branch_list[0])) + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="", icon="FILE_REFRESH") + + elif updater.update_ready == True and updater.manual_only == False: + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_update_now.bl_idname, + text="Update now to "+str(updater.update_version)) + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="", icon="FILE_REFRESH") + + elif updater.update_ready == True and updater.manual_only == True: + col.scale_y = 2 + col.operator("wm.url_open", + text="Download "+str(updater.update_version)).url = updater.website + else: # i.e. that updater.update_ready == False + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.enabled = False + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="Addon is up to date") + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="", icon="FILE_REFRESH") + + if updater.manual_only == False: + col = row.column(align=True) + # col.operator(addon_updater_update_target.bl_idname, + if updater.include_branches == True and len(updater.include_branch_list) > 0: + branch = updater.include_branch_list[0] + col.operator(addon_updater_update_target.bl_idname, + text="Install latest {} / old version".format(branch)) + else: + col.operator(addon_updater_update_target.bl_idname, + text="Reinstall / install old version") + lastdate = "none found" + backuppath = os.path.join(updater.stage_path, "backup") + if "backup_date" in updater.json and os.path.isdir(backuppath): + if updater.json["backup_date"] == "": + lastdate = "Date not found" + else: + lastdate = updater.json["backup_date"] + backuptext = "Restore addon backup ({})".format(lastdate) + col.operator(addon_updater_restore_backup.bl_idname, text=backuptext) + + row = box.row() + row.scale_y = 0.7 + lastcheck = updater.json["last_check"] + if updater.error != None and updater.error_msg != None: + row.label(text=updater.error_msg) + elif lastcheck != "" and lastcheck != None: + lastcheck = lastcheck[0: lastcheck.index(".")] + row.label(text="Last update check: " + lastcheck) + else: + row.label(text="Last update check: Never") + + +def update_settings_ui_condensed(self, context, element=None): + """Preferences - Condensed drawing within preferences + + Alternate draw for user preferences or other places, does not draw a box + """ + + # element is a UI element, such as layout, a row, column, or box + if element == None: + element = self.layout + row = element.row() + + # in case of error importing updater + if updater.invalidupdater == True: + row.label(text="Error initializing updater code:") + row.label(text=updater.error_msg) + return + settings = get_user_preferences(context) + if not settings: + row.label(text="Error getting updater preferences", icon='ERROR') + return + + # special case to tell user to restart blender, if set that way + if updater.auto_reload_post_update == False: + saved_state = updater.json + if "just_updated" in saved_state and saved_state["just_updated"] == True: + row.label(text="Restart blender to complete update", icon="ERROR") + return + + col = row.column() + if updater.error != None: + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.scale_y = 2 + if "ssl" in updater.error_msg.lower(): + split.enabled = True + split.operator(addon_updater_install_manually.bl_idname, + text=updater.error) + else: + split.enabled = False + split.operator(addon_updater_check_now.bl_idname, + text=updater.error) + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="", icon="FILE_REFRESH") + + elif updater.update_ready == None and updater.async_checking == False: + col.scale_y = 2 + col.operator(addon_updater_check_now.bl_idname) + elif updater.update_ready == None: # async is running + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.enabled = False + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="Checking...") + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_end_background.bl_idname, + text="", icon="X") + + elif updater.include_branches == True and \ + len(updater.tags) == len(updater.include_branch_list) and \ + updater.manual_only == False: + # no releases found, but still show the appropriate branch + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_update_now.bl_idname, + text="Update directly to "+str(updater.include_branch_list[0])) + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="", icon="FILE_REFRESH") + + elif updater.update_ready == True and updater.manual_only == False: + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_update_now.bl_idname, + text="Update now to "+str(updater.update_version)) + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="", icon="FILE_REFRESH") + + elif updater.update_ready == True and updater.manual_only == True: + col.scale_y = 2 + col.operator("wm.url_open", + text="Download "+str(updater.update_version)).url = updater.website + else: # i.e. that updater.update_ready == False + subcol = col.row(align=True) + subcol.scale_y = 1 + split = subcol.split(align=True) + split.enabled = False + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="Addon is up to date") + split = subcol.split(align=True) + split.scale_y = 2 + split.operator(addon_updater_check_now.bl_idname, + text="", icon="FILE_REFRESH") + + row = element.row() + row.prop(settings, "auto_check_update") + + row = element.row() + row.scale_y = 0.7 + lastcheck = updater.json["last_check"] + if updater.error != None and updater.error_msg != None: + row.label(text=updater.error_msg) + elif lastcheck != "" and lastcheck != None: + lastcheck = lastcheck[0: lastcheck.index(".")] + row.label(text="Last check: " + lastcheck) + else: + row.label(text="Last check: Never") + + +def skip_tag_function(self, tag): + """A global function for tag skipping + + A way to filter which tags are displayed, + e.g. to limit downgrading too far + input is a tag text, e.g. "v1.2.3" + output is True for skipping this tag number, + False if the tag is allowed (default for all) + Note: here, "self" is the acting updater shared class instance + """ + + # in case of error importing updater + if self.invalidupdater == True: + return False + + # ---- write any custom code here, return true to disallow version ---- # + # + # # Filter out e.g. if 'beta' is in name of release + # if 'beta' in tag.lower(): + # return True + # ---- write any custom code above, return true to disallow version --- # + + if self.include_branches == True: + for branch in self.include_branch_list: + if tag["name"].lower() == branch: + return False + + # function converting string to tuple, ignoring e.g. leading 'v' + tupled = self.version_tuple_from_text(tag["name"]) + if type(tupled) != type((1, 2, 3)): + return True + + # select the min tag version - change tuple accordingly + if self.version_min_update != None: + if tupled < self.version_min_update: + return True # skip if current version below this + + # select the max tag version + if self.version_max_update != None: + if tupled >= self.version_max_update: + return True # skip if current version at or above this + + # in all other cases, allow showing the tag for updating/reverting + return False + + +def select_link_function(self, tag): + """Only customize if trying to leverage "attachments" in *GitHub* releases + + A way to select from one or multiple attached donwloadable files from the + server, instead of downloading the default release/tag source code + """ + + # -- Default, universal case (and is the only option for GitLab/Bitbucket) + link = tag["zipball_url"] + + # -- Example: select the first (or only) asset instead source code -- + # if "assets" in tag and "browser_download_url" in tag["assets"][0]: + # link = tag["assets"][0]["browser_download_url"] + + # -- Example: select asset based on OS, where multiple builds exist -- + # # not tested/no error checking, modify to fit your own needs! + # # assume each release has three attached builds: + # # release_windows.zip, release_OSX.zip, release_linux.zip + # # This also would logically not be used with "branches" enabled + # if platform.system() == "Darwin": # ie OSX + # link = [asset for asset in tag["assets"] if 'OSX' in asset][0] + # elif platform.system() == "Windows": + # link = [asset for asset in tag["assets"] if 'windows' in asset][0] + # elif platform.system() == "Linux": + # link = [asset for asset in tag["assets"] if 'linux' in asset][0] + + return link + + +# ----------------------------------------------------------------------------- +# Register, should be run in the register module itself +# ----------------------------------------------------------------------------- + + +classes = ( + addon_updater_install_popup, + addon_updater_check_now, + addon_updater_update_now, + addon_updater_update_target, + addon_updater_install_manually, + addon_updater_updated_successful, + addon_updater_restore_backup, + addon_updater_ignore, + addon_updater_end_background +) + + +def register(bl_info): + """Registering the operators in this module""" + # safer failure in case of issue loading module + if updater.error: + print("Exiting updater registration, " + updater.error) + return + updater.clear_state() # clear internal vars, avoids reloading oddities + + # confirm your updater "engine" (Github is default if not specified) + updater.engine = "GitLab" + + # If using private repository, indicate the token here + # Must be set after assigning the engine. + # **WARNING** Depending on the engine, this token can act like a password!! + # Only provide a token if the project is *non-public*, see readme for + # other considerations and suggestions from a security standpoint + updater.private_token = None # "tokenstring" + + # choose your own username, must match website (not needed for GitLab) + updater.user = "slumber" + + # choose your own repository, must match git name + updater.repo = "10515801" + + # updater.addon = # define at top of module, MUST be done first + + # Website for manual addon download, optional but recommended to set + updater.website = "https://gitlab.com/slumber/multi-user/" + + # Addon subfolder path + # "sample/path/to/addon" + # default is "" or None, meaning root + updater.subfolder_path = "multi-user" + + # used to check/compare versions + updater.current_version = bl_info["version"] + + # Optional, to hard-set update frequency, use this here - however, + # this demo has this set via UI properties. + # updater.set_check_interval( + # enable=False,months=0,days=0,hours=0,minutes=2) + + # Optional, consider turning off for production or allow as an option + # This will print out additional debugging info to the console + updater.verbose = True # make False for production default + + # Optional, customize where the addon updater processing subfolder is, + # essentially a staging folder used by the updater on its own + # Needs to be within the same folder as the addon itself + # Need to supply a full, absolute path to folder + # updater.updater_path = # set path of updater folder, by default: + # /addons/{__package__}/{__package__}_updater + + # auto create a backup of the addon when installing other versions + updater.backup_current = True # True by default + + # Sample ignore patterns for when creating backup of current during update + updater.backup_ignore_patterns = ["__pycache__"] + # Alternate example patterns + # updater.backup_ignore_patterns = [".git", "__pycache__", "*.bat", ".gitignore", "*.exe"] + + # Patterns for files to actively overwrite if found in new update + # file and are also found in the currently installed addon. Note that + + # by default (ie if set to []), updates are installed in the same way as blender: + # .py files are replaced, but other file types (e.g. json, txt, blend) + # will NOT be overwritten if already present in current install. Thus + # if you want to automatically update resources/non py files, add them + # as a part of the pattern list below so they will always be overwritten by an + # update. If a pattern file is not found in new update, no action is taken + # This does NOT detele anything, only defines what is allowed to be overwritten + updater.overwrite_patterns = ["*.png", "*.jpg", "README.md", "LICENSE.txt"] + # updater.overwrite_patterns = [] + # other examples: + # ["*"] means ALL files/folders will be overwritten by update, was the behavior pre updater v1.0.4 + # [] or ["*.py","*.pyc"] matches default blender behavior, ie same effect if user installs update manually without deleting the existing addon first + # e.g. if existing install and update both have a resource.blend file, the existing installed one will remain + # ["some.py"] means if some.py is found in addon update, it will overwrite any existing some.py in current addon install, if any + # ["*.json"] means all json files found in addon update will overwrite those of same name in current install + # ["*.png","README.md","LICENSE.txt"] means the readme, license, and all pngs will be overwritten by update + + # Patterns for files to actively remove prior to running update + # Useful if wanting to remove old code due to changes in filenames + # that otherwise would accumulate. Note: this runs after taking + # a backup (if enabled) but before placing in new update. If the same + # file name removed exists in the update, then it acts as if pattern + # is placed in the overwrite_patterns property. Note this is effectively + # ignored if clean=True in the run_update method + updater.remove_pre_update_patterns = ["*.py", "*.pyc"] + # Note setting ["*"] here is equivalent to always running updates with + # clean = True in the run_update method, ie the equivalent of a fresh, + # new install. This would also delete any resources or user-made/modified + # files setting ["__pycache__"] ensures the pycache folder is always removed + # The configuration of ["*.py","*.pyc"] is a safe option as this + # will ensure no old python files/caches remain in event different addon + # versions have different filenames or structures + + # Allow branches like 'master' as an option to update to, regardless + # of release or version. + # Default behavior: releases will still be used for auto check (popup), + # but the user has the option from user preferences to directly + # update to the master branch or any other branches specified using + # the "install {branch}/older version" operator. + updater.include_branches = True + + # (GitHub/Gitlab only) This options allows the user to use releases over tags for data, + # which enables pulling down release logs/notes, as well as specify installs from + # release-attached zips (instead of just the auto-packaged code generated with + # a release/tag). Setting has no impact on BitBucket or GitLab repos + updater.use_releases = True + # note: Releases always have a tag, but a tag may not always be a release + # Therefore, setting True above will filter out any non-annoted tags + # note 2: Using this option will also display the release name instead of + # just the tag name, bear this in mind given the skip_tag_function filtering above + + # if using "include_branches", + # updater.include_branch_list defaults to ['master'] branch if set to none + # example targeting another multiple branches allowed to pull from + # updater.include_branch_list = ['master', 'dev'] # example with two branches + # None is the equivalent to setting ['master'] + updater.include_branch_list = None + + # Only allow manual install, thus prompting the user to open + # the addon's web page to download, specifically: updater.website + # Useful if only wanting to get notification of updates but not + # directly install. + updater.manual_only = False + + # Used for development only, "pretend" to install an update to test + # reloading conditions + updater.fake_install = False # Set to true to test callback/reloading + + # Show popups, ie if auto-check for update is enabled or a previous + # check for update in user preferences found a new version, show a popup + # (at most once per blender session, and it provides an option to ignore + # for future sessions); default behavior is set to True + updater.showpopups = True + # note: if set to false, there will still be an "update ready" box drawn + # using the `update_notice_box_ui` panel function. + + # Override with a custom function on what tags + # to skip showing for updater; see code for function above. + # Set the min and max versions allowed to install. + # Optional, default None + # min install (>=) will install this and higher + updater.version_min_update = (0, 0, 1) + # updater.version_min_update = None # if not wanting to define a min + + # max install (<) will install strictly anything lower + # updater.version_max_update = (9,9,9) + updater.version_max_update = None # set to None if not wanting to set max + + # Function defined above, customize as appropriate per repository + updater.skip_tag = skip_tag_function # min and max used in this function + + # Function defined above, customize as appropriate per repository; not required + updater.select_link = select_link_function + + # The register line items for all operators/panels + # If using bpy.utils.register_module(__name__) to register elsewhere + # in the addon, delete these lines (also from unregister) + for cls in classes: + # apply annotations to remove Blender 2.8 warnings, no effect on 2.7 + make_annotations(cls) + # comment out this line if using bpy.utils.register_module(__name__) + bpy.utils.register_class(cls) + + # special situation: we just updated the addon, show a popup + # to tell the user it worked + # should be enclosed in try/catch in case other issues arise + showReloadPopup() + + +def unregister(): + for cls in reversed(classes): + # comment out this line if using bpy.utils.unregister_module(__name__) + bpy.utils.unregister_class(cls) + + # clear global vars since they may persist if not restarting blender + updater.clear_state() # clear internal vars, avoids reloading oddities + + global ran_autocheck_install_popup + ran_autocheck_install_popup = False + + global ran_update_sucess_popup + ran_update_sucess_popup = False + + global ran_background_check + ran_background_check = False diff --git a/multi_user/bl_types/__init__.py b/multi_user/bl_types/__init__.py index 81f744a..e7cfdad 100644 --- a/multi_user/bl_types/__init__.py +++ b/multi_user/bl_types/__init__.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + __all__ = [ 'bl_object', 'bl_mesh', diff --git a/multi_user/bl_types/bl_action.py b/multi_user/bl_types/bl_action.py index 9d4c28f..1fc8138 100644 --- a/multi_user/bl_types/bl_action.py +++ b/multi_user/bl_types/bl_action.py @@ -1,11 +1,132 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import copy +import numpy as np +from enum import Enum from .. import utils +from .dump_anything import ( + Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict) from .bl_datablock import BlDatablock -# WIP + +KEYFRAME = [ + 'amplitude', + 'co', + 'back', + 'handle_left', + 'handle_right', + 'easing', + 'handle_left_type', + 'handle_right_type', + 'type', + 'interpolation', +] + + +def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict: + """ Dump a sigle curve to a dict + + :arg fcurve: fcurve to dump + :type fcurve: bpy.types.FCurve + :arg use_numpy: use numpy to eccelerate dump + :type use_numpy: bool + :return: dict + """ + fcurve_data = { + "data_path": fcurve.data_path, + "dumped_array_index": fcurve.array_index, + "use_numpy": use_numpy + } + + if use_numpy: + points = fcurve.keyframe_points + fcurve_data['keyframes_count'] = len(fcurve.keyframe_points) + fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME) + + else: # Legacy method + dumper = Dumper() + fcurve_data["keyframe_points"] = [] + + for k in fcurve.keyframe_points: + fcurve_data["keyframe_points"].append( + dumper.dump(k) + ) + + return fcurve_data + + +def load_fcurve(fcurve_data, fcurve): + """ Load a dumped fcurve + + :arg fcurve_data: a dumped fcurve + :type fcurve_data: dict + :arg fcurve: fcurve to dump + :type fcurve: bpy.types.FCurve + """ + use_numpy = fcurve_data.get('use_numpy') + + keyframe_points = fcurve.keyframe_points + + # Remove all keyframe points + for i in range(len(keyframe_points)): + keyframe_points.remove(keyframe_points[0], fast=True) + + if use_numpy: + keyframe_points.add(fcurve_data['keyframes_count']) + np_load_collection(fcurve_data["keyframe_points"], keyframe_points, KEYFRAME) + + else: + # paste dumped keyframes + for dumped_keyframe_point in fcurve_data["keyframe_points"]: + if dumped_keyframe_point['type'] == '': + dumped_keyframe_point['type'] = 'KEYFRAME' + + new_kf = keyframe_points.insert( + dumped_keyframe_point["co"][0], + dumped_keyframe_point["co"][1], + options={'FAST', 'REPLACE'} + ) + + keycache = copy.copy(dumped_keyframe_point) + keycache = remove_items_from_dict( + keycache, + ["co", "handle_left", "handle_right", 'type'] + ) + + loader = Loader() + loader.load(new_kf, keycache) + + new_kf.type = dumped_keyframe_point['type'] + new_kf.handle_left = [ + dumped_keyframe_point["handle_left"][0], + dumped_keyframe_point["handle_left"][1] + ] + new_kf.handle_right = [ + dumped_keyframe_point["handle_right"][0], + dumped_keyframe_point["handle_right"][1] + ] + + fcurve.update() + class BlAction(BlDatablock): bl_id = "actions" @@ -14,86 +135,29 @@ class BlAction(BlDatablock): bl_delay_apply = 1 bl_automatic_push = True bl_icon = 'ACTION_TWEAK' - - def construct(self, data): + + def _construct(self, data): return bpy.data.actions.new(data["name"]) - def load(self, data, target): - begin_frame = 100000 - end_frame = -100000 - - for dumped_fcurve in data["fcurves"]: - begin_frame = min( - begin_frame, - min( - [begin_frame] + [dkp["co"][0] for dkp in dumped_fcurve["keyframe_points"]] - ) - ) - end_frame = max( - end_frame, - max( - [end_frame] + [dkp["co"][0] for dkp in dumped_fcurve["keyframe_points"]] - ) - ) - begin_frame = 0 - - loader = utils.dump_anything.Loader() + def _load(self, data, target): for dumped_fcurve in data["fcurves"]: dumped_data_path = dumped_fcurve["data_path"] dumped_array_index = dumped_fcurve["dumped_array_index"] # create fcurve if needed - fcurve = target.fcurves.find(dumped_data_path, index=dumped_array_index) + fcurve = target.fcurves.find( + dumped_data_path, index=dumped_array_index) if fcurve is None: - fcurve = target.fcurves.new(dumped_data_path, index=dumped_array_index) + fcurve = target.fcurves.new( + dumped_data_path, index=dumped_array_index) + load_fcurve(dumped_fcurve, fcurve) + target.id_root = data['id_root'] - # remove keyframes within dumped_action range - for keyframe in reversed(fcurve.keyframe_points): - if end_frame >= (keyframe.co[0] + begin_frame ) >= begin_frame: - fcurve.keyframe_points.remove(keyframe, fast=True) - - # paste dumped keyframes - for dumped_keyframe_point in dumped_fcurve["keyframe_points"]: - if dumped_keyframe_point['type'] == '': - dumped_keyframe_point['type'] = 'KEYFRAME' - - new_kf = fcurve.keyframe_points.insert( - dumped_keyframe_point["co"][0] - begin_frame, - dumped_keyframe_point["co"][1], - options={'FAST', 'REPLACE'} - ) - - keycache = copy.copy(dumped_keyframe_point) - keycache = utils.dump_anything.remove_items_from_dict( - keycache, - ["co", "handle_left", "handle_right",'type'] - ) - - loader.load( - new_kf, - keycache - ) - - new_kf.type = dumped_keyframe_point['type'] - new_kf.handle_left = [ - dumped_keyframe_point["handle_left"][0] - begin_frame, - dumped_keyframe_point["handle_left"][1] - ] - new_kf.handle_right = [ - dumped_keyframe_point["handle_right"][0] - begin_frame, - dumped_keyframe_point["handle_right"][1] - ] - - # clearing (needed for blender to update well) - if len(fcurve.keyframe_points) == 0: - target.fcurves.remove(fcurve) - target.id_root= data['id_root'] - - def dump(self, pointer=None): + def _dump(self, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() - dumper.exclude_filter =[ + dumper = Dumper() + dumper.exclude_filter = [ 'name_full', 'original', 'use_fake_user', @@ -106,27 +170,11 @@ class BlAction(BlDatablock): 'users' ] dumper.depth = 1 - data = dumper.dump(pointer) + data = dumper.dump(pointer) - data["fcurves"] = [] - dumper.depth = 2 + for fcurve in self.pointer.fcurves: - fc = { - "data_path": fcurve.data_path, - "dumped_array_index": fcurve.array_index, - "keyframe_points": [] - } - - for k in fcurve.keyframe_points: - fc["keyframe_points"].append( - dumper.dump(k) - ) - - data["fcurves"].append(fc) + data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True)) return data - - - - diff --git a/multi_user/bl_types/bl_armature.py b/multi_user/bl_types/bl_armature.py index 8aca65c..98bef98 100644 --- a/multi_user/bl_types/bl_armature.py +++ b/multi_user/bl_types/bl_armature.py @@ -1,12 +1,28 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from ..libs.overrider import Overrider -from .. import utils -from .. import presence, operators -from .bl_datablock import BlDatablock -# WIP +from .dump_anything import Loader, Dumper +from .. import presence, operators, utils +from .bl_datablock import BlDatablock class BlArmature(BlDatablock): @@ -17,10 +33,10 @@ class BlArmature(BlDatablock): bl_automatic_push = True bl_icon = 'ARMATURE_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.armatures.new(data["name"]) - def load_implementation(self, data, target): + def _load_implementation(self, data, target): # Load parent object parent_object = utils.find_from_attr( 'uuid', @@ -82,7 +98,8 @@ class BlArmature(BlDatablock): [bone]['parent']] new_bone.use_connect = bone_data['use_connect'] - utils.dump_anything.load(new_bone, bone_data) + loader = Loader() + loader.load(new_bone, bone_data) if bpy.context.mode != 'OBJECT': bpy.ops.object.mode_set(mode='OBJECT') @@ -92,10 +109,10 @@ class BlArmature(BlDatablock): if 'EDIT' in current_mode: bpy.ops.object.mode_set(mode='EDIT') - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() + dumper = Dumper() dumper.depth = 4 dumper.include_filter = [ 'bones', diff --git a/multi_user/bl_types/bl_camera.py b/multi_user/bl_types/bl_camera.py index 72700a2..178d578 100644 --- a/multi_user/bl_types/bl_camera.py +++ b/multi_user/bl_types/bl_camera.py @@ -1,7 +1,25 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from .. import utils +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock @@ -13,22 +31,26 @@ class BlCamera(BlDatablock): bl_automatic_push = True bl_icon = 'CAMERA_DATA' - def load_implementation(self, data, target): - utils.dump_anything.load(target, data) + def _construct(self, data): + return bpy.data.cameras.new(data["name"]) + + + def _load_implementation(self, data, target): + loader = Loader() + loader.load(target, data) dof_settings = data.get('dof') # DOF settings if dof_settings: - utils.dump_anything.load(target.dof, dof_settings) + loader.load(target.dof, dof_settings) - def construct(self, data): - return bpy.data.cameras.new(data["name"]) - - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() + # TODO: background image support + + dumper = Dumper() dumper.depth = 2 dumper.include_filter = [ "name", @@ -49,6 +71,14 @@ class BlCamera(BlDatablock): 'aperture_blades', 'aperture_rotation', 'aperture_ratio', + 'display_size', + 'show_limits', + 'show_mist', + 'show_sensor', + 'show_name', + 'sensor_fit', + 'sensor_height', + 'sensor_width', ] return dumper.dump(pointer) diff --git a/multi_user/bl_types/bl_collection.py b/multi_user/bl_types/bl_collection.py index 38ac0b1..4a4d382 100644 --- a/multi_user/bl_types/bl_collection.py +++ b/multi_user/bl_types/bl_collection.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils @@ -13,7 +31,7 @@ class BlCollection(BlDatablock): bl_delay_apply = 1 bl_automatic_push = True - def construct(self, data): + def _construct(self, data): if self.is_library: with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData): targetData.collections = [ @@ -28,9 +46,8 @@ class BlCollection(BlDatablock): instance.uuid = self.uuid return instance - def load(self, data, target): + def _load_implementation(self, data, target): # Load other meshes metadata - # dump_anything.load(target, data) target.name = data["name"] # link objects @@ -54,7 +71,7 @@ class BlCollection(BlDatablock): if collection.uuid not in data["children"]: target.children.unlink(collection) - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) data = {} data['name'] = pointer.name @@ -77,7 +94,7 @@ class BlCollection(BlDatablock): return data - def resolve_deps_implementation(self): + def _resolve_deps_implementation(self): deps = [] for child in self.pointer.children: diff --git a/multi_user/bl_types/bl_curve.py b/multi_user/bl_types/bl_curve.py index 4a498e9..b2ec720 100644 --- a/multi_user/bl_types/bl_curve.py +++ b/multi_user/bl_types/bl_curve.py @@ -1,11 +1,51 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import bpy.types as T import mathutils +import logging from .. import utils from .bl_datablock import BlDatablock -from ..libs import dump_anything +from .dump_anything import (Dumper, Loader, + np_load_collection, + np_dump_collection) +logger = logging.getLogger(__name__) + +SPLINE_BEZIER_POINT = [ + # "handle_left_type", + # "handle_right_type", + "handle_left", + "co", + "handle_right", + "tilt", + "weight_softbody", + "radius", +] + +SPLINE_POINT = [ + "co", + "tilt", + "weight_softbody", + "radius", +] class BlCurve(BlDatablock): bl_id = "curves" @@ -15,52 +55,50 @@ class BlCurve(BlDatablock): bl_automatic_push = True bl_icon = 'CURVE_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.curves.new(data["name"], data["type"]) - def load_implementation(self, data, target): - dump_anything.load(target, data) + def _load_implementation(self, data, target): + loader = Loader() + loader.load(target, data) target.splines.clear() # load splines - for spline in data['splines']: - new_spline = target.splines.new(data['splines'][spline]['type']) - dump_anything.load(new_spline, data['splines'][spline]) + for spline in data['splines'].values(): + new_spline = target.splines.new(spline['type']) + # Load curve geometry data if new_spline.type == 'BEZIER': - for bezier_point_index in data['splines'][spline]["bezier_points"]: - if bezier_point_index != 0: - new_spline.bezier_points.add(1) - dump_anything.load( - new_spline.bezier_points[bezier_point_index], data['splines'][spline]["bezier_points"][bezier_point_index]) - + bezier_points = new_spline.bezier_points + bezier_points.add(spline['bezier_points_count']) + np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT) + # Not really working for now... # See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python if new_spline.type == 'NURBS': - new_spline.points.add(len(data['splines'][spline]["points"])-1) - for point_index in data['splines'][spline]["points"]: - dump_anything.load( - new_spline.points[point_index], data['splines'][spline]["points"][point_index]) + logger.error("NURBS not supported.") + # new_spline.points.add(len(data['splines'][spline]["points"])-1) + # for point_index in data['splines'][spline]["points"]: + # loader.load( + # new_spline.points[point_index], data['splines'][spline]["points"][point_index]) - def dump_implementation(self, data, pointer=None): + loader.load(new_spline, spline) + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = dump_anything.Dumper() + dumper = Dumper() data = dumper.dump(pointer) data['splines'] = {} - dumper = utils.dump_anything.Dumper() - dumper.depth = 3 - for index, spline in enumerate(pointer.splines): - spline_data = dump_anything.dump(spline) - spline_data['points'] = dumper.dump(spline.points) - spline_data['bezier_points'] = dumper.dump(spline.bezier_points) - spline_data['type'] = dumper.dump(spline.type) + dumper.depth = 2 + spline_data = dumper.dump(spline) + # spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT) + spline_data['bezier_points_count'] = len(spline.bezier_points)-1 + spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT) data['splines'][index] = spline_data - if isinstance(pointer, T.SurfaceCurve): data['type'] = 'SURFACE' elif isinstance(pointer, T.TextCurve): @@ -68,4 +106,3 @@ class BlCurve(BlDatablock): elif isinstance(pointer, T.Curve): data['type'] = 'CURVE' return data - diff --git a/multi_user/bl_types/bl_datablock.py b/multi_user/bl_types/bl_datablock.py index c18dfcd..381cefe 100644 --- a/multi_user/bl_types/bl_datablock.py +++ b/multi_user/bl_types/bl_datablock.py @@ -1,13 +1,48 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils from .. import utils +from .dump_anything import Loader, Dumper from ..libs.replication.replication.data import ReplicatedDatablock -from ..libs.replication.replication.constants import (UP, DIFF_BINARY,DIFF_JSON) -from ..libs import dump_anything +from ..libs.replication.replication.constants import (UP, DIFF_BINARY) + + +def has_action(target): + """ Check if the target datablock has actions + """ + return (hasattr(target, 'animation_data') + and target.animation_data + and target.animation_data.action) + + +def has_driver(target): + """ Check if the target datablock is driven + """ + return (hasattr(target, 'animation_data') + and target.animation_data + and target.animation_data.drivers) + def dump_driver(driver): - dumper = dump_anything.Dumper() + dumper = Dumper() dumper.depth = 6 data = dumper.dump(driver) @@ -15,6 +50,7 @@ def dump_driver(driver): def load_driver(target_datablock, src_driver): + loader = Loader() drivers = target_datablock.animation_data.drivers src_driver_data = src_driver['driver'] new_driver = drivers.new(src_driver['data_path']) @@ -22,7 +58,7 @@ def load_driver(target_datablock, src_driver): # Settings new_driver.driver.type = src_driver_data['type'] new_driver.driver.expression = src_driver_data['expression'] - dump_anything.load(new_driver, src_driver) + loader.load(new_driver, src_driver) # Variables for src_variable in src_driver_data['variables']: @@ -35,7 +71,7 @@ def load_driver(target_datablock, src_driver): src_target_data = src_var_data['targets'][src_target] new_var.targets[src_target].id = utils.resolve_from_id( src_target_data['id'], src_target_data['id_type']) - dump_anything.load( + loader.load( new_var.targets[src_target], src_target_data) # Fcurve @@ -47,8 +83,7 @@ def load_driver(target_datablock, src_driver): for index, src_point in enumerate(src_driver['keyframe_points']): new_point = new_fcurve[index] - dump_anything.load( - new_point, src_driver['keyframe_points'][src_point]) + loader.load(new_point, src_driver['keyframe_points'][src_point]) class BlDatablock(ReplicatedDatablock): @@ -61,7 +96,6 @@ class BlDatablock(ReplicatedDatablock): bl_automatic_push : boolean bl_icon : type icon (blender icon name) """ - bl_id = "scenes" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -77,23 +111,7 @@ class BlDatablock(ReplicatedDatablock): self.diff_method = DIFF_JSON - def library_apply(self): - """Apply stored data - """ - # UP in case we want to reset our pointer data - self.state = UP - - def bl_diff(self): - """Generic datablock diff""" - return self.pointer.name != self.data['name'] - - def diff_library(self): - return False - - def resolve_deps_library(self): - return [self.pointer.library] - - def resolve(self): + def _resolve(self): datablock_ref = None datablock_root = getattr(bpy.data, self.bl_id) datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root) @@ -108,15 +126,16 @@ class BlDatablock(ReplicatedDatablock): self.pointer = datablock_ref - def dump(self, pointer=None): + def _dump(self, pointer=None): + dumper = Dumper() data = {} # Dump animation data - if utils.has_action(pointer): - dumper = utils.dump_anything.Dumper() + if has_action(pointer): + dumper = Dumper() dumper.include_filter = ['action'] data['animation_data'] = dumper.dump(pointer.animation_data) - if utils.has_driver(pointer): + if has_driver(pointer): dumped_drivers = {'animation_data': {'drivers': []}} for driver in pointer.animation_data.drivers: dumped_drivers['animation_data']['drivers'].append( @@ -125,16 +144,16 @@ class BlDatablock(ReplicatedDatablock): data.update(dumped_drivers) if self.is_library: - data.update(dump_anything.dump(pointer)) + data.update(dumper.dump(pointer)) else: - data.update(self.dump_implementation(data, pointer=pointer)) + data.update(self._dump_implementation(data, pointer=pointer)) return data - def dump_implementation(self, data, target): + def _dump_implementation(self, data, target): raise NotImplementedError - def load(self, data, target): + def _load(self, data, target): # Load animation data if 'animation_data' in data.keys(): if target.animation_data is None: @@ -153,23 +172,23 @@ class BlDatablock(ReplicatedDatablock): if self.is_library: return else: - self.load_implementation(data, target) + self._load_implementation(data, target) - def load_implementation(self, data, target): + def _load_implementation(self, data, target): raise NotImplementedError def resolve_deps(self): dependencies = [] - if utils.has_action(self.pointer): + if has_action(self.pointer): dependencies.append(self.pointer.animation_data.action) if not self.is_library: - dependencies.extend(self.resolve_deps_implementation()) - print(dependencies) + dependencies.extend(self._resolve_deps_implementation()) + return dependencies - def resolve_deps_implementation(self): + def _resolve_deps_implementation(self): return [] def is_valid(self): diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index d593e6d..e3156ed 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -1,76 +1,278 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils +import numpy as np -from ..libs import dump_anything +from .dump_anything import (Dumper, + Loader, + np_dump_collection, + np_load_collection) from .bl_datablock import BlDatablock +# GPencil data api is structured as it follow: +# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points -def load_gpencil_layer(target=None, data=None, create=False): +STROKE_POINT = [ + 'co', + 'pressure', + 'strength', + 'uv_factor', + 'uv_rotation' - dump_anything.load(target, data) - for k,v in target.frames.items(): - target.frames.remove(v) - - for frame in data["frames"]: - - tframe = target.frames.new(data["frames"][frame]['frame_number']) +] - for stroke in data["frames"][frame]["strokes"]: - try: - tstroke = tframe.strokes[stroke] - except: - tstroke = tframe.strokes.new() - dump_anything.load( - tstroke, data["frames"][frame]["strokes"][stroke]) +if bpy.app.version[1] >= 83: + STROKE_POINT.append('vertex_color') - for point in data["frames"][frame]["strokes"][stroke]["points"]: - p = data["frames"][frame]["strokes"][stroke]["points"][point] +def dump_stroke(stroke): + """ Dump a grease pencil stroke to a dict - tstroke.points.add(1) - tpoint = tstroke.points[len(tstroke.points)-1] + :param stroke: target grease pencil stroke + :type stroke: bpy.types.GPencilStroke + :return: dict + """ + + assert(stroke) + + dumper = Dumper() + dumper.include_filter = [ + "aspect", + "display_mode", + "draw_cyclic", + "end_cap_mode", + "hardeness", + "line_width", + "material_index", + "start_cap_mode", + "uv_rotation", + "uv_scale", + "uv_translation", + "vertex_color_fill", + ] + dumped_stroke = dumper.dump(stroke) + + # Stoke points + p_count = len(stroke.points) + dumped_stroke['p_count'] = p_count + dumped_stroke['points'] = np_dump_collection(stroke.points, STROKE_POINT) + + # TODO: uv_factor, uv_rotation + + return dumped_stroke + + +def load_stroke(stroke_data, stroke): + """ Load a grease pencil stroke from a dict + + :param stroke_data: dumped grease pencil stroke + :type stroke_data: dict + :param stroke: target grease pencil stroke + :type stroke: bpy.types.GPencilStroke + """ + assert(stroke and stroke_data) + + loader = Loader() + loader.load(stroke, stroke_data) + + stroke.points.add(stroke_data["p_count"]) + + np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT) + +def dump_frame(frame): + """ Dump a grease pencil frame to a dict + + :param frame: target grease pencil stroke + :type frame: bpy.types.GPencilFrame + :return: dict + """ + + assert(frame) + + dumped_frame = dict() + dumped_frame['frame_number'] = frame.frame_number + dumped_frame['strokes'] = [] + + # TODO: took existing strokes in account + for stroke in frame.strokes: + dumped_frame['strokes'].append(dump_stroke(stroke)) + + return dumped_frame + + +def load_frame(frame_data, frame): + """ Load a grease pencil frame from a dict + + :param frame_data: source grease pencil frame + :type frame_data: dict + :param frame: target grease pencil stroke + :type frame: bpy.types.GPencilFrame + """ + + assert(frame and frame_data) + + # frame.frame_number = frame_data['frame_number'] + + # TODO: took existing stroke in account + + for stroke_data in frame_data['strokes']: + target_stroke = frame.strokes.new() + load_stroke(stroke_data, target_stroke) + + +def dump_layer(layer): + """ Dump a grease pencil layer + + :param layer: target grease pencil stroke + :type layer: bpy.types.GPencilFrame + """ + + assert(layer) + + dumper = Dumper() + + dumper.include_filter = [ + 'info', + 'opacity', + 'channel_color', + 'color', + 'thickness', + 'tint_color', + 'tint_factor', + 'vertex_paint_opacity', + 'line_change', + 'use_onion_skinning', + 'use_annotation_onion_skinning', + 'annotation_onion_before_range', + 'annotation_onion_after_range', + 'annotation_onion_before_color', + 'annotation_onion_after_color', + 'pass_index', + # 'viewlayer_render', + 'blend_mode', + 'hide', + 'annotation_hide', + 'lock', + # 'lock_frame', + # 'lock_material', + # 'use_mask_layer', + 'use_lights', + 'use_solo_mode', + 'select', + 'show_points', + 'show_in_front', + # 'parent', + # 'parent_type', + # 'parent_bone', + # 'matrix_inverse', + ] + dumped_layer = dumper.dump(layer) + + dumped_layer['frames'] = [] + + for frame in layer.frames: + dumped_layer['frames'].append(dump_frame(frame)) + + return dumped_layer + + +def load_layer(layer_data, layer): + """ Load a grease pencil layer from a dict + + :param layer_data: source grease pencil layer data + :type layer_data: dict + :param layer: target grease pencil stroke + :type layer: bpy.types.GPencilFrame + """ + # TODO: take existing data in account + loader = Loader() + loader.load(layer, layer_data) + + for frame_data in layer_data["frames"]: + target_frame = layer.frames.new(frame_data['frame_number']) + + load_frame(frame_data, target_frame) - dump_anything.load(tpoint, p) class BlGpencil(BlDatablock): bl_id = "grease_pencils" bl_class = bpy.types.GreasePencil - bl_delay_refresh = 5 - bl_delay_apply = 5 + bl_delay_refresh = 2 + bl_delay_apply = 1 bl_automatic_push = True bl_icon = 'GREASEPENCIL' - def construct(self, data): + def _construct(self, data): return bpy.data.grease_pencils.new(data["name"]) - def load_implementation(self, data, target): - for layer in target.layers: - target.layers.remove(layer) - - if "layers" in data.keys(): - for layer in data["layers"]: - if layer not in target.layers.keys(): - gp_layer = target.layers.new(data["layers"][layer]["info"]) - else: - gp_layer = target.layers[layer] - load_gpencil_layer( - target=gp_layer, data=data["layers"][layer], create=True) - - dump_anything.load(target, data) - + def _load_implementation(self, data, target): target.materials.clear() if "materials" in data.keys(): for mat in data['materials']: target.materials.append(bpy.data.materials[mat]) - def dump_implementation(self, data, pointer=None): + loader = Loader() + loader.load(target, data) + + # TODO: reuse existing layer + for layer in target.layers: + target.layers.remove(layer) + + if "layers" in data.keys(): + for layer in data["layers"]: + layer_data = data["layers"].get(layer) + + # if layer not in target.layers.keys(): + target_layer = target.layers.new(data["layers"][layer]["info"]) + # else: + # target_layer = target.layers[layer] + # target_layer.clear() + + load_layer(layer_data, target_layer) + + + + + + def _dump_implementation(self, data, pointer=None): assert(pointer) - data = dump_anything.dump(pointer, 2) - data['layers'] = dump_anything.dump(pointer.layers, 9) + dumper = Dumper() + dumper.depth = 2 + dumper.include_filter = [ + 'materials', + 'name', + 'zdepth_offset', + 'stroke_thickness_space', + 'pixel_factor', + 'stroke_depth_order' + ] + data = dumper.dump(pointer) + + data['layers'] = {} + + for layer in pointer.layers: + data['layers'][layer.info] = dump_layer(layer) return data - def resolve_deps_implementation(self): + def _resolve_deps_implementation(self): deps = [] for material in self.pointer.materials: diff --git a/multi_user/bl_types/bl_image.py b/multi_user/bl_types/bl_image.py index 8b965f1..4a9ce9c 100644 --- a/multi_user/bl_types/bl_image.py +++ b/multi_user/bl_types/bl_image.py @@ -1,8 +1,27 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import os from .. import utils +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock def dump_image(image): @@ -10,8 +29,10 @@ def dump_image(image): if image.source == "GENERATED": prefs = utils.get_preferences() img_name = "{}.png".format(image.name) - + + # Cache the image on the disk image.filepath_raw = os.path.join(prefs.cache_directory, img_name) + os.makedirs(prefs.cache_directory, exist_ok=True) image.file_format = "PNG" image.save() @@ -35,14 +56,14 @@ class BlImage(BlDatablock): bl_automatic_push = False bl_icon = 'IMAGE_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.images.new( name=data['name'], width=data['size'][0], height=data['size'][1] ) - def load(self, data, target): + def _load(self, data, target): image = target prefs = utils.get_preferences() @@ -59,11 +80,11 @@ class BlImage(BlDatablock): image.colorspace_settings.name = data["colorspace_settings"]["name"] - def dump(self, data, pointer=None): + def _dump(self, pointer=None): assert(pointer) data = {} data['pixels'] = dump_image(pointer) - dumper = utils.dump_anything.Dumper() + dumper = Dumper() dumper.depth = 2 dumper.include_filter = [ "name", diff --git a/multi_user/bl_types/bl_lattice.py b/multi_user/bl_types/bl_lattice.py index 5a7ac28..d816a2c 100644 --- a/multi_user/bl_types/bl_lattice.py +++ b/multi_user/bl_types/bl_lattice.py @@ -1,9 +1,29 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from .. import utils +from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection from .bl_datablock import BlDatablock +POINT = ['co', 'weight_softbody', 'co_deform'] + class BlLattice(BlDatablock): bl_id = "lattices" @@ -13,19 +33,20 @@ class BlLattice(BlDatablock): bl_automatic_push = True bl_icon = 'LATTICE_DATA' - def load_implementation(self, data, target): - utils.dump_anything.load(target, data) + def _load_implementation(self, data, target): + loader = Loader() + loader.load(target, data) - for point in data['points']: - utils.dump_anything.load(target.points[point], data["points"][point]) - def construct(self, data): + np_load_collection(data['points'], target.points, POINT) + + def _construct(self, data): return bpy.data.lattices.new(data["name"]) - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() - dumper.depth = 3 + dumper = Dumper() + dumper.depth = 1 dumper.include_filter = [ "name", 'type', @@ -35,17 +56,10 @@ class BlLattice(BlDatablock): 'interpolation_type_u', 'interpolation_type_v', 'interpolation_type_w', - 'use_outside', - 'points', - 'co', - 'weight_softbody', - 'co_deform' + 'use_outside' ] data = dumper.dump(pointer) + data['points'] = np_dump_collection(pointer.points, POINT) return data - - - - diff --git a/multi_user/bl_types/bl_library.py b/multi_user/bl_types/bl_library.py index 8e909ac..f5b1753 100644 --- a/multi_user/bl_types/bl_library.py +++ b/multi_user/bl_types/bl_library.py @@ -1,7 +1,25 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from ..libs import dump_anything +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock @@ -13,15 +31,16 @@ class BlLibrary(BlDatablock): bl_automatic_push = True bl_icon = 'LIBRARY_DATA_DIRECT' - def construct(self, data): + def _construct(self, data): with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData): targetData = sourceData return sourceData - def load(self, data, target): + def _load(self, data, target): pass def dump(self, pointer=None): assert(pointer) - return dump_anything.dump(pointer, 1) + dumper = Dumper() + return dumper.dump(pointer) diff --git a/multi_user/bl_types/bl_light.py b/multi_user/bl_types/bl_light.py index 6dfdd80..3bccfe4 100644 --- a/multi_user/bl_types/bl_light.py +++ b/multi_user/bl_types/bl_light.py @@ -1,7 +1,25 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from .. import utils +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock @@ -13,15 +31,16 @@ class BlLight(BlDatablock): bl_automatic_push = True bl_icon = 'LIGHT_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.lights.new(data["name"], data["type"]) - def load(self, data, target): - utils.dump_anything.load(target, data) + def _load_implementation(self, data, target): + loader = Loader() + loader.load(target, data) - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() + dumper = Dumper() dumper.depth = 3 dumper.include_filter = [ "name", @@ -41,7 +60,8 @@ class BlLight(BlDatablock): "contact_shadow_distance", "contact_shadow_soft_size", "contact_shadow_bias", - "contact_shadow_thickness" + "contact_shadow_thickness", + "shape" ] data = dumper.dump(pointer) return data diff --git a/multi_user/bl_types/bl_lightprobe.py b/multi_user/bl_types/bl_lightprobe.py index bc8edfd..7da2b7e 100644 --- a/multi_user/bl_types/bl_lightprobe.py +++ b/multi_user/bl_types/bl_lightprobe.py @@ -1,8 +1,26 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import logging -from .. import utils +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock logger = logging.getLogger(__name__) @@ -15,23 +33,24 @@ class BlLightprobe(BlDatablock): bl_automatic_push = True bl_icon = 'LIGHTPROBE_GRID' - def load_implementation(self, data, target): - utils.dump_anything.load(target, data) - - def construct(self, data): + def _construct(self, data): type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type'] # See https://developer.blender.org/D6396 if bpy.app.version[1] >= 83: return bpy.data.lightprobes.new(data["name"], type) else: - logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") + logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") - def dump_implementation(self, data, pointer=None): + def _load_implementation(self, data, target): + loader = Loader() + loader.load(target, data) + + def _dump_implementation(self, data, pointer=None): assert(pointer) if bpy.app.version[1] < 83: logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") - dumper = utils.dump_anything.Dumper() + dumper = Dumper() dumper.depth = 1 dumper.include_filter = [ "name", diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index fa33f40..f230ef1 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -1,118 +1,212 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import logging from .. import utils -from ..libs import dump_anything +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock logger = logging.getLogger(__name__) -def clean_color_ramp(target_ramp): - # clear existing - try: - for key in target_ramp.elements: - target_ramp.elements.remove(key) - except: - pass - -def load_mapping(target_apping, source_mapping): - # clear existing curves - for curve in target_apping.curves: - for point in curve.points: - try: - curve.remove(point) - except: - continue - - # Load curves - for curve in source_mapping['curves']: - for point in source_mapping['curves'][curve]['points']: - pos = source_mapping['curves'][curve]['points'][point]['location'] - target_apping.curves[curve].points.new(pos[0],pos[1]) +def load_node(node_data, node_tree): + """ Load a node into a node_tree from a dict -def load_node(target_node_tree, source): - target_node = target_node_tree.nodes.get(source["name"]) + :arg node_data: dumped node data + :type node_data: dict + :arg node_tree: target node_tree + :type node_tree: bpy.types.NodeTree + """ + loader = Loader() + target_node = node_tree.nodes.new(type=node_data["bl_idname"]) - if target_node is None: - node_type = source["bl_idname"] + loader.load(target_node, node_data) - target_node = target_node_tree.nodes.new(type=node_type) - - # Clean color ramp before loading it - if source['type'] == 'VALTORGB': - clean_color_ramp(target_node.color_ramp) - if source['type'] == 'CURVE_RGB': - load_mapping(target_node.mapping, source['mapping']) - dump_anything.load( - target_node, - source) - - if source['type'] == 'TEX_IMAGE': - target_node.image = bpy.data.images[source['image']] - for input in source["inputs"]: + for input in node_data["inputs"]: if hasattr(target_node.inputs[input], "default_value"): try: - target_node.inputs[input].default_value = source["inputs"][input]["default_value"] + target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"] except: logger.error("{} not supported, skipping".format(input)) -def load_link(target_node_tree, source): - input_socket = target_node_tree.nodes[source['to_node'] - ['name']].inputs[source['to_socket']['name']] - output_socket = target_node_tree.nodes[source['from_node'] - ['name']].outputs[source['from_socket']['name']] - target_node_tree.links.new(input_socket, output_socket) +def load_links(links_data, node_tree): + """ Load node_tree links from a list + + :arg links_data: dumped node links + :type links_data: list + :arg node_tree: node links collection + :type node_tree: bpy.types.NodeTree + """ + + for link in links_data: + input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])] + output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])] + + node_tree.links.new(input_socket, output_socket) + + +def dump_links(links): + """ Dump node_tree links collection to a list + + :arg links: node links collection + :type links: bpy.types.NodeLinks + :retrun: list + """ + + links_data = [] + + for link in links: + links_data.append({ + 'to_node':link.to_node.name, + 'to_socket':link.to_socket.path_from_id()[-2:-1], + 'from_node':link.from_node.name, + 'from_socket':link.from_socket.path_from_id()[-2:-1], + }) + + return links_data + + +def dump_node(node): + """ Dump a single node to a dict + + :arg node: target node + :type node: bpy.types.Node + :retrun: dict + """ + + node_dumper = Dumper() + node_dumper.depth = 1 + node_dumper.exclude_filter = [ + "dimensions", + "show_expanded", + "name_full", + "select", + "bl_height_min", + "bl_height_max", + "bl_height_default", + "bl_width_min", + "bl_width_max", + "type", + "bl_icon", + "bl_width_default", + "bl_static_type", + "show_tetxure", + "is_active_output", + "hide", + "show_options", + "show_preview", + "show_texture", + "outputs", + "width_hidden" + ] + + dumped_node = node_dumper.dump(node) + + if hasattr(node, 'inputs'): + dumped_node['inputs'] = {} + + for i in node.inputs: + input_dumper = Dumper() + input_dumper.depth = 2 + input_dumper.include_filter = ["default_value"] + + if hasattr(i, 'default_value'): + dumped_node['inputs'][i.name] = input_dumper.dump( + i) + if hasattr(node, 'color_ramp'): + ramp_dumper = Dumper() + ramp_dumper.depth = 4 + ramp_dumper.include_filter = [ + 'elements', + 'alpha', + 'color', + 'position' + ] + dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp) + if hasattr(node, 'mapping'): + curve_dumper = Dumper() + curve_dumper.depth = 5 + curve_dumper.include_filter = [ + 'curves', + 'points', + 'location' + ] + dumped_node['mapping'] = curve_dumper.dump(node.mapping) + + return dumped_node class BlMaterial(BlDatablock): bl_id = "materials" bl_class = bpy.types.Material - bl_delay_refresh = 10 - bl_delay_apply = 10 + bl_delay_refresh = 1 + bl_delay_apply = 1 bl_automatic_push = True bl_icon = 'MATERIAL_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.materials.new(data["name"]) - def load_implementation(self, data, target): + def _load_implementation(self, data, target): + loader = Loader() target.name = data['name'] if data['is_grease_pencil']: if not target.is_grease_pencil: bpy.data.materials.create_gpencil_data(target) - dump_anything.load( + loader.load( target.grease_pencil, data['grease_pencil']) - utils.load_dict(data['grease_pencil'], target.grease_pencil) - elif data["use_nodes"]: + if data["use_nodes"]: if target.node_tree is None: target.use_nodes = True target.node_tree.nodes.clear() - dump_anything.load(target,data) + loader.load(target,data) # Load nodes for node in data["node_tree"]["nodes"]: - load_node(target.node_tree, data["node_tree"]["nodes"][node]) + load_node(data["node_tree"]["nodes"][node], target.node_tree) # Load nodes links target.node_tree.links.clear() - for link in data["node_tree"]["links"]: - load_link(target.node_tree, data["node_tree"]["links"][link]) + load_links(data["node_tree"]["links"], target.node_tree) - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - mat_dumper = dump_anything.Dumper() + mat_dumper = Dumper() mat_dumper.depth = 2 mat_dumper.exclude_filter = [ + "is_embed_data", + "is_evaluated", + "name_full", + "bl_description", + "bl_icon", + "bl_idname", + "bl_label", "preview", "original", "uuid", @@ -121,77 +215,46 @@ class BlMaterial(BlDatablock): "line_color", "view_center", ] - node_dumper = dump_anything.Dumper() - node_dumper.depth = 1 - node_dumper.exclude_filter = [ - "dimensions", - "show_expanded" - "select", - "bl_height_min", - "bl_height_max", - "bl_width_min", - "bl_width_max", - "bl_width_default", - "hide", - "show_options", - "show_tetxures", - "show_preview", - "outputs", - "width_hidden" - ] - input_dumper = dump_anything.Dumper() - input_dumper.depth = 2 - input_dumper.include_filter = ["default_value"] - links_dumper = dump_anything.Dumper() - links_dumper.depth = 3 - links_dumper.include_filter = [ - "name", - "to_node", - "from_node", - "from_socket", - "to_socket"] data = mat_dumper.dump(pointer) if pointer.use_nodes: nodes = {} - for node in pointer.node_tree.nodes: - nodes[node.name] = node_dumper.dump(node) - - if hasattr(node, 'inputs'): - nodes[node.name]['inputs'] = {} - - for i in node.inputs: - if hasattr(i, 'default_value'): - nodes[node.name]['inputs'][i.name] = input_dumper.dump( - i) - if hasattr(node, 'color_ramp'): - ramp_dumper = dump_anything.Dumper() - ramp_dumper.depth = 4 - ramp_dumper.include_filter = [ - 'elements', - 'alpha', - 'color', - 'position' - ] - nodes[node.name]['color_ramp'] = ramp_dumper.dump(node.color_ramp) - if hasattr(node, 'mapping'): - curve_dumper = dump_anything.Dumper() - curve_dumper.depth = 5 - curve_dumper.include_filter = [ - 'curves', - 'points', - 'location' - ] - nodes[node.name]['mapping'] = curve_dumper.dump(node.mapping) + nodes[node.name] = dump_node(node) data["node_tree"]['nodes'] = nodes - data["node_tree"]["links"] = links_dumper.dump(pointer.node_tree.links) + + data["node_tree"]["links"] = dump_links(pointer.node_tree.links) - elif pointer.is_grease_pencil: - data['grease_pencil'] = dump_anything.dump(pointer.grease_pencil, 3) + if pointer.is_grease_pencil: + gp_mat_dumper = Dumper() + gp_mat_dumper.depth = 3 + + gp_mat_dumper.include_filter = [ + 'show_stroke', + 'mode', + 'stroke_style', + 'color', + 'use_overlap_strokes', + 'show_fill', + 'fill_style', + 'fill_color', + 'pass_index', + 'alignment_mode', + # 'fill_image', + 'texture_opacity', + 'mix_factor', + 'texture_offset', + 'texture_angle', + 'texture_scale', + 'texture_clamp', + 'gradient_type', + 'mix_color', + 'flip' + ] + data['grease_pencil'] = gp_mat_dumper.dump(pointer.grease_pencil) return data - def resolve_deps_implementation(self): + def _resolve_deps_implementation(self): # TODO: resolve node group deps deps = [] diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index f502cae..bd8d618 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -1,163 +1,159 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import bmesh import mathutils +import logging +import numpy as np -from .. import utils +from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection from ..libs.replication.replication.constants import DIFF_BINARY from .bl_datablock import BlDatablock +logger = logging.getLogger(__name__) -def dump_mesh(mesh, data={}): - import bmesh +VERTICE = ['co'] - mesh_data = data - mesh_buffer = bmesh.new() - - # https://blog.michelanders.nl/2016/02/copying-vertices-to-numpy-arrays-in_4.html - mesh_buffer.from_mesh(mesh) - - uv_layer = mesh_buffer.loops.layers.uv.verify() - bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify() - skin_layer = mesh_buffer.verts.layers.skin.verify() - - verts = {} - for vert in mesh_buffer.verts: - v = {} - v["co"] = list(vert.co) - - # vert metadata - v['bevel'] = vert[bevel_layer] - v['normal'] = list(vert.normal) - # v['skin'] = list(vert[skin_layer]) - - verts[str(vert.index)] = v - - mesh_data["verts"] = verts - - edges = {} - for edge in mesh_buffer.edges: - e = {} - e["verts"] = [edge.verts[0].index, edge.verts[1].index] - - # Edge metadata - e["smooth"] = edge.smooth - - edges[edge.index] = e - mesh_data["edges"] = edges - - faces = {} - for face in mesh_buffer.faces: - f = {} - fverts = [] - for vert in face.verts: - fverts.append(vert.index) - - f["verts"] = fverts - f["material_index"] = face.material_index - f["smooth"] = face.smooth - f["normal"] = list(face.normal) - f["index"] = face.index - - uvs = [] - # Face metadata - for loop in face.loops: - loop_uv = loop[uv_layer] - - uvs.append(list(loop_uv.uv)) - - f["uv"] = uvs - faces[face.index] = f - - mesh_data["faces"] = faces - - uv_layers = [] - for uv_layer in mesh.uv_layers: - uv_layers.append(uv_layer.name) - - mesh_data["uv_layers"] = uv_layers - # return mesh_data +EDGE = [ + 'vertices', + 'crease', + 'bevel_weight', +] +LOOP = [ + 'vertex_index', + 'normal', +] +POLYGON = [ + 'loop_total', + 'loop_start', + 'use_smooth', + 'material_index', +] class BlMesh(BlDatablock): bl_id = "meshes" bl_class = bpy.types.Mesh - bl_delay_refresh = 10 - bl_delay_apply = 10 + bl_delay_refresh = 2 + bl_delay_apply = 1 bl_automatic_push = True bl_icon = 'MESH_DATA' - def construct(self, data): + def _construct(self, data): instance = bpy.data.meshes.new(data["name"]) instance.uuid = self.uuid return instance - def load_implementation(self, data, target): + def _load_implementation(self, data, target): if not target or not target.is_editmode: - # 1 - LOAD MATERIAL SLOTS - # SLots - i = 0 + loader = Loader() + loader.load(target, data) + + # MATERIAL SLOTS + target.materials.clear() for m in data["material_list"]: target.materials.append(bpy.data.materials[m]) - # 2 - LOAD GEOMETRY - mesh_buffer = bmesh.new() + # CLEAR GEOMETRY + if target.vertices: + target.clear_geometry() - for i in data["verts"]: - v = mesh_buffer.verts.new(data["verts"][i]["co"]) - v.normal = data["verts"][i]["normal"] - mesh_buffer.verts.ensure_lookup_table() + target.vertices.add(data["vertex_count"]) + target.edges.add(data["egdes_count"]) + target.loops.add(data["loop_count"]) + target.polygons.add(data["poly_count"]) - for i in data["edges"]: - verts = mesh_buffer.verts - v1 = data["edges"][i]["verts"][0] - v2 = data["edges"][i]["verts"][1] - edge = mesh_buffer.edges.new([verts[v1], verts[v2]]) - edge.smooth = data["edges"][i]["smooth"] + # LOADING + np_load_collection(data['vertices'], target.vertices, VERTICE) + np_load_collection(data['edges'], target.edges, EDGE) + np_load_collection(data['loops'], target.loops, LOOP) + np_load_collection(data["polygons"],target.polygons, POLYGON) + + # UV Layers + for layer in data['uv_layers']: + if layer not in target.uv_layers: + target.uv_layers.new(name=layer) + + np_load_collection_primitives( + target.uv_layers[layer].data, + 'uv', + data["uv_layers"][layer]['data']) - mesh_buffer.edges.ensure_lookup_table() - for p in data["faces"]: - verts = [] - for v in data["faces"][p]["verts"]: - verts.append(mesh_buffer.verts[v]) + # Vertex color + for color_layer in data['vertex_colors']: + if color_layer not in target.vertex_colors: + target.vertex_colors.new(name=color_layer) - if len(verts) > 0: - f = mesh_buffer.faces.new(verts) + np_load_collection_primitives( + target.vertex_colors[color_layer].data, + 'color', + data["vertex_colors"][color_layer]['data']) - uv_layer = mesh_buffer.loops.layers.uv.verify() + target.validate() + target.update() - f.smooth = data["faces"][p]["smooth"] - f.normal = data["faces"][p]["normal"] - f.index = data["faces"][p]["index"] - f.material_index = data["faces"][p]['material_index'] - # UV loading - for i, loop in enumerate(f.loops): - loop_uv = loop[uv_layer] - loop_uv.uv = data["faces"][p]["uv"][i] - mesh_buffer.faces.ensure_lookup_table() - mesh_buffer.to_mesh(target) - - # 3 - LOAD METADATA - # uv's - utils.dump_anything.load(target.uv_layers, data['uv_layers']) - - bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify() - skin_layer = mesh_buffer.verts.layers.skin.verify() - - utils.dump_anything.load(target, data) - - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() - dumper.depth = 2 + mesh = pointer + + dumper = Dumper() + dumper.depth = 1 dumper.include_filter = [ 'name', 'use_auto_smooth', - 'auto_smooth_angle' + 'auto_smooth_angle', + 'use_customdata_edge_bevel', + 'use_customdata_edge_crease' ] - data = dumper.dump(pointer) - dump_mesh(pointer, data) + + data = dumper.dump(mesh) + + # VERTICES + data["vertex_count"] = len(mesh.vertices) + data["vertices"] = np_dump_collection(mesh.vertices, VERTICE) + + # EDGES + data["egdes_count"] = len(mesh.edges) + data["edges"] = np_dump_collection(mesh.edges, EDGE) + + # POLYGONS + data["poly_count"] = len(mesh.polygons) + data["polygons"] = np_dump_collection(mesh.polygons, POLYGON) + + # LOOPS + data["loop_count"] = len(mesh.loops) + data["loops"] = np_dump_collection(mesh.loops, LOOP) + + # UV Layers + data['uv_layers'] = {} + for layer in mesh.uv_layers: + data['uv_layers'][layer.name] = {} + data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv') + + # Vertex color + data['vertex_colors'] = {} + for color_map in mesh.vertex_colors: + data['vertex_colors'][color_map.name] = {} + data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color') + # Fix material index m_list = [] for material in pointer.materials: @@ -168,7 +164,7 @@ class BlMesh(BlDatablock): return data - def resolve_deps_implementation(self): + def _resolve_deps_implementation(self): deps = [] for material in self.pointer.materials: diff --git a/multi_user/bl_types/bl_metaball.py b/multi_user/bl_types/bl_metaball.py index 7dec312..22eeea5 100644 --- a/multi_user/bl_types/bl_metaball.py +++ b/multi_user/bl_types/bl_metaball.py @@ -1,10 +1,67 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from .. import utils +from .dump_anything import ( + Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives, + np_dump_collection, np_load_collection) + from .bl_datablock import BlDatablock +ELEMENT = [ + 'co', + 'hide', + 'radius', + 'rotation', + 'size_x', + 'size_y', + 'size_z', + 'stiffness', + 'type' +] + + +def dump_metaball_elements(elements): + """ Dump a metaball element + + :arg element: metaball element + :type bpy.types.MetaElement + :return: dict + """ + + dumped_elements = np_dump_collection(elements, ELEMENT) + + return dumped_elements + + +def load_metaball_elements(elements_data, elements): + """ Dump a metaball element + + :arg element: metaball element + :type bpy.types.MetaElement + :return: dict + """ + np_load_collection(elements_data, elements, ELEMENT) + + class BlMetaball(BlDatablock): bl_id = "metaballs" bl_class = bpy.types.MetaBall @@ -13,25 +70,33 @@ class BlMetaball(BlDatablock): bl_automatic_push = True bl_icon = 'META_BALL' - def construct(self, data): + def _construct(self, data): return bpy.data.metaballs.new(data["name"]) - def load(self, data, target): - utils.dump_anything.load(target, data) - - target.elements.clear() - for element in data["elements"]: - new_element = target.elements.new(type=data["elements"][element]['type']) - utils.dump_anything.load(new_element, data["elements"][element]) + def _load_implementation(self, data, target): + loader = Loader() + loader.load(target, data) - def dump_implementation(self, data, pointer=None): + target.elements.clear() + + for mtype in data["elements"]['type']: + new_element = target.elements.new() + + load_metaball_elements(data['elements'], target.elements) + + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() - dumper.depth = 3 - dumper.exclude_filter = ["is_editmode"] + dumper = Dumper() + dumper.depth = 1 + dumper.exclude_filter = [ + "is_editmode", + "is_evaluated", + "is_embedded_data", + "is_library_indirect", + "name_full" + ] data = dumper.dump(pointer) + data['elements'] = dump_metaball_elements(pointer.elements) + return data - - - diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index b7f356f..1060849 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -1,33 +1,35 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import logging -from .. import utils +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock logger = logging.getLogger(__name__) -def load_constraints(target, data): - for local_constraint in target.constraints: - if local_constraint.name not in data: - target.constraints.remove(local_constraint) - - for constraint in data: - target_constraint = target.constraints.get(constraint) - - if not target_constraint: - target_constraint = target.constraints.new( - data[constraint]['type']) - - utils.dump_anything.load( - target_constraint, data[constraint]) - - def load_pose(target_bone, data): target_bone.rotation_mode = data['rotation_mode'] - - utils.dump_anything.load(target_bone, data) + loader = Loader() + loader.load(target_bone, data) class BlObject(BlDatablock): @@ -38,7 +40,7 @@ class BlObject(BlDatablock): bl_automatic_push = True bl_icon = 'OBJECT_DATA' - def construct(self, data): + def _construct(self, data): pointer = None if self.is_library: @@ -50,7 +52,7 @@ class BlObject(BlDatablock): instance.uuid = self.uuid return instance - # Object specific constructor... + # TODO: refactoring if "data" not in data: pass elif data["data"] in bpy.data.meshes.keys(): @@ -85,39 +87,10 @@ class BlObject(BlDatablock): return instance - def load_implementation(self, data, target): + def _load_implementation(self, data, target): # Load transformation data - rot_mode = 'rotation_quaternion' if data['rotation_mode'] == 'QUATERNION' else 'rotation_euler' - target.rotation_mode = data['rotation_mode'] - target.location = data['location'] - setattr(target, rot_mode, data[rot_mode]) - target.scale = data['scale'] - - target.name = data["name"] - # Load modifiers - if hasattr(target, 'modifiers'): - # TODO: smarter selective update - target.modifiers.clear() - - for modifier in data['modifiers']: - target_modifier = target.modifiers.get(modifier) - if not target_modifier: - target_modifier = target.modifiers.new( - data['modifiers'][modifier]['name'], data['modifiers'][modifier]['type']) - - if target_modifier.type == 'PARTICLE_SYSTEM': - tmp_particle_system = target_modifier.particle_system.name - - utils.dump_anything.load( - target_modifier, data['modifiers'][modifier]) - - if target_modifier.type == 'PARTICLE_SYSTEM': - target.particle_systems[data['modifiers'][modifier]['name']].settings = bpy.data.particles[data['modifiers'][modifier]['particle_system']] - # bpy.data.particles.remove(tmp_particle_system) - # Load constraints - # Object - if hasattr(target, 'constraints') and 'constraints' in data: - load_constraints(target, data['constraints']) + loader = Loader() + loader.load(target, data) # Pose if 'pose' in data: @@ -131,7 +104,7 @@ class BlObject(BlDatablock): if not bg_target: bg_target = target.pose.bone_groups.new(name=bg_name) - utils.dump_anything.load(bg_target, bg_data) + loader.load(bg_target, bg_data) # target.pose.bone_groups.get # Bones @@ -140,28 +113,14 @@ class BlObject(BlDatablock): bone_data = data['pose']['bones'].get(bone) if 'constraints' in bone_data.keys(): - load_constraints( - target_bone, bone_data['constraints']) + loader.load(target_bone, bone_data['constraints']) + load_pose(target_bone, bone_data) if 'bone_index' in bone_data.keys(): target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']] - # Load relations - if 'children' in data.keys(): - for child in data['children']: - bpy.data.objects[child].parent = self.pointer - - # Load empty representation - target.empty_display_size = data['empty_display_size'] - target.empty_display_type = data['empty_display_type'] - - # Instancing - target.instance_type = data['instance_type'] - if data['instance_type'] == 'COLLECTION': - target.instance_collection = bpy.data.collections[data['instance_collection']] - # vertex groups if 'vertex_groups' in data: target.vertex_groups.clear() @@ -182,7 +141,7 @@ class BlObject(BlDatablock): key_data = data['shape_keys']['key_blocks'][key_block] target.shape_key_add(name=key_block) - utils.dump_anything.load( + loader.load( target.data.shape_keys.key_blocks[key_block], key_data) for vert in key_data['data']: target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co'] @@ -193,10 +152,9 @@ class BlObject(BlDatablock): target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference] - - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() + dumper = Dumper() dumper.depth = 1 dumper.include_filter = [ "name", @@ -313,18 +271,18 @@ class BlObject(BlDatablock): data['vertex_groups'] = vg_data # SHAPE KEYS - pointer_data = pointer.data - if hasattr(pointer_data, 'shape_keys') and pointer_data.shape_keys: - dumper = utils.dump_anything.Dumper() + object_data = pointer.data + if hasattr(object_data, 'shape_keys') and object_data.shape_keys: + dumper = Dumper() dumper.depth = 2 dumper.include_filter = [ 'reference_key', 'use_relative' ] - data['shape_keys'] = dumper.dump(pointer_data.shape_keys) - data['shape_keys']['reference_key'] = pointer_data.shape_keys.reference_key.name + data['shape_keys'] = dumper.dump(object_data.shape_keys) + data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name key_blocks = {} - for key in pointer_data.shape_keys.key_blocks: + for key in object_data.shape_keys.key_blocks: dumper.depth = 3 dumper.include_filter = [ 'name', @@ -345,7 +303,7 @@ class BlObject(BlDatablock): print(len(psys[0].particles)) return data - def resolve_deps_implementation(self): + def _resolve_deps_implementation(self): deps = [] # Avoid Empty case diff --git a/multi_user/bl_types/bl_scene.py b/multi_user/bl_types/bl_scene.py index bac83f8..3db21f9 100644 --- a/multi_user/bl_types/bl_scene.py +++ b/multi_user/bl_types/bl_scene.py @@ -1,9 +1,29 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from .. import utils +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock +from ..utils import get_preferences + class BlScene(BlDatablock): bl_id = "scenes" bl_class = bpy.types.Scene @@ -12,15 +32,16 @@ class BlScene(BlDatablock): bl_automatic_push = True bl_icon = 'SCENE_DATA' - def construct(self, data): + def _construct(self, data): instance = bpy.data.scenes.new(data["name"]) instance.uuid = self.uuid return instance - def load(self, data, target): + def _load_implementation(self, data, target): target = self.pointer # Load other meshes metadata - utils.dump_anything.load(target, data) + loader = Loader() + loader.load(target, data) # Load master collection for object in data["collection"]["objects"]: @@ -49,23 +70,79 @@ class BlScene(BlDatablock): if 'grease_pencil' in data.keys(): target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']] - def dump_implementation(self, data, pointer=None): + if 'eevee' in data.keys(): + loader.load(target.eevee, data['eevee']) + + if 'cycles' in data.keys(): + loader.load(target.eevee, data['cycles']) + + if 'view_settings' in data.keys(): + loader.load(target.view_settings, data['view_settings']) + if target.view_settings.use_curve_mapping: + #TODO: change this ugly fix + target.view_settings.curve_mapping.white_level = data['view_settings']['curve_mapping']['white_level'] + target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level'] + target.view_settings.curve_mapping.update() + + def _dump_implementation(self, data, pointer=None): assert(pointer) data = {} - scene_dumper = utils.dump_anything.Dumper() + scene_dumper = Dumper() scene_dumper.depth = 1 - scene_dumper.include_filter = ['name','world', 'id', 'camera', 'grease_pencil'] + scene_dumper.include_filter = [ + 'name', + 'world', + 'id', + 'camera', + 'grease_pencil', + ] data = scene_dumper.dump(pointer) scene_dumper.depth = 3 + scene_dumper.include_filter = ['children','objects','name'] data['collection'] = scene_dumper.dump(pointer.collection) - + + scene_dumper.depth = 1 + scene_dumper.include_filter = None + + pref = get_preferences() + if pref.sync_flags.sync_render_settings: + scene_dumper.exclude_filter = [ + 'gi_cache_info', + 'feature_set', + 'debug_use_hair_bvh', + 'aa_samples', + 'blur_glossy', + 'glossy_bounces', + 'device', + 'max_bounces', + 'preview_aa_samples', + 'preview_samples', + 'sample_clamp_indirect', + 'samples', + 'volume_bounces' + ] + data['eevee'] = scene_dumper.dump(pointer.eevee) + data['cycles'] = scene_dumper.dump(pointer.cycles) + data['view_settings'] = scene_dumper.dump(pointer.view_settings) + data['view_settings']['curve_mapping'] = scene_dumper.dump(pointer.view_settings.curve_mapping) + + if pointer.view_settings.use_curve_mapping: + scene_dumper.depth = 5 + scene_dumper.include_filter = [ + 'curves', + 'points', + 'location' + ] + data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(pointer.view_settings.curve_mapping.curves) + + return data - def resolve_deps_implementation(self): + def _resolve_deps_implementation(self): deps = [] # child collections diff --git a/multi_user/bl_types/bl_speaker.py b/multi_user/bl_types/bl_speaker.py index d675ee2..3d419f9 100644 --- a/multi_user/bl_types/bl_speaker.py +++ b/multi_user/bl_types/bl_speaker.py @@ -1,7 +1,25 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from .. import utils +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock @@ -13,16 +31,17 @@ class BlSpeaker(BlDatablock): bl_automatic_push = True bl_icon = 'SPEAKER' - def load_implementation(self, data, target): - utils.dump_anything.load(target, data) + def _load_implementation(self, data, target): + loader = Loader() + loader.load(target, data) - def construct(self, data): + def _construct(self, data): return bpy.data.speakers.new(data["name"]) - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - dumper = utils.dump_anything.Dumper() + dumper = Dumper() dumper.depth = 1 dumper.include_filter = [ "muted", diff --git a/multi_user/bl_types/bl_world.py b/multi_user/bl_types/bl_world.py index 588ef64..7fffb53 100644 --- a/multi_user/bl_types/bl_world.py +++ b/multi_user/bl_types/bl_world.py @@ -1,23 +1,41 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils -from .. import utils +from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock -from .bl_material import load_link, load_node +from .bl_material import load_links, load_node, dump_node, dump_links class BlWorld(BlDatablock): bl_id = "worlds" bl_class = bpy.types.World - bl_delay_refresh = 4 - bl_delay_apply = 4 + bl_delay_refresh = 1 + bl_delay_apply = 1 bl_automatic_push = True bl_icon = 'WORLD_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.worlds.new(data["name"]) - def load(self, data, target): + def _load_implementation(self, data, target): if data["use_nodes"]: if target.node_tree is None: target.use_nodes = True @@ -25,18 +43,18 @@ class BlWorld(BlDatablock): target.node_tree.nodes.clear() for node in data["node_tree"]["nodes"]: - load_node(target.node_tree, data["node_tree"]["nodes"][node]) + load_node(data["node_tree"]["nodes"][node], target.node_tree) # Load nodes links target.node_tree.links.clear() - for link in data["node_tree"]["links"]: - load_link(target.node_tree, data["node_tree"]["links"][link]) + + load_links(data["node_tree"]["links"], target.node_tree) - def dump_implementation(self, data, pointer=None): + def _dump_implementation(self, data, pointer=None): assert(pointer) - world_dumper = utils.dump_anything.Dumper() + world_dumper = Dumper() world_dumper.depth = 2 world_dumper.exclude_filter = [ "preview", @@ -51,46 +69,17 @@ class BlWorld(BlDatablock): data = world_dumper.dump(pointer) if pointer.use_nodes: nodes = {} - dumper = utils.dump_anything.Dumper() - dumper.depth = 2 - dumper.exclude_filter = [ - "dimensions", - "select", - "bl_height_min", - "bl_height_max", - "bl_width_min", - "bl_width_max", - "bl_width_default", - "hide", - "show_options", - "show_tetxures", - "show_preview", - "outputs", - "preview", - "original", - "width_hidden", - - ] for node in pointer.node_tree.nodes: - nodes[node.name] = dumper.dump(node) + nodes[node.name] = dump_node(node) - if hasattr(node, 'inputs'): - nodes[node.name]['inputs'] = {} - - for i in node.inputs: - input_dumper = utils.dump_anything.Dumper() - input_dumper.depth = 2 - input_dumper.include_filter = ["default_value"] - if hasattr(i, 'default_value'): - nodes[node.name]['inputs'][i.name] = input_dumper.dump( - i) data["node_tree"]['nodes'] = nodes - utils.dump_datablock_attibutes( - pointer.node_tree, ["links"], 3, data['node_tree']) + + data["node_tree"]['links'] = dump_links(pointer.node_tree.links) + return data - def resolve_deps_implementation(self): + def _resolve_deps_implementation(self): deps = [] if self.pointer.use_nodes: @@ -101,6 +90,3 @@ class BlWorld(BlDatablock): deps.append(self.pointer.library) return deps - def is_valid(self): - return bpy.data.worlds.get(self.data['name']) - diff --git a/multi_user/libs/dump_anything.py b/multi_user/bl_types/dump_anything.py similarity index 57% rename from multi_user/libs/dump_anything.py rename to multi_user/bl_types/dump_anything.py index 9c4f3fc..8acd7b2 100644 --- a/multi_user/libs/dump_anything.py +++ b/multi_user/bl_types/dump_anything.py @@ -1,6 +1,198 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + +import logging + import bpy import bpy.types as T import mathutils +import numpy as np + +logger = logging.getLogger(__name__) + +BPY_TO_NUMPY_TYPES = { + 'FLOAT': np.float, + 'INT': np.int, + 'BOOL': np.bool} + +PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN'] + +NP_COMPATIBLE_TYPES = ['FLOAT', 'INT', 'BOOLEAN', 'ENUM'] + + +def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, attributes: list = None): + """ Dump a list of attributes from the sane collection + to the target dikt. + + Without attribute given, it try to load all entry from dikt. + + :arg dikt: target dict + :type dikt: dict + :arg collection: source collection + :type collection: bpy.types.CollectionProperty + :arg attributes: list of attributes name + :type attributes: list + """ + if attributes is None: + attributes = dikt.keys() + + for attr in attributes: + attr_type = collection[0].bl_rna.properties.get(attr).type + + if attr_type in PRIMITIVE_TYPES: + np_load_collection_primitives(collection, attr, dikt[attr]) + elif attr_type == 'ENUM': + np_load_collection_enum(collection, attr, dikt[attr]) + else: + logger.error(f"{attr} of type {attr_type} not supported.") + + +def np_dump_collection(collection: bpy.types.CollectionProperty, attributes: list = None) -> dict: + """ Dump a list of attributes from the sane collection + to the target dikt + + Without attributes given, it try to dump all properties + that matches NP_COMPATIBLE_TYPES. + + :arg collection: source collection + :type collection: bpy.types.CollectionProperty + :arg attributes: list of attributes name + :type attributes: list + :retrun: dict + """ + dumped_collection = {} + + if len(collection) == 0: + return dumped_collection + + # TODO: find a way without getting the first item + properties = collection[0].bl_rna.properties + + if attributes is None: + attributes = [p.identifier for p in properties if p.type in NP_COMPATIBLE_TYPES and not p.is_readonly] + + for attr in attributes: + attr_type = properties[attr].type + + if attr_type in PRIMITIVE_TYPES: + dumped_collection[attr] = np_dump_collection_primitive( + collection, attr) + elif attr_type == 'ENUM': + dumped_collection[attr] = np_dump_collection_enum(collection, attr) + else: + logger.error(f"{attr} of type {attr_type} not supported. Only {PRIMITIVE_TYPES} and ENUM supported. Skipping it.") + + return dumped_collection + + +def np_dump_collection_primitive(collection: bpy.types.CollectionProperty, attribute: str) -> str: + """ Dump a collection attribute as a sequence + + !!! warning + Only work with int, float and bool attributes + + :arg collection: target collection + :type collection: bpy.types.CollectionProperty + :arg attribute: target attribute + :type attribute: str + :return: numpy byte buffer + """ + + attr_infos = collection[0].bl_rna.properties.get(attribute) + + assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN']) + + size = sum(attr_infos.array_dimensions) if attr_infos.is_array else 1 + + dumped_sequence = np.zeros( + len(collection)*size, + dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type)) + + collection.foreach_get(attribute, dumped_sequence) + + return dumped_sequence.tobytes() + + +def np_dump_collection_enum(collection: bpy.types.CollectionProperty, attribute: str) -> list: + """ Dump a collection enum attribute to an index list + + :arg collection: target collection + :type collection: bpy.types.CollectionProperty + :arg attribute: target attribute + :type attribute: bpy.types.EnumProperty + :return: list of int + """ + attr_infos = collection[0].bl_rna.properties.get(attribute) + + assert(attr_infos.type == 'ENUM') + + enum_items = attr_infos.enum_items + return [enum_items[getattr(i, attribute)].value for i in collection] + + +def np_load_collection_enum(collection: bpy.types.CollectionProperty, attribute: str, sequence: list): + """ Load a collection enum attribute from a list sequence + + !!! warning + Only work with Enum + + :arg collection: target collection + :type collection: bpy.types.CollectionProperty + :arg attribute: target attribute + :type attribute: str + :arg sequence: enum data buffer + :type sequence: list + :return: numpy byte buffer + """ + + attr_infos = collection[0].bl_rna.properties.get(attribute) + + assert(attr_infos.type == 'ENUM') + + enum_items = attr_infos.enum_items + enum_idx = [i.value for i in enum_items] + + for index, item in enumerate(sequence): + setattr(collection[index], attribute, + enum_items[enum_idx.index(item)].identifier) + + +def np_load_collection_primitives(collection: bpy.types.CollectionProperty, attribute: str, sequence: str): + """ Load a collection attribute from a str bytes sequence + + !!! warning + Only work with int, float and bool attributes + + :arg collection: target collection + :type collection: bpy.types.CollectionProperty + :arg attribute: target attribute + :type attribute: str + :arg sequence: data buffer + :type sequence: str + :return: numpy byte buffer + """ + + attr_infos = collection[0].bl_rna.properties.get(attribute) + + assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN']) + + collection.foreach_set( + attribute, + np.frombuffer(sequence, dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type))) def remove_items_from_dict(d, keys, recursive=False): @@ -47,7 +239,7 @@ def _load_filter_type(t, use_bl_rna=True): if use_bl_rna and x.bl_rna_property: return isinstance(x.bl_rna_property, t) else: - isinstance(x.read(), t) + return isinstance(x.read(), t) return filter_function @@ -73,8 +265,10 @@ def _load_filter_default(default): class Dumper: + # TODO: support occlude readonly + # TODO: use foreach_set/get on collection compatible properties def __init__(self): - self.verbose = False + self.verbose = True self.depth = 1 self.keep_compounds_as_leaves = False self.accept_read_only = True @@ -83,7 +277,6 @@ class Dumper: self.type_subset = self.match_subset_all self.include_filter = [] self.exclude_filter = [] - # self._atomic_types = [] # TODO future option? def dump(self, any): return self._dump_any(any, 0) @@ -175,7 +368,8 @@ class Dumper: if (self.include_filter and p not in self.include_filter): return False getattr(default, p) - except AttributeError: + except AttributeError as err: + logger.debug(err) return False if p.startswith("__"): return False @@ -238,14 +432,12 @@ class BlenderAPIElement: def write(self, value): # take precaution if property is read-only - try: - if self.sub_element_name: - setattr(self.api_element, self.sub_element_name, value) - else: - self.api_element = value - except AttributeError as err: - if not self.occlude_read_only: - raise err + if self.sub_element_name and \ + not self.api_element.is_property_readonly(self.sub_element_name): + + setattr(self.api_element, self.sub_element_name, value) + else: + self.api_element = value def extend(self, element_name): return BlenderAPIElement(self.read(), element_name) @@ -262,7 +454,7 @@ class BlenderAPIElement: class Loader: def __init__(self): self.type_subset = self.match_subset_all - self.occlude_read_only = True + self.occlude_read_only = False self.order = ['*'] def load(self, dst_data, src_dumped_data): @@ -287,6 +479,7 @@ class Loader: for i in range(len(dump)): element.read()[i] = dump[i] except AttributeError as err: + logger.debug(err) if not self.occlude_read_only: raise err @@ -297,29 +490,82 @@ class Loader: CONSTRUCTOR_NEW = "new" CONSTRUCTOR_ADD = "add" + DESTRUCTOR_REMOVE = "remove" + DESTRUCTOR_CLEAR = "clear" + constructors = { T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]), - T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []) + T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []), + T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]), + T.Constraint: (CONSTRUCTOR_NEW, ["type"]), + } + + destructors = { + T.ColorRampElement: DESTRUCTOR_REMOVE, + T.Modifier: DESTRUCTOR_CLEAR, + T.Constraint: CONSTRUCTOR_NEW, } element_type = element.bl_rna_property.fixed_type + constructor = constructors.get(type(element_type)) + if constructor is None: # collection type not supported return - for dumped_element in dump.values(): - try: - constructor_parameters = [dumped_element[name] - for name in constructor[1]] - except KeyError: - print("Collection load error, missing parameters.") - continue # TODO handle error - new_element = getattr(element.read(), constructor[0])( - *constructor_parameters) + + destructor = destructors.get(type(element_type)) + + # Try to clear existing + if destructor: + if destructor == DESTRUCTOR_REMOVE: + collection = element.read() + for i in range(len(collection)-1): + collection.remove(collection[0]) + else: + getattr(element.read(), DESTRUCTOR_CLEAR)() + + for dump_idx, dumped_element in enumerate(dump.values()): + if dump_idx == 0 and len(element.read()) > 0: + new_element = element.read()[0] + else: + try: + constructor_parameters = [dumped_element[name] + for name in constructor[1]] + except KeyError: + logger.debug("Collection load error, missing parameters.") + continue # TODO handle error + + new_element = getattr(element.read(), constructor[0])( + *constructor_parameters) self._load_any( BlenderAPIElement( new_element, occlude_read_only=self.occlude_read_only), dumped_element ) + def _load_curve_mapping(self, element, dump): + mapping = element.read() + curves = mapping.curves + + for curve_index, curve in dump['curves'].items(): + dst_curve = curves[curve_index] + + # cleanup existing curve + for idx in range(len(dst_curve.points), 0, -1): + try: + dst_curve.points.remove(dst_curve.points[0]) + except Exception: + break + + default_point_count = len(dst_curve.points) + + for point_idx, point in curve['points'].items(): + pos = point['location'] + + if point_idx < default_point_count: + dst_curve.points[int(point_idx)].location = pos + else: + dst_curve.points.new(pos[0], pos[1]) + def _load_pointer(self, pointer, dump): rna_property_type = pointer.bl_rna_property.fixed_type if not rna_property_type: @@ -336,6 +582,10 @@ class Loader: pointer.write(bpy.data.meshes.get(dump)) elif isinstance(rna_property_type, T.Material): pointer.write(bpy.data.materials.get(dump)) + elif isinstance(rna_property_type, T.Collection): + pointer.write(bpy.data.collections.get(dump)) + elif isinstance(rna_property_type, T.ParticleSettings): + pointer.write(bpy.data.particles.get(dump)) def _load_matrix(self, matrix, dump): matrix.write(mathutils.Matrix(dump)) @@ -365,11 +615,11 @@ class Loader: for k in self._ordered_keys(dump.keys()): v = dump[k] if not hasattr(default.read(), k): - continue # TODO error handling + logger.debug(f"Load default, skipping {default} : {k}") try: self._load_any(default.extend(k), v) - except: - pass + except Exception as err: + logger.debug(f"Cannot load {k}: {err}") @property def match_subset_all(self): @@ -380,8 +630,11 @@ class Loader: (_load_filter_type(mathutils.Matrix, use_bl_rna=False), self._load_matrix), # before float because bl_rna type of vector if FloatProperty (_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector), - (_load_filter_type(mathutils.Quaternion, use_bl_rna=False), self._load_quaternion), + (_load_filter_type(mathutils.Quaternion, + use_bl_rna=False), self._load_quaternion), (_load_filter_type(mathutils.Euler, use_bl_rna=False), self._load_euler), + (_load_filter_type(T.CurveMapping, use_bl_rna=False), + self._load_curve_mapping), (_load_filter_type(T.FloatProperty), self._load_identity), (_load_filter_type(T.StringProperty), self._load_identity), (_load_filter_type(T.EnumProperty), self._load_identity), diff --git a/multi_user/delayable.py b/multi_user/delayable.py index dcdf72f..78598d4 100644 --- a/multi_user/delayable.py +++ b/multi_user/delayable.py @@ -1,3 +1,20 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + import logging import bpy @@ -294,5 +311,5 @@ class ClientUpdate(Timer): presence.renderer.stop() # # ui update - elif session: + elif session.state['STATE'] != STATE_INITIAL: presence.refresh_3d_view() \ No newline at end of file diff --git a/multi_user/environment.py b/multi_user/environment.py index 077f451..716fcf8 100644 --- a/multi_user/environment.py +++ b/multi_user/environment.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import collections import logging import os diff --git a/multi_user/libs/overrider.py b/multi_user/libs/overrider.py deleted file mode 100644 index 964833d..0000000 --- a/multi_user/libs/overrider.py +++ /dev/null @@ -1,219 +0,0 @@ -""" -Context Manager allowing temporary override of attributes - -````python -import bpy -from overrider import Overrider - -with Overrider(name='bpy_', parent=bpy) as bpy_: - # set preview render settings - bpy_.context.scene.render.use_file_extension = False - bpy_.context.scene.render.resolution_x = 512 - bpy_.context.scene.render.resolution_y = 512 - bpy_.context.scene.render.use_file_extension = False - bpy_.context.scene.render.image_settings.file_format = "JPEG" - bpy_.context.scene.layers[10] = False - - frame_start = action.frame_range[0] - frame_end = action.frame_range[1] - if begin_frame is not None: - frame_start = begin_frame - if end_frame is not None: - frame_end = end_frame - - # render - window = bpy_.data.window_managers[0].windows[0] - screen = bpy_.data.window_managers[0].windows[0].screen - area = next(area for area in screen.areas if area.type == 'VIEW_3D') - space = next(space for space in area.spaces if space.type == 'VIEW_3D') - - space.viewport_shade = 'MATERIAL' - space.region_3d.view_perspective = 'CAMERA' - - override_context = { - "window": window._real_value_(), - "screen": screen._real_value_() - } - - if frame_start == frame_end: - bpy.context.scene.frame_set(int(frame_start)) - bpy_.context.scene.render.filepath = os.path.join(directory, "icon.jpg") - bpy.ops.render.opengl(override_context, write_still=True) - - else: - for icon_index, frame_number in enumerate(range(int(frame_start), int(frame_end) + 1)): - bpy.context.scene.frame_set(frame_number) - bpy.context.scene.render.filepath = os.path.join(directory, "icon", "{:04d}.jpg".format(icon_index)) - bpy.ops.render.opengl(override_context, write_still=True) -```` -""" -from collections import OrderedDict - - -class OverrideIter: - - def __init__(self, parent): - self.parent = parent - self.index = -1 - - def __next__(self): - self.index += 1 - try: - return self.parent[self.index] - except IndexError as e: - raise StopIteration - - -class OverrideBase: - - def __init__(self, context_manager, name=None, parent=None): - self._name__ = name - self._context_manager_ = context_manager - self._parent_ = parent - self._changed_attributes_ = OrderedDict() - self._changed_items_ = OrderedDict() - self._children_ = list() - self._original_value_ = self._real_value_() - - def __repr__(self): - return "<{}({})>".format(self.__class__.__name__, self._path_) - - @property - def _name_(self): - raise NotImplementedError() - - @property - def _path_(self): - if isinstance(self._parent_, OverrideBase): - return self._parent_._path_ + self._name_ - - return self._name_ - - def _real_value_(self): - raise NotImplementedError() - - def _restore_(self): - for attribute, original_value in reversed(self._changed_attributes_.items()): - setattr(self._real_value_(), attribute, original_value) - - for item, original_value in reversed(self._changed_items_.items()): - self._real_value_()[item] = original_value - - def __getattr__(self, attr): - new_attribute = OverrideAttribute(self._context_manager_, name=attr, parent=self) - self._children_.append(new_attribute) - return new_attribute - - def __getitem__(self, item): - new_item = OverrideItem(self._context_manager_, name=item, parent=self) - self._children_.append(new_item) - return new_item - - def __iter__(self): - return OverrideIter(self) - - def __setattr__(self, attr, value): - if attr in ( - '_name__', - '_context_manager_', - '_parent_', - '_children_', - '_original_value_', - '_changed_attributes_', - '_changed_items_' - ): - self.__dict__[attr] = value - return - - if attr not in self._changed_attributes_.keys(): - self._changed_attributes_[attr] = getattr(self._real_value_(), attr) - self._context_manager_.register_as_changed(self) - - setattr(self._real_value_(), attr, value) - - def __setitem__(self, item, value): - if item not in self._changed_items_.keys(): - self._changed_items_[item] = self._real_value_()[item] - self._context_manager_.register_as_changed(self) - - self._real_value_()[item] = value - - def __eq__(self, other): - return self._real_value_() == other - - def __gt__(self, other): - return self._real_value_() > other - - def __lt__(self, other): - return self._real_value_() < other - - def __ge__(self, other): - return self._real_value_() >= other - - def __le__(self, other): - return self._real_value_() <= other - - def __call__(self, *args, **kwargs): - # TODO : surround str value with quotes - arguments = list([str(arg) for arg in args]) + ['{}={}'.format(key, value) for key, value in kwargs.items()] - arguments = ', '.join(arguments) - raise RuntimeError('Overrider does not allow call to {}({})'.format(self._path_, arguments)) - - -class OverrideRoot(OverrideBase): - - @property - def _name_(self): - return self._name__ - - def _real_value_(self): - return self._parent_ - - -class OverrideAttribute(OverrideBase): - - @property - def _name_(self): - return '.{}'.format(self._name__) - - def _real_value_(self): - return getattr(self._parent_._real_value_(), self._name__) - - -class OverrideItem(OverrideBase): - - @property - def _name_(self): - if isinstance(self._name__, str): - return '["{}"]'.format(self._name__) - - return '[{}]'.format(self._name__) - - def _real_value_(self): - return self._parent_._real_value_()[self._name__] - - -class Overrider: - def __init__(self, name, parent): - self.name = name - self.parent = parent - self.override = None - self.registered_overrides = list() - - def __enter__(self): - self.override = OverrideRoot( - context_manager=self, - parent=self.parent, - name=self.name - ) - return self.override - - def __exit__(self, exc_type, exc_val, exc_tb): - self.restore() - - def register_as_changed(self, override): - self.registered_overrides.append(override) - - def restore(self): - for override in reversed(self.registered_overrides): - override._restore_() diff --git a/multi_user/libs/replication b/multi_user/libs/replication index 99bf948..d3601d5 160000 --- a/multi_user/libs/replication +++ b/multi_user/libs/replication @@ -1 +1 @@ -Subproject commit 99bf94874a07890dc747ec53519aa34443a95146 +Subproject commit d3601d508235b32a268371f062a5ccb6f03c8406 diff --git a/multi_user/operators.py b/multi_user/operators.py index 424418f..4c7560c 100644 --- a/multi_user/operators.py +++ b/multi_user/operators.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import asyncio import logging import os @@ -181,6 +199,36 @@ class SessionStopOperator(bpy.types.Operator): return {"FINISHED"} +class SessionKickOperator(bpy.types.Operator): + bl_idname = "session.kick" + bl_label = "Kick" + bl_description = "Kick the user" + bl_options = {"REGISTER"} + + user: bpy.props.StringProperty() + + @classmethod + def poll(cls, context): + return True + + def execute(self, context): + global client, delayables, stop_modal_executor + assert(client) + + try: + client.kick(self.user) + except Exception as e: + self.report({'ERROR'}, repr(e)) + + return {"FINISHED"} + + def invoke(self, context, event): + return context.window_manager.invoke_props_dialog(self) + + + def draw(self, context): + row = self.layout + row.label(text=f" Do you really want to kick {self.user} ? " ) class SessionPropertyRemoveOperator(bpy.types.Operator): bl_idname = "session.remove_prop" @@ -446,6 +494,7 @@ classes = ( SessionApply, SessionCommit, ApplyArmatureOperator, + SessionKickOperator, ) @@ -470,7 +519,7 @@ def sanitize_deps_graph(dummy): if client and client.state['STATE'] in [STATE_ACTIVE]: for node_key in client.list(): - client.get(node_key).resolve() + client.get(node_key)._resolve() @persistent diff --git a/multi_user/preferences.py b/multi_user/preferences.py index ddf0edb..414c2ef 100644 --- a/multi_user/preferences.py +++ b/multi_user/preferences.py @@ -1,10 +1,44 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + +import random import logging import bpy +import string -from . import utils, bl_types, environment +from . import utils, bl_types, environment, addon_updater_ops, presence +from .libs.replication.replication.constants import RP_COMMON logger = logging.getLogger(__name__) +def randomColor(): + """Generate a random color """ + r = random.random() + v = random.random() + b = random.random() + return [r, v, b] + + +def random_string_digits(stringLength=6): + """Generate a random string of letters and digits """ + lettersAndDigits = string.ascii_letters + string.digits + return ''.join(random.choices(lettersAndDigits, k=stringLength)) + + class ReplicatedDatablock(bpy.types.PropertyGroup): type_name: bpy.props.StringProperty() bl_name: bpy.props.StringProperty() @@ -14,8 +48,16 @@ class ReplicatedDatablock(bpy.types.PropertyGroup): auto_push: bpy.props.BoolProperty(default=True) icon: bpy.props.StringProperty() + +class ReplicationFlags(bpy.types.PropertyGroup): + sync_render_settings: bpy.props.BoolProperty( + name="Synchronize render settings", + description="Synchronize render settings (eevee and cycles only)", + default=True) + + class SessionPrefs(bpy.types.AddonPreferences): - bl_idname = __package__ + bl_idname = __package__ ip: bpy.props.StringProperty( name="ip", @@ -23,29 +65,32 @@ class SessionPrefs(bpy.types.AddonPreferences): default="127.0.0.1") username: bpy.props.StringProperty( name="Username", - default="user_{}".format(utils.random_string_digits()) + default=f"user_{random_string_digits()}" ) client_color: bpy.props.FloatVectorProperty( name="client_instance_color", subtype='COLOR', - default=utils.randomColor()) + default=randomColor()) port: bpy.props.IntProperty( name="port", description='Distant host port', default=5555 - ) + ) + sync_flags: bpy.props.PointerProperty( + type=ReplicationFlags + ) supported_datablocks: bpy.props.CollectionProperty( type=ReplicatedDatablock, - ) + ) ipc_port: bpy.props.IntProperty( name="ipc_port", description='internal ttl port(only usefull for multiple local instances)', default=5561 - ) + ) start_empty: bpy.props.BoolProperty( name="start_empty", default=False - ) + ) right_strategy: bpy.props.EnumProperty( name='right_strategy', description='right strategy', @@ -58,16 +103,15 @@ class SessionPrefs(bpy.types.AddonPreferences): subtype="DIR_PATH", default=environment.DEFAULT_CACHE_DIR) # for UI - # category: bpy.props.EnumProperty( - # name="Category", - # description="Preferences Category", - # items=[ - # ('INFO', "Information", "Information about this add-on"), - # ('CONFIG', "Configuration", "Configuration about this add-on"), - # ('UPDATE', "Update", "Update this add-on"), - # ], - # default='INFO' - # ) + category: bpy.props.EnumProperty( + name="Category", + description="Preferences Category", + items=[ + ('CONFIG', "Configuration", "Configuration about this add-on"), + ('UPDATE', "Update", "Update this add-on"), + ], + default='CONFIG' + ) conf_session_identity_expanded: bpy.props.BoolProperty( name="Identity", description="Identity", @@ -94,82 +138,115 @@ class SessionPrefs(bpy.types.AddonPreferences): default=False ) + auto_check_update: bpy.props.BoolProperty( + name="Auto-check for Update", + description="If enabled, auto-check for updates using an interval", + default=False, + ) + updater_intrval_months: bpy.props.IntProperty( + name='Months', + description="Number of months between checking for updates", + default=0, + min=0 + ) + updater_intrval_days: bpy.props.IntProperty( + name='Days', + description="Number of days between checking for updates", + default=7, + min=0, + max=31 + ) + updater_intrval_hours: bpy.props.IntProperty( + name='Hours', + description="Number of hours between checking for updates", + default=0, + min=0, + max=23 + ) + updater_intrval_minutes: bpy.props.IntProperty( + name='Minutes', + description="Number of minutes between checking for updates", + default=0, + min=0, + max=59 + ) def draw(self, context): layout = self.layout - # layout.row().prop(self, "category", expand=True) - - # if self.category == 'INFO': - # layout.separator() - # layout.label(text="Enable real-time collaborative workflow inside blender") - # if self.category == 'CONFIG': - grid = layout.column() + layout.row().prop(self, "category", expand=True) - # USER INFORMATIONS - box = grid.box() - box.prop( - self, "conf_session_identity_expanded", text="User informations", - icon='DISCLOSURE_TRI_DOWN' if self.conf_session_identity_expanded - else 'DISCLOSURE_TRI_RIGHT', emboss=False) - if self.conf_session_identity_expanded: - box.row().prop(self, "username", text="name") - box.row().prop(self, "client_color", text="color") - - # NETWORK SETTINGS - box = grid.box() - box.prop( - self, "conf_session_net_expanded", text="Netorking", - icon='DISCLOSURE_TRI_DOWN' if self.conf_session_net_expanded - else 'DISCLOSURE_TRI_RIGHT', emboss=False) - - if self.conf_session_net_expanded: - box.row().prop(self, "ip", text="Address") - row = box.row() - row.label(text="Port:") - row.prop(self, "port", text="Address") - row = box.row() - row.label(text="Start with an empty scene:") - row.prop(self, "start_empty", text="") - - table = box.box() - table.row().prop( - self, "conf_session_timing_expanded", text="Refresh rates", - icon='DISCLOSURE_TRI_DOWN' if self.conf_session_timing_expanded - else 'DISCLOSURE_TRI_RIGHT', emboss=False) + if self.category == 'CONFIG': + grid = layout.column() - if self.conf_session_timing_expanded: - line = table.row() - line.label(text=" ") - line.separator() - line.label(text="refresh (sec)") - line.label(text="apply (sec)") + # USER INFORMATIONS + box = grid.box() + box.prop( + self, "conf_session_identity_expanded", text="User informations", + icon='DISCLOSURE_TRI_DOWN' if self.conf_session_identity_expanded + else 'DISCLOSURE_TRI_RIGHT', emboss=False) + if self.conf_session_identity_expanded: + box.row().prop(self, "username", text="name") + box.row().prop(self, "client_color", text="color") - for item in self.supported_datablocks: - line = table.row(align=True) - line.label(text="", icon=item.icon) - line.prop(item, "bl_delay_refresh", text="") - line.prop(item, "bl_delay_apply", text="") - # HOST SETTINGS - box = grid.box() - box.prop( - self, "conf_session_hosting_expanded", text="Hosting", - icon='DISCLOSURE_TRI_DOWN' if self.conf_session_hosting_expanded - else 'DISCLOSURE_TRI_RIGHT', emboss=False) - if self.conf_session_hosting_expanded: - box.row().prop(self, "right_strategy", text="Right model") - row = box.row() - row.label(text="Start with an empty scene:") - row.prop(self, "start_empty", text="") - - # CACHE SETTINGS - box = grid.box() - box.prop( - self, "conf_session_cache_expanded", text="Cache", - icon='DISCLOSURE_TRI_DOWN' if self.conf_session_cache_expanded - else 'DISCLOSURE_TRI_RIGHT', emboss=False) - if self.conf_session_cache_expanded: - box.row().prop(self, "cache_directory", text="Cache directory") + # NETWORK SETTINGS + box = grid.box() + box.prop( + self, "conf_session_net_expanded", text="Netorking", + icon='DISCLOSURE_TRI_DOWN' if self.conf_session_net_expanded + else 'DISCLOSURE_TRI_RIGHT', emboss=False) + + if self.conf_session_net_expanded: + box.row().prop(self, "ip", text="Address") + row = box.row() + row.label(text="Port:") + row.prop(self, "port", text="Address") + row = box.row() + row.label(text="Start with an empty scene:") + row.prop(self, "start_empty", text="") + + table = box.box() + table.row().prop( + self, "conf_session_timing_expanded", text="Refresh rates", + icon='DISCLOSURE_TRI_DOWN' if self.conf_session_timing_expanded + else 'DISCLOSURE_TRI_RIGHT', emboss=False) + + if self.conf_session_timing_expanded: + line = table.row() + line.label(text=" ") + line.separator() + line.label(text="refresh (sec)") + line.label(text="apply (sec)") + + for item in self.supported_datablocks: + line = table.row(align=True) + line.label(text="", icon=item.icon) + line.prop(item, "bl_delay_refresh", text="") + line.prop(item, "bl_delay_apply", text="") + # HOST SETTINGS + box = grid.box() + box.prop( + self, "conf_session_hosting_expanded", text="Hosting", + icon='DISCLOSURE_TRI_DOWN' if self.conf_session_hosting_expanded + else 'DISCLOSURE_TRI_RIGHT', emboss=False) + if self.conf_session_hosting_expanded: + box.row().prop(self, "right_strategy", text="Right model") + row = box.row() + row.label(text="Start with an empty scene:") + row.prop(self, "start_empty", text="") + + # CACHE SETTINGS + box = grid.box() + box.prop( + self, "conf_session_cache_expanded", text="Cache", + icon='DISCLOSURE_TRI_DOWN' if self.conf_session_cache_expanded + else 'DISCLOSURE_TRI_RIGHT', emboss=False) + if self.conf_session_cache_expanded: + box.row().prop(self, "cache_directory", text="Cache directory") + + if self.category == 'UPDATE': + from . import addon_updater_ops + addon_updater_ops.update_settings_ui_condensed(self, context) def generate_supported_types(self): self.supported_datablocks.clear() @@ -181,19 +258,105 @@ class SessionPrefs(bpy.types.AddonPreferences): type_impl_name = "Bl{}".format(type.split('_')[1].capitalize()) type_module_class = getattr(type_module, type_impl_name) - new_db.name = type_impl_name + new_db.name = type_impl_name new_db.type_name = type_impl_name new_db.bl_delay_refresh = type_module_class.bl_delay_refresh - new_db.bl_delay_apply =type_module_class.bl_delay_apply + new_db.bl_delay_apply = type_module_class.bl_delay_apply new_db.use_as_filter = True new_db.icon = type_module_class.bl_icon - new_db.auto_push =type_module_class.bl_automatic_push - new_db.bl_name=type_module_class.bl_id + new_db.auto_push = type_module_class.bl_automatic_push + new_db.bl_name = type_module_class.bl_id + + +def client_list_callback(scene, context): + from . import operators + + items = [(RP_COMMON, RP_COMMON, "")] + + username = utils.get_preferences().username + cli = operators.client + if cli: + client_ids = cli.online_users.keys() + for id in client_ids: + name_desc = id + if id == username: + name_desc += " (self)" + + items.append((id, name_desc, "")) + + return items + + +class SessionUser(bpy.types.PropertyGroup): + """Session User + + Blender user information property + """ + username: bpy.props.StringProperty(name="username") + current_frame: bpy.props.IntProperty(name="current_frame") + + +class SessionProps(bpy.types.PropertyGroup): + is_admin: bpy.props.BoolProperty( + name="is_admin", + default=False + ) + session_mode: bpy.props.EnumProperty( + name='session_mode', + description='session mode', + items={ + ('HOST', 'hosting', 'host a session'), + ('CONNECT', 'connexion', 'connect to a session')}, + default='HOST') + clients: bpy.props.EnumProperty( + name="clients", + description="client enum", + items=client_list_callback) + enable_presence: bpy.props.BoolProperty( + name="Presence overlay", + description='Enable overlay drawing module', + default=True, + update=presence.update_presence + ) + presence_show_selected: bpy.props.BoolProperty( + name="Show selected objects", + description='Enable selection overlay ', + default=True, + update=presence.update_overlay_settings + ) + presence_show_user: bpy.props.BoolProperty( + name="Show users", + description='Enable user overlay ', + default=True, + update=presence.update_overlay_settings + ) + presence_show_far_user: bpy.props.BoolProperty( + name="Show different scenes", + description="Show user on different scenes", + default=False, + update=presence.update_overlay_settings + ) + filter_owned: bpy.props.BoolProperty( + name="filter_owned", + description='Show only owned datablocks', + default=True + ) + user_snap_running: bpy.props.BoolProperty( + default=False + ) + time_snap_running: bpy.props.BoolProperty( + default=False + ) classes = ( + SessionUser, + SessionProps, + ReplicationFlags, ReplicatedDatablock, SessionPrefs, ) + + def register(): from bpy.utils import register_class @@ -205,8 +368,9 @@ def register(): logger.info('Generating bl_types preferences') prefs.generate_supported_types() + def unregister(): from bpy.utils import unregister_class for cls in reversed(classes): - unregister_class(cls) \ No newline at end of file + unregister_class(cls) diff --git a/multi_user/presence.py b/multi_user/presence.py index 5494e19..4a3ee19 100644 --- a/multi_user/presence.py +++ b/multi_user/presence.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import copy import logging import math @@ -117,10 +135,8 @@ def get_bb_coords_from_obj(object, parent=None): def get_view_matrix(): area, region, rv3d = view3d_find() - if area and region and rv3d: - matrix_dumper = utils.dump_anything.Dumper() - - return matrix_dumper.dump(rv3d.view_matrix) + if area and region and rv3d: + return [list(v) for v in rv3d.view_matrix] def update_presence(self, context): global renderer diff --git a/multi_user/ui.py b/multi_user/ui.py index 5e356f7..1c13367 100644 --- a/multi_user/ui.py +++ b/multi_user/ui.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy from . import operators, utils @@ -6,7 +24,8 @@ from .libs.replication.replication.constants import (ADDED, ERROR, FETCHED, STATE_ACTIVE, STATE_AUTH, STATE_CONFIG, STATE_SYNCING, STATE_INITIAL, STATE_SRV_SYNC, - STATE_WAITING, STATE_QUITTING) + STATE_WAITING, STATE_QUITTING, + STATE_LAUNCHING_SERVICES) ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED 'TRIA_UP', # COMMITED @@ -35,7 +54,7 @@ def printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1, return '{} |{}| {}/{}{}'.format(prefix, bar, iteration,total, suffix) def get_state_str(state): - state_str = 'None' + state_str = 'UNKNOWN' if state == STATE_WAITING: state_str = 'WARMING UP DATA' elif state == STATE_SYNCING: @@ -52,6 +71,9 @@ def get_state_str(state): state_str = 'INIT' elif state == STATE_QUITTING: state_str = 'QUITTING SESSION' + elif state == STATE_LAUNCHING_SERVICES: + state_str = 'LAUNCHING SERVICES' + return state_str class SESSION_PT_settings(bpy.types.Panel): @@ -219,6 +241,9 @@ class SESSION_PT_settings_replication(bpy.types.Panel): # Right managment if runtime_settings.session_mode == 'HOST': + row = layout.row() + row.prop(settings.sync_flags,"sync_render_settings") + row = layout.row(align=True) row.label(text="Right strategy:") row.prop(settings,"right_strategy",text="") @@ -261,7 +286,7 @@ class SESSION_PT_user(bpy.types.Panel): selected_user = context.window_manager.user_index settings = utils.get_preferences() active_user = online_users[selected_user] if len(online_users)-1>=selected_user else 0 - + runtime_settings = context.window_manager.session # Create a simple row. row = layout.row() @@ -291,6 +316,12 @@ class SESSION_PT_user(bpy.types.Panel): text="", icon='TIME').target_client = active_user.username + if runtime_settings.session_mode == 'HOST': + user_operations.operator( + "session.kick", + text="", + icon='CANCEL').user = active_user.username + class SESSION_UL_users(bpy.types.UIList): def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag): diff --git a/multi_user/utils.py b/multi_user/utils.py index db96e92..d221dff 100644 --- a/multi_user/utils.py +++ b/multi_user/utils.py @@ -1,9 +1,26 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import json import logging import os -import random -import string import sys +import time from uuid import uuid4 from collections.abc import Iterable @@ -11,22 +28,10 @@ import bpy import mathutils from . import environment, presence -from .libs import dump_anything logger = logging.getLogger(__name__) logger.setLevel(logging.WARNING) -def has_action(target): - return (hasattr(target, 'animation_data') - and target.animation_data - and target.animation_data.action) - - -def has_driver(target): - return (hasattr(target, 'animation_data') - and target.animation_data - and target.animation_data.drivers) - def find_from_attr(attr_name, attr_value, list): for item in list: @@ -54,19 +59,6 @@ def get_datablock_users(datablock): return users -def random_string_digits(stringLength=6): - """Generate a random string of letters and digits """ - lettersAndDigits = string.ascii_letters + string.digits - return ''.join(random.choices(lettersAndDigits, k=stringLength)) - - -def randomColor(): - r = random.random() - v = random.random() - b = random.random() - return [r, v, b] - - def clean_scene(): for type_name in dir(bpy.data): try: @@ -77,76 +69,10 @@ def clean_scene(): continue -def revers(d): - l = [] - for i in d: - l.append(i) - - return l[::-1] - - -def get_armature_edition_context(armature): - - override = {} - # Set correct area - for area in bpy.data.window_managers[0].windows[0].screen.areas: - if area.type == 'VIEW_3D': - override = bpy.context.copy() - override['area'] = area - break - - # Set correct armature settings - override['window'] = bpy.data.window_managers[0].windows[0] - override['screen'] = bpy.data.window_managers[0].windows[0].screen - override['mode'] = 'EDIT_ARMATURE' - override['active_object'] = armature - override['selected_objects'] = [armature] - - for o in bpy.data.objects: - if o.data == armature: - override['edit_object'] = o - - break - - return override - - def get_selected_objects(scene, active_view_layer): return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)] -def load_dict(src_dict, target): - try: - for item in src_dict: - # attr = - setattr(target, item, src_dict[item]) - - except Exception as e: - logger.error(e) - pass - - -def dump_datablock_attibutes(datablock=None, attributes=[], depth=1, dickt=None): - if datablock: - dumper = dump_anything.Dumper() - dumper.type_subset = dumper.match_subset_all - dumper.depth = depth - - datablock_type = datablock.bl_rna.name - - data = {} - - if dickt: - data = dickt - for attr in attributes: - try: - data[attr] = dumper.dump(getattr(datablock, attr)) - except: - pass - - return data - - def resolve_from_id(id, optionnal_type=None): for category in dir(bpy.data): root = getattr(bpy.data, category) @@ -157,4 +83,7 @@ def resolve_from_id(id, optionnal_type=None): def get_preferences(): - return bpy.context.preferences.addons[__package__].preferences \ No newline at end of file + return bpy.context.preferences.addons[__package__].preferences + +def current_milli_time(): + return int(round(time.time() * 1000)) \ No newline at end of file