Merge branch '29-differential-revision' into feature/event_driven_updates
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@ -7,6 +7,10 @@ __pycache__/
|
|||||||
cache
|
cache
|
||||||
config
|
config
|
||||||
*.code-workspace
|
*.code-workspace
|
||||||
|
multi_user_updater/
|
||||||
|
|
||||||
# sphinx build folder
|
# sphinx build folder
|
||||||
_build
|
_build
|
||||||
|
|
||||||
|
# ignore generated zip generated from blender_addon_tester
|
||||||
|
*.zip
|
3
.gitlab-ci.yml
Normal file
3
.gitlab-ci.yml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
include:
|
||||||
|
- local: .gitlab/ci/test.gitlab-ci.yml
|
||||||
|
- local: .gitlab/ci/build.gitlab-ci.yml
|
16
.gitlab/ci/build.gitlab-ci.yml
Normal file
16
.gitlab/ci/build.gitlab-ci.yml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
build:
|
||||||
|
image: python:latest
|
||||||
|
script:
|
||||||
|
- git submodule init
|
||||||
|
- git submodule update
|
||||||
|
- cd multi_user/libs/replication
|
||||||
|
- rm -rf tests .git .gitignore script
|
||||||
|
|
||||||
|
artifacts:
|
||||||
|
name: multi_user
|
||||||
|
paths:
|
||||||
|
- multi_user
|
||||||
|
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
- develop
|
12
.gitlab/ci/test.gitlab-ci.yml
Normal file
12
.gitlab/ci/test.gitlab-ci.yml
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
test:
|
||||||
|
image: python:latest
|
||||||
|
script:
|
||||||
|
- git submodule init
|
||||||
|
- git submodule update
|
||||||
|
- apt update
|
||||||
|
# install blender to get all required dependencies
|
||||||
|
# TODO: indtall only dependencies
|
||||||
|
- apt install -f -y blender
|
||||||
|
- pip install blender-addon-tester
|
||||||
|
- python scripts/test_addon.py
|
||||||
|
|
46
.gitlab/issue_templates/Bug.md
Normal file
46
.gitlab/issue_templates/Bug.md
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
<!---
|
||||||
|
Please read this!
|
||||||
|
|
||||||
|
Before opening a new issue, make sure to search for keywords in the issues
|
||||||
|
filtered by the "bug" label:
|
||||||
|
|
||||||
|
- https://gitlab.com/slumber/multi-user/-/issues?scope=all&utf8=✓&label_name[]=bug
|
||||||
|
|
||||||
|
and verify the issue you're about to submit isn't a duplicate.
|
||||||
|
--->
|
||||||
|
|
||||||
|
### Summary
|
||||||
|
|
||||||
|
(Summarize the bug encountered concisely)
|
||||||
|
|
||||||
|
* Addon version: (your addon-version)
|
||||||
|
* Blender version: (your blender version)
|
||||||
|
* OS: (your os windows/linux/mac)
|
||||||
|
|
||||||
|
|
||||||
|
### Steps to reproduce
|
||||||
|
|
||||||
|
(How one can reproduce the issue - this is very important)
|
||||||
|
|
||||||
|
### Example Project [optionnal]
|
||||||
|
(If possible, please create an example project that exhibits the problematic behavior, and link to it here in the bug report)
|
||||||
|
|
||||||
|
|
||||||
|
### What is the current *bug* behavior?
|
||||||
|
|
||||||
|
(What actually happens)
|
||||||
|
|
||||||
|
|
||||||
|
### Relevant logs and/or screenshots
|
||||||
|
|
||||||
|
(Paste any relevant logs - please use code blocks (```) to format console output,
|
||||||
|
logs, and code as it's tough to read otherwise.)
|
||||||
|
|
||||||
|
|
||||||
|
### Possible fixes [optionnal]
|
||||||
|
|
||||||
|
(If you can, link to the line of code that might be responsible for the problem)
|
||||||
|
|
||||||
|
|
||||||
|
/label ~bug
|
||||||
|
/cc @project-manager
|
14
CHANGELOG.md
14
CHANGELOG.md
@ -36,3 +36,17 @@ All notable changes to this project will be documented in this file.
|
|||||||
- Use a basic BFS approach for replication graph pre-load.
|
- Use a basic BFS approach for replication graph pre-load.
|
||||||
- Serialization is now based on marshal (2x performance improvements).
|
- Serialization is now based on marshal (2x performance improvements).
|
||||||
- Let pip chose python dependencies install path.
|
- Let pip chose python dependencies install path.
|
||||||
|
|
||||||
|
## [0.0.3] - Upcoming
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Auto updater support
|
||||||
|
- Performances improvements on Meshes, Gpencils, Actions
|
||||||
|
- Multi-scene workflow support
|
||||||
|
- Render setting synchronisation
|
||||||
|
- Kick command
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Config is now stored in blender user preference
|
||||||
|
43
README.md
43
README.md
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
> Enable real-time collaborative workflow inside blender
|
> Enable real-time collaborative workflow inside blender
|
||||||
|
|
||||||

|
<img src="https://i.imgur.com/X0B7O1Q.gif" width=600>
|
||||||
|
|
||||||
|
|
||||||
:warning: Under development, use it at your own risks. Currently tested on Windows platform. :warning:
|
:warning: Under development, use it at your own risks. Currently tested on Windows platform. :warning:
|
||||||
@ -25,22 +25,28 @@ See the [documentation](https://multi-user.readthedocs.io/en/latest/) for detail
|
|||||||
|
|
||||||
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
||||||
|
|
||||||
| Name | Status | Comment |
|
| Name | Status | Comment |
|
||||||
| ----------- | :----------------: | :------------: |
|
| ----------- | :----: | :-----------------------------------------------------------: |
|
||||||
| action | :exclamation: | Not stable |
|
| action | ❗ | Not stable |
|
||||||
| armature | :exclamation: | Not stable |
|
| armature | ❗ | Not stable |
|
||||||
| camera | :white_check_mark: | |
|
| camera | ✔️ | |
|
||||||
| collection | :white_check_mark: | |
|
| collection | ✔️ | |
|
||||||
| curve | :white_check_mark: | Not tested |
|
| curve | ✔️ | Nurbs surface don't load correctly |
|
||||||
| gpencil | :white_check_mark: | |
|
| gpencil | ✔️ | |
|
||||||
| image | :exclamation: | Not stable yet |
|
| image | ❗ | Not stable yet |
|
||||||
| mesh | :white_check_mark: | |
|
| mesh | ✔️ | |
|
||||||
| material | :white_check_mark: | |
|
| material | ✔️ | |
|
||||||
| metaball | :white_check_mark: | |
|
| metaball | ✔️ | |
|
||||||
| object | :white_check_mark: | |
|
| object | ✔️ | |
|
||||||
| scene | :white_check_mark: | |
|
| scene | ✔️ | |
|
||||||
| world | :white_check_mark: | |
|
| world | ✔️ | |
|
||||||
| lightprobes | :white_check_mark: | |
|
| lightprobes | ✔️ | |
|
||||||
|
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
|
||||||
|
| speakers | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||||
|
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||||
|
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||||
|
| libraries | ❗ | Partial |
|
||||||
|
|
||||||
|
|
||||||
### Performance issues
|
### Performance issues
|
||||||
|
|
||||||
@ -52,8 +58,6 @@ I'm working on it.
|
|||||||
| Dependencies | Version | Needed |
|
| Dependencies | Version | Needed |
|
||||||
| ------------ | :-----: | -----: |
|
| ------------ | :-----: | -----: |
|
||||||
| ZeroMQ | latest | yes |
|
| ZeroMQ | latest | yes |
|
||||||
| msgpack | latest | yes |
|
|
||||||
| PyYAML | latest | yes |
|
|
||||||
| JsonDiff | latest | yes |
|
| JsonDiff | latest | yes |
|
||||||
|
|
||||||
|
|
||||||
@ -65,4 +69,3 @@ See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_c
|
|||||||
|
|
||||||
See [license](LICENSE)
|
See [license](LICENSE)
|
||||||
|
|
||||||
[](https://multi-user.readthedocs.io/en/latest/?badge=latest)
|
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 9.6 KiB |
BIN
docs/getting_started/img/quickstart_presence.png
Normal file
BIN
docs/getting_started/img/quickstart_presence.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.8 KiB |
Binary file not shown.
Before Width: | Height: | Size: 9.1 KiB After Width: | Height: | Size: 12 KiB |
@ -4,6 +4,6 @@ Installation
|
|||||||
|
|
||||||
*The process is the same for linux, mac and windows.*
|
*The process is the same for linux, mac and windows.*
|
||||||
|
|
||||||
1. Download latest release `multi_user.zip <https://gitlab.com/slumber/multi-user/uploads/7ce1fd015f50f610e7deefda862d55b1/multi-user.zip>`_.
|
1. Download latest `release <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build>`_ or `develop (unstable !) <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build>`_ build.
|
||||||
2. Run blender as administrator (to allow python dependencies auto-installation).
|
2. Run blender as administrator (to allow python dependencies auto-installation).
|
||||||
3. Install last_version.zip from your addon preferences.
|
3. Install **multi-user.zip** from your addon preferences.
|
@ -46,6 +46,8 @@ If 5555 is given in host settings, it will use 5555, 5556 (5555+1), 5557 (5555+2
|
|||||||
|
|
||||||
.. image:: img/quickstart_advanced.png
|
.. image:: img/quickstart_advanced.png
|
||||||
|
|
||||||
|
**Synchronise render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
|
||||||
|
|
||||||
**Right strategy** (only host) enable you to choose between a strict and a relaxed pattern:
|
**Right strategy** (only host) enable you to choose between a strict and a relaxed pattern:
|
||||||
|
|
||||||
- **Strict**: Host is the king, by default the host own each properties, only him can grant modification rights.
|
- **Strict**: Host is the king, by default the host own each properties, only him can grant modification rights.
|
||||||
@ -76,6 +78,19 @@ By selecting a user in the list you'll have access to different **actions**:
|
|||||||
|
|
||||||
- The **camera button** allow you to snap on the user viewpoint.
|
- The **camera button** allow you to snap on the user viewpoint.
|
||||||
- The **time button** allow you to snap on the user time.
|
- The **time button** allow you to snap on the user time.
|
||||||
|
- The **cross button** [**host only**] allow the admin to kick users
|
||||||
|
|
||||||
|
-------------------
|
||||||
|
Presence show flags
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
.. image:: img/quickstart_presence.png
|
||||||
|
|
||||||
|
This pannel allow you to tweak users overlay in the viewport:
|
||||||
|
|
||||||
|
- **Show selected objects**: display other users current selection
|
||||||
|
- **Show users**: display users current viewpoint
|
||||||
|
- **Show different scenes**: display users on other scenes
|
||||||
|
|
||||||
---------------------
|
---------------------
|
||||||
Replicated properties
|
Replicated properties
|
||||||
|
@ -1,12 +1,31 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Multi-User",
|
"name": "Multi-User",
|
||||||
"author": "Swann Martinez",
|
"author": "Swann Martinez",
|
||||||
"version": (0, 0, 2),
|
"version": (0, 0, 3),
|
||||||
"description": "Enable real-time collaborative workflow inside blender",
|
"description": "Enable real-time collaborative workflow inside blender",
|
||||||
"blender": (2, 80, 0),
|
"blender": (2, 80, 0),
|
||||||
"location": "3D View > Sidebar > Multi-User tab",
|
"location": "3D View > Sidebar > Multi-User tab",
|
||||||
"warning": "Unstable addon, use it at your own risks",
|
"warning": "Unstable addon, use it at your own risks",
|
||||||
"category": "Collaboration",
|
"category": "Collaboration",
|
||||||
|
"doc_url": "https://multi-user.readthedocs.io/en/develop/index.html",
|
||||||
"wiki_url": "https://multi-user.readthedocs.io/en/develop/index.html",
|
"wiki_url": "https://multi-user.readthedocs.io/en/develop/index.html",
|
||||||
"tracker_url": "https://gitlab.com/slumber/multi-user/issues",
|
"tracker_url": "https://gitlab.com/slumber/multi-user/issues",
|
||||||
"support": "COMMUNITY"
|
"support": "COMMUNITY"
|
||||||
@ -21,99 +40,20 @@ import sys
|
|||||||
import bpy
|
import bpy
|
||||||
from bpy.app.handlers import persistent
|
from bpy.app.handlers import persistent
|
||||||
|
|
||||||
from . import environment, utils, presence, preferences
|
from . import environment, utils
|
||||||
from .libs.replication.replication.constants import RP_COMMON
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: remove dependency as soon as replication will be installed as a module
|
# TODO: remove dependency as soon as replication will be installed as a module
|
||||||
DEPENDENCIES = {
|
DEPENDENCIES = {
|
||||||
("zmq","zmq"),
|
("zmq","zmq"),
|
||||||
("msgpack","msgpack"),
|
("jsondiff","jsondiff"),
|
||||||
("yaml","pyyaml"),
|
("deepdiff", "deepdiff")
|
||||||
("jsondiff","jsondiff")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logger.setLevel(logging.WARNING)
|
logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
def client_list_callback(scene, context):
|
|
||||||
from . import operators
|
|
||||||
|
|
||||||
items = [(RP_COMMON, RP_COMMON, "")]
|
|
||||||
|
|
||||||
username = utils.get_preferences().username
|
|
||||||
cli = operators.client
|
|
||||||
if cli:
|
|
||||||
client_ids = cli.online_users.keys()
|
|
||||||
for id in client_ids:
|
|
||||||
name_desc = id
|
|
||||||
if id == username:
|
|
||||||
name_desc += " (self)"
|
|
||||||
|
|
||||||
items.append((id, name_desc, ""))
|
|
||||||
|
|
||||||
return items
|
|
||||||
|
|
||||||
class SessionUser(bpy.types.PropertyGroup):
|
|
||||||
"""Session User
|
|
||||||
|
|
||||||
Blender user information property
|
|
||||||
"""
|
|
||||||
username: bpy.props.StringProperty(name="username")
|
|
||||||
current_frame: bpy.props.IntProperty(name="current_frame")
|
|
||||||
|
|
||||||
|
|
||||||
class SessionProps(bpy.types.PropertyGroup):
|
|
||||||
is_admin: bpy.props.BoolProperty(
|
|
||||||
name="is_admin",
|
|
||||||
default=False
|
|
||||||
)
|
|
||||||
session_mode: bpy.props.EnumProperty(
|
|
||||||
name='session_mode',
|
|
||||||
description='session mode',
|
|
||||||
items={
|
|
||||||
('HOST', 'hosting', 'host a session'),
|
|
||||||
('CONNECT', 'connexion', 'connect to a session')},
|
|
||||||
default='HOST')
|
|
||||||
clients: bpy.props.EnumProperty(
|
|
||||||
name="clients",
|
|
||||||
description="client enum",
|
|
||||||
items=client_list_callback)
|
|
||||||
enable_presence: bpy.props.BoolProperty(
|
|
||||||
name="Presence overlay",
|
|
||||||
description='Enable overlay drawing module',
|
|
||||||
default=True,
|
|
||||||
update=presence.update_presence
|
|
||||||
)
|
|
||||||
presence_show_selected: bpy.props.BoolProperty(
|
|
||||||
name="Show selected objects",
|
|
||||||
description='Enable selection overlay ',
|
|
||||||
default=True,
|
|
||||||
update=presence.update_overlay_settings
|
|
||||||
)
|
|
||||||
presence_show_user: bpy.props.BoolProperty(
|
|
||||||
name="Show users",
|
|
||||||
description='Enable user overlay ',
|
|
||||||
default=True,
|
|
||||||
update=presence.update_overlay_settings
|
|
||||||
)
|
|
||||||
filter_owned: bpy.props.BoolProperty(
|
|
||||||
name="filter_owned",
|
|
||||||
description='Show only owned datablocks',
|
|
||||||
default=True
|
|
||||||
)
|
|
||||||
user_snap_running: bpy.props.BoolProperty(
|
|
||||||
default=False
|
|
||||||
)
|
|
||||||
time_snap_running: bpy.props.BoolProperty(
|
|
||||||
default=False
|
|
||||||
)
|
|
||||||
|
|
||||||
classes = (
|
|
||||||
SessionUser,
|
|
||||||
SessionProps,
|
|
||||||
)
|
|
||||||
|
|
||||||
libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication\\replication"
|
libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication\\replication"
|
||||||
|
|
||||||
@ -127,34 +67,36 @@ def register():
|
|||||||
from . import operators
|
from . import operators
|
||||||
from . import ui
|
from . import ui
|
||||||
from . import preferences
|
from . import preferences
|
||||||
|
from . import addon_updater_ops
|
||||||
for cls in classes:
|
|
||||||
bpy.utils.register_class(cls)
|
|
||||||
|
|
||||||
bpy.types.WindowManager.session = bpy.props.PointerProperty(
|
|
||||||
type=SessionProps)
|
|
||||||
bpy.types.ID.uuid = bpy.props.StringProperty(default="")
|
|
||||||
bpy.types.WindowManager.online_users = bpy.props.CollectionProperty(
|
|
||||||
type=SessionUser
|
|
||||||
)
|
|
||||||
bpy.types.WindowManager.user_index = bpy.props.IntProperty()
|
|
||||||
|
|
||||||
preferences.register()
|
preferences.register()
|
||||||
|
addon_updater_ops.register(bl_info)
|
||||||
presence.register()
|
presence.register()
|
||||||
operators.register()
|
operators.register()
|
||||||
ui.register()
|
ui.register()
|
||||||
|
|
||||||
|
bpy.types.WindowManager.session = bpy.props.PointerProperty(
|
||||||
|
type=preferences.SessionProps)
|
||||||
|
bpy.types.ID.uuid = bpy.props.StringProperty(default="")
|
||||||
|
bpy.types.WindowManager.online_users = bpy.props.CollectionProperty(
|
||||||
|
type=preferences.SessionUser
|
||||||
|
)
|
||||||
|
bpy.types.WindowManager.user_index = bpy.props.IntProperty()
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
from . import presence
|
from . import presence
|
||||||
from . import operators
|
from . import operators
|
||||||
from . import ui
|
from . import ui
|
||||||
from . import preferences
|
from . import preferences
|
||||||
|
from . import addon_updater_ops
|
||||||
|
|
||||||
presence.unregister()
|
presence.unregister()
|
||||||
|
addon_updater_ops.unregister()
|
||||||
ui.unregister()
|
ui.unregister()
|
||||||
operators.unregister()
|
operators.unregister()
|
||||||
preferences.unregister()
|
preferences.unregister()
|
||||||
del bpy.types.WindowManager.session
|
|
||||||
|
|
||||||
for cls in reversed(classes):
|
del bpy.types.WindowManager.session
|
||||||
bpy.utils.unregister_class(cls)
|
del bpy.types.ID.uuid
|
||||||
|
del bpy.types.WindowManager.online_users
|
||||||
|
del bpy.types.WindowManager.user_index
|
||||||
|
1671
multi_user/addon_updater.py
Normal file
1671
multi_user/addon_updater.py
Normal file
File diff suppressed because it is too large
Load Diff
1454
multi_user/addon_updater_ops.py
Normal file
1454
multi_user/addon_updater_ops.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,3 +1,21 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'bl_object',
|
'bl_object',
|
||||||
'bl_mesh',
|
'bl_mesh',
|
||||||
|
@ -1,11 +1,132 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
import copy
|
import copy
|
||||||
|
import numpy as np
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
|
from .dump_anything import (
|
||||||
|
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
# WIP
|
|
||||||
|
KEYFRAME = [
|
||||||
|
'amplitude',
|
||||||
|
'co',
|
||||||
|
'back',
|
||||||
|
'handle_left',
|
||||||
|
'handle_right',
|
||||||
|
'easing',
|
||||||
|
'handle_left_type',
|
||||||
|
'handle_right_type',
|
||||||
|
'type',
|
||||||
|
'interpolation',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict:
|
||||||
|
""" Dump a sigle curve to a dict
|
||||||
|
|
||||||
|
:arg fcurve: fcurve to dump
|
||||||
|
:type fcurve: bpy.types.FCurve
|
||||||
|
:arg use_numpy: use numpy to eccelerate dump
|
||||||
|
:type use_numpy: bool
|
||||||
|
:return: dict
|
||||||
|
"""
|
||||||
|
fcurve_data = {
|
||||||
|
"data_path": fcurve.data_path,
|
||||||
|
"dumped_array_index": fcurve.array_index,
|
||||||
|
"use_numpy": use_numpy
|
||||||
|
}
|
||||||
|
|
||||||
|
if use_numpy:
|
||||||
|
points = fcurve.keyframe_points
|
||||||
|
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||||
|
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
|
||||||
|
|
||||||
|
else: # Legacy method
|
||||||
|
dumper = Dumper()
|
||||||
|
fcurve_data["keyframe_points"] = []
|
||||||
|
|
||||||
|
for k in fcurve.keyframe_points:
|
||||||
|
fcurve_data["keyframe_points"].append(
|
||||||
|
dumper.dump(k)
|
||||||
|
)
|
||||||
|
|
||||||
|
return fcurve_data
|
||||||
|
|
||||||
|
|
||||||
|
def load_fcurve(fcurve_data, fcurve):
|
||||||
|
""" Load a dumped fcurve
|
||||||
|
|
||||||
|
:arg fcurve_data: a dumped fcurve
|
||||||
|
:type fcurve_data: dict
|
||||||
|
:arg fcurve: fcurve to dump
|
||||||
|
:type fcurve: bpy.types.FCurve
|
||||||
|
"""
|
||||||
|
use_numpy = fcurve_data.get('use_numpy')
|
||||||
|
|
||||||
|
keyframe_points = fcurve.keyframe_points
|
||||||
|
|
||||||
|
# Remove all keyframe points
|
||||||
|
for i in range(len(keyframe_points)):
|
||||||
|
keyframe_points.remove(keyframe_points[0], fast=True)
|
||||||
|
|
||||||
|
if use_numpy:
|
||||||
|
keyframe_points.add(fcurve_data['keyframes_count'])
|
||||||
|
np_load_collection(fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# paste dumped keyframes
|
||||||
|
for dumped_keyframe_point in fcurve_data["keyframe_points"]:
|
||||||
|
if dumped_keyframe_point['type'] == '':
|
||||||
|
dumped_keyframe_point['type'] = 'KEYFRAME'
|
||||||
|
|
||||||
|
new_kf = keyframe_points.insert(
|
||||||
|
dumped_keyframe_point["co"][0],
|
||||||
|
dumped_keyframe_point["co"][1],
|
||||||
|
options={'FAST', 'REPLACE'}
|
||||||
|
)
|
||||||
|
|
||||||
|
keycache = copy.copy(dumped_keyframe_point)
|
||||||
|
keycache = remove_items_from_dict(
|
||||||
|
keycache,
|
||||||
|
["co", "handle_left", "handle_right", 'type']
|
||||||
|
)
|
||||||
|
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(new_kf, keycache)
|
||||||
|
|
||||||
|
new_kf.type = dumped_keyframe_point['type']
|
||||||
|
new_kf.handle_left = [
|
||||||
|
dumped_keyframe_point["handle_left"][0],
|
||||||
|
dumped_keyframe_point["handle_left"][1]
|
||||||
|
]
|
||||||
|
new_kf.handle_right = [
|
||||||
|
dumped_keyframe_point["handle_right"][0],
|
||||||
|
dumped_keyframe_point["handle_right"][1]
|
||||||
|
]
|
||||||
|
|
||||||
|
fcurve.update()
|
||||||
|
|
||||||
|
|
||||||
class BlAction(BlDatablock):
|
class BlAction(BlDatablock):
|
||||||
bl_id = "actions"
|
bl_id = "actions"
|
||||||
@ -15,85 +136,27 @@ class BlAction(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'ACTION_TWEAK'
|
bl_icon = 'ACTION_TWEAK'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.actions.new(data["name"])
|
return bpy.data.actions.new(data["name"])
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
begin_frame = 100000
|
|
||||||
end_frame = -100000
|
|
||||||
|
|
||||||
for dumped_fcurve in data["fcurves"]:
|
|
||||||
begin_frame = min(
|
|
||||||
begin_frame,
|
|
||||||
min(
|
|
||||||
[begin_frame] + [dkp["co"][0] for dkp in dumped_fcurve["keyframe_points"]]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end_frame = max(
|
|
||||||
end_frame,
|
|
||||||
max(
|
|
||||||
[end_frame] + [dkp["co"][0] for dkp in dumped_fcurve["keyframe_points"]]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
begin_frame = 0
|
|
||||||
|
|
||||||
loader = utils.dump_anything.Loader()
|
|
||||||
for dumped_fcurve in data["fcurves"]:
|
for dumped_fcurve in data["fcurves"]:
|
||||||
dumped_data_path = dumped_fcurve["data_path"]
|
dumped_data_path = dumped_fcurve["data_path"]
|
||||||
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
||||||
|
|
||||||
# create fcurve if needed
|
# create fcurve if needed
|
||||||
fcurve = target.fcurves.find(dumped_data_path, index=dumped_array_index)
|
fcurve = target.fcurves.find(
|
||||||
|
dumped_data_path, index=dumped_array_index)
|
||||||
if fcurve is None:
|
if fcurve is None:
|
||||||
fcurve = target.fcurves.new(dumped_data_path, index=dumped_array_index)
|
fcurve = target.fcurves.new(
|
||||||
|
dumped_data_path, index=dumped_array_index)
|
||||||
|
|
||||||
|
load_fcurve(dumped_fcurve, fcurve)
|
||||||
|
target.id_root = data['id_root']
|
||||||
|
|
||||||
# remove keyframes within dumped_action range
|
def _dump_implementation(self, data, instance=None):
|
||||||
for keyframe in reversed(fcurve.keyframe_points):
|
dumper = Dumper()
|
||||||
if end_frame >= (keyframe.co[0] + begin_frame ) >= begin_frame:
|
dumper.exclude_filter = [
|
||||||
fcurve.keyframe_points.remove(keyframe, fast=True)
|
|
||||||
|
|
||||||
# paste dumped keyframes
|
|
||||||
for dumped_keyframe_point in dumped_fcurve["keyframe_points"]:
|
|
||||||
if dumped_keyframe_point['type'] == '':
|
|
||||||
dumped_keyframe_point['type'] = 'KEYFRAME'
|
|
||||||
|
|
||||||
new_kf = fcurve.keyframe_points.insert(
|
|
||||||
dumped_keyframe_point["co"][0] - begin_frame,
|
|
||||||
dumped_keyframe_point["co"][1],
|
|
||||||
options={'FAST', 'REPLACE'}
|
|
||||||
)
|
|
||||||
|
|
||||||
keycache = copy.copy(dumped_keyframe_point)
|
|
||||||
keycache = utils.dump_anything.remove_items_from_dict(
|
|
||||||
keycache,
|
|
||||||
["co", "handle_left", "handle_right",'type']
|
|
||||||
)
|
|
||||||
|
|
||||||
loader.load(
|
|
||||||
new_kf,
|
|
||||||
keycache
|
|
||||||
)
|
|
||||||
|
|
||||||
new_kf.type = dumped_keyframe_point['type']
|
|
||||||
new_kf.handle_left = [
|
|
||||||
dumped_keyframe_point["handle_left"][0] - begin_frame,
|
|
||||||
dumped_keyframe_point["handle_left"][1]
|
|
||||||
]
|
|
||||||
new_kf.handle_right = [
|
|
||||||
dumped_keyframe_point["handle_right"][0] - begin_frame,
|
|
||||||
dumped_keyframe_point["handle_right"][1]
|
|
||||||
]
|
|
||||||
|
|
||||||
# clearing (needed for blender to update well)
|
|
||||||
if len(fcurve.keyframe_points) == 0:
|
|
||||||
target.fcurves.remove(fcurve)
|
|
||||||
target.id_root= data['id_root']
|
|
||||||
|
|
||||||
def dump(self, pointer=None):
|
|
||||||
assert(pointer)
|
|
||||||
dumper = utils.dump_anything.Dumper()
|
|
||||||
dumper.exclude_filter =[
|
|
||||||
'name_full',
|
'name_full',
|
||||||
'original',
|
'original',
|
||||||
'use_fake_user',
|
'use_fake_user',
|
||||||
@ -106,28 +169,11 @@ class BlAction(BlDatablock):
|
|||||||
'users'
|
'users'
|
||||||
]
|
]
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
data["fcurves"] = []
|
data["fcurves"] = []
|
||||||
dumper.depth = 2
|
|
||||||
for fcurve in self.pointer.fcurves:
|
|
||||||
fc = {
|
|
||||||
"data_path": fcurve.data_path,
|
|
||||||
"dumped_array_index": fcurve.array_index,
|
|
||||||
"keyframe_points": []
|
|
||||||
}
|
|
||||||
|
|
||||||
for k in fcurve.keyframe_points:
|
for fcurve in instance.fcurves:
|
||||||
fc["keyframe_points"].append(
|
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||||
dumper.dump(k)
|
|
||||||
)
|
|
||||||
|
|
||||||
data["fcurves"].append(fc)
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.actions.get(self.data['name'])
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,12 +1,28 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from ..libs.overrider import Overrider
|
|
||||||
from .. import utils
|
|
||||||
from .. import presence, operators
|
|
||||||
from .bl_datablock import BlDatablock
|
|
||||||
|
|
||||||
# WIP
|
from .dump_anything import Loader, Dumper
|
||||||
|
from .. import presence, operators, utils
|
||||||
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
|
||||||
class BlArmature(BlDatablock):
|
class BlArmature(BlDatablock):
|
||||||
@ -17,10 +33,10 @@ class BlArmature(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'ARMATURE_DATA'
|
bl_icon = 'ARMATURE_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.armatures.new(data["name"])
|
return bpy.data.armatures.new(data["name"])
|
||||||
|
|
||||||
def load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
# Load parent object
|
# Load parent object
|
||||||
parent_object = utils.find_from_attr(
|
parent_object = utils.find_from_attr(
|
||||||
'uuid',
|
'uuid',
|
||||||
@ -30,7 +46,7 @@ class BlArmature(BlDatablock):
|
|||||||
|
|
||||||
if parent_object is None:
|
if parent_object is None:
|
||||||
parent_object = bpy.data.objects.new(
|
parent_object = bpy.data.objects.new(
|
||||||
data['user_name'], self.pointer)
|
data['user_name'], target)
|
||||||
parent_object.uuid = data['user']
|
parent_object.uuid = data['user']
|
||||||
|
|
||||||
is_object_in_master = (
|
is_object_in_master = (
|
||||||
@ -65,10 +81,10 @@ class BlArmature(BlDatablock):
|
|||||||
bpy.ops.object.mode_set(mode='EDIT')
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
|
||||||
for bone in data['bones']:
|
for bone in data['bones']:
|
||||||
if bone not in self.pointer.edit_bones:
|
if bone not in target.edit_bones:
|
||||||
new_bone = self.pointer.edit_bones.new(bone)
|
new_bone = target.edit_bones.new(bone)
|
||||||
else:
|
else:
|
||||||
new_bone = self.pointer.edit_bones[bone]
|
new_bone = target.edit_bones[bone]
|
||||||
|
|
||||||
bone_data = data['bones'].get(bone)
|
bone_data = data['bones'].get(bone)
|
||||||
|
|
||||||
@ -78,11 +94,12 @@ class BlArmature(BlDatablock):
|
|||||||
new_bone.head_radius = bone_data['head_radius']
|
new_bone.head_radius = bone_data['head_radius']
|
||||||
|
|
||||||
if 'parent' in bone_data:
|
if 'parent' in bone_data:
|
||||||
new_bone.parent = self.pointer.edit_bones[data['bones']
|
new_bone.parent = target.edit_bones[data['bones']
|
||||||
[bone]['parent']]
|
[bone]['parent']]
|
||||||
new_bone.use_connect = bone_data['use_connect']
|
new_bone.use_connect = bone_data['use_connect']
|
||||||
|
|
||||||
utils.dump_anything.load(new_bone, bone_data)
|
loader = Loader()
|
||||||
|
loader.load(new_bone, bone_data)
|
||||||
|
|
||||||
if bpy.context.mode != 'OBJECT':
|
if bpy.context.mode != 'OBJECT':
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
@ -92,10 +109,10 @@ class BlArmature(BlDatablock):
|
|||||||
if 'EDIT' in current_mode:
|
if 'EDIT' in current_mode:
|
||||||
bpy.ops.object.mode_set(mode='EDIT')
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
|
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 4
|
dumper.depth = 4
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'bones',
|
'bones',
|
||||||
@ -109,13 +126,13 @@ class BlArmature(BlDatablock):
|
|||||||
'layers'
|
'layers'
|
||||||
|
|
||||||
]
|
]
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
for bone in pointer.bones:
|
for bone in instance.bones:
|
||||||
if bone.parent:
|
if bone.parent:
|
||||||
data['bones'][bone.name]['parent'] = bone.parent.name
|
data['bones'][bone.name]['parent'] = bone.parent.name
|
||||||
# get the parent Object
|
# get the parent Object
|
||||||
object_users = utils.get_datablock_users(pointer)[0]
|
object_users = utils.get_datablock_users(instance)[0]
|
||||||
data['user'] = object_users.uuid
|
data['user'] = object_users.uuid
|
||||||
data['user_name'] = object_users.name
|
data['user_name'] = object_users.name
|
||||||
|
|
||||||
@ -127,5 +144,4 @@ class BlArmature(BlDatablock):
|
|||||||
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.armatures.get(self.data['name'])
|
|
||||||
|
@ -1,7 +1,25 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
|
||||||
@ -13,22 +31,26 @@ class BlCamera(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'CAMERA_DATA'
|
bl_icon = 'CAMERA_DATA'
|
||||||
|
|
||||||
def load(self, data, target):
|
def _construct(self, data):
|
||||||
utils.dump_anything.load(target, data)
|
return bpy.data.cameras.new(data["name"])
|
||||||
|
|
||||||
|
|
||||||
|
def _load_implementation(self, data, target):
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
dof_settings = data.get('dof')
|
dof_settings = data.get('dof')
|
||||||
|
|
||||||
# DOF settings
|
# DOF settings
|
||||||
if dof_settings:
|
if dof_settings:
|
||||||
utils.dump_anything.load(target.dof, dof_settings)
|
loader.load(target.dof, dof_settings)
|
||||||
|
|
||||||
def construct(self, data):
|
def _dump_implementation(self, data, instance=None):
|
||||||
return bpy.data.cameras.new(data["name"])
|
assert(instance)
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
# TODO: background image support
|
||||||
assert(pointer)
|
|
||||||
|
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
@ -49,8 +71,15 @@ class BlCamera(BlDatablock):
|
|||||||
'aperture_blades',
|
'aperture_blades',
|
||||||
'aperture_rotation',
|
'aperture_rotation',
|
||||||
'aperture_ratio',
|
'aperture_ratio',
|
||||||
|
'display_size',
|
||||||
|
'show_limits',
|
||||||
|
'show_mist',
|
||||||
|
'show_sensor',
|
||||||
|
'show_name',
|
||||||
|
'sensor_fit',
|
||||||
|
'sensor_height',
|
||||||
|
'sensor_width',
|
||||||
]
|
]
|
||||||
return dumper.dump(pointer)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.cameras.get(self.data['name'])
|
|
||||||
|
@ -1,3 +1,21 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
@ -13,80 +31,79 @@ class BlCollection(BlDatablock):
|
|||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||||
targetData.collections = [
|
targetData.collections = [
|
||||||
name for name in sourceData.collections if name == self.data['name']]
|
name for name in sourceData.collections if name == self.data['name']]
|
||||||
|
|
||||||
instance = bpy.data.collections[self.data['name']]
|
instance = bpy.data.collections[self.data['name']]
|
||||||
instance.uuid = self.uuid
|
|
||||||
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
instance = bpy.data.collections.new(data["name"])
|
instance = bpy.data.collections.new(data["name"])
|
||||||
instance.uuid = self.uuid
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
# Load other meshes metadata
|
# Load other meshes metadata
|
||||||
# dump_anything.load(target, data)
|
|
||||||
target.name = data["name"]
|
target.name = data["name"]
|
||||||
|
|
||||||
# link objects
|
# Objects
|
||||||
for object in data["objects"]:
|
for object in data["objects"]:
|
||||||
object_ref = utils.find_from_attr('uuid', object, bpy.data.objects)
|
object_ref = bpy.data.objects.get(object)
|
||||||
if object_ref and object_ref.name not in target.objects.keys():
|
|
||||||
|
if object_ref is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if object not in target.objects.keys():
|
||||||
target.objects.link(object_ref)
|
target.objects.link(object_ref)
|
||||||
|
|
||||||
for object in target.objects:
|
for object in target.objects:
|
||||||
if object.uuid not in data["objects"]:
|
if object.name not in data["objects"]:
|
||||||
target.objects.unlink(object)
|
target.objects.unlink(object)
|
||||||
|
|
||||||
# Link childrens
|
# Link childrens
|
||||||
for collection in data["children"]:
|
for collection in data["children"]:
|
||||||
collection_ref = utils.find_from_attr(
|
collection_ref = bpy.data.collections.get(collection)
|
||||||
'uuid', collection, bpy.data.collections)
|
|
||||||
if collection_ref and collection_ref.name not in target.children.keys():
|
if collection_ref is None:
|
||||||
|
continue
|
||||||
|
if collection_ref.name not in target.children.keys():
|
||||||
target.children.link(collection_ref)
|
target.children.link(collection_ref)
|
||||||
|
|
||||||
for collection in target.children:
|
for collection in target.children:
|
||||||
if collection.uuid not in data["children"]:
|
if collection.name not in data["children"]:
|
||||||
target.children.unlink(collection)
|
target.children.unlink(collection)
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
data = {}
|
data = {}
|
||||||
data['name'] = pointer.name
|
data['name'] = instance.name
|
||||||
|
|
||||||
# dump objects
|
# dump objects
|
||||||
collection_objects = []
|
collection_objects = []
|
||||||
for object in pointer.objects:
|
for object in instance.objects:
|
||||||
if object not in collection_objects:
|
if object not in collection_objects:
|
||||||
collection_objects.append(object.uuid)
|
collection_objects.append(object.name)
|
||||||
|
|
||||||
data['objects'] = collection_objects
|
data['objects'] = collection_objects
|
||||||
|
|
||||||
# dump children collections
|
# dump children collections
|
||||||
collection_children = []
|
collection_children = []
|
||||||
for child in pointer.children:
|
for child in instance.children:
|
||||||
if child not in collection_children:
|
if child not in collection_children:
|
||||||
collection_children.append(child.uuid)
|
collection_children.append(child.name)
|
||||||
|
|
||||||
data['children'] = collection_children
|
data['children'] = collection_children
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def resolve_dependencies(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
for child in self.pointer.children:
|
for child in self.instance.children:
|
||||||
deps.append(child)
|
deps.append(child)
|
||||||
for object in self.pointer.objects:
|
for object in self.instance.objects:
|
||||||
deps.append(object)
|
deps.append(object)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.collections.get(self.data['name'])
|
|
||||||
|
|
||||||
|
@ -1,8 +1,51 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
import bpy.types as T
|
||||||
import mathutils
|
import mathutils
|
||||||
|
import logging
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
from .dump_anything import (Dumper, Loader,
|
||||||
|
np_load_collection,
|
||||||
|
np_dump_collection)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SPLINE_BEZIER_POINT = [
|
||||||
|
# "handle_left_type",
|
||||||
|
# "handle_right_type",
|
||||||
|
"handle_left",
|
||||||
|
"co",
|
||||||
|
"handle_right",
|
||||||
|
"tilt",
|
||||||
|
"weight_softbody",
|
||||||
|
"radius",
|
||||||
|
]
|
||||||
|
|
||||||
|
SPLINE_POINT = [
|
||||||
|
"co",
|
||||||
|
"tilt",
|
||||||
|
"weight_softbody",
|
||||||
|
"radius",
|
||||||
|
]
|
||||||
|
|
||||||
class BlCurve(BlDatablock):
|
class BlCurve(BlDatablock):
|
||||||
bl_id = "curves"
|
bl_id = "curves"
|
||||||
@ -12,52 +55,67 @@ class BlCurve(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'CURVE_DATA'
|
bl_icon = 'CURVE_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.curves.new(data["name"], 'CURVE')
|
return bpy.data.curves.new(data["name"], data["type"])
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
utils.dump_anything.load(target, data)
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
target.splines.clear()
|
target.splines.clear()
|
||||||
# load splines
|
# load splines
|
||||||
for spline in data['splines']:
|
for spline in data['splines'].values():
|
||||||
new_spline = target.splines.new(data['splines'][spline]['type'])
|
new_spline = target.splines.new(spline['type'])
|
||||||
utils.dump_anything.load(new_spline, data['splines'][spline])
|
|
||||||
|
|
||||||
# Load curve geometry data
|
# Load curve geometry data
|
||||||
for bezier_point_index in data['splines'][spline]["bezier_points"]:
|
if new_spline.type == 'BEZIER':
|
||||||
if bezier_point_index != 0:
|
bezier_points = new_spline.bezier_points
|
||||||
new_spline.bezier_points.add(1)
|
bezier_points.add(spline['bezier_points_count'])
|
||||||
utils.dump_anything.load(
|
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
|
||||||
new_spline.bezier_points[bezier_point_index], data['splines'][spline]["bezier_points"][bezier_point_index])
|
|
||||||
|
|
||||||
for point_index in data['splines'][spline]["points"]:
|
# Not really working for now...
|
||||||
new_spline.points.add(1)
|
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
|
||||||
utils.dump_anything.load(
|
if new_spline.type == 'NURBS':
|
||||||
new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
logger.error("NURBS not supported.")
|
||||||
|
# new_spline.points.add(len(data['splines'][spline]["points"])-1)
|
||||||
|
# for point_index in data['splines'][spline]["points"]:
|
||||||
|
# loader.load(
|
||||||
|
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
loader.load(new_spline, spline)
|
||||||
assert(pointer)
|
def _dump_implementation(self, data, instance=None):
|
||||||
data = utils.dump_datablock(pointer, 1)
|
assert(instance)
|
||||||
|
dumper = Dumper()
|
||||||
|
# Conflicting attributes
|
||||||
|
# TODO: remove them with the NURBS support
|
||||||
|
dumper.exclude_filter = [
|
||||||
|
'users',
|
||||||
|
'order_u',
|
||||||
|
'order_v',
|
||||||
|
'point_count_v',
|
||||||
|
'point_count_u',
|
||||||
|
'active_textbox'
|
||||||
|
]
|
||||||
|
if instance.use_auto_texspace:
|
||||||
|
dumper.exclude_filter.extend([
|
||||||
|
'texspace_location',
|
||||||
|
'texspace_size'])
|
||||||
|
data = dumper.dump(instance)
|
||||||
data['splines'] = {}
|
data['splines'] = {}
|
||||||
|
|
||||||
dumper = utils.dump_anything.Dumper()
|
for index, spline in enumerate(instance.splines):
|
||||||
dumper.depth = 3
|
dumper.depth = 2
|
||||||
|
spline_data = dumper.dump(spline)
|
||||||
for index,spline in enumerate(pointer.splines):
|
# spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
|
||||||
spline_data = {}
|
spline_data['bezier_points_count'] = len(spline.bezier_points)-1
|
||||||
spline_data['points'] = dumper.dump(spline.points)
|
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||||
spline_data['bezier_points'] = dumper.dump(spline.bezier_points)
|
|
||||||
spline_data['type'] = dumper.dump(spline.type)
|
|
||||||
data['splines'][index] = spline_data
|
data['splines'][index] = spline_data
|
||||||
|
|
||||||
if isinstance(pointer,'TextCurve'):
|
if isinstance(instance, T.SurfaceCurve):
|
||||||
data['type'] = 'TEXT'
|
|
||||||
if isinstance(pointer,'SurfaceCurve'):
|
|
||||||
data['type'] = 'SURFACE'
|
data['type'] = 'SURFACE'
|
||||||
if isinstance(pointer,'TextCurve'):
|
elif isinstance(instance, T.TextCurve):
|
||||||
|
data['type'] = 'FONT'
|
||||||
|
elif isinstance(instance, T.Curve):
|
||||||
data['type'] = 'CURVE'
|
data['type'] = 'CURVE'
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.curves.get(self.data['name'])
|
|
||||||
|
@ -1,13 +1,48 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
|
from .dump_anything import Loader, Dumper
|
||||||
from ..libs.replication.replication.data import ReplicatedDatablock
|
from ..libs.replication.replication.data import ReplicatedDatablock
|
||||||
from ..libs.replication.replication.constants import UP
|
from ..libs.replication.replication.constants import (UP, DIFF_BINARY)
|
||||||
from ..libs.replication.replication.constants import DIFF_BINARY
|
|
||||||
|
|
||||||
|
def has_action(target):
|
||||||
|
""" Check if the target datablock has actions
|
||||||
|
"""
|
||||||
|
return (hasattr(target, 'animation_data')
|
||||||
|
and target.animation_data
|
||||||
|
and target.animation_data.action)
|
||||||
|
|
||||||
|
|
||||||
|
def has_driver(target):
|
||||||
|
""" Check if the target datablock is driven
|
||||||
|
"""
|
||||||
|
return (hasattr(target, 'animation_data')
|
||||||
|
and target.animation_data
|
||||||
|
and target.animation_data.drivers)
|
||||||
|
|
||||||
|
|
||||||
def dump_driver(driver):
|
def dump_driver(driver):
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 6
|
dumper.depth = 6
|
||||||
data = dumper.dump(driver)
|
data = dumper.dump(driver)
|
||||||
|
|
||||||
@ -15,6 +50,7 @@ def dump_driver(driver):
|
|||||||
|
|
||||||
|
|
||||||
def load_driver(target_datablock, src_driver):
|
def load_driver(target_datablock, src_driver):
|
||||||
|
loader = Loader()
|
||||||
drivers = target_datablock.animation_data.drivers
|
drivers = target_datablock.animation_data.drivers
|
||||||
src_driver_data = src_driver['driver']
|
src_driver_data = src_driver['driver']
|
||||||
new_driver = drivers.new(src_driver['data_path'])
|
new_driver = drivers.new(src_driver['data_path'])
|
||||||
@ -22,7 +58,7 @@ def load_driver(target_datablock, src_driver):
|
|||||||
# Settings
|
# Settings
|
||||||
new_driver.driver.type = src_driver_data['type']
|
new_driver.driver.type = src_driver_data['type']
|
||||||
new_driver.driver.expression = src_driver_data['expression']
|
new_driver.driver.expression = src_driver_data['expression']
|
||||||
utils.dump_anything.load(new_driver, src_driver)
|
loader.load(new_driver, src_driver)
|
||||||
|
|
||||||
# Variables
|
# Variables
|
||||||
for src_variable in src_driver_data['variables']:
|
for src_variable in src_driver_data['variables']:
|
||||||
@ -35,7 +71,7 @@ def load_driver(target_datablock, src_driver):
|
|||||||
src_target_data = src_var_data['targets'][src_target]
|
src_target_data = src_var_data['targets'][src_target]
|
||||||
new_var.targets[src_target].id = utils.resolve_from_id(
|
new_var.targets[src_target].id = utils.resolve_from_id(
|
||||||
src_target_data['id'], src_target_data['id_type'])
|
src_target_data['id'], src_target_data['id_type'])
|
||||||
utils.dump_anything.load(
|
loader.load(
|
||||||
new_var.targets[src_target], src_target_data)
|
new_var.targets[src_target], src_target_data)
|
||||||
|
|
||||||
# Fcurve
|
# Fcurve
|
||||||
@ -47,8 +83,7 @@ def load_driver(target_datablock, src_driver):
|
|||||||
|
|
||||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||||
new_point = new_fcurve[index]
|
new_point = new_fcurve[index]
|
||||||
utils.dump_anything.load(
|
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||||
new_point, src_driver['keyframe_points'][src_point])
|
|
||||||
|
|
||||||
|
|
||||||
class BlDatablock(ReplicatedDatablock):
|
class BlDatablock(ReplicatedDatablock):
|
||||||
@ -61,91 +96,64 @@ class BlDatablock(ReplicatedDatablock):
|
|||||||
bl_automatic_push : boolean
|
bl_automatic_push : boolean
|
||||||
bl_icon : type icon (blender icon name)
|
bl_icon : type icon (blender icon name)
|
||||||
"""
|
"""
|
||||||
bl_id = "scenes"
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
pointer = kwargs.get('pointer', None)
|
instance = kwargs.get('instance', None)
|
||||||
|
|
||||||
# TODO: use is_library_indirect
|
# TODO: use is_library_indirect
|
||||||
self.is_library = (pointer and hasattr(pointer, 'library') and
|
self.is_library = (instance and hasattr(instance, 'library') and
|
||||||
pointer.library) or \
|
instance.library) or \
|
||||||
(self.data and 'library' in self.data)
|
(self.data and 'library' in self.data)
|
||||||
|
|
||||||
if self.is_library:
|
if instance and hasattr(instance, 'uuid'):
|
||||||
self.load = self.load_library
|
instance.uuid = self.uuid
|
||||||
self.dump = self.dump_library
|
|
||||||
self.diff = self.diff_library
|
|
||||||
self.resolve_dependencies = self.resolve_dependencies_library
|
|
||||||
|
|
||||||
if self.pointer and hasattr(self.pointer, 'uuid'):
|
|
||||||
self.pointer.uuid = self.uuid
|
|
||||||
|
|
||||||
# self.diff_method = DIFF_BINARY
|
# self.diff_method = DIFF_BINARY
|
||||||
|
|
||||||
def library_apply(self):
|
@property
|
||||||
"""Apply stored data
|
def instance(self):
|
||||||
"""
|
|
||||||
# UP in case we want to reset our pointer data
|
|
||||||
self.state = UP
|
|
||||||
|
|
||||||
def bl_diff(self):
|
|
||||||
"""Generic datablock diff"""
|
|
||||||
return self.pointer.name != self.data['name']
|
|
||||||
|
|
||||||
def construct_library(self, data):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def load_library(self, data, target):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def dump_library(self, pointer=None):
|
|
||||||
return utils.dump_datablock(pointer, 1)
|
|
||||||
|
|
||||||
def diff_library(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def resolve_dependencies_library(self):
|
|
||||||
return [self.pointer.library]
|
|
||||||
|
|
||||||
def resolve(self):
|
|
||||||
datablock_ref = None
|
datablock_ref = None
|
||||||
datablock_root = getattr(bpy.data, self.bl_id)
|
datablock_root = getattr(bpy.data, self.bl_id)
|
||||||
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
||||||
|
|
||||||
# In case of lost uuid (ex: undo), resolve by name and reassign it
|
# In case of lost uuid (ex: undo), resolve by name and reassign it
|
||||||
# TODO: avoid reference storing
|
|
||||||
if not datablock_ref:
|
if not datablock_ref:
|
||||||
datablock_ref = getattr(
|
datablock_ref = datablock_root.get(self.data['name'])
|
||||||
bpy.data, self.bl_id).get(self.data['name'])
|
|
||||||
|
|
||||||
if datablock_ref:
|
if datablock_ref:
|
||||||
setattr(datablock_ref, 'uuid', self.uuid)
|
setattr(datablock_ref, 'uuid', self.uuid)
|
||||||
|
|
||||||
self.pointer = datablock_ref
|
return datablock_ref
|
||||||
|
|
||||||
def dump(self, pointer=None):
|
def _dump(self, instance=None):
|
||||||
|
dumper = Dumper()
|
||||||
data = {}
|
data = {}
|
||||||
if utils.has_action(pointer):
|
# Dump animation data
|
||||||
dumper = utils.dump_anything.Dumper()
|
if has_action(instance):
|
||||||
|
dumper = Dumper()
|
||||||
dumper.include_filter = ['action']
|
dumper.include_filter = ['action']
|
||||||
data['animation_data'] = dumper.dump(pointer.animation_data)
|
data['animation_data'] = dumper.dump(instance.animation_data)
|
||||||
|
|
||||||
if utils.has_driver(pointer):
|
if has_driver(instance):
|
||||||
dumped_drivers = {'animation_data': {'drivers': []}}
|
dumped_drivers = {'animation_data': {'drivers': []}}
|
||||||
for driver in pointer.animation_data.drivers:
|
for driver in instance.animation_data.drivers:
|
||||||
dumped_drivers['animation_data']['drivers'].append(
|
dumped_drivers['animation_data']['drivers'].append(
|
||||||
dump_driver(driver))
|
dump_driver(driver))
|
||||||
|
|
||||||
data.update(dumped_drivers)
|
data.update(dumped_drivers)
|
||||||
data.update(self.dump_implementation(data, pointer=pointer))
|
|
||||||
|
if self.is_library:
|
||||||
|
data.update(dumper.dump(instance))
|
||||||
|
else:
|
||||||
|
data.update(self._dump_implementation(data, instance=instance))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def dump_implementation(self, data, target):
|
def _dump_implementation(self, data, target):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load(self, data, target):
|
||||||
# Load animation data
|
# Load animation data
|
||||||
if 'animation_data' in data.keys():
|
if 'animation_data' in data.keys():
|
||||||
if target.animation_data is None:
|
if target.animation_data is None:
|
||||||
@ -161,18 +169,27 @@ class BlDatablock(ReplicatedDatablock):
|
|||||||
if 'action' in data['animation_data']:
|
if 'action' in data['animation_data']:
|
||||||
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
|
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
|
||||||
|
|
||||||
self.load_implementation(data, target)
|
if self.is_library:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self._load_implementation(data, target)
|
||||||
|
|
||||||
def load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def resolve_dependencies(self):
|
def resolve_deps(self):
|
||||||
dependencies = []
|
dependencies = []
|
||||||
|
|
||||||
if utils.has_action(self.pointer):
|
if has_action(self.instance):
|
||||||
dependencies.append(self.pointer.animation_data.action)
|
dependencies.append(self.instance.animation_data.action)
|
||||||
|
|
||||||
|
if not self.is_library:
|
||||||
|
dependencies.extend(self._resolve_deps_implementation())
|
||||||
|
|
||||||
return dependencies
|
return dependencies
|
||||||
|
|
||||||
|
def _resolve_deps_implementation(self):
|
||||||
|
return []
|
||||||
|
|
||||||
def is_valid(self):
|
def is_valid(self):
|
||||||
raise NotImplementedError
|
return getattr(bpy.data, self.bl_id).get(self.data['name'])
|
||||||
|
@ -1,83 +1,282 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import (Dumper,
|
||||||
|
Loader,
|
||||||
|
np_dump_collection,
|
||||||
|
np_load_collection)
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
# GPencil data api is structured as it follow:
|
||||||
|
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
|
||||||
|
|
||||||
def load_gpencil_layer(target=None, data=None, create=False):
|
STROKE_POINT = [
|
||||||
|
'co',
|
||||||
|
'pressure',
|
||||||
|
'strength',
|
||||||
|
'uv_factor',
|
||||||
|
'uv_rotation'
|
||||||
|
|
||||||
utils.dump_anything.load(target, data)
|
]
|
||||||
for k,v in target.frames.items():
|
|
||||||
target.frames.remove(v)
|
|
||||||
|
|
||||||
for frame in data["frames"]:
|
if bpy.app.version[1] >= 83:
|
||||||
|
STROKE_POINT.append('vertex_color')
|
||||||
|
|
||||||
tframe = target.frames.new(data["frames"][frame]['frame_number'])
|
def dump_stroke(stroke):
|
||||||
|
""" Dump a grease pencil stroke to a dict
|
||||||
|
|
||||||
# utils.dump_anything.load(tframe, data["frames"][frame])
|
:param stroke: target grease pencil stroke
|
||||||
for stroke in data["frames"][frame]["strokes"]:
|
:type stroke: bpy.types.GPencilStroke
|
||||||
try:
|
:return: dict
|
||||||
tstroke = tframe.strokes[stroke]
|
"""
|
||||||
except:
|
|
||||||
tstroke = tframe.strokes.new()
|
|
||||||
utils.dump_anything.load(
|
|
||||||
tstroke, data["frames"][frame]["strokes"][stroke])
|
|
||||||
|
|
||||||
for point in data["frames"][frame]["strokes"][stroke]["points"]:
|
assert(stroke)
|
||||||
p = data["frames"][frame]["strokes"][stroke]["points"][point]
|
|
||||||
|
|
||||||
tstroke.points.add(1)
|
dumper = Dumper()
|
||||||
tpoint = tstroke.points[len(tstroke.points)-1]
|
dumper.include_filter = [
|
||||||
|
"aspect",
|
||||||
|
"display_mode",
|
||||||
|
"draw_cyclic",
|
||||||
|
"end_cap_mode",
|
||||||
|
"hardeness",
|
||||||
|
"line_width",
|
||||||
|
"material_index",
|
||||||
|
"start_cap_mode",
|
||||||
|
"uv_rotation",
|
||||||
|
"uv_scale",
|
||||||
|
"uv_translation",
|
||||||
|
"vertex_color_fill",
|
||||||
|
]
|
||||||
|
dumped_stroke = dumper.dump(stroke)
|
||||||
|
|
||||||
|
# Stoke points
|
||||||
|
p_count = len(stroke.points)
|
||||||
|
dumped_stroke['p_count'] = p_count
|
||||||
|
dumped_stroke['points'] = np_dump_collection(stroke.points, STROKE_POINT)
|
||||||
|
|
||||||
|
# TODO: uv_factor, uv_rotation
|
||||||
|
|
||||||
|
return dumped_stroke
|
||||||
|
|
||||||
|
|
||||||
|
def load_stroke(stroke_data, stroke):
|
||||||
|
""" Load a grease pencil stroke from a dict
|
||||||
|
|
||||||
|
:param stroke_data: dumped grease pencil stroke
|
||||||
|
:type stroke_data: dict
|
||||||
|
:param stroke: target grease pencil stroke
|
||||||
|
:type stroke: bpy.types.GPencilStroke
|
||||||
|
"""
|
||||||
|
assert(stroke and stroke_data)
|
||||||
|
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(stroke, stroke_data)
|
||||||
|
|
||||||
|
stroke.points.add(stroke_data["p_count"])
|
||||||
|
|
||||||
|
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_frame(frame):
|
||||||
|
""" Dump a grease pencil frame to a dict
|
||||||
|
|
||||||
|
:param frame: target grease pencil stroke
|
||||||
|
:type frame: bpy.types.GPencilFrame
|
||||||
|
:return: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert(frame)
|
||||||
|
|
||||||
|
dumped_frame = dict()
|
||||||
|
dumped_frame['frame_number'] = frame.frame_number
|
||||||
|
dumped_frame['strokes'] = []
|
||||||
|
|
||||||
|
# TODO: took existing strokes in account
|
||||||
|
for stroke in frame.strokes:
|
||||||
|
dumped_frame['strokes'].append(dump_stroke(stroke))
|
||||||
|
|
||||||
|
return dumped_frame
|
||||||
|
|
||||||
|
|
||||||
|
def load_frame(frame_data, frame):
|
||||||
|
""" Load a grease pencil frame from a dict
|
||||||
|
|
||||||
|
:param frame_data: source grease pencil frame
|
||||||
|
:type frame_data: dict
|
||||||
|
:param frame: target grease pencil stroke
|
||||||
|
:type frame: bpy.types.GPencilFrame
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert(frame and frame_data)
|
||||||
|
|
||||||
|
# frame.frame_number = frame_data['frame_number']
|
||||||
|
|
||||||
|
# TODO: took existing stroke in account
|
||||||
|
|
||||||
|
for stroke_data in frame_data['strokes']:
|
||||||
|
target_stroke = frame.strokes.new()
|
||||||
|
load_stroke(stroke_data, target_stroke)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_layer(layer):
|
||||||
|
""" Dump a grease pencil layer
|
||||||
|
|
||||||
|
:param layer: target grease pencil stroke
|
||||||
|
:type layer: bpy.types.GPencilFrame
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert(layer)
|
||||||
|
|
||||||
|
dumper = Dumper()
|
||||||
|
|
||||||
|
dumper.include_filter = [
|
||||||
|
'info',
|
||||||
|
'opacity',
|
||||||
|
'channel_color',
|
||||||
|
'color',
|
||||||
|
# 'thickness', #TODO: enabling only for annotation
|
||||||
|
'tint_color',
|
||||||
|
'tint_factor',
|
||||||
|
'vertex_paint_opacity',
|
||||||
|
'line_change',
|
||||||
|
'use_onion_skinning',
|
||||||
|
'use_annotation_onion_skinning',
|
||||||
|
'annotation_onion_before_range',
|
||||||
|
'annotation_onion_after_range',
|
||||||
|
'annotation_onion_before_color',
|
||||||
|
'annotation_onion_after_color',
|
||||||
|
'pass_index',
|
||||||
|
# 'viewlayer_render',
|
||||||
|
'blend_mode',
|
||||||
|
'hide',
|
||||||
|
'annotation_hide',
|
||||||
|
'lock',
|
||||||
|
# 'lock_frame',
|
||||||
|
# 'lock_material',
|
||||||
|
# 'use_mask_layer',
|
||||||
|
'use_lights',
|
||||||
|
'use_solo_mode',
|
||||||
|
'select',
|
||||||
|
'show_points',
|
||||||
|
'show_in_front',
|
||||||
|
# 'parent',
|
||||||
|
# 'parent_type',
|
||||||
|
# 'parent_bone',
|
||||||
|
# 'matrix_inverse',
|
||||||
|
]
|
||||||
|
dumped_layer = dumper.dump(layer)
|
||||||
|
|
||||||
|
dumped_layer['frames'] = []
|
||||||
|
|
||||||
|
for frame in layer.frames:
|
||||||
|
dumped_layer['frames'].append(dump_frame(frame))
|
||||||
|
|
||||||
|
return dumped_layer
|
||||||
|
|
||||||
|
|
||||||
|
def load_layer(layer_data, layer):
|
||||||
|
""" Load a grease pencil layer from a dict
|
||||||
|
|
||||||
|
:param layer_data: source grease pencil layer data
|
||||||
|
:type layer_data: dict
|
||||||
|
:param layer: target grease pencil stroke
|
||||||
|
:type layer: bpy.types.GPencilFrame
|
||||||
|
"""
|
||||||
|
# TODO: take existing data in account
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(layer, layer_data)
|
||||||
|
|
||||||
|
for frame_data in layer_data["frames"]:
|
||||||
|
target_frame = layer.frames.new(frame_data['frame_number'])
|
||||||
|
|
||||||
|
load_frame(frame_data, target_frame)
|
||||||
|
|
||||||
utils.dump_anything.load(tpoint, p)
|
|
||||||
|
|
||||||
|
|
||||||
class BlGpencil(BlDatablock):
|
class BlGpencil(BlDatablock):
|
||||||
bl_id = "grease_pencils"
|
bl_id = "grease_pencils"
|
||||||
bl_class = bpy.types.GreasePencil
|
bl_class = bpy.types.GreasePencil
|
||||||
bl_delay_refresh = 5
|
bl_delay_refresh = 2
|
||||||
bl_delay_apply = 5
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'GREASEPENCIL'
|
bl_icon = 'GREASEPENCIL'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.grease_pencils.new(data["name"])
|
return bpy.data.grease_pencils.new(data["name"])
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
for layer in target.layers:
|
|
||||||
target.layers.remove(layer)
|
|
||||||
|
|
||||||
if "layers" in data.keys():
|
|
||||||
for layer in data["layers"]:
|
|
||||||
if layer not in target.layers.keys():
|
|
||||||
gp_layer = target.layers.new(data["layers"][layer]["info"])
|
|
||||||
else:
|
|
||||||
gp_layer = target.layers[layer]
|
|
||||||
load_gpencil_layer(
|
|
||||||
target=gp_layer, data=data["layers"][layer], create=True)
|
|
||||||
|
|
||||||
utils.dump_anything.load(target, data)
|
|
||||||
|
|
||||||
target.materials.clear()
|
target.materials.clear()
|
||||||
if "materials" in data.keys():
|
if "materials" in data.keys():
|
||||||
for mat in data['materials']:
|
for mat in data['materials']:
|
||||||
target.materials.append(bpy.data.materials[mat])
|
target.materials.append(bpy.data.materials[mat])
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
loader = Loader()
|
||||||
assert(pointer)
|
loader.load(target, data)
|
||||||
data = utils.dump_datablock(pointer, 2)
|
|
||||||
utils.dump_datablock_attibutes(
|
# TODO: reuse existing layer
|
||||||
pointer, ['layers'], 9, data)
|
for layer in target.layers:
|
||||||
|
target.layers.remove(layer)
|
||||||
|
|
||||||
|
if "layers" in data.keys():
|
||||||
|
for layer in data["layers"]:
|
||||||
|
layer_data = data["layers"].get(layer)
|
||||||
|
|
||||||
|
# if layer not in target.layers.keys():
|
||||||
|
target_layer = target.layers.new(data["layers"][layer]["info"])
|
||||||
|
# else:
|
||||||
|
# target_layer = target.layers[layer]
|
||||||
|
# target_layer.clear()
|
||||||
|
|
||||||
|
load_layer(layer_data, target_layer)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _dump_implementation(self, data, instance=None):
|
||||||
|
assert(instance)
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.depth = 2
|
||||||
|
dumper.include_filter = [
|
||||||
|
'materials',
|
||||||
|
'name',
|
||||||
|
'zdepth_offset',
|
||||||
|
'stroke_thickness_space',
|
||||||
|
'pixel_factor',
|
||||||
|
'stroke_depth_order'
|
||||||
|
]
|
||||||
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
|
data['layers'] = {}
|
||||||
|
|
||||||
|
for layer in instance.layers:
|
||||||
|
data['layers'][layer.info] = dump_layer(layer)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def resolve_dependencies(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
for material in self.pointer.materials:
|
for material in self.instance.materials:
|
||||||
deps.append(material)
|
deps.append(material)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.grease_pencils.get(self.data['name'])
|
|
@ -1,8 +1,27 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
def dump_image(image):
|
def dump_image(image):
|
||||||
@ -11,7 +30,9 @@ def dump_image(image):
|
|||||||
prefs = utils.get_preferences()
|
prefs = utils.get_preferences()
|
||||||
img_name = "{}.png".format(image.name)
|
img_name = "{}.png".format(image.name)
|
||||||
|
|
||||||
|
# Cache the image on the disk
|
||||||
image.filepath_raw = os.path.join(prefs.cache_directory, img_name)
|
image.filepath_raw = os.path.join(prefs.cache_directory, img_name)
|
||||||
|
os.makedirs(prefs.cache_directory, exist_ok=True)
|
||||||
image.file_format = "PNG"
|
image.file_format = "PNG"
|
||||||
image.save()
|
image.save()
|
||||||
|
|
||||||
@ -35,14 +56,14 @@ class BlImage(BlDatablock):
|
|||||||
bl_automatic_push = False
|
bl_automatic_push = False
|
||||||
bl_icon = 'IMAGE_DATA'
|
bl_icon = 'IMAGE_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.images.new(
|
return bpy.data.images.new(
|
||||||
name=data['name'],
|
name=data['name'],
|
||||||
width=data['size'][0],
|
width=data['size'][0],
|
||||||
height=data['size'][1]
|
height=data['size'][1]
|
||||||
)
|
)
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load(self, data, target):
|
||||||
image = target
|
image = target
|
||||||
prefs = utils.get_preferences()
|
prefs = utils.get_preferences()
|
||||||
|
|
||||||
@ -59,11 +80,11 @@ class BlImage(BlDatablock):
|
|||||||
image.colorspace_settings.name = data["colorspace_settings"]["name"]
|
image.colorspace_settings.name = data["colorspace_settings"]["name"]
|
||||||
|
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
def _dump(self, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
data = {}
|
data = {}
|
||||||
data['pixels'] = dump_image(pointer)
|
data['pixels'] = dump_image(instance)
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
@ -74,12 +95,11 @@ class BlImage(BlDatablock):
|
|||||||
'filepath',
|
'filepath',
|
||||||
'source',
|
'source',
|
||||||
'colorspace_settings']
|
'colorspace_settings']
|
||||||
data.update(dumper.dump(pointer))
|
data.update(dumper.dump(instance))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def diff(self):
|
def diff(self):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.images.get(self.data['name'])
|
|
||||||
|
@ -1,9 +1,29 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||||
|
|
||||||
|
|
||||||
class BlLattice(BlDatablock):
|
class BlLattice(BlDatablock):
|
||||||
bl_id = "lattices"
|
bl_id = "lattices"
|
||||||
@ -13,19 +33,20 @@ class BlLattice(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'LATTICE_DATA'
|
bl_icon = 'LATTICE_DATA'
|
||||||
|
|
||||||
def load(self, data, target):
|
def _construct(self, data):
|
||||||
utils.dump_anything.load(target, data)
|
|
||||||
|
|
||||||
for point in data['points']:
|
|
||||||
utils.dump_anything.load(target.points[point], data["points"][point])
|
|
||||||
def construct(self, data):
|
|
||||||
return bpy.data.lattices.new(data["name"])
|
return bpy.data.lattices.new(data["name"])
|
||||||
|
|
||||||
def dump(self, pointer=None):
|
def _load_implementation(self, data, target):
|
||||||
assert(pointer)
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
dumper = utils.dump_anything.Dumper()
|
np_load_collection(data['points'], target.points, POINT)
|
||||||
dumper.depth = 3
|
|
||||||
|
def _dump_implementation(self, data, instance=None):
|
||||||
|
assert(instance)
|
||||||
|
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.depth = 1
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
'type',
|
'type',
|
||||||
@ -35,18 +56,10 @@ class BlLattice(BlDatablock):
|
|||||||
'interpolation_type_u',
|
'interpolation_type_u',
|
||||||
'interpolation_type_v',
|
'interpolation_type_v',
|
||||||
'interpolation_type_w',
|
'interpolation_type_w',
|
||||||
'use_outside',
|
'use_outside'
|
||||||
'points',
|
|
||||||
'co',
|
|
||||||
'weight_softbody',
|
|
||||||
'co_deform'
|
|
||||||
]
|
]
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
|
data['points'] = np_dump_collection(instance.points, POINT)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.lattices.get(self.data['name'])
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,7 +1,25 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
|
||||||
@ -13,16 +31,16 @@ class BlLibrary(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'LIBRARY_DATA_DIRECT'
|
bl_icon = 'LIBRARY_DATA_DIRECT'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
|
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
|
||||||
targetData = sourceData
|
targetData = sourceData
|
||||||
return sourceData
|
return sourceData
|
||||||
def load(self, data, target):
|
def _load(self, data, target):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def dump(self, pointer=None):
|
def _dump(self, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
return utils.dump_datablock(pointer, 1)
|
dumper = Dumper()
|
||||||
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.libraries.get(self.data['name'])
|
|
@ -1,7 +1,25 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
|
||||||
@ -13,15 +31,16 @@ class BlLight(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'LIGHT_DATA'
|
bl_icon = 'LIGHT_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.lights.new(data["name"], data["type"])
|
return bpy.data.lights.new(data["name"], data["type"])
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
utils.dump_anything.load(target, data)
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
@ -41,11 +60,15 @@ class BlLight(BlDatablock):
|
|||||||
"contact_shadow_distance",
|
"contact_shadow_distance",
|
||||||
"contact_shadow_soft_size",
|
"contact_shadow_soft_size",
|
||||||
"contact_shadow_bias",
|
"contact_shadow_bias",
|
||||||
"contact_shadow_thickness"
|
"contact_shadow_thickness",
|
||||||
|
"shape",
|
||||||
|
"size_y",
|
||||||
|
"size",
|
||||||
|
"angle"
|
||||||
]
|
]
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.lights.get(self.data['name'])
|
|
||||||
|
|
||||||
|
@ -1,8 +1,26 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -15,10 +33,7 @@ class BlLightprobe(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'LIGHTPROBE_GRID'
|
bl_icon = 'LIGHTPROBE_GRID'
|
||||||
|
|
||||||
def load(self, data, target):
|
def _construct(self, data):
|
||||||
utils.dump_anything.load(target, data)
|
|
||||||
|
|
||||||
def construct(self, data):
|
|
||||||
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
|
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
|
||||||
# See https://developer.blender.org/D6396
|
# See https://developer.blender.org/D6396
|
||||||
if bpy.app.version[1] >= 83:
|
if bpy.app.version[1] >= 83:
|
||||||
@ -26,15 +41,16 @@ class BlLightprobe(BlDatablock):
|
|||||||
else:
|
else:
|
||||||
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
|
|
||||||
|
def _load_implementation(self, data, target):
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
|
def _dump_implementation(self, data, instance=None):
|
||||||
|
assert(instance)
|
||||||
def dump(self, pointer=None):
|
|
||||||
assert(pointer)
|
|
||||||
if bpy.app.version[1] < 83:
|
if bpy.app.version[1] < 83:
|
||||||
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
|
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
@ -57,7 +73,7 @@ class BlLightprobe(BlDatablock):
|
|||||||
'visibility_blur'
|
'visibility_blur'
|
||||||
]
|
]
|
||||||
|
|
||||||
return dumper.dump(pointer)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.lattices.get(self.data['name'])
|
|
||||||
|
@ -1,117 +1,212 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
def clean_color_ramp(target_ramp):
|
|
||||||
# clear existing
|
|
||||||
try:
|
|
||||||
for key in target_ramp.elements:
|
|
||||||
target_ramp.elements.remove(key)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def load_mapping(target_apping, source_mapping):
|
def load_node(node_data, node_tree):
|
||||||
# clear existing curves
|
""" Load a node into a node_tree from a dict
|
||||||
for curve in target_apping.curves:
|
|
||||||
for point in curve.points:
|
|
||||||
try:
|
|
||||||
curve.remove(point)
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Load curves
|
:arg node_data: dumped node data
|
||||||
for curve in source_mapping['curves']:
|
:type node_data: dict
|
||||||
for point in source_mapping['curves'][curve]['points']:
|
:arg node_tree: target node_tree
|
||||||
pos = source_mapping['curves'][curve]['points'][point]['location']
|
:type node_tree: bpy.types.NodeTree
|
||||||
target_apping.curves[curve].points.new(pos[0],pos[1])
|
"""
|
||||||
|
loader = Loader()
|
||||||
|
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
|
||||||
|
|
||||||
|
loader.load(target_node, node_data)
|
||||||
|
|
||||||
|
|
||||||
def load_node(target_node_tree, source):
|
|
||||||
target_node = target_node_tree.nodes.get(source["name"])
|
|
||||||
|
|
||||||
if target_node is None:
|
for input in node_data["inputs"]:
|
||||||
node_type = source["bl_idname"]
|
|
||||||
|
|
||||||
target_node = target_node_tree.nodes.new(type=node_type)
|
|
||||||
|
|
||||||
# Clean color ramp before loading it
|
|
||||||
if source['type'] == 'VALTORGB':
|
|
||||||
clean_color_ramp(target_node.color_ramp)
|
|
||||||
if source['type'] == 'CURVE_RGB':
|
|
||||||
load_mapping(target_node.mapping, source['mapping'])
|
|
||||||
utils.dump_anything.load(
|
|
||||||
target_node,
|
|
||||||
source)
|
|
||||||
|
|
||||||
if source['type'] == 'TEX_IMAGE':
|
|
||||||
target_node.image = bpy.data.images[source['image']]
|
|
||||||
|
|
||||||
|
|
||||||
for input in source["inputs"]:
|
|
||||||
if hasattr(target_node.inputs[input], "default_value"):
|
if hasattr(target_node.inputs[input], "default_value"):
|
||||||
try:
|
try:
|
||||||
target_node.inputs[input].default_value = source["inputs"][input]["default_value"]
|
target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
|
||||||
except:
|
except:
|
||||||
logger.error("{} not supported, skipping".format(input))
|
logger.error("{} not supported, skipping".format(input))
|
||||||
|
|
||||||
def load_link(target_node_tree, source):
|
|
||||||
input_socket = target_node_tree.nodes[source['to_node']
|
|
||||||
['name']].inputs[source['to_socket']['name']]
|
|
||||||
output_socket = target_node_tree.nodes[source['from_node']
|
|
||||||
['name']].outputs[source['from_socket']['name']]
|
|
||||||
|
|
||||||
target_node_tree.links.new(input_socket, output_socket)
|
def load_links(links_data, node_tree):
|
||||||
|
""" Load node_tree links from a list
|
||||||
|
|
||||||
|
:arg links_data: dumped node links
|
||||||
|
:type links_data: list
|
||||||
|
:arg node_tree: node links collection
|
||||||
|
:type node_tree: bpy.types.NodeTree
|
||||||
|
"""
|
||||||
|
|
||||||
|
for link in links_data:
|
||||||
|
input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])]
|
||||||
|
output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])]
|
||||||
|
|
||||||
|
node_tree.links.new(input_socket, output_socket)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_links(links):
|
||||||
|
""" Dump node_tree links collection to a list
|
||||||
|
|
||||||
|
:arg links: node links collection
|
||||||
|
:type links: bpy.types.NodeLinks
|
||||||
|
:retrun: list
|
||||||
|
"""
|
||||||
|
|
||||||
|
links_data = []
|
||||||
|
|
||||||
|
for link in links:
|
||||||
|
links_data.append({
|
||||||
|
'to_node':link.to_node.name,
|
||||||
|
'to_socket':link.to_socket.path_from_id()[-2:-1],
|
||||||
|
'from_node':link.from_node.name,
|
||||||
|
'from_socket':link.from_socket.path_from_id()[-2:-1],
|
||||||
|
})
|
||||||
|
|
||||||
|
return links_data
|
||||||
|
|
||||||
|
|
||||||
|
def dump_node(node):
|
||||||
|
""" Dump a single node to a dict
|
||||||
|
|
||||||
|
:arg node: target node
|
||||||
|
:type node: bpy.types.Node
|
||||||
|
:retrun: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
node_dumper = Dumper()
|
||||||
|
node_dumper.depth = 1
|
||||||
|
node_dumper.exclude_filter = [
|
||||||
|
"dimensions",
|
||||||
|
"show_expanded",
|
||||||
|
"name_full",
|
||||||
|
"select",
|
||||||
|
"bl_height_min",
|
||||||
|
"bl_height_max",
|
||||||
|
"bl_height_default",
|
||||||
|
"bl_width_min",
|
||||||
|
"bl_width_max",
|
||||||
|
"type",
|
||||||
|
"bl_icon",
|
||||||
|
"bl_width_default",
|
||||||
|
"bl_static_type",
|
||||||
|
"show_tetxure",
|
||||||
|
"is_active_output",
|
||||||
|
"hide",
|
||||||
|
"show_options",
|
||||||
|
"show_preview",
|
||||||
|
"show_texture",
|
||||||
|
"outputs",
|
||||||
|
"width_hidden"
|
||||||
|
]
|
||||||
|
|
||||||
|
dumped_node = node_dumper.dump(node)
|
||||||
|
|
||||||
|
if hasattr(node, 'inputs'):
|
||||||
|
dumped_node['inputs'] = {}
|
||||||
|
|
||||||
|
for i in node.inputs:
|
||||||
|
input_dumper = Dumper()
|
||||||
|
input_dumper.depth = 2
|
||||||
|
input_dumper.include_filter = ["default_value"]
|
||||||
|
|
||||||
|
if hasattr(i, 'default_value'):
|
||||||
|
dumped_node['inputs'][i.name] = input_dumper.dump(
|
||||||
|
i)
|
||||||
|
if hasattr(node, 'color_ramp'):
|
||||||
|
ramp_dumper = Dumper()
|
||||||
|
ramp_dumper.depth = 4
|
||||||
|
ramp_dumper.include_filter = [
|
||||||
|
'elements',
|
||||||
|
'alpha',
|
||||||
|
'color',
|
||||||
|
'position'
|
||||||
|
]
|
||||||
|
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
|
||||||
|
if hasattr(node, 'mapping'):
|
||||||
|
curve_dumper = Dumper()
|
||||||
|
curve_dumper.depth = 5
|
||||||
|
curve_dumper.include_filter = [
|
||||||
|
'curves',
|
||||||
|
'points',
|
||||||
|
'location'
|
||||||
|
]
|
||||||
|
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
|
||||||
|
|
||||||
|
return dumped_node
|
||||||
|
|
||||||
|
|
||||||
class BlMaterial(BlDatablock):
|
class BlMaterial(BlDatablock):
|
||||||
bl_id = "materials"
|
bl_id = "materials"
|
||||||
bl_class = bpy.types.Material
|
bl_class = bpy.types.Material
|
||||||
bl_delay_refresh = 10
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 10
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'MATERIAL_DATA'
|
bl_icon = 'MATERIAL_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.materials.new(data["name"])
|
return bpy.data.materials.new(data["name"])
|
||||||
|
|
||||||
def load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
|
loader = Loader()
|
||||||
target.name = data['name']
|
target.name = data['name']
|
||||||
if data['is_grease_pencil']:
|
if data['is_grease_pencil']:
|
||||||
if not target.is_grease_pencil:
|
if not target.is_grease_pencil:
|
||||||
bpy.data.materials.create_gpencil_data(target)
|
bpy.data.materials.create_gpencil_data(target)
|
||||||
|
|
||||||
utils.dump_anything.load(
|
loader.load(
|
||||||
target.grease_pencil, data['grease_pencil'])
|
target.grease_pencil, data['grease_pencil'])
|
||||||
|
|
||||||
utils.load_dict(data['grease_pencil'], target.grease_pencil)
|
|
||||||
|
|
||||||
elif data["use_nodes"]:
|
if data["use_nodes"]:
|
||||||
if target.node_tree is None:
|
if target.node_tree is None:
|
||||||
target.use_nodes = True
|
target.use_nodes = True
|
||||||
|
|
||||||
target.node_tree.nodes.clear()
|
target.node_tree.nodes.clear()
|
||||||
|
|
||||||
utils.dump_anything.load(target,data)
|
loader.load(target,data)
|
||||||
|
|
||||||
# Load nodes
|
# Load nodes
|
||||||
for node in data["node_tree"]["nodes"]:
|
for node in data["node_tree"]["nodes"]:
|
||||||
load_node(target.node_tree, data["node_tree"]["nodes"][node])
|
load_node(data["node_tree"]["nodes"][node], target.node_tree)
|
||||||
|
|
||||||
# Load nodes links
|
# Load nodes links
|
||||||
target.node_tree.links.clear()
|
target.node_tree.links.clear()
|
||||||
|
|
||||||
for link in data["node_tree"]["links"]:
|
load_links(data["node_tree"]["links"], target.node_tree)
|
||||||
load_link(target.node_tree, data["node_tree"]["links"][link])
|
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
mat_dumper = utils.dump_anything.Dumper()
|
mat_dumper = Dumper()
|
||||||
mat_dumper.depth = 2
|
mat_dumper.depth = 2
|
||||||
mat_dumper.exclude_filter = [
|
mat_dumper.exclude_filter = [
|
||||||
|
"is_embed_data",
|
||||||
|
"is_evaluated",
|
||||||
|
"name_full",
|
||||||
|
"bl_description",
|
||||||
|
"bl_icon",
|
||||||
|
"bl_idname",
|
||||||
|
"bl_label",
|
||||||
"preview",
|
"preview",
|
||||||
"original",
|
"original",
|
||||||
"uuid",
|
"uuid",
|
||||||
@ -120,89 +215,55 @@ class BlMaterial(BlDatablock):
|
|||||||
"line_color",
|
"line_color",
|
||||||
"view_center",
|
"view_center",
|
||||||
]
|
]
|
||||||
node_dumper = utils.dump_anything.Dumper()
|
data = mat_dumper.dump(instance)
|
||||||
node_dumper.depth = 1
|
|
||||||
node_dumper.exclude_filter = [
|
|
||||||
"dimensions",
|
|
||||||
"show_expanded"
|
|
||||||
"select",
|
|
||||||
"bl_height_min",
|
|
||||||
"bl_height_max",
|
|
||||||
"bl_width_min",
|
|
||||||
"bl_width_max",
|
|
||||||
"bl_width_default",
|
|
||||||
"hide",
|
|
||||||
"show_options",
|
|
||||||
"show_tetxures",
|
|
||||||
"show_preview",
|
|
||||||
"outputs",
|
|
||||||
"width_hidden"
|
|
||||||
]
|
|
||||||
input_dumper = utils.dump_anything.Dumper()
|
|
||||||
input_dumper.depth = 2
|
|
||||||
input_dumper.include_filter = ["default_value"]
|
|
||||||
links_dumper = utils.dump_anything.Dumper()
|
|
||||||
links_dumper.depth = 3
|
|
||||||
links_dumper.include_filter = [
|
|
||||||
"name",
|
|
||||||
"to_node",
|
|
||||||
"from_node",
|
|
||||||
"from_socket",
|
|
||||||
"to_socket"]
|
|
||||||
data = mat_dumper.dump(pointer)
|
|
||||||
|
|
||||||
if pointer.use_nodes:
|
if instance.use_nodes:
|
||||||
nodes = {}
|
nodes = {}
|
||||||
|
for node in instance.node_tree.nodes:
|
||||||
for node in pointer.node_tree.nodes:
|
nodes[node.name] = dump_node(node)
|
||||||
nodes[node.name] = node_dumper.dump(node)
|
|
||||||
|
|
||||||
if hasattr(node, 'inputs'):
|
|
||||||
nodes[node.name]['inputs'] = {}
|
|
||||||
|
|
||||||
for i in node.inputs:
|
|
||||||
if hasattr(i, 'default_value'):
|
|
||||||
nodes[node.name]['inputs'][i.name] = input_dumper.dump(
|
|
||||||
i)
|
|
||||||
if hasattr(node, 'color_ramp'):
|
|
||||||
ramp_dumper = utils.dump_anything.Dumper()
|
|
||||||
ramp_dumper.depth = 4
|
|
||||||
ramp_dumper.include_filter = [
|
|
||||||
'elements',
|
|
||||||
'alpha',
|
|
||||||
'color',
|
|
||||||
'position'
|
|
||||||
]
|
|
||||||
nodes[node.name]['color_ramp'] = ramp_dumper.dump(node.color_ramp)
|
|
||||||
if hasattr(node, 'mapping'):
|
|
||||||
curve_dumper = utils.dump_anything.Dumper()
|
|
||||||
curve_dumper.depth = 5
|
|
||||||
curve_dumper.include_filter = [
|
|
||||||
'curves',
|
|
||||||
'points',
|
|
||||||
'location'
|
|
||||||
]
|
|
||||||
nodes[node.name]['mapping'] = curve_dumper.dump(node.mapping)
|
|
||||||
data["node_tree"]['nodes'] = nodes
|
data["node_tree"]['nodes'] = nodes
|
||||||
data["node_tree"]["links"] = links_dumper.dump(pointer.node_tree.links)
|
|
||||||
|
|
||||||
elif pointer.is_grease_pencil:
|
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
|
||||||
utils.dump_datablock_attibutes(pointer, ["grease_pencil"], 3, data)
|
|
||||||
|
if instance.is_grease_pencil:
|
||||||
|
gp_mat_dumper = Dumper()
|
||||||
|
gp_mat_dumper.depth = 3
|
||||||
|
|
||||||
|
gp_mat_dumper.include_filter = [
|
||||||
|
'show_stroke',
|
||||||
|
'mode',
|
||||||
|
'stroke_style',
|
||||||
|
'color',
|
||||||
|
'use_overlap_strokes',
|
||||||
|
'show_fill',
|
||||||
|
'fill_style',
|
||||||
|
'fill_color',
|
||||||
|
'pass_index',
|
||||||
|
'alignment_mode',
|
||||||
|
# 'fill_image',
|
||||||
|
'texture_opacity',
|
||||||
|
'mix_factor',
|
||||||
|
'texture_offset',
|
||||||
|
'texture_angle',
|
||||||
|
'texture_scale',
|
||||||
|
'texture_clamp',
|
||||||
|
'gradient_type',
|
||||||
|
'mix_color',
|
||||||
|
'flip'
|
||||||
|
]
|
||||||
|
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def resolve_dependencies(self):
|
def _resolve_deps_implementation(self):
|
||||||
# TODO: resolve node group deps
|
# TODO: resolve node group deps
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.pointer.use_nodes:
|
if self.instance.use_nodes:
|
||||||
for node in self.pointer.node_tree.nodes:
|
for node in self.instance.node_tree.nodes:
|
||||||
if node.type == 'TEX_IMAGE':
|
if node.type == 'TEX_IMAGE':
|
||||||
deps.append(node.image)
|
deps.append(node.image)
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
deps.append(self.pointer.library)
|
deps.append(self.instance.library)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.materials.get(self.data['name'])
|
|
||||||
|
|
||||||
|
@ -1,166 +1,162 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import bmesh
|
import bmesh
|
||||||
import mathutils
|
import mathutils
|
||||||
|
import logging
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||||
from ..libs.replication.replication.constants import DIFF_BINARY
|
from ..libs.replication.replication.constants import DIFF_BINARY
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def dump_mesh(mesh, data={}):
|
VERTICE = ['co']
|
||||||
import bmesh
|
|
||||||
|
|
||||||
mesh_data = data
|
EDGE = [
|
||||||
mesh_buffer = bmesh.new()
|
'vertices',
|
||||||
|
'crease',
|
||||||
# https://blog.michelanders.nl/2016/02/copying-vertices-to-numpy-arrays-in_4.html
|
'bevel_weight',
|
||||||
mesh_buffer.from_mesh(mesh)
|
]
|
||||||
|
LOOP = [
|
||||||
uv_layer = mesh_buffer.loops.layers.uv.verify()
|
'vertex_index',
|
||||||
bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify()
|
'normal',
|
||||||
skin_layer = mesh_buffer.verts.layers.skin.verify()
|
]
|
||||||
|
|
||||||
verts = {}
|
|
||||||
for vert in mesh_buffer.verts:
|
|
||||||
v = {}
|
|
||||||
v["co"] = list(vert.co)
|
|
||||||
|
|
||||||
# vert metadata
|
|
||||||
v['bevel'] = vert[bevel_layer]
|
|
||||||
v['normal'] = list(vert.normal)
|
|
||||||
# v['skin'] = list(vert[skin_layer])
|
|
||||||
|
|
||||||
verts[str(vert.index)] = v
|
|
||||||
|
|
||||||
mesh_data["verts"] = verts
|
|
||||||
|
|
||||||
edges = {}
|
|
||||||
for edge in mesh_buffer.edges:
|
|
||||||
e = {}
|
|
||||||
e["verts"] = [edge.verts[0].index, edge.verts[1].index]
|
|
||||||
|
|
||||||
# Edge metadata
|
|
||||||
e["smooth"] = edge.smooth
|
|
||||||
|
|
||||||
edges[edge.index] = e
|
|
||||||
mesh_data["edges"] = edges
|
|
||||||
|
|
||||||
faces = {}
|
|
||||||
for face in mesh_buffer.faces:
|
|
||||||
f = {}
|
|
||||||
fverts = []
|
|
||||||
for vert in face.verts:
|
|
||||||
fverts.append(vert.index)
|
|
||||||
|
|
||||||
f["verts"] = fverts
|
|
||||||
f["material_index"] = face.material_index
|
|
||||||
f["smooth"] = face.smooth
|
|
||||||
f["normal"] = list(face.normal)
|
|
||||||
f["index"] = face.index
|
|
||||||
|
|
||||||
uvs = []
|
|
||||||
# Face metadata
|
|
||||||
for loop in face.loops:
|
|
||||||
loop_uv = loop[uv_layer]
|
|
||||||
|
|
||||||
uvs.append(list(loop_uv.uv))
|
|
||||||
|
|
||||||
f["uv"] = uvs
|
|
||||||
faces[face.index] = f
|
|
||||||
|
|
||||||
mesh_data["faces"] = faces
|
|
||||||
|
|
||||||
uv_layers = []
|
|
||||||
for uv_layer in mesh.uv_layers:
|
|
||||||
uv_layers.append(uv_layer.name)
|
|
||||||
|
|
||||||
mesh_data["uv_layers"] = uv_layers
|
|
||||||
# return mesh_data
|
|
||||||
|
|
||||||
|
POLYGON = [
|
||||||
|
'loop_total',
|
||||||
|
'loop_start',
|
||||||
|
'use_smooth',
|
||||||
|
'material_index',
|
||||||
|
]
|
||||||
|
|
||||||
class BlMesh(BlDatablock):
|
class BlMesh(BlDatablock):
|
||||||
bl_id = "meshes"
|
bl_id = "meshes"
|
||||||
bl_class = bpy.types.Mesh
|
bl_class = bpy.types.Mesh
|
||||||
bl_delay_refresh = 10
|
bl_delay_refresh = 2
|
||||||
bl_delay_apply = 10
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'MESH_DATA'
|
bl_icon = 'MESH_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
instance = bpy.data.meshes.new(data["name"])
|
instance = bpy.data.meshes.new(data["name"])
|
||||||
instance.uuid = self.uuid
|
instance.uuid = self.uuid
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
if not target or not target.is_editmode:
|
if not target or not target.is_editmode:
|
||||||
# 1 - LOAD MATERIAL SLOTS
|
loader = Loader()
|
||||||
# SLots
|
loader.load(target, data)
|
||||||
i = 0
|
|
||||||
|
# MATERIAL SLOTS
|
||||||
|
target.materials.clear()
|
||||||
|
|
||||||
for m in data["material_list"]:
|
for m in data["material_list"]:
|
||||||
target.materials.append(bpy.data.materials[m])
|
target.materials.append(bpy.data.materials[m])
|
||||||
|
|
||||||
# 2 - LOAD GEOMETRY
|
# CLEAR GEOMETRY
|
||||||
mesh_buffer = bmesh.new()
|
if target.vertices:
|
||||||
|
target.clear_geometry()
|
||||||
|
|
||||||
for i in data["verts"]:
|
target.vertices.add(data["vertex_count"])
|
||||||
v = mesh_buffer.verts.new(data["verts"][i]["co"])
|
target.edges.add(data["egdes_count"])
|
||||||
v.normal = data["verts"][i]["normal"]
|
target.loops.add(data["loop_count"])
|
||||||
mesh_buffer.verts.ensure_lookup_table()
|
target.polygons.add(data["poly_count"])
|
||||||
|
|
||||||
for i in data["edges"]:
|
# LOADING
|
||||||
verts = mesh_buffer.verts
|
np_load_collection(data['vertices'], target.vertices, VERTICE)
|
||||||
v1 = data["edges"][i]["verts"][0]
|
np_load_collection(data['edges'], target.edges, EDGE)
|
||||||
v2 = data["edges"][i]["verts"][1]
|
np_load_collection(data['loops'], target.loops, LOOP)
|
||||||
edge = mesh_buffer.edges.new([verts[v1], verts[v2]])
|
np_load_collection(data["polygons"],target.polygons, POLYGON)
|
||||||
edge.smooth = data["edges"][i]["smooth"]
|
|
||||||
|
|
||||||
mesh_buffer.edges.ensure_lookup_table()
|
# UV Layers
|
||||||
for p in data["faces"]:
|
for layer in data['uv_layers']:
|
||||||
verts = []
|
if layer not in target.uv_layers:
|
||||||
for v in data["faces"][p]["verts"]:
|
target.uv_layers.new(name=layer)
|
||||||
verts.append(mesh_buffer.verts[v])
|
|
||||||
|
|
||||||
if len(verts) > 0:
|
np_load_collection_primitives(
|
||||||
f = mesh_buffer.faces.new(verts)
|
target.uv_layers[layer].data,
|
||||||
|
'uv',
|
||||||
|
data["uv_layers"][layer]['data'])
|
||||||
|
|
||||||
uv_layer = mesh_buffer.loops.layers.uv.verify()
|
# Vertex color
|
||||||
|
for color_layer in data['vertex_colors']:
|
||||||
|
if color_layer not in target.vertex_colors:
|
||||||
|
target.vertex_colors.new(name=color_layer)
|
||||||
|
|
||||||
f.smooth = data["faces"][p]["smooth"]
|
np_load_collection_primitives(
|
||||||
f.normal = data["faces"][p]["normal"]
|
target.vertex_colors[color_layer].data,
|
||||||
f.index = data["faces"][p]["index"]
|
'color',
|
||||||
f.material_index = data["faces"][p]['material_index']
|
data["vertex_colors"][color_layer]['data'])
|
||||||
# UV loading
|
|
||||||
for i, loop in enumerate(f.loops):
|
|
||||||
loop_uv = loop[uv_layer]
|
|
||||||
loop_uv.uv = data["faces"][p]["uv"][i]
|
|
||||||
mesh_buffer.faces.ensure_lookup_table()
|
|
||||||
mesh_buffer.to_mesh(target)
|
|
||||||
|
|
||||||
# 3 - LOAD METADATA
|
target.validate()
|
||||||
# uv's
|
target.update()
|
||||||
utils.dump_anything.load(target.uv_layers, data['uv_layers'])
|
|
||||||
|
|
||||||
bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify()
|
def _dump_implementation(self, data, instance=None):
|
||||||
skin_layer = mesh_buffer.verts.layers.skin.verify()
|
assert(instance)
|
||||||
|
|
||||||
utils.dump_anything.load(target, data)
|
mesh = instance
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
dumper = Dumper()
|
||||||
assert(pointer)
|
dumper.depth = 1
|
||||||
|
|
||||||
dumper = utils.dump_anything.Dumper()
|
|
||||||
dumper.depth = 2
|
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'name',
|
'name',
|
||||||
'use_auto_smooth',
|
'use_auto_smooth',
|
||||||
'auto_smooth_angle'
|
'auto_smooth_angle',
|
||||||
|
'use_customdata_edge_bevel',
|
||||||
|
'use_customdata_edge_crease'
|
||||||
]
|
]
|
||||||
data = dumper.dump(pointer)
|
|
||||||
dump_mesh(pointer, data)
|
data = dumper.dump(mesh)
|
||||||
|
|
||||||
|
# VERTICES
|
||||||
|
data["vertex_count"] = len(mesh.vertices)
|
||||||
|
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
|
||||||
|
|
||||||
|
# EDGES
|
||||||
|
data["egdes_count"] = len(mesh.edges)
|
||||||
|
data["edges"] = np_dump_collection(mesh.edges, EDGE)
|
||||||
|
|
||||||
|
# POLYGONS
|
||||||
|
data["poly_count"] = len(mesh.polygons)
|
||||||
|
data["polygons"] = np_dump_collection(mesh.polygons, POLYGON)
|
||||||
|
|
||||||
|
# LOOPS
|
||||||
|
data["loop_count"] = len(mesh.loops)
|
||||||
|
data["loops"] = np_dump_collection(mesh.loops, LOOP)
|
||||||
|
|
||||||
|
# UV Layers
|
||||||
|
data['uv_layers'] = {}
|
||||||
|
for layer in mesh.uv_layers:
|
||||||
|
data['uv_layers'][layer.name] = {}
|
||||||
|
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
|
||||||
|
|
||||||
|
# Vertex color
|
||||||
|
data['vertex_colors'] = {}
|
||||||
|
for color_map in mesh.vertex_colors:
|
||||||
|
data['vertex_colors'][color_map.name] = {}
|
||||||
|
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
||||||
|
|
||||||
# Fix material index
|
# Fix material index
|
||||||
m_list = []
|
m_list = []
|
||||||
for material in pointer.materials:
|
for material in instance.materials:
|
||||||
if material:
|
if material:
|
||||||
m_list.append(material.name)
|
m_list.append(material.name)
|
||||||
|
|
||||||
@ -168,14 +164,11 @@ class BlMesh(BlDatablock):
|
|||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def resolve_dependencies(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
for material in self.pointer.materials:
|
for material in self.instance.materials:
|
||||||
if material:
|
if material:
|
||||||
deps.append(material)
|
deps.append(material)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.meshes.get(self.data['name'])
|
|
||||||
|
@ -1,10 +1,67 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import (
|
||||||
|
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
|
||||||
|
np_dump_collection, np_load_collection)
|
||||||
|
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
|
||||||
|
ELEMENT = [
|
||||||
|
'co',
|
||||||
|
'hide',
|
||||||
|
'radius',
|
||||||
|
'rotation',
|
||||||
|
'size_x',
|
||||||
|
'size_y',
|
||||||
|
'size_z',
|
||||||
|
'stiffness',
|
||||||
|
'type'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def dump_metaball_elements(elements):
|
||||||
|
""" Dump a metaball element
|
||||||
|
|
||||||
|
:arg element: metaball element
|
||||||
|
:type bpy.types.MetaElement
|
||||||
|
:return: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
dumped_elements = np_dump_collection(elements, ELEMENT)
|
||||||
|
|
||||||
|
return dumped_elements
|
||||||
|
|
||||||
|
|
||||||
|
def load_metaball_elements(elements_data, elements):
|
||||||
|
""" Dump a metaball element
|
||||||
|
|
||||||
|
:arg element: metaball element
|
||||||
|
:type bpy.types.MetaElement
|
||||||
|
:return: dict
|
||||||
|
"""
|
||||||
|
np_load_collection(elements_data, elements, ELEMENT)
|
||||||
|
|
||||||
|
|
||||||
class BlMetaball(BlDatablock):
|
class BlMetaball(BlDatablock):
|
||||||
bl_id = "metaballs"
|
bl_id = "metaballs"
|
||||||
bl_class = bpy.types.MetaBall
|
bl_class = bpy.types.MetaBall
|
||||||
@ -13,25 +70,36 @@ class BlMetaball(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'META_BALL'
|
bl_icon = 'META_BALL'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.metaballs.new(data["name"])
|
return bpy.data.metaballs.new(data["name"])
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
utils.dump_anything.load(target, data)
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
target.elements.clear()
|
target.elements.clear()
|
||||||
for element in data["elements"]:
|
|
||||||
new_element = target.elements.new(type=data["elements"][element]['type'])
|
|
||||||
utils.dump_anything.load(new_element, data["elements"][element])
|
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
for mtype in data["elements"]['type']:
|
||||||
assert(pointer)
|
new_element = target.elements.new()
|
||||||
dumper = utils.dump_anything.Dumper()
|
|
||||||
dumper.depth = 3
|
load_metaball_elements(data['elements'], target.elements)
|
||||||
dumper.exclude_filter = ["is_editmode"]
|
|
||||||
|
def _dump_implementation(self, data, instance=None):
|
||||||
|
assert(instance)
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.depth = 1
|
||||||
|
dumper.include_filter = [
|
||||||
|
'name',
|
||||||
|
'resolution',
|
||||||
|
'render_resolution',
|
||||||
|
'threshold',
|
||||||
|
'update_method',
|
||||||
|
'use_auto_texspace',
|
||||||
|
'texspace_location',
|
||||||
|
'texspace_size'
|
||||||
|
]
|
||||||
|
|
||||||
|
data = dumper.dump(instance)
|
||||||
|
data['elements'] = dump_metaball_elements(instance.elements)
|
||||||
|
|
||||||
data = dumper.dump(pointer)
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.metaballs.get(self.data['name'])
|
|
||||||
|
@ -1,33 +1,35 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def load_constraints(target, data):
|
|
||||||
for local_constraint in target.constraints:
|
|
||||||
if local_constraint.name not in data:
|
|
||||||
target.constraints.remove(local_constraint)
|
|
||||||
|
|
||||||
for constraint in data:
|
|
||||||
target_constraint = target.constraints.get(constraint)
|
|
||||||
|
|
||||||
if not target_constraint:
|
|
||||||
target_constraint = target.constraints.new(
|
|
||||||
data[constraint]['type'])
|
|
||||||
|
|
||||||
utils.dump_anything.load(
|
|
||||||
target_constraint, data[constraint])
|
|
||||||
|
|
||||||
|
|
||||||
def load_pose(target_bone, data):
|
def load_pose(target_bone, data):
|
||||||
target_bone.rotation_mode = data['rotation_mode']
|
target_bone.rotation_mode = data['rotation_mode']
|
||||||
|
loader = Loader()
|
||||||
utils.dump_anything.load(target_bone, data)
|
loader.load(target_bone, data)
|
||||||
|
|
||||||
|
|
||||||
class BlObject(BlDatablock):
|
class BlObject(BlDatablock):
|
||||||
@ -38,8 +40,8 @@ class BlObject(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'OBJECT_DATA'
|
bl_icon = 'OBJECT_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
pointer = None
|
instance = None
|
||||||
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||||
@ -50,69 +52,45 @@ class BlObject(BlDatablock):
|
|||||||
instance.uuid = self.uuid
|
instance.uuid = self.uuid
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
# Object specific constructor...
|
# TODO: refactoring
|
||||||
if "data" not in data:
|
if "data" not in data:
|
||||||
pass
|
pass
|
||||||
elif data["data"] in bpy.data.meshes.keys():
|
elif data["data"] in bpy.data.meshes.keys():
|
||||||
pointer = bpy.data.meshes[data["data"]]
|
instance = bpy.data.meshes[data["data"]]
|
||||||
elif data["data"] in bpy.data.lights.keys():
|
elif data["data"] in bpy.data.lights.keys():
|
||||||
pointer = bpy.data.lights[data["data"]]
|
instance = bpy.data.lights[data["data"]]
|
||||||
elif data["data"] in bpy.data.cameras.keys():
|
elif data["data"] in bpy.data.cameras.keys():
|
||||||
pointer = bpy.data.cameras[data["data"]]
|
instance = bpy.data.cameras[data["data"]]
|
||||||
elif data["data"] in bpy.data.curves.keys():
|
elif data["data"] in bpy.data.curves.keys():
|
||||||
pointer = bpy.data.curves[data["data"]]
|
instance = bpy.data.curves[data["data"]]
|
||||||
elif data["data"] in bpy.data.metaballs.keys():
|
elif data["data"] in bpy.data.metaballs.keys():
|
||||||
pointer = bpy.data.metaballs[data["data"]]
|
instance = bpy.data.metaballs[data["data"]]
|
||||||
elif data["data"] in bpy.data.armatures.keys():
|
elif data["data"] in bpy.data.armatures.keys():
|
||||||
pointer = bpy.data.armatures[data["data"]]
|
instance = bpy.data.armatures[data["data"]]
|
||||||
elif data["data"] in bpy.data.grease_pencils.keys():
|
elif data["data"] in bpy.data.grease_pencils.keys():
|
||||||
pointer = bpy.data.grease_pencils[data["data"]]
|
instance = bpy.data.grease_pencils[data["data"]]
|
||||||
elif data["data"] in bpy.data.curves.keys():
|
elif data["data"] in bpy.data.curves.keys():
|
||||||
pointer = bpy.data.curves[data["data"]]
|
instance = bpy.data.curves[data["data"]]
|
||||||
elif data["data"] in bpy.data.lattices.keys():
|
elif data["data"] in bpy.data.lattices.keys():
|
||||||
pointer = bpy.data.lattices[data["data"]]
|
instance = bpy.data.lattices[data["data"]]
|
||||||
elif data["data"] in bpy.data.speakers.keys():
|
elif data["data"] in bpy.data.speakers.keys():
|
||||||
pointer = bpy.data.speakers[data["data"]]
|
instance = bpy.data.speakers[data["data"]]
|
||||||
elif data["data"] in bpy.data.lightprobes.keys():
|
elif data["data"] in bpy.data.lightprobes.keys():
|
||||||
# Only supported since 2.83
|
# Only supported since 2.83
|
||||||
if bpy.app.version[1] >= 83:
|
if bpy.app.version[1] >= 83:
|
||||||
pointer = bpy.data.lightprobes[data["data"]]
|
instance = bpy.data.lightprobes[data["data"]]
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
instance = bpy.data.objects.new(data["name"], pointer)
|
instance = bpy.data.objects.new(data["name"], instance)
|
||||||
instance.uuid = self.uuid
|
instance.uuid = self.uuid
|
||||||
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
# Load transformation data
|
# Load transformation data
|
||||||
rot_mode = 'rotation_quaternion' if data['rotation_mode'] == 'QUATERNION' else 'rotation_euler'
|
loader = Loader()
|
||||||
target.rotation_mode = data['rotation_mode']
|
loader.load(target, data)
|
||||||
target.location = data['location']
|
|
||||||
setattr(target, rot_mode, data[rot_mode])
|
|
||||||
target.scale = data['scale']
|
|
||||||
|
|
||||||
target.name = data["name"]
|
|
||||||
# Load modifiers
|
|
||||||
if hasattr(target, 'modifiers'):
|
|
||||||
# TODO: smarter selective update
|
|
||||||
target.modifiers.clear()
|
|
||||||
|
|
||||||
for modifier in data['modifiers']:
|
|
||||||
target_modifier = target.modifiers.get(modifier)
|
|
||||||
|
|
||||||
if not target_modifier:
|
|
||||||
target_modifier = target.modifiers.new(
|
|
||||||
data['modifiers'][modifier]['name'], data['modifiers'][modifier]['type'])
|
|
||||||
|
|
||||||
utils.dump_anything.load(
|
|
||||||
target_modifier, data['modifiers'][modifier])
|
|
||||||
|
|
||||||
# Load constraints
|
|
||||||
# Object
|
|
||||||
if hasattr(target, 'constraints') and 'constraints' in data:
|
|
||||||
load_constraints(target, data['constraints'])
|
|
||||||
|
|
||||||
# Pose
|
# Pose
|
||||||
if 'pose' in data:
|
if 'pose' in data:
|
||||||
@ -126,7 +104,7 @@ class BlObject(BlDatablock):
|
|||||||
if not bg_target:
|
if not bg_target:
|
||||||
bg_target = target.pose.bone_groups.new(name=bg_name)
|
bg_target = target.pose.bone_groups.new(name=bg_name)
|
||||||
|
|
||||||
utils.dump_anything.load(bg_target, bg_data)
|
loader.load(bg_target, bg_data)
|
||||||
# target.pose.bone_groups.get
|
# target.pose.bone_groups.get
|
||||||
|
|
||||||
# Bones
|
# Bones
|
||||||
@ -135,28 +113,14 @@ class BlObject(BlDatablock):
|
|||||||
bone_data = data['pose']['bones'].get(bone)
|
bone_data = data['pose']['bones'].get(bone)
|
||||||
|
|
||||||
if 'constraints' in bone_data.keys():
|
if 'constraints' in bone_data.keys():
|
||||||
load_constraints(
|
loader.load(target_bone, bone_data['constraints'])
|
||||||
target_bone, bone_data['constraints'])
|
|
||||||
|
|
||||||
load_pose(target_bone, bone_data)
|
load_pose(target_bone, bone_data)
|
||||||
|
|
||||||
if 'bone_index' in bone_data.keys():
|
if 'bone_index' in bone_data.keys():
|
||||||
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
|
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
|
||||||
|
|
||||||
# Load relations
|
|
||||||
if 'children' in data.keys():
|
|
||||||
for child in data['children']:
|
|
||||||
bpy.data.objects[child].parent = self.pointer
|
|
||||||
|
|
||||||
# Load empty representation
|
|
||||||
target.empty_display_size = data['empty_display_size']
|
|
||||||
target.empty_display_type = data['empty_display_type']
|
|
||||||
|
|
||||||
# Instancing
|
|
||||||
target.instance_type = data['instance_type']
|
|
||||||
if data['instance_type'] == 'COLLECTION':
|
|
||||||
target.instance_collection = bpy.data.collections[data['instance_collection']]
|
|
||||||
|
|
||||||
# vertex groups
|
# vertex groups
|
||||||
if 'vertex_groups' in data:
|
if 'vertex_groups' in data:
|
||||||
target.vertex_groups.clear()
|
target.vertex_groups.clear()
|
||||||
@ -177,7 +141,7 @@ class BlObject(BlDatablock):
|
|||||||
key_data = data['shape_keys']['key_blocks'][key_block]
|
key_data = data['shape_keys']['key_blocks'][key_block]
|
||||||
target.shape_key_add(name=key_block)
|
target.shape_key_add(name=key_block)
|
||||||
|
|
||||||
utils.dump_anything.load(
|
loader.load(
|
||||||
target.data.shape_keys.key_blocks[key_block], key_data)
|
target.data.shape_keys.key_blocks[key_block], key_data)
|
||||||
for vert in key_data['data']:
|
for vert in key_data['data']:
|
||||||
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
||||||
@ -188,9 +152,9 @@ class BlObject(BlDatablock):
|
|||||||
|
|
||||||
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
@ -205,34 +169,33 @@ class BlObject(BlDatablock):
|
|||||||
"instance_type",
|
"instance_type",
|
||||||
"location",
|
"location",
|
||||||
"scale",
|
"scale",
|
||||||
'rotation_quaternion' if pointer.rotation_mode == 'QUATERNION' else 'rotation_euler',
|
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
|
||||||
]
|
]
|
||||||
|
|
||||||
data = dumper.dump(pointer)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
# MODIFIERS
|
# MODIFIERS
|
||||||
if hasattr(pointer, 'modifiers'):
|
if hasattr(instance, 'modifiers'):
|
||||||
dumper.include_filter = None
|
dumper.include_filter = None
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
data["modifiers"] = {}
|
data["modifiers"] = {}
|
||||||
for index, modifier in enumerate(pointer.modifiers):
|
for index, modifier in enumerate(instance.modifiers):
|
||||||
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
||||||
data["modifiers"][modifier.name]['m_index'] = index
|
|
||||||
|
|
||||||
# CONSTRAINTS
|
# CONSTRAINTS
|
||||||
# OBJECT
|
# OBJECT
|
||||||
if hasattr(pointer, 'constraints'):
|
if hasattr(instance, 'constraints'):
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
data["constraints"] = dumper.dump(pointer.constraints)
|
data["constraints"] = dumper.dump(instance.constraints)
|
||||||
|
|
||||||
# POSE
|
# POSE
|
||||||
if hasattr(pointer, 'pose') and pointer.pose:
|
if hasattr(instance, 'pose') and instance.pose:
|
||||||
# BONES
|
# BONES
|
||||||
bones = {}
|
bones = {}
|
||||||
for bone in pointer.pose.bones:
|
for bone in instance.pose.bones:
|
||||||
bones[bone.name] = {}
|
bones[bone.name] = {}
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
||||||
@ -257,7 +220,7 @@ class BlObject(BlDatablock):
|
|||||||
|
|
||||||
# GROUPS
|
# GROUPS
|
||||||
bone_groups = {}
|
bone_groups = {}
|
||||||
for group in pointer.pose.bone_groups:
|
for group in instance.pose.bone_groups:
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'name',
|
'name',
|
||||||
@ -267,24 +230,24 @@ class BlObject(BlDatablock):
|
|||||||
data['pose']['bone_groups'] = bone_groups
|
data['pose']['bone_groups'] = bone_groups
|
||||||
|
|
||||||
# CHILDS
|
# CHILDS
|
||||||
if len(pointer.children) > 0:
|
if len(instance.children) > 0:
|
||||||
childs = []
|
childs = []
|
||||||
for child in pointer.children:
|
for child in instance.children:
|
||||||
childs.append(child.name)
|
childs.append(child.name)
|
||||||
|
|
||||||
data["children"] = childs
|
data["children"] = childs
|
||||||
|
|
||||||
# VERTEx GROUP
|
# VERTEx GROUP
|
||||||
if len(pointer.vertex_groups) > 0:
|
if len(instance.vertex_groups) > 0:
|
||||||
vg_data = []
|
vg_data = []
|
||||||
for vg in pointer.vertex_groups:
|
for vg in instance.vertex_groups:
|
||||||
vg_idx = vg.index
|
vg_idx = vg.index
|
||||||
dumped_vg = {}
|
dumped_vg = {}
|
||||||
dumped_vg['name'] = vg.name
|
dumped_vg['name'] = vg.name
|
||||||
|
|
||||||
vertices = []
|
vertices = []
|
||||||
|
|
||||||
for v in pointer.data.vertices:
|
for v in instance.data.vertices:
|
||||||
for vg in v.groups:
|
for vg in v.groups:
|
||||||
if vg.group == vg_idx:
|
if vg.group == vg_idx:
|
||||||
vertices.append({
|
vertices.append({
|
||||||
@ -299,18 +262,18 @@ class BlObject(BlDatablock):
|
|||||||
data['vertex_groups'] = vg_data
|
data['vertex_groups'] = vg_data
|
||||||
|
|
||||||
# SHAPE KEYS
|
# SHAPE KEYS
|
||||||
pointer_data = pointer.data
|
object_data = instance.data
|
||||||
if hasattr(pointer_data, 'shape_keys') and pointer_data.shape_keys:
|
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'reference_key',
|
'reference_key',
|
||||||
'use_relative'
|
'use_relative'
|
||||||
]
|
]
|
||||||
data['shape_keys'] = dumper.dump(pointer_data.shape_keys)
|
data['shape_keys'] = dumper.dump(object_data.shape_keys)
|
||||||
data['shape_keys']['reference_key'] = pointer_data.shape_keys.reference_key.name
|
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
|
||||||
key_blocks = {}
|
key_blocks = {}
|
||||||
for key in pointer_data.shape_keys.key_blocks:
|
for key in object_data.shape_keys.key_blocks:
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
'name',
|
'name',
|
||||||
@ -328,23 +291,21 @@ class BlObject(BlDatablock):
|
|||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def resolve_dependencies(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = super().resolve_dependencies()
|
deps = []
|
||||||
|
|
||||||
# Avoid Empty case
|
# Avoid Empty case
|
||||||
if self.pointer.data:
|
if self.instance.data:
|
||||||
deps.append(self.pointer.data)
|
deps.append(self.instance.data)
|
||||||
if len(self.pointer.children) > 0:
|
if len(self.instance.children) > 0:
|
||||||
deps.extend(list(self.pointer.children))
|
deps.extend(list(self.instance.children))
|
||||||
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
deps.append(self.pointer.library)
|
deps.append(self.instance.library)
|
||||||
|
|
||||||
if self.pointer.instance_type == 'COLLECTION':
|
if self.instance.instance_type == 'COLLECTION':
|
||||||
# TODO: uuid based
|
# TODO: uuid based
|
||||||
deps.append(self.pointer.instance_collection)
|
deps.append(self.instance.instance_collection)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.objects.get(self.data['name'])
|
|
||||||
|
@ -1,9 +1,29 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
from ..utils import get_preferences
|
||||||
|
|
||||||
class BlScene(BlDatablock):
|
class BlScene(BlDatablock):
|
||||||
bl_id = "scenes"
|
bl_id = "scenes"
|
||||||
bl_class = bpy.types.Scene
|
bl_class = bpy.types.Scene
|
||||||
@ -12,15 +32,14 @@ class BlScene(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'SCENE_DATA'
|
bl_icon = 'SCENE_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
instance = bpy.data.scenes.new(data["name"])
|
instance = bpy.data.scenes.new(data["name"])
|
||||||
instance.uuid = self.uuid
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
target = self.pointer
|
|
||||||
# Load other meshes metadata
|
# Load other meshes metadata
|
||||||
utils.dump_anything.load(target, data)
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
# Load master collection
|
# Load master collection
|
||||||
for object in data["collection"]["objects"]:
|
for object in data["collection"]["objects"]:
|
||||||
@ -49,42 +68,95 @@ class BlScene(BlDatablock):
|
|||||||
if 'grease_pencil' in data.keys():
|
if 'grease_pencil' in data.keys():
|
||||||
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
if 'eevee' in data.keys():
|
||||||
assert(pointer)
|
loader.load(target.eevee, data['eevee'])
|
||||||
|
|
||||||
|
if 'cycles' in data.keys():
|
||||||
|
loader.load(target.eevee, data['cycles'])
|
||||||
|
|
||||||
|
if 'view_settings' in data.keys():
|
||||||
|
loader.load(target.view_settings, data['view_settings'])
|
||||||
|
if target.view_settings.use_curve_mapping:
|
||||||
|
#TODO: change this ugly fix
|
||||||
|
target.view_settings.curve_mapping.white_level = data['view_settings']['curve_mapping']['white_level']
|
||||||
|
target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level']
|
||||||
|
target.view_settings.curve_mapping.update()
|
||||||
|
|
||||||
|
def _dump_implementation(self, data, instance=None):
|
||||||
|
assert(instance)
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
scene_dumper = utils.dump_anything.Dumper()
|
scene_dumper = Dumper()
|
||||||
scene_dumper.depth = 1
|
scene_dumper.depth = 1
|
||||||
scene_dumper.include_filter = ['name','world', 'id', 'camera', 'grease_pencil']
|
scene_dumper.include_filter = [
|
||||||
data = scene_dumper.dump(pointer)
|
'name',
|
||||||
|
'world',
|
||||||
|
'id',
|
||||||
|
'camera',
|
||||||
|
'grease_pencil',
|
||||||
|
]
|
||||||
|
data = scene_dumper.dump(instance)
|
||||||
|
|
||||||
scene_dumper.depth = 3
|
scene_dumper.depth = 3
|
||||||
|
|
||||||
scene_dumper.include_filter = ['children','objects','name']
|
scene_dumper.include_filter = ['children','objects','name']
|
||||||
data['collection'] = scene_dumper.dump(pointer.collection)
|
data['collection'] = scene_dumper.dump(instance.collection)
|
||||||
|
|
||||||
|
scene_dumper.depth = 1
|
||||||
|
scene_dumper.include_filter = None
|
||||||
|
|
||||||
|
pref = get_preferences()
|
||||||
|
|
||||||
|
if pref.sync_flags.sync_render_settings:
|
||||||
|
scene_dumper.exclude_filter = [
|
||||||
|
'gi_cache_info',
|
||||||
|
'feature_set',
|
||||||
|
'debug_use_hair_bvh',
|
||||||
|
'aa_samples',
|
||||||
|
'blur_glossy',
|
||||||
|
'glossy_bounces',
|
||||||
|
'device',
|
||||||
|
'max_bounces',
|
||||||
|
'preview_aa_samples',
|
||||||
|
'preview_samples',
|
||||||
|
'sample_clamp_indirect',
|
||||||
|
'samples',
|
||||||
|
'volume_bounces'
|
||||||
|
]
|
||||||
|
data['eevee'] = scene_dumper.dump(instance.eevee)
|
||||||
|
data['cycles'] = scene_dumper.dump(instance.cycles)
|
||||||
|
data['view_settings'] = scene_dumper.dump(instance.view_settings)
|
||||||
|
data['view_settings']['curve_mapping'] = scene_dumper.dump(instance.view_settings.curve_mapping)
|
||||||
|
|
||||||
|
if instance.view_settings.use_curve_mapping:
|
||||||
|
scene_dumper.depth = 5
|
||||||
|
scene_dumper.include_filter = [
|
||||||
|
'curves',
|
||||||
|
'points',
|
||||||
|
'location'
|
||||||
|
]
|
||||||
|
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(instance.view_settings.curve_mapping.curves)
|
||||||
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def resolve_dependencies(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
# child collections
|
# child collections
|
||||||
for child in self.pointer.collection.children:
|
for child in self.instance.collection.children:
|
||||||
deps.append(child)
|
deps.append(child)
|
||||||
|
|
||||||
# childs objects
|
# childs objects
|
||||||
for object in self.pointer.objects:
|
for object in self.instance.objects:
|
||||||
deps.append(object)
|
deps.append(object)
|
||||||
|
|
||||||
# world
|
# world
|
||||||
if self.pointer.world:
|
if self.instance.world:
|
||||||
deps.append(self.pointer.world)
|
deps.append(self.instance.world)
|
||||||
|
|
||||||
# annotations
|
# annotations
|
||||||
if self.pointer.grease_pencil:
|
if self.instance.grease_pencil:
|
||||||
deps.append(self.pointer.grease_pencil)
|
deps.append(self.instance.grease_pencil)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.scenes.get(self.data['name'])
|
|
@ -1,7 +1,25 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
|
||||||
|
|
||||||
@ -13,16 +31,17 @@ class BlSpeaker(BlDatablock):
|
|||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'SPEAKER'
|
bl_icon = 'SPEAKER'
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
utils.dump_anything.load(target, data)
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.speakers.new(data["name"])
|
return bpy.data.speakers.new(data["name"])
|
||||||
|
|
||||||
def dump(self, pointer=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(pointer)
|
assert(instance)
|
||||||
|
|
||||||
dumper = utils.dump_anything.Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 1
|
dumper.depth = 1
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"muted",
|
"muted",
|
||||||
@ -39,8 +58,8 @@ class BlSpeaker(BlDatablock):
|
|||||||
'cone_volume_outer'
|
'cone_volume_outer'
|
||||||
]
|
]
|
||||||
|
|
||||||
return dumper.dump(pointer)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.lattices.get(self.data['name'])
|
|
||||||
|
|
||||||
|
@ -1,23 +1,41 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from .. import utils
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
from .bl_material import load_link, load_node
|
from .bl_material import load_links, load_node, dump_node, dump_links
|
||||||
|
|
||||||
|
|
||||||
class BlWorld(BlDatablock):
|
class BlWorld(BlDatablock):
|
||||||
bl_id = "worlds"
|
bl_id = "worlds"
|
||||||
bl_class = bpy.types.World
|
bl_class = bpy.types.World
|
||||||
bl_delay_refresh = 4
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 4
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
bl_icon = 'WORLD_DATA'
|
bl_icon = 'WORLD_DATA'
|
||||||
|
|
||||||
def construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.worlds.new(data["name"])
|
return bpy.data.worlds.new(data["name"])
|
||||||
|
|
||||||
def load(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
if data["use_nodes"]:
|
if data["use_nodes"]:
|
||||||
if target.node_tree is None:
|
if target.node_tree is None:
|
||||||
target.use_nodes = True
|
target.use_nodes = True
|
||||||
@ -25,18 +43,18 @@ class BlWorld(BlDatablock):
|
|||||||
target.node_tree.nodes.clear()
|
target.node_tree.nodes.clear()
|
||||||
|
|
||||||
for node in data["node_tree"]["nodes"]:
|
for node in data["node_tree"]["nodes"]:
|
||||||
load_node(target.node_tree, data["node_tree"]["nodes"][node])
|
load_node(data["node_tree"]["nodes"][node], target.node_tree)
|
||||||
|
|
||||||
# Load nodes links
|
# Load nodes links
|
||||||
target.node_tree.links.clear()
|
target.node_tree.links.clear()
|
||||||
|
|
||||||
for link in data["node_tree"]["links"]:
|
|
||||||
load_link(target.node_tree, data["node_tree"]["links"][link])
|
|
||||||
|
|
||||||
def dump_implementation(self, data, pointer=None):
|
load_links(data["node_tree"]["links"], target.node_tree)
|
||||||
assert(pointer)
|
|
||||||
|
|
||||||
world_dumper = utils.dump_anything.Dumper()
|
def _dump_implementation(self, data, instance=None):
|
||||||
|
assert(instance)
|
||||||
|
|
||||||
|
world_dumper = Dumper()
|
||||||
world_dumper.depth = 2
|
world_dumper.depth = 2
|
||||||
world_dumper.exclude_filter = [
|
world_dumper.exclude_filter = [
|
||||||
"preview",
|
"preview",
|
||||||
@ -48,59 +66,27 @@ class BlWorld(BlDatablock):
|
|||||||
"users",
|
"users",
|
||||||
"view_center"
|
"view_center"
|
||||||
]
|
]
|
||||||
data = world_dumper.dump(pointer)
|
data = world_dumper.dump(instance)
|
||||||
if pointer.use_nodes:
|
if instance.use_nodes:
|
||||||
nodes = {}
|
nodes = {}
|
||||||
dumper = utils.dump_anything.Dumper()
|
|
||||||
dumper.depth = 2
|
|
||||||
dumper.exclude_filter = [
|
|
||||||
"dimensions",
|
|
||||||
"select",
|
|
||||||
"bl_height_min",
|
|
||||||
"bl_height_max",
|
|
||||||
"bl_width_min",
|
|
||||||
"bl_width_max",
|
|
||||||
"bl_width_default",
|
|
||||||
"hide",
|
|
||||||
"show_options",
|
|
||||||
"show_tetxures",
|
|
||||||
"show_preview",
|
|
||||||
"outputs",
|
|
||||||
"preview",
|
|
||||||
"original",
|
|
||||||
"width_hidden",
|
|
||||||
|
|
||||||
]
|
for node in instance.node_tree.nodes:
|
||||||
|
nodes[node.name] = dump_node(node)
|
||||||
|
|
||||||
for node in pointer.node_tree.nodes:
|
|
||||||
nodes[node.name] = dumper.dump(node)
|
|
||||||
|
|
||||||
if hasattr(node, 'inputs'):
|
|
||||||
nodes[node.name]['inputs'] = {}
|
|
||||||
|
|
||||||
for i in node.inputs:
|
|
||||||
input_dumper = utils.dump_anything.Dumper()
|
|
||||||
input_dumper.depth = 2
|
|
||||||
input_dumper.include_filter = ["default_value"]
|
|
||||||
if hasattr(i, 'default_value'):
|
|
||||||
nodes[node.name]['inputs'][i.name] = input_dumper.dump(
|
|
||||||
i)
|
|
||||||
data["node_tree"]['nodes'] = nodes
|
data["node_tree"]['nodes'] = nodes
|
||||||
utils.dump_datablock_attibutes(
|
|
||||||
pointer.node_tree, ["links"], 3, data['node_tree'])
|
data["node_tree"]['links'] = dump_links(instance.node_tree.links)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def resolve_dependencies(self):
|
def _resolve_deps_implementation(self):
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.pointer.use_nodes:
|
if self.instance.use_nodes:
|
||||||
for node in self.pointer.node_tree.nodes:
|
for node in self.instance.node_tree.nodes:
|
||||||
if node.type == 'TEX_IMAGE':
|
if node.type == 'TEX_IMAGE':
|
||||||
deps.append(node.image)
|
deps.append(node.image)
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
deps.append(self.pointer.library)
|
deps.append(self.instance.library)
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
return bpy.data.worlds.get(self.data['name'])
|
|
||||||
|
|
||||||
|
666
multi_user/bl_types/dump_anything.py
Normal file
666
multi_user/bl_types/dump_anything.py
Normal file
@ -0,0 +1,666 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import bpy.types as T
|
||||||
|
import mathutils
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
BPY_TO_NUMPY_TYPES = {
|
||||||
|
'FLOAT': np.float,
|
||||||
|
'INT': np.int,
|
||||||
|
'BOOL': np.bool}
|
||||||
|
|
||||||
|
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
|
||||||
|
|
||||||
|
NP_COMPATIBLE_TYPES = ['FLOAT', 'INT', 'BOOLEAN', 'ENUM']
|
||||||
|
|
||||||
|
|
||||||
|
def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, attributes: list = None):
|
||||||
|
""" Dump a list of attributes from the sane collection
|
||||||
|
to the target dikt.
|
||||||
|
|
||||||
|
Without attribute given, it try to load all entry from dikt.
|
||||||
|
|
||||||
|
:arg dikt: target dict
|
||||||
|
:type dikt: dict
|
||||||
|
:arg collection: source collection
|
||||||
|
:type collection: bpy.types.CollectionProperty
|
||||||
|
:arg attributes: list of attributes name
|
||||||
|
:type attributes: list
|
||||||
|
"""
|
||||||
|
if not dikt or len(collection) == 0:
|
||||||
|
logger.warning(f'Skipping collection')
|
||||||
|
return
|
||||||
|
|
||||||
|
if attributes is None:
|
||||||
|
attributes = dikt.keys()
|
||||||
|
|
||||||
|
for attr in attributes:
|
||||||
|
attr_type = collection[0].bl_rna.properties.get(attr).type
|
||||||
|
|
||||||
|
if attr_type in PRIMITIVE_TYPES:
|
||||||
|
np_load_collection_primitives(collection, attr, dikt[attr])
|
||||||
|
elif attr_type == 'ENUM':
|
||||||
|
np_load_collection_enum(collection, attr, dikt[attr])
|
||||||
|
else:
|
||||||
|
logger.error(f"{attr} of type {attr_type} not supported.")
|
||||||
|
|
||||||
|
|
||||||
|
def np_dump_collection(collection: bpy.types.CollectionProperty, attributes: list = None) -> dict:
|
||||||
|
""" Dump a list of attributes from the sane collection
|
||||||
|
to the target dikt
|
||||||
|
|
||||||
|
Without attributes given, it try to dump all properties
|
||||||
|
that matches NP_COMPATIBLE_TYPES.
|
||||||
|
|
||||||
|
:arg collection: source collection
|
||||||
|
:type collection: bpy.types.CollectionProperty
|
||||||
|
:arg attributes: list of attributes name
|
||||||
|
:type attributes: list
|
||||||
|
:retrun: dict
|
||||||
|
"""
|
||||||
|
dumped_collection = {}
|
||||||
|
|
||||||
|
if len(collection) == 0:
|
||||||
|
return dumped_collection
|
||||||
|
|
||||||
|
# TODO: find a way without getting the first item
|
||||||
|
properties = collection[0].bl_rna.properties
|
||||||
|
|
||||||
|
if attributes is None:
|
||||||
|
attributes = [p.identifier for p in properties if p.type in NP_COMPATIBLE_TYPES and not p.is_readonly]
|
||||||
|
|
||||||
|
for attr in attributes:
|
||||||
|
attr_type = properties[attr].type
|
||||||
|
|
||||||
|
if attr_type in PRIMITIVE_TYPES:
|
||||||
|
dumped_collection[attr] = np_dump_collection_primitive(
|
||||||
|
collection, attr)
|
||||||
|
elif attr_type == 'ENUM':
|
||||||
|
dumped_collection[attr] = np_dump_collection_enum(collection, attr)
|
||||||
|
else:
|
||||||
|
logger.error(f"{attr} of type {attr_type} not supported. Only {PRIMITIVE_TYPES} and ENUM supported. Skipping it.")
|
||||||
|
|
||||||
|
return dumped_collection
|
||||||
|
|
||||||
|
|
||||||
|
def np_dump_collection_primitive(collection: bpy.types.CollectionProperty, attribute: str) -> str:
|
||||||
|
""" Dump a collection attribute as a sequence
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
Only work with int, float and bool attributes
|
||||||
|
|
||||||
|
:arg collection: target collection
|
||||||
|
:type collection: bpy.types.CollectionProperty
|
||||||
|
:arg attribute: target attribute
|
||||||
|
:type attribute: str
|
||||||
|
:return: numpy byte buffer
|
||||||
|
"""
|
||||||
|
if len(collection) == 0:
|
||||||
|
logger.warning(f'Skipping empty {attribute} attribute')
|
||||||
|
return {}
|
||||||
|
|
||||||
|
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||||
|
|
||||||
|
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
|
||||||
|
|
||||||
|
|
||||||
|
size = sum(attr_infos.array_dimensions) if attr_infos.is_array else 1
|
||||||
|
|
||||||
|
dumped_sequence = np.zeros(
|
||||||
|
len(collection)*size,
|
||||||
|
dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type))
|
||||||
|
|
||||||
|
collection.foreach_get(attribute, dumped_sequence)
|
||||||
|
|
||||||
|
return dumped_sequence.tobytes()
|
||||||
|
|
||||||
|
|
||||||
|
def np_dump_collection_enum(collection: bpy.types.CollectionProperty, attribute: str) -> list:
|
||||||
|
""" Dump a collection enum attribute to an index list
|
||||||
|
|
||||||
|
:arg collection: target collection
|
||||||
|
:type collection: bpy.types.CollectionProperty
|
||||||
|
:arg attribute: target attribute
|
||||||
|
:type attribute: bpy.types.EnumProperty
|
||||||
|
:return: list of int
|
||||||
|
"""
|
||||||
|
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||||
|
|
||||||
|
assert(attr_infos.type == 'ENUM')
|
||||||
|
|
||||||
|
enum_items = attr_infos.enum_items
|
||||||
|
return [enum_items[getattr(i, attribute)].value for i in collection]
|
||||||
|
|
||||||
|
|
||||||
|
def np_load_collection_enum(collection: bpy.types.CollectionProperty, attribute: str, sequence: list):
|
||||||
|
""" Load a collection enum attribute from a list sequence
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
Only work with Enum
|
||||||
|
|
||||||
|
:arg collection: target collection
|
||||||
|
:type collection: bpy.types.CollectionProperty
|
||||||
|
:arg attribute: target attribute
|
||||||
|
:type attribute: str
|
||||||
|
:arg sequence: enum data buffer
|
||||||
|
:type sequence: list
|
||||||
|
:return: numpy byte buffer
|
||||||
|
"""
|
||||||
|
|
||||||
|
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||||
|
|
||||||
|
assert(attr_infos.type == 'ENUM')
|
||||||
|
|
||||||
|
enum_items = attr_infos.enum_items
|
||||||
|
enum_idx = [i.value for i in enum_items]
|
||||||
|
|
||||||
|
for index, item in enumerate(sequence):
|
||||||
|
setattr(collection[index], attribute,
|
||||||
|
enum_items[enum_idx.index(item)].identifier)
|
||||||
|
|
||||||
|
|
||||||
|
def np_load_collection_primitives(collection: bpy.types.CollectionProperty, attribute: str, sequence: str):
|
||||||
|
""" Load a collection attribute from a str bytes sequence
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
Only work with int, float and bool attributes
|
||||||
|
|
||||||
|
:arg collection: target collection
|
||||||
|
:type collection: bpy.types.CollectionProperty
|
||||||
|
:arg attribute: target attribute
|
||||||
|
:type attribute: str
|
||||||
|
:arg sequence: data buffer
|
||||||
|
:type sequence: strr
|
||||||
|
"""
|
||||||
|
if len(collection) == 0 or not sequence:
|
||||||
|
logger.warning(f"Skipping loadin {attribute}")
|
||||||
|
return
|
||||||
|
|
||||||
|
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||||
|
|
||||||
|
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
|
||||||
|
|
||||||
|
collection.foreach_set(
|
||||||
|
attribute,
|
||||||
|
np.frombuffer(sequence, dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type)))
|
||||||
|
|
||||||
|
|
||||||
|
def remove_items_from_dict(d, keys, recursive=False):
|
||||||
|
copy = dict(d)
|
||||||
|
for k in keys:
|
||||||
|
copy.pop(k, None)
|
||||||
|
if recursive:
|
||||||
|
for k in [k for k in copy.keys() if isinstance(copy[k], dict)]:
|
||||||
|
copy[k] = remove_items_from_dict(copy[k], keys, recursive)
|
||||||
|
return copy
|
||||||
|
|
||||||
|
|
||||||
|
def _is_dictionnary(v):
|
||||||
|
return hasattr(v, "items") and callable(v.items)
|
||||||
|
|
||||||
|
|
||||||
|
def _dump_filter_type(t):
|
||||||
|
return lambda x: isinstance(x, t)
|
||||||
|
|
||||||
|
|
||||||
|
def _dump_filter_type_by_name(t_name):
|
||||||
|
return lambda x: t_name == x.__class__.__name__
|
||||||
|
|
||||||
|
|
||||||
|
def _dump_filter_array(array):
|
||||||
|
# only primitive type array
|
||||||
|
if not isinstance(array, T.bpy_prop_array):
|
||||||
|
return False
|
||||||
|
if len(array) > 0 and type(array[0]) not in [bool, float, int]:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _dump_filter_default(default):
|
||||||
|
if default is None:
|
||||||
|
return False
|
||||||
|
if type(default) is list:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _load_filter_type(t, use_bl_rna=True):
|
||||||
|
def filter_function(x):
|
||||||
|
if use_bl_rna and x.bl_rna_property:
|
||||||
|
return isinstance(x.bl_rna_property, t)
|
||||||
|
else:
|
||||||
|
return isinstance(x.read(), t)
|
||||||
|
return filter_function
|
||||||
|
|
||||||
|
|
||||||
|
def _load_filter_array(array):
|
||||||
|
# only primitive type array
|
||||||
|
if not isinstance(array.read(), T.bpy_prop_array):
|
||||||
|
return False
|
||||||
|
if len(array.read()) > 0 and type(array.read()[0]) not in [bool, float, int]:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _load_filter_color(color):
|
||||||
|
return color.__class__.__name__ == 'Color'
|
||||||
|
|
||||||
|
|
||||||
|
def _load_filter_default(default):
|
||||||
|
if default.read() is None:
|
||||||
|
return False
|
||||||
|
if type(default.read()) is list:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class Dumper:
|
||||||
|
# TODO: support occlude readonly
|
||||||
|
# TODO: use foreach_set/get on collection compatible properties
|
||||||
|
def __init__(self):
|
||||||
|
self.verbose = True
|
||||||
|
self.depth = 1
|
||||||
|
self.keep_compounds_as_leaves = False
|
||||||
|
self.accept_read_only = True
|
||||||
|
self._build_inline_dump_functions()
|
||||||
|
self._build_match_elements()
|
||||||
|
self.type_subset = self.match_subset_all
|
||||||
|
self.include_filter = []
|
||||||
|
self.exclude_filter = []
|
||||||
|
|
||||||
|
def dump(self, any):
|
||||||
|
return self._dump_any(any, 0)
|
||||||
|
|
||||||
|
def _dump_any(self, any, depth):
|
||||||
|
for filter_function, dump_function in self.type_subset:
|
||||||
|
if filter_function(any):
|
||||||
|
return dump_function[not (depth >= self.depth)](any, depth + 1)
|
||||||
|
|
||||||
|
def _build_inline_dump_functions(self):
|
||||||
|
self._dump_identity = (lambda x, depth: x, lambda x, depth: x)
|
||||||
|
self._dump_ref = (lambda x, depth: x.name, self._dump_object_as_branch)
|
||||||
|
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
|
||||||
|
self._dump_collection = (
|
||||||
|
self._dump_default_as_leaf, self._dump_collection_as_branch)
|
||||||
|
self._dump_array = (self._dump_default_as_leaf,
|
||||||
|
self._dump_array_as_branch)
|
||||||
|
self._dump_matrix = (self._dump_matrix_as_leaf,
|
||||||
|
self._dump_matrix_as_leaf)
|
||||||
|
self._dump_vector = (self._dump_vector_as_leaf,
|
||||||
|
self._dump_vector_as_leaf)
|
||||||
|
self._dump_quaternion = (
|
||||||
|
self._dump_quaternion_as_leaf, self._dump_quaternion_as_leaf)
|
||||||
|
self._dump_default = (self._dump_default_as_leaf,
|
||||||
|
self._dump_default_as_branch)
|
||||||
|
self._dump_color = (self._dump_color_as_leaf, self._dump_color_as_leaf)
|
||||||
|
|
||||||
|
def _build_match_elements(self):
|
||||||
|
self._match_type_bool = (_dump_filter_type(bool), self._dump_identity)
|
||||||
|
self._match_type_int = (_dump_filter_type(int), self._dump_identity)
|
||||||
|
self._match_type_float = (
|
||||||
|
_dump_filter_type(float), self._dump_identity)
|
||||||
|
self._match_type_string = (_dump_filter_type(str), self._dump_identity)
|
||||||
|
self._match_type_ref = (_dump_filter_type(T.Object), self._dump_ref)
|
||||||
|
self._match_type_ID = (_dump_filter_type(T.ID), self._dump_ID)
|
||||||
|
self._match_type_bpy_prop_collection = (
|
||||||
|
_dump_filter_type(T.bpy_prop_collection), self._dump_collection)
|
||||||
|
self._match_type_array = (_dump_filter_array, self._dump_array)
|
||||||
|
self._match_type_matrix = (_dump_filter_type(
|
||||||
|
mathutils.Matrix), self._dump_matrix)
|
||||||
|
self._match_type_vector = (_dump_filter_type(
|
||||||
|
mathutils.Vector), self._dump_vector)
|
||||||
|
self._match_type_quaternion = (_dump_filter_type(
|
||||||
|
mathutils.Quaternion), self._dump_quaternion)
|
||||||
|
self._match_type_euler = (_dump_filter_type(
|
||||||
|
mathutils.Euler), self._dump_quaternion)
|
||||||
|
self._match_type_color = (
|
||||||
|
_dump_filter_type_by_name("Color"), self._dump_color)
|
||||||
|
self._match_default = (_dump_filter_default, self._dump_default)
|
||||||
|
|
||||||
|
def _dump_collection_as_branch(self, collection, depth):
|
||||||
|
dump = {}
|
||||||
|
for i in collection.items():
|
||||||
|
dv = self._dump_any(i[1], depth)
|
||||||
|
if not (dv is None):
|
||||||
|
dump[i[0]] = dv
|
||||||
|
return dump
|
||||||
|
|
||||||
|
def _dump_default_as_leaf(self, default, depth):
|
||||||
|
if self.keep_compounds_as_leaves:
|
||||||
|
return str(type(default))
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _dump_array_as_branch(self, array, depth):
|
||||||
|
return [i for i in array]
|
||||||
|
|
||||||
|
def _dump_matrix_as_leaf(self, matrix, depth):
|
||||||
|
return [list(v) for v in matrix]
|
||||||
|
|
||||||
|
def _dump_vector_as_leaf(self, vector, depth):
|
||||||
|
return list(vector)
|
||||||
|
|
||||||
|
def _dump_quaternion_as_leaf(self, quaternion, depth):
|
||||||
|
return list(quaternion)
|
||||||
|
|
||||||
|
def _dump_color_as_leaf(self, color, depth):
|
||||||
|
return list(color)
|
||||||
|
|
||||||
|
def _dump_object_as_branch(self, default, depth):
|
||||||
|
if depth == 1:
|
||||||
|
return self._dump_default_as_branch(default, depth)
|
||||||
|
else:
|
||||||
|
return default.name
|
||||||
|
|
||||||
|
def _dump_default_as_branch(self, default, depth):
|
||||||
|
def is_valid_property(p):
|
||||||
|
try:
|
||||||
|
if (self.include_filter and p not in self.include_filter):
|
||||||
|
return False
|
||||||
|
getattr(default, p)
|
||||||
|
except AttributeError as err:
|
||||||
|
logger.debug(err)
|
||||||
|
return False
|
||||||
|
if p.startswith("__"):
|
||||||
|
return False
|
||||||
|
if callable(getattr(default, p)):
|
||||||
|
return False
|
||||||
|
if p in ["bl_rna", "rna_type"]:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
all_property_names = [p for p in dir(default) if is_valid_property(
|
||||||
|
p) and p != '' and p not in self.exclude_filter]
|
||||||
|
dump = {}
|
||||||
|
for p in all_property_names:
|
||||||
|
if (self.exclude_filter and p in self.exclude_filter) or\
|
||||||
|
(self.include_filter and p not in self.include_filter):
|
||||||
|
return False
|
||||||
|
dp = self._dump_any(getattr(default, p), depth)
|
||||||
|
if not (dp is None):
|
||||||
|
dump[p] = dp
|
||||||
|
return dump
|
||||||
|
|
||||||
|
@property
|
||||||
|
def match_subset_all(self):
|
||||||
|
return [
|
||||||
|
self._match_type_bool,
|
||||||
|
self._match_type_int,
|
||||||
|
self._match_type_float,
|
||||||
|
self._match_type_string,
|
||||||
|
self._match_type_ref,
|
||||||
|
self._match_type_ID,
|
||||||
|
self._match_type_bpy_prop_collection,
|
||||||
|
self._match_type_array,
|
||||||
|
self._match_type_matrix,
|
||||||
|
self._match_type_vector,
|
||||||
|
self._match_type_quaternion,
|
||||||
|
self._match_type_euler,
|
||||||
|
self._match_type_color,
|
||||||
|
self._match_default
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def match_subset_primitives(self):
|
||||||
|
return [
|
||||||
|
self._match_type_bool,
|
||||||
|
self._match_type_int,
|
||||||
|
self._match_type_float,
|
||||||
|
self._match_type_string,
|
||||||
|
self._match_default
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class BlenderAPIElement:
|
||||||
|
def __init__(self, api_element, sub_element_name="", occlude_read_only=True):
|
||||||
|
self.api_element = api_element
|
||||||
|
self.sub_element_name = sub_element_name
|
||||||
|
self.occlude_read_only = occlude_read_only
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
return getattr(self.api_element, self.sub_element_name) if self.sub_element_name else self.api_element
|
||||||
|
|
||||||
|
def write(self, value):
|
||||||
|
# take precaution if property is read-only
|
||||||
|
if self.sub_element_name and \
|
||||||
|
not self.api_element.is_property_readonly(self.sub_element_name):
|
||||||
|
|
||||||
|
setattr(self.api_element, self.sub_element_name, value)
|
||||||
|
else:
|
||||||
|
self.api_element = value
|
||||||
|
|
||||||
|
def extend(self, element_name):
|
||||||
|
return BlenderAPIElement(self.read(), element_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bl_rna_property(self):
|
||||||
|
if not hasattr(self.api_element, "bl_rna"):
|
||||||
|
return False
|
||||||
|
if not self.sub_element_name:
|
||||||
|
return False
|
||||||
|
return self.api_element.bl_rna.properties[self.sub_element_name]
|
||||||
|
|
||||||
|
|
||||||
|
class Loader:
|
||||||
|
def __init__(self):
|
||||||
|
self.type_subset = self.match_subset_all
|
||||||
|
self.occlude_read_only = False
|
||||||
|
self.order = ['*']
|
||||||
|
|
||||||
|
def load(self, dst_data, src_dumped_data):
|
||||||
|
self._load_any(
|
||||||
|
BlenderAPIElement(
|
||||||
|
dst_data, occlude_read_only=self.occlude_read_only),
|
||||||
|
src_dumped_data
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_any(self, any, dump):
|
||||||
|
for filter_function, load_function in self.type_subset:
|
||||||
|
if filter_function(any):
|
||||||
|
load_function(any, dump)
|
||||||
|
return
|
||||||
|
|
||||||
|
def _load_identity(self, element, dump):
|
||||||
|
element.write(dump)
|
||||||
|
|
||||||
|
def _load_array(self, element, dump):
|
||||||
|
# supports only primitive types currently
|
||||||
|
try:
|
||||||
|
for i in range(len(dump)):
|
||||||
|
element.read()[i] = dump[i]
|
||||||
|
except AttributeError as err:
|
||||||
|
logger.debug(err)
|
||||||
|
if not self.occlude_read_only:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
def _load_collection(self, element, dump):
|
||||||
|
if not element.bl_rna_property:
|
||||||
|
return
|
||||||
|
# local enum
|
||||||
|
CONSTRUCTOR_NEW = "new"
|
||||||
|
CONSTRUCTOR_ADD = "add"
|
||||||
|
|
||||||
|
DESTRUCTOR_REMOVE = "remove"
|
||||||
|
DESTRUCTOR_CLEAR = "clear"
|
||||||
|
|
||||||
|
_constructors = {
|
||||||
|
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||||
|
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||||
|
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||||
|
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
destructors = {
|
||||||
|
T.ColorRampElement: DESTRUCTOR_REMOVE,
|
||||||
|
T.Modifier: DESTRUCTOR_CLEAR,
|
||||||
|
T.Constraint: CONSTRUCTOR_NEW,
|
||||||
|
}
|
||||||
|
element_type = element.bl_rna_property.fixed_type
|
||||||
|
|
||||||
|
_constructor = _constructors.get(type(element_type))
|
||||||
|
|
||||||
|
if _constructor is None: # collection type not supported
|
||||||
|
return
|
||||||
|
|
||||||
|
destructor = destructors.get(type(element_type))
|
||||||
|
|
||||||
|
# Try to clear existing
|
||||||
|
if destructor:
|
||||||
|
if destructor == DESTRUCTOR_REMOVE:
|
||||||
|
collection = element.read()
|
||||||
|
for i in range(len(collection)-1):
|
||||||
|
collection.remove(collection[0])
|
||||||
|
else:
|
||||||
|
getattr(element.read(), DESTRUCTOR_CLEAR)()
|
||||||
|
|
||||||
|
for dump_idx, dumped_element in enumerate(dump.values()):
|
||||||
|
if dump_idx == 0 and len(element.read()) > 0:
|
||||||
|
new_element = element.read()[0]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
_constructor_parameters = [dumped_element[name]
|
||||||
|
for name in _constructor[1]]
|
||||||
|
except KeyError:
|
||||||
|
logger.debug("Collection load error, missing parameters.")
|
||||||
|
continue # TODO handle error
|
||||||
|
|
||||||
|
new_element = getattr(element.read(), _constructor[0])(
|
||||||
|
*_constructor_parameters)
|
||||||
|
self._load_any(
|
||||||
|
BlenderAPIElement(
|
||||||
|
new_element, occlude_read_only=self.occlude_read_only),
|
||||||
|
dumped_element
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_curve_mapping(self, element, dump):
|
||||||
|
mapping = element.read()
|
||||||
|
curves = mapping.curves
|
||||||
|
|
||||||
|
for curve_index, curve in dump['curves'].items():
|
||||||
|
dst_curve = curves[curve_index]
|
||||||
|
|
||||||
|
# cleanup existing curve
|
||||||
|
for idx in range(len(dst_curve.points), 0, -1):
|
||||||
|
try:
|
||||||
|
dst_curve.points.remove(dst_curve.points[0])
|
||||||
|
except Exception:
|
||||||
|
break
|
||||||
|
|
||||||
|
default_point_count = len(dst_curve.points)
|
||||||
|
|
||||||
|
for point_idx, point in curve['points'].items():
|
||||||
|
pos = point['location']
|
||||||
|
|
||||||
|
if point_idx < default_point_count:
|
||||||
|
dst_curve.points[int(point_idx)].location = pos
|
||||||
|
else:
|
||||||
|
dst_curve.points.new(pos[0], pos[1])
|
||||||
|
|
||||||
|
def _load_pointer(self, instance, dump):
|
||||||
|
rna_property_type = instance.bl_rna_property.fixed_type
|
||||||
|
if not rna_property_type:
|
||||||
|
return
|
||||||
|
if isinstance(rna_property_type, T.Image):
|
||||||
|
instance.write(bpy.data.images.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.Texture):
|
||||||
|
instance.write(bpy.data.textures.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.ColorRamp):
|
||||||
|
self._load_default(instance, dump)
|
||||||
|
elif isinstance(rna_property_type, T.Object):
|
||||||
|
instance.write(bpy.data.objects.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.Mesh):
|
||||||
|
instance.write(bpy.data.meshes.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.Material):
|
||||||
|
instance.write(bpy.data.materials.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.Collection):
|
||||||
|
instance.write(bpy.data.collections.get(dump))
|
||||||
|
|
||||||
|
def _load_matrix(self, matrix, dump):
|
||||||
|
matrix.write(mathutils.Matrix(dump))
|
||||||
|
|
||||||
|
def _load_vector(self, vector, dump):
|
||||||
|
vector.write(mathutils.Vector(dump))
|
||||||
|
|
||||||
|
def _load_quaternion(self, quaternion, dump):
|
||||||
|
quaternion.write(mathutils.Quaternion(dump))
|
||||||
|
|
||||||
|
def _load_euler(self, euler, dump):
|
||||||
|
euler.write(mathutils.Euler(dump))
|
||||||
|
|
||||||
|
def _ordered_keys(self, keys):
|
||||||
|
ordered_keys = []
|
||||||
|
for order_element in self.order:
|
||||||
|
if order_element == '*':
|
||||||
|
ordered_keys += [k for k in keys if not k in self.order]
|
||||||
|
else:
|
||||||
|
if order_element in keys:
|
||||||
|
ordered_keys.append(order_element)
|
||||||
|
return ordered_keys
|
||||||
|
|
||||||
|
def _load_default(self, default, dump):
|
||||||
|
if not _is_dictionnary(dump):
|
||||||
|
return # TODO error handling
|
||||||
|
for k in self._ordered_keys(dump.keys()):
|
||||||
|
v = dump[k]
|
||||||
|
if not hasattr(default.read(), k):
|
||||||
|
logger.debug(f"Load default, skipping {default} : {k}")
|
||||||
|
try:
|
||||||
|
self._load_any(default.extend(k), v)
|
||||||
|
except Exception as err:
|
||||||
|
logger.debug(f"Cannot load {k}: {err}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def match_subset_all(self):
|
||||||
|
return [
|
||||||
|
(_load_filter_type(T.BoolProperty), self._load_identity),
|
||||||
|
(_load_filter_type(T.IntProperty), self._load_identity),
|
||||||
|
# before float because bl_rna type of matrix if FloatProperty
|
||||||
|
(_load_filter_type(mathutils.Matrix, use_bl_rna=False), self._load_matrix),
|
||||||
|
# before float because bl_rna type of vector if FloatProperty
|
||||||
|
(_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector),
|
||||||
|
(_load_filter_type(mathutils.Quaternion,
|
||||||
|
use_bl_rna=False), self._load_quaternion),
|
||||||
|
(_load_filter_type(mathutils.Euler, use_bl_rna=False), self._load_euler),
|
||||||
|
(_load_filter_type(T.CurveMapping, use_bl_rna=False),
|
||||||
|
self._load_curve_mapping),
|
||||||
|
(_load_filter_type(T.FloatProperty), self._load_identity),
|
||||||
|
(_load_filter_type(T.StringProperty), self._load_identity),
|
||||||
|
(_load_filter_type(T.EnumProperty), self._load_identity),
|
||||||
|
(_load_filter_type(T.PointerProperty), self._load_pointer),
|
||||||
|
(_load_filter_array, self._load_array),
|
||||||
|
(_load_filter_type(T.CollectionProperty), self._load_collection),
|
||||||
|
(_load_filter_default, self._load_default),
|
||||||
|
(_load_filter_color, self._load_identity),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Utility functions
|
||||||
|
def dump(any, depth=1):
|
||||||
|
dumper = Dumper()
|
||||||
|
dumper.depth = depth
|
||||||
|
return dumper.dump(any)
|
||||||
|
|
||||||
|
|
||||||
|
def load(dst, src):
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(dst, src)
|
@ -1,3 +1,20 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
@ -196,27 +213,37 @@ class DrawClient(Draw):
|
|||||||
def execute(self):
|
def execute(self):
|
||||||
session = getattr(operators, 'client', None)
|
session = getattr(operators, 'client', None)
|
||||||
renderer = getattr(presence, 'renderer', None)
|
renderer = getattr(presence, 'renderer', None)
|
||||||
|
prefs = utils.get_preferences()
|
||||||
|
|
||||||
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
|
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
|
||||||
settings = bpy.context.window_manager.session
|
settings = bpy.context.window_manager.session
|
||||||
users = session.online_users
|
users = session.online_users
|
||||||
|
|
||||||
|
# Update users
|
||||||
for user in users.values():
|
for user in users.values():
|
||||||
metadata = user.get('metadata')
|
metadata = user.get('metadata')
|
||||||
|
color = metadata.get('color')
|
||||||
if 'color' in metadata:
|
scene_current = metadata.get('scene_current')
|
||||||
|
user_showable = scene_current == bpy.context.scene.name or settings.presence_show_far_user
|
||||||
|
if color and scene_current and user_showable:
|
||||||
if settings.presence_show_selected and 'selected_objects' in metadata.keys():
|
if settings.presence_show_selected and 'selected_objects' in metadata.keys():
|
||||||
renderer.draw_client_selection(
|
renderer.draw_client_selection(
|
||||||
user['id'], metadata['color'], metadata['selected_objects'])
|
user['id'], color, metadata['selected_objects'])
|
||||||
if settings.presence_show_user and 'view_corners' in metadata:
|
if settings.presence_show_user and 'view_corners' in metadata:
|
||||||
renderer.draw_client_camera(
|
renderer.draw_client_camera(
|
||||||
user['id'], metadata['view_corners'], metadata['color'])
|
user['id'], metadata['view_corners'], color)
|
||||||
|
if not user_showable:
|
||||||
|
# TODO: remove this when user event drivent update will be
|
||||||
|
# ready
|
||||||
|
renderer.flush_selection()
|
||||||
|
renderer.flush_users()
|
||||||
|
|
||||||
|
|
||||||
class ClientUpdate(Timer):
|
class ClientUpdate(Timer):
|
||||||
def __init__(self, timout=.5):
|
def __init__(self, timout=.016):
|
||||||
super().__init__(timout)
|
super().__init__(timout)
|
||||||
self.handle_quit = False
|
self.handle_quit = False
|
||||||
|
self.users_metadata = {}
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
settings = utils.get_preferences()
|
settings = utils.get_preferences()
|
||||||
@ -228,31 +255,53 @@ class ClientUpdate(Timer):
|
|||||||
if session.state['STATE'] == 0:
|
if session.state['STATE'] == 0:
|
||||||
bpy.ops.session.stop()
|
bpy.ops.session.stop()
|
||||||
|
|
||||||
local_user = operators.client.online_users.get(
|
local_user = operators.client.online_users.get(settings.username)
|
||||||
settings.username)
|
|
||||||
if not local_user:
|
if not local_user:
|
||||||
return
|
return
|
||||||
|
else:
|
||||||
|
for username, user_data in operators.client.online_users.items():
|
||||||
|
if username != settings.username:
|
||||||
|
cached_user_data = self.users_metadata.get(username)
|
||||||
|
new_user_data = operators.client.online_users[username]['metadata']
|
||||||
|
|
||||||
|
if cached_user_data is None:
|
||||||
|
self.users_metadata[username] = user_data['metadata']
|
||||||
|
elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']:
|
||||||
|
presence.refresh_3d_view()
|
||||||
|
self.users_metadata[username] = user_data['metadata']
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.users_metadata[username] = user_data['metadata']
|
||||||
|
|
||||||
local_user_metadata = local_user.get('metadata')
|
local_user_metadata = local_user.get('metadata')
|
||||||
|
scene_current = bpy.context.scene.name
|
||||||
|
local_user = session.online_users.get(settings.username)
|
||||||
current_view_corners = presence.get_view_corners()
|
current_view_corners = presence.get_view_corners()
|
||||||
|
|
||||||
|
# Init client metadata
|
||||||
if not local_user_metadata or 'color' not in local_user_metadata.keys():
|
if not local_user_metadata or 'color' not in local_user_metadata.keys():
|
||||||
metadata = {
|
metadata = {
|
||||||
'view_corners': current_view_corners,
|
'view_corners': presence.get_view_matrix(),
|
||||||
'view_matrix': presence.get_view_matrix(),
|
'view_matrix': presence.get_view_matrix(),
|
||||||
'color': (settings.client_color.r,
|
'color': (settings.client_color.r,
|
||||||
settings.client_color.g,
|
settings.client_color.g,
|
||||||
settings.client_color.b,
|
settings.client_color.b,
|
||||||
1),
|
1),
|
||||||
'frame_current':bpy.context.scene.frame_current
|
'frame_current':bpy.context.scene.frame_current,
|
||||||
|
'scene_current': scene_current
|
||||||
}
|
}
|
||||||
session.update_user_metadata(metadata)
|
session.update_user_metadata(metadata)
|
||||||
elif current_view_corners != local_user_metadata['view_corners']:
|
|
||||||
logger.info('update user metadata')
|
# Update client representation
|
||||||
|
# Update client current scene
|
||||||
|
elif scene_current != local_user_metadata['scene_current']:
|
||||||
|
local_user_metadata['scene_current'] = scene_current
|
||||||
|
session.update_user_metadata(local_user_metadata)
|
||||||
|
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
||||||
local_user_metadata['view_corners'] = current_view_corners
|
local_user_metadata['view_corners'] = current_view_corners
|
||||||
local_user_metadata['view_matrix'] = presence.get_view_matrix()
|
local_user_metadata['view_matrix'] = presence.get_view_matrix()
|
||||||
session.update_user_metadata(local_user_metadata)
|
session.update_user_metadata(local_user_metadata)
|
||||||
|
|
||||||
# sync online users
|
# sync online users
|
||||||
session_users = operators.client.online_users
|
session_users = operators.client.online_users
|
||||||
ui_users = bpy.context.window_manager.online_users
|
ui_users = bpy.context.window_manager.online_users
|
||||||
@ -260,11 +309,8 @@ class ClientUpdate(Timer):
|
|||||||
for index, user in enumerate(ui_users):
|
for index, user in enumerate(ui_users):
|
||||||
if user.username not in session_users.keys():
|
if user.username not in session_users.keys():
|
||||||
ui_users.remove(index)
|
ui_users.remove(index)
|
||||||
|
|
||||||
renderer.flush_selection()
|
renderer.flush_selection()
|
||||||
renderer.flush_users()
|
renderer.flush_users()
|
||||||
|
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
for user in session_users:
|
for user in session_users:
|
||||||
@ -274,7 +320,9 @@ class ClientUpdate(Timer):
|
|||||||
new_key.username = user
|
new_key.username = user
|
||||||
|
|
||||||
# TODO: event drivent 3d view refresh
|
# TODO: event drivent 3d view refresh
|
||||||
presence.refresh_3d_view()
|
|
||||||
|
|
||||||
|
|
||||||
elif session.state['STATE'] == STATE_QUITTING:
|
elif session.state['STATE'] == STATE_QUITTING:
|
||||||
presence.refresh_3d_view()
|
presence.refresh_3d_view()
|
||||||
self.handle_quit = True
|
self.handle_quit = True
|
||||||
@ -286,5 +334,5 @@ class ClientUpdate(Timer):
|
|||||||
|
|
||||||
presence.renderer.stop()
|
presence.renderer.stop()
|
||||||
# # ui update
|
# # ui update
|
||||||
elif session:
|
elif session.state['STATE'] != STATE_INITIAL:
|
||||||
presence.refresh_3d_view()
|
presence.refresh_3d_view()
|
@ -1,3 +1,21 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -1,397 +0,0 @@
|
|||||||
import bpy
|
|
||||||
import bpy.types as T
|
|
||||||
import mathutils
|
|
||||||
|
|
||||||
|
|
||||||
def remove_items_from_dict(d, keys, recursive=False):
|
|
||||||
copy = dict(d)
|
|
||||||
for k in keys:
|
|
||||||
copy.pop(k, None)
|
|
||||||
if recursive:
|
|
||||||
for k in [k for k in copy.keys() if isinstance(copy[k], dict)]:
|
|
||||||
copy[k] = remove_items_from_dict(copy[k], keys, recursive)
|
|
||||||
return copy
|
|
||||||
|
|
||||||
|
|
||||||
def _is_dictionnary(v):
|
|
||||||
return hasattr(v, "items") and callable(v.items)
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_filter_type(t):
|
|
||||||
return lambda x: isinstance(x, t)
|
|
||||||
|
|
||||||
def _dump_filter_type_by_name(t_name):
|
|
||||||
return lambda x: t_name == x.__class__.__name__
|
|
||||||
|
|
||||||
def _dump_filter_array(array):
|
|
||||||
# only primitive type array
|
|
||||||
if not isinstance(array, T.bpy_prop_array):
|
|
||||||
return False
|
|
||||||
if len(array) > 0 and type(array[0]) not in [bool, float, int]:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_filter_default(default):
|
|
||||||
if default is None:
|
|
||||||
return False
|
|
||||||
if type(default) is list:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _load_filter_type(t, use_bl_rna=True):
|
|
||||||
def filter_function(x):
|
|
||||||
if use_bl_rna and x.bl_rna_property:
|
|
||||||
return isinstance(x.bl_rna_property, t)
|
|
||||||
else:
|
|
||||||
isinstance(x.read(), t)
|
|
||||||
return filter_function
|
|
||||||
|
|
||||||
|
|
||||||
def _load_filter_array(array):
|
|
||||||
# only primitive type array
|
|
||||||
if not isinstance(array.read(), T.bpy_prop_array):
|
|
||||||
return False
|
|
||||||
if len(array.read()) > 0 and type(array.read()[0]) not in [bool, float, int]:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _load_filter_color(color):
|
|
||||||
return color.__class__.__name__ == 'Color'
|
|
||||||
|
|
||||||
def _load_filter_default(default):
|
|
||||||
if default.read() is None:
|
|
||||||
return False
|
|
||||||
if type(default.read()) is list:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class Dumper:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.verbose = False
|
|
||||||
self.depth = 1
|
|
||||||
self.keep_compounds_as_leaves = False
|
|
||||||
self.accept_read_only = True
|
|
||||||
self._build_inline_dump_functions()
|
|
||||||
self._build_match_elements()
|
|
||||||
self.type_subset = self.match_subset_all
|
|
||||||
self.include_filter = []
|
|
||||||
self.exclude_filter = []
|
|
||||||
# self._atomic_types = [] # TODO future option?
|
|
||||||
|
|
||||||
def dump(self, any):
|
|
||||||
return self._dump_any(any, 0)
|
|
||||||
|
|
||||||
def _dump_any(self, any, depth):
|
|
||||||
for filter_function, dump_function in self.type_subset:
|
|
||||||
if filter_function(any):
|
|
||||||
return dump_function[not (depth >= self.depth)](any, depth + 1)
|
|
||||||
|
|
||||||
def _build_inline_dump_functions(self):
|
|
||||||
self._dump_identity = (lambda x, depth: x, lambda x, depth: x)
|
|
||||||
self._dump_ref = (lambda x, depth: x.name, self._dump_object_as_branch)
|
|
||||||
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
|
|
||||||
self._dump_collection = (self._dump_default_as_leaf, self._dump_collection_as_branch)
|
|
||||||
self._dump_array = (self._dump_default_as_leaf, self._dump_array_as_branch)
|
|
||||||
self._dump_matrix = (self._dump_matrix_as_leaf, self._dump_matrix_as_leaf)
|
|
||||||
self._dump_vector = (self._dump_vector_as_leaf, self._dump_vector_as_leaf)
|
|
||||||
self._dump_quaternion = (self._dump_quaternion_as_leaf, self._dump_quaternion_as_leaf)
|
|
||||||
self._dump_default = (self._dump_default_as_leaf, self._dump_default_as_branch)
|
|
||||||
self._dump_color = (self._dump_color_as_leaf, self._dump_color_as_leaf)
|
|
||||||
|
|
||||||
def _build_match_elements(self):
|
|
||||||
self._match_type_bool = (_dump_filter_type(bool), self._dump_identity)
|
|
||||||
self._match_type_int = (_dump_filter_type(int), self._dump_identity)
|
|
||||||
self._match_type_float = (_dump_filter_type(float), self._dump_identity)
|
|
||||||
self._match_type_string = (_dump_filter_type(str), self._dump_identity)
|
|
||||||
self._match_type_ref = (_dump_filter_type(T.Object), self._dump_ref)
|
|
||||||
self._match_type_ID = (_dump_filter_type(T.ID), self._dump_ID)
|
|
||||||
self._match_type_bpy_prop_collection = (_dump_filter_type(T.bpy_prop_collection), self._dump_collection)
|
|
||||||
self._match_type_array = (_dump_filter_array, self._dump_array)
|
|
||||||
self._match_type_matrix = (_dump_filter_type(mathutils.Matrix), self._dump_matrix)
|
|
||||||
self._match_type_vector = (_dump_filter_type(mathutils.Vector), self._dump_vector)
|
|
||||||
self._match_type_quaternion = (_dump_filter_type(mathutils.Quaternion), self._dump_quaternion)
|
|
||||||
self._match_type_euler = (_dump_filter_type(mathutils.Euler), self._dump_quaternion)
|
|
||||||
self._match_type_color = (_dump_filter_type_by_name("Color"), self._dump_color)
|
|
||||||
self._match_default = (_dump_filter_default, self._dump_default)
|
|
||||||
|
|
||||||
def _dump_collection_as_branch(self, collection, depth):
|
|
||||||
dump = {}
|
|
||||||
for i in collection.items():
|
|
||||||
dv = self._dump_any(i[1], depth)
|
|
||||||
if not (dv is None):
|
|
||||||
dump[i[0]] = dv
|
|
||||||
return dump
|
|
||||||
|
|
||||||
def _dump_default_as_leaf(self, default, depth):
|
|
||||||
if self.keep_compounds_as_leaves:
|
|
||||||
return str(type(default))
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _dump_array_as_branch(self, array, depth):
|
|
||||||
return [i for i in array]
|
|
||||||
|
|
||||||
def _dump_matrix_as_leaf(self, matrix, depth):
|
|
||||||
return [list(v) for v in matrix]
|
|
||||||
|
|
||||||
def _dump_vector_as_leaf(self, vector, depth):
|
|
||||||
return list(vector)
|
|
||||||
|
|
||||||
def _dump_quaternion_as_leaf(self, quaternion, depth):
|
|
||||||
return list(quaternion)
|
|
||||||
|
|
||||||
def _dump_color_as_leaf(self, color, depth):
|
|
||||||
return list(color)
|
|
||||||
|
|
||||||
def _dump_object_as_branch(self, default, depth):
|
|
||||||
if depth == 1:
|
|
||||||
return self._dump_default_as_branch(default, depth)
|
|
||||||
else:
|
|
||||||
return default.name
|
|
||||||
|
|
||||||
def _dump_default_as_branch(self, default, depth):
|
|
||||||
def is_valid_property(p):
|
|
||||||
try:
|
|
||||||
if (self.include_filter and p not in self.include_filter):
|
|
||||||
return False
|
|
||||||
getattr(default, p)
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
if p.startswith("__"):
|
|
||||||
return False
|
|
||||||
if callable(getattr(default, p)):
|
|
||||||
return False
|
|
||||||
if p in ["bl_rna", "rna_type"]:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
all_property_names = [p for p in dir(default) if is_valid_property(p) and p != '' and p not in self.exclude_filter]
|
|
||||||
dump = {}
|
|
||||||
for p in all_property_names:
|
|
||||||
if (self.exclude_filter and p in self.exclude_filter) or\
|
|
||||||
(self.include_filter and p not in self.include_filter):
|
|
||||||
return False
|
|
||||||
dp = self._dump_any(getattr(default, p), depth)
|
|
||||||
if not (dp is None):
|
|
||||||
dump[p] = dp
|
|
||||||
return dump
|
|
||||||
|
|
||||||
@property
|
|
||||||
def match_subset_all(self):
|
|
||||||
return [
|
|
||||||
self._match_type_bool,
|
|
||||||
self._match_type_int,
|
|
||||||
self._match_type_float,
|
|
||||||
self._match_type_string,
|
|
||||||
self._match_type_ref,
|
|
||||||
self._match_type_ID,
|
|
||||||
self._match_type_bpy_prop_collection,
|
|
||||||
self._match_type_array,
|
|
||||||
self._match_type_matrix,
|
|
||||||
self._match_type_vector,
|
|
||||||
self._match_type_quaternion,
|
|
||||||
self._match_type_euler,
|
|
||||||
self._match_type_color,
|
|
||||||
self._match_default
|
|
||||||
]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def match_subset_primitives(self):
|
|
||||||
return [
|
|
||||||
self._match_type_bool,
|
|
||||||
self._match_type_int,
|
|
||||||
self._match_type_float,
|
|
||||||
self._match_type_string,
|
|
||||||
self._match_default
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class BlenderAPIElement:
|
|
||||||
def __init__(self, api_element, sub_element_name="", occlude_read_only=True):
|
|
||||||
self.api_element = api_element
|
|
||||||
self.sub_element_name = sub_element_name
|
|
||||||
self.occlude_read_only = occlude_read_only
|
|
||||||
|
|
||||||
def read(self):
|
|
||||||
return getattr(self.api_element, self.sub_element_name) if self.sub_element_name else self.api_element
|
|
||||||
|
|
||||||
def write(self, value):
|
|
||||||
# take precaution if property is read-only
|
|
||||||
try:
|
|
||||||
if self.sub_element_name:
|
|
||||||
setattr(self.api_element, self.sub_element_name, value)
|
|
||||||
else:
|
|
||||||
self.api_element = value
|
|
||||||
except AttributeError as err:
|
|
||||||
if not self.occlude_read_only:
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def extend(self, element_name):
|
|
||||||
return BlenderAPIElement(self.read(), element_name)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bl_rna_property(self):
|
|
||||||
if not hasattr(self.api_element, "bl_rna"):
|
|
||||||
return False
|
|
||||||
if not self.sub_element_name:
|
|
||||||
return False
|
|
||||||
return self.api_element.bl_rna.properties[self.sub_element_name]
|
|
||||||
|
|
||||||
|
|
||||||
class Loader:
|
|
||||||
def __init__(self):
|
|
||||||
self.type_subset = self.match_subset_all
|
|
||||||
self.occlude_read_only = True
|
|
||||||
self.order = ['*']
|
|
||||||
|
|
||||||
def load(self, dst_data, src_dumped_data):
|
|
||||||
self._load_any(
|
|
||||||
BlenderAPIElement(dst_data, occlude_read_only=self.occlude_read_only),
|
|
||||||
src_dumped_data
|
|
||||||
)
|
|
||||||
|
|
||||||
def _load_any(self, any, dump):
|
|
||||||
for filter_function, load_function in self.type_subset:
|
|
||||||
if filter_function(any):
|
|
||||||
load_function(any, dump)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def _load_identity(self, element, dump):
|
|
||||||
element.write(dump)
|
|
||||||
|
|
||||||
def _load_array(self, element, dump):
|
|
||||||
# supports only primitive types currently
|
|
||||||
try:
|
|
||||||
for i in range(len(dump)):
|
|
||||||
element.read()[i] = dump[i]
|
|
||||||
except AttributeError as err:
|
|
||||||
if not self.occlude_read_only:
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def _load_collection(self, element, dump):
|
|
||||||
if not element.bl_rna_property:
|
|
||||||
return
|
|
||||||
# local enum
|
|
||||||
CONSTRUCTOR_NEW = "new"
|
|
||||||
CONSTRUCTOR_ADD = "add"
|
|
||||||
|
|
||||||
constructors = {
|
|
||||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
|
||||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, [])
|
|
||||||
}
|
|
||||||
element_type = element.bl_rna_property.fixed_type
|
|
||||||
constructor = constructors.get(type(element_type))
|
|
||||||
if constructor is None: # collection type not supported
|
|
||||||
return
|
|
||||||
for dumped_element in dump.values():
|
|
||||||
try:
|
|
||||||
constructor_parameters = [dumped_element[name] for name in constructor[1]]
|
|
||||||
except KeyError:
|
|
||||||
print("Collection load error, missing parameters.")
|
|
||||||
continue # TODO handle error
|
|
||||||
new_element = getattr(element.read(), constructor[0])(*constructor_parameters)
|
|
||||||
self._load_any(
|
|
||||||
BlenderAPIElement(new_element, occlude_read_only=self.occlude_read_only),
|
|
||||||
dumped_element
|
|
||||||
)
|
|
||||||
|
|
||||||
def _load_pointer(self, pointer, dump):
|
|
||||||
rna_property_type = pointer.bl_rna_property.fixed_type
|
|
||||||
if not rna_property_type:
|
|
||||||
return
|
|
||||||
if isinstance(rna_property_type, T.Image):
|
|
||||||
pointer.write(bpy.data.images.get(dump))
|
|
||||||
elif isinstance(rna_property_type, T.Texture):
|
|
||||||
pointer.write(bpy.data.textures.get(dump))
|
|
||||||
elif isinstance(rna_property_type, T.ColorRamp):
|
|
||||||
self._load_default(pointer, dump)
|
|
||||||
elif isinstance(rna_property_type, T.Object):
|
|
||||||
pointer.write(bpy.data.objects.get(dump))
|
|
||||||
elif isinstance(rna_property_type, T.Mesh):
|
|
||||||
pointer.write(bpy.data.meshes.get(dump))
|
|
||||||
elif isinstance(rna_property_type, T.Material):
|
|
||||||
pointer.write(bpy.data.materials.get(dump))
|
|
||||||
|
|
||||||
def _load_matrix(self, matrix, dump):
|
|
||||||
matrix.write(mathutils.Matrix(dump))
|
|
||||||
|
|
||||||
def _load_vector(self, vector, dump):
|
|
||||||
vector.write(mathutils.Vector(dump))
|
|
||||||
|
|
||||||
def _load_quaternion(self, quaternion, dump):
|
|
||||||
quaternion.write(mathutils.Quaternion(dump))
|
|
||||||
|
|
||||||
def _load_euler(self, euler, dump):
|
|
||||||
euler.write(mathutils.Euler(dump))
|
|
||||||
|
|
||||||
def _ordered_keys(self, keys):
|
|
||||||
ordered_keys = []
|
|
||||||
for order_element in self.order:
|
|
||||||
if order_element == '*':
|
|
||||||
ordered_keys += [k for k in keys if not k in self.order]
|
|
||||||
else:
|
|
||||||
if order_element in keys:
|
|
||||||
ordered_keys.append(order_element)
|
|
||||||
return ordered_keys
|
|
||||||
|
|
||||||
def _load_default(self, default, dump):
|
|
||||||
if not _is_dictionnary(dump):
|
|
||||||
return # TODO error handling
|
|
||||||
for k in self._ordered_keys(dump.keys()):
|
|
||||||
v = dump[k]
|
|
||||||
if not hasattr(default.read(), k):
|
|
||||||
continue # TODO error handling
|
|
||||||
try:
|
|
||||||
self._load_any(default.extend(k), v)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
|
||||||
def match_subset_all(self):
|
|
||||||
return [
|
|
||||||
(_load_filter_type(T.BoolProperty), self._load_identity),
|
|
||||||
(_load_filter_type(T.IntProperty), self._load_identity),
|
|
||||||
(_load_filter_type(mathutils.Matrix, use_bl_rna=False), self._load_matrix), # before float because bl_rna type of matrix if FloatProperty
|
|
||||||
(_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector), # before float because bl_rna type of vector if FloatProperty
|
|
||||||
(_load_filter_type(mathutils.Quaternion, use_bl_rna=False), self._load_quaternion),
|
|
||||||
(_load_filter_type(mathutils.Euler, use_bl_rna=False), self._load_euler),
|
|
||||||
(_load_filter_type(T.FloatProperty), self._load_identity),
|
|
||||||
(_load_filter_type(T.StringProperty), self._load_identity),
|
|
||||||
(_load_filter_type(T.EnumProperty), self._load_identity),
|
|
||||||
(_load_filter_type(T.PointerProperty), self._load_pointer),
|
|
||||||
(_load_filter_array, self._load_array),
|
|
||||||
(_load_filter_type(T.CollectionProperty), self._load_collection),
|
|
||||||
(_load_filter_default, self._load_default),
|
|
||||||
(_load_filter_color, self._load_identity),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# Utility functions
|
|
||||||
|
|
||||||
|
|
||||||
def dump(any, depth=1):
|
|
||||||
dumper = Dumper()
|
|
||||||
dumper.depath = depth
|
|
||||||
return dumper.dump(any)
|
|
||||||
|
|
||||||
def dump_datablock(datablock, depth):
|
|
||||||
if datablock:
|
|
||||||
dumper = Dumper()
|
|
||||||
dumper.type_subset = dumper.match_subset_all
|
|
||||||
dumper.depth = depth
|
|
||||||
|
|
||||||
datablock_type = datablock.bl_rna.name
|
|
||||||
key = "{}/{}".format(datablock_type, datablock.name)
|
|
||||||
data = dumper.dump(datablock)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def load(dst, src):
|
|
||||||
loader = Loader()
|
|
||||||
# loader.match_subset_all = loader.match_subset_all
|
|
||||||
loader.load(dst, src)
|
|
@ -1,219 +0,0 @@
|
|||||||
"""
|
|
||||||
Context Manager allowing temporary override of attributes
|
|
||||||
|
|
||||||
````python
|
|
||||||
import bpy
|
|
||||||
from overrider import Overrider
|
|
||||||
|
|
||||||
with Overrider(name='bpy_', parent=bpy) as bpy_:
|
|
||||||
# set preview render settings
|
|
||||||
bpy_.context.scene.render.use_file_extension = False
|
|
||||||
bpy_.context.scene.render.resolution_x = 512
|
|
||||||
bpy_.context.scene.render.resolution_y = 512
|
|
||||||
bpy_.context.scene.render.use_file_extension = False
|
|
||||||
bpy_.context.scene.render.image_settings.file_format = "JPEG"
|
|
||||||
bpy_.context.scene.layers[10] = False
|
|
||||||
|
|
||||||
frame_start = action.frame_range[0]
|
|
||||||
frame_end = action.frame_range[1]
|
|
||||||
if begin_frame is not None:
|
|
||||||
frame_start = begin_frame
|
|
||||||
if end_frame is not None:
|
|
||||||
frame_end = end_frame
|
|
||||||
|
|
||||||
# render
|
|
||||||
window = bpy_.data.window_managers[0].windows[0]
|
|
||||||
screen = bpy_.data.window_managers[0].windows[0].screen
|
|
||||||
area = next(area for area in screen.areas if area.type == 'VIEW_3D')
|
|
||||||
space = next(space for space in area.spaces if space.type == 'VIEW_3D')
|
|
||||||
|
|
||||||
space.viewport_shade = 'MATERIAL'
|
|
||||||
space.region_3d.view_perspective = 'CAMERA'
|
|
||||||
|
|
||||||
override_context = {
|
|
||||||
"window": window._real_value_(),
|
|
||||||
"screen": screen._real_value_()
|
|
||||||
}
|
|
||||||
|
|
||||||
if frame_start == frame_end:
|
|
||||||
bpy.context.scene.frame_set(int(frame_start))
|
|
||||||
bpy_.context.scene.render.filepath = os.path.join(directory, "icon.jpg")
|
|
||||||
bpy.ops.render.opengl(override_context, write_still=True)
|
|
||||||
|
|
||||||
else:
|
|
||||||
for icon_index, frame_number in enumerate(range(int(frame_start), int(frame_end) + 1)):
|
|
||||||
bpy.context.scene.frame_set(frame_number)
|
|
||||||
bpy.context.scene.render.filepath = os.path.join(directory, "icon", "{:04d}.jpg".format(icon_index))
|
|
||||||
bpy.ops.render.opengl(override_context, write_still=True)
|
|
||||||
````
|
|
||||||
"""
|
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
|
|
||||||
class OverrideIter:
|
|
||||||
|
|
||||||
def __init__(self, parent):
|
|
||||||
self.parent = parent
|
|
||||||
self.index = -1
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
self.index += 1
|
|
||||||
try:
|
|
||||||
return self.parent[self.index]
|
|
||||||
except IndexError as e:
|
|
||||||
raise StopIteration
|
|
||||||
|
|
||||||
|
|
||||||
class OverrideBase:
|
|
||||||
|
|
||||||
def __init__(self, context_manager, name=None, parent=None):
|
|
||||||
self._name__ = name
|
|
||||||
self._context_manager_ = context_manager
|
|
||||||
self._parent_ = parent
|
|
||||||
self._changed_attributes_ = OrderedDict()
|
|
||||||
self._changed_items_ = OrderedDict()
|
|
||||||
self._children_ = list()
|
|
||||||
self._original_value_ = self._real_value_()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<{}({})>".format(self.__class__.__name__, self._path_)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _name_(self):
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _path_(self):
|
|
||||||
if isinstance(self._parent_, OverrideBase):
|
|
||||||
return self._parent_._path_ + self._name_
|
|
||||||
|
|
||||||
return self._name_
|
|
||||||
|
|
||||||
def _real_value_(self):
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def _restore_(self):
|
|
||||||
for attribute, original_value in reversed(self._changed_attributes_.items()):
|
|
||||||
setattr(self._real_value_(), attribute, original_value)
|
|
||||||
|
|
||||||
for item, original_value in reversed(self._changed_items_.items()):
|
|
||||||
self._real_value_()[item] = original_value
|
|
||||||
|
|
||||||
def __getattr__(self, attr):
|
|
||||||
new_attribute = OverrideAttribute(self._context_manager_, name=attr, parent=self)
|
|
||||||
self._children_.append(new_attribute)
|
|
||||||
return new_attribute
|
|
||||||
|
|
||||||
def __getitem__(self, item):
|
|
||||||
new_item = OverrideItem(self._context_manager_, name=item, parent=self)
|
|
||||||
self._children_.append(new_item)
|
|
||||||
return new_item
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return OverrideIter(self)
|
|
||||||
|
|
||||||
def __setattr__(self, attr, value):
|
|
||||||
if attr in (
|
|
||||||
'_name__',
|
|
||||||
'_context_manager_',
|
|
||||||
'_parent_',
|
|
||||||
'_children_',
|
|
||||||
'_original_value_',
|
|
||||||
'_changed_attributes_',
|
|
||||||
'_changed_items_'
|
|
||||||
):
|
|
||||||
self.__dict__[attr] = value
|
|
||||||
return
|
|
||||||
|
|
||||||
if attr not in self._changed_attributes_.keys():
|
|
||||||
self._changed_attributes_[attr] = getattr(self._real_value_(), attr)
|
|
||||||
self._context_manager_.register_as_changed(self)
|
|
||||||
|
|
||||||
setattr(self._real_value_(), attr, value)
|
|
||||||
|
|
||||||
def __setitem__(self, item, value):
|
|
||||||
if item not in self._changed_items_.keys():
|
|
||||||
self._changed_items_[item] = self._real_value_()[item]
|
|
||||||
self._context_manager_.register_as_changed(self)
|
|
||||||
|
|
||||||
self._real_value_()[item] = value
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self._real_value_() == other
|
|
||||||
|
|
||||||
def __gt__(self, other):
|
|
||||||
return self._real_value_() > other
|
|
||||||
|
|
||||||
def __lt__(self, other):
|
|
||||||
return self._real_value_() < other
|
|
||||||
|
|
||||||
def __ge__(self, other):
|
|
||||||
return self._real_value_() >= other
|
|
||||||
|
|
||||||
def __le__(self, other):
|
|
||||||
return self._real_value_() <= other
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
# TODO : surround str value with quotes
|
|
||||||
arguments = list([str(arg) for arg in args]) + ['{}={}'.format(key, value) for key, value in kwargs.items()]
|
|
||||||
arguments = ', '.join(arguments)
|
|
||||||
raise RuntimeError('Overrider does not allow call to {}({})'.format(self._path_, arguments))
|
|
||||||
|
|
||||||
|
|
||||||
class OverrideRoot(OverrideBase):
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _name_(self):
|
|
||||||
return self._name__
|
|
||||||
|
|
||||||
def _real_value_(self):
|
|
||||||
return self._parent_
|
|
||||||
|
|
||||||
|
|
||||||
class OverrideAttribute(OverrideBase):
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _name_(self):
|
|
||||||
return '.{}'.format(self._name__)
|
|
||||||
|
|
||||||
def _real_value_(self):
|
|
||||||
return getattr(self._parent_._real_value_(), self._name__)
|
|
||||||
|
|
||||||
|
|
||||||
class OverrideItem(OverrideBase):
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _name_(self):
|
|
||||||
if isinstance(self._name__, str):
|
|
||||||
return '["{}"]'.format(self._name__)
|
|
||||||
|
|
||||||
return '[{}]'.format(self._name__)
|
|
||||||
|
|
||||||
def _real_value_(self):
|
|
||||||
return self._parent_._real_value_()[self._name__]
|
|
||||||
|
|
||||||
|
|
||||||
class Overrider:
|
|
||||||
def __init__(self, name, parent):
|
|
||||||
self.name = name
|
|
||||||
self.parent = parent
|
|
||||||
self.override = None
|
|
||||||
self.registered_overrides = list()
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.override = OverrideRoot(
|
|
||||||
context_manager=self,
|
|
||||||
parent=self.parent,
|
|
||||||
name=self.name
|
|
||||||
)
|
|
||||||
return self.override
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
self.restore()
|
|
||||||
|
|
||||||
def register_as_changed(self, override):
|
|
||||||
self.registered_overrides.append(override)
|
|
||||||
|
|
||||||
def restore(self):
|
|
||||||
for override in reversed(self.registered_overrides):
|
|
||||||
override._restore_()
|
|
Submodule multi_user/libs/replication updated: 5b2425c44d...4d2f575b24
@ -1,3 +1,21 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@ -16,7 +34,7 @@ from bpy.app.handlers import persistent
|
|||||||
from . import bl_types, delayable, environment, presence, ui, utils
|
from . import bl_types, delayable, environment, presence, ui, utils
|
||||||
from .libs.replication.replication.constants import (FETCHED, STATE_ACTIVE,
|
from .libs.replication.replication.constants import (FETCHED, STATE_ACTIVE,
|
||||||
STATE_INITIAL,
|
STATE_INITIAL,
|
||||||
STATE_SYNCING)
|
STATE_SYNCING,UP)
|
||||||
from .libs.replication.replication.data import ReplicatedDataFactory
|
from .libs.replication.replication.data import ReplicatedDataFactory
|
||||||
from .libs.replication.replication.exception import NonAuthorizedOperationError
|
from .libs.replication.replication.exception import NonAuthorizedOperationError
|
||||||
from .libs.replication.replication.interface import Session
|
from .libs.replication.replication.interface import Session
|
||||||
@ -109,7 +127,9 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
id=settings.username,
|
id=settings.username,
|
||||||
address=settings.ip,
|
address=settings.ip,
|
||||||
port=settings.port,
|
port=settings.port,
|
||||||
ipc_port=settings.ipc_port)
|
ipc_port=settings.ipc_port,
|
||||||
|
timeout=settings.connection_timeout
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.report({'ERROR'}, repr(e))
|
self.report({'ERROR'}, repr(e))
|
||||||
logger.error(f"Error: {e}")
|
logger.error(f"Error: {e}")
|
||||||
@ -125,7 +145,8 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
id=settings.username,
|
id=settings.username,
|
||||||
address=settings.ip,
|
address=settings.ip,
|
||||||
port=settings.port,
|
port=settings.port,
|
||||||
ipc_port=settings.ipc_port
|
ipc_port=settings.ipc_port,
|
||||||
|
timeout=settings.connection_timeout
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.report({'ERROR'}, repr(e))
|
self.report({'ERROR'}, repr(e))
|
||||||
@ -178,6 +199,36 @@ class SessionStopOperator(bpy.types.Operator):
|
|||||||
|
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
class SessionKickOperator(bpy.types.Operator):
|
||||||
|
bl_idname = "session.kick"
|
||||||
|
bl_label = "Kick"
|
||||||
|
bl_description = "Kick the user"
|
||||||
|
bl_options = {"REGISTER"}
|
||||||
|
|
||||||
|
user: bpy.props.StringProperty()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
global client, delayables, stop_modal_executor
|
||||||
|
assert(client)
|
||||||
|
|
||||||
|
try:
|
||||||
|
client.kick(self.user)
|
||||||
|
except Exception as e:
|
||||||
|
self.report({'ERROR'}, repr(e))
|
||||||
|
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
return context.window_manager.invoke_props_dialog(self)
|
||||||
|
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
row = self.layout
|
||||||
|
row.label(text=f" Do you really want to kick {self.user} ? " )
|
||||||
|
|
||||||
class SessionPropertyRemoveOperator(bpy.types.Operator):
|
class SessionPropertyRemoveOperator(bpy.types.Operator):
|
||||||
bl_idname = "session.remove_prop"
|
bl_idname = "session.remove_prop"
|
||||||
@ -284,6 +335,10 @@ class SessionSnapUserOperator(bpy.types.Operator):
|
|||||||
target_ref = client.online_users.get(self.target_client)
|
target_ref = client.online_users.get(self.target_client)
|
||||||
|
|
||||||
if target_ref:
|
if target_ref:
|
||||||
|
target_scene = target_ref['metadata']['scene_current']
|
||||||
|
if target_scene != context.scene.name:
|
||||||
|
bpy.context.window.scene = bpy.data.scenes[target_scene]
|
||||||
|
|
||||||
rv3d.view_matrix = mathutils.Matrix(
|
rv3d.view_matrix = mathutils.Matrix(
|
||||||
target_ref['metadata']['view_matrix'])
|
target_ref['metadata']['view_matrix'])
|
||||||
else:
|
else:
|
||||||
@ -439,6 +494,7 @@ classes = (
|
|||||||
SessionApply,
|
SessionApply,
|
||||||
SessionCommit,
|
SessionCommit,
|
||||||
ApplyArmatureOperator,
|
ApplyArmatureOperator,
|
||||||
|
SessionKickOperator,
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -451,19 +507,7 @@ def load_pre_handler(dummy):
|
|||||||
bpy.ops.session.stop()
|
bpy.ops.session.stop()
|
||||||
|
|
||||||
|
|
||||||
@persistent
|
|
||||||
def sanitize_deps_graph(dummy):
|
|
||||||
"""sanitize deps graph
|
|
||||||
|
|
||||||
Temporary solution to resolve each node pointers after a Undo.
|
|
||||||
A future solution should be to avoid storing dataclock reference...
|
|
||||||
|
|
||||||
"""
|
|
||||||
global client
|
|
||||||
|
|
||||||
if client and client.state['STATE'] in [STATE_ACTIVE]:
|
|
||||||
for node_key in client.list():
|
|
||||||
client.get(node_key).resolve()
|
|
||||||
|
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
@ -493,12 +537,11 @@ def depsgraph_evaluation(scene):
|
|||||||
# - if its ours or ( under common and diff), launch the
|
# - if its ours or ( under common and diff), launch the
|
||||||
# update process
|
# update process
|
||||||
# - if its to someone else, ignore the update (go deeper ?)
|
# - if its to someone else, ignore the update (go deeper ?)
|
||||||
if node.owner in [client.id, 'COMMON']:
|
if node.owner in [client.id, 'COMMON'] and node.state == UP:
|
||||||
# Avoid slow geometry update
|
# Avoid slow geometry update
|
||||||
if 'EDIT' in context.mode:
|
if 'EDIT' in context.mode:
|
||||||
break
|
break
|
||||||
|
|
||||||
logger.error(node.data['name'])
|
|
||||||
client.stash(node.uuid)
|
client.stash(node.uuid)
|
||||||
else:
|
else:
|
||||||
# Distant update
|
# Distant update
|
||||||
@ -515,8 +558,6 @@ def register():
|
|||||||
|
|
||||||
bpy.app.handlers.load_pre.append(load_pre_handler)
|
bpy.app.handlers.load_pre.append(load_pre_handler)
|
||||||
|
|
||||||
bpy.app.handlers.undo_post.append(sanitize_deps_graph)
|
|
||||||
bpy.app.handlers.redo_post.append(sanitize_deps_graph)
|
|
||||||
|
|
||||||
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
||||||
|
|
||||||
@ -536,8 +577,6 @@ def unregister():
|
|||||||
|
|
||||||
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
||||||
|
|
||||||
bpy.app.handlers.undo_post.remove(sanitize_deps_graph)
|
|
||||||
bpy.app.handlers.redo_post.remove(sanitize_deps_graph)
|
|
||||||
|
|
||||||
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
||||||
|
|
||||||
|
@ -1,10 +1,44 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
import random
|
||||||
import logging
|
import logging
|
||||||
import bpy
|
import bpy
|
||||||
|
import string
|
||||||
|
|
||||||
from . import utils, bl_types, environment
|
from . import utils, bl_types, environment, addon_updater_ops, presence
|
||||||
|
from .libs.replication.replication.constants import RP_COMMON
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def randomColor():
|
||||||
|
"""Generate a random color """
|
||||||
|
r = random.random()
|
||||||
|
v = random.random()
|
||||||
|
b = random.random()
|
||||||
|
return [r, v, b]
|
||||||
|
|
||||||
|
|
||||||
|
def random_string_digits(stringLength=6):
|
||||||
|
"""Generate a random string of letters and digits """
|
||||||
|
lettersAndDigits = string.ascii_letters + string.digits
|
||||||
|
return ''.join(random.choices(lettersAndDigits, k=stringLength))
|
||||||
|
|
||||||
|
|
||||||
class ReplicatedDatablock(bpy.types.PropertyGroup):
|
class ReplicatedDatablock(bpy.types.PropertyGroup):
|
||||||
type_name: bpy.props.StringProperty()
|
type_name: bpy.props.StringProperty()
|
||||||
bl_name: bpy.props.StringProperty()
|
bl_name: bpy.props.StringProperty()
|
||||||
@ -14,8 +48,16 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
|
|||||||
auto_push: bpy.props.BoolProperty(default=True)
|
auto_push: bpy.props.BoolProperty(default=True)
|
||||||
icon: bpy.props.StringProperty()
|
icon: bpy.props.StringProperty()
|
||||||
|
|
||||||
|
|
||||||
|
class ReplicationFlags(bpy.types.PropertyGroup):
|
||||||
|
sync_render_settings: bpy.props.BoolProperty(
|
||||||
|
name="Synchronize render settings",
|
||||||
|
description="Synchronize render settings (eevee and cycles only)",
|
||||||
|
default=True)
|
||||||
|
|
||||||
|
|
||||||
class SessionPrefs(bpy.types.AddonPreferences):
|
class SessionPrefs(bpy.types.AddonPreferences):
|
||||||
bl_idname = __package__
|
bl_idname = __package__
|
||||||
|
|
||||||
ip: bpy.props.StringProperty(
|
ip: bpy.props.StringProperty(
|
||||||
name="ip",
|
name="ip",
|
||||||
@ -23,29 +65,32 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
default="127.0.0.1")
|
default="127.0.0.1")
|
||||||
username: bpy.props.StringProperty(
|
username: bpy.props.StringProperty(
|
||||||
name="Username",
|
name="Username",
|
||||||
default="user_{}".format(utils.random_string_digits())
|
default=f"user_{random_string_digits()}"
|
||||||
)
|
)
|
||||||
client_color: bpy.props.FloatVectorProperty(
|
client_color: bpy.props.FloatVectorProperty(
|
||||||
name="client_instance_color",
|
name="client_instance_color",
|
||||||
subtype='COLOR',
|
subtype='COLOR',
|
||||||
default=utils.randomColor())
|
default=randomColor())
|
||||||
port: bpy.props.IntProperty(
|
port: bpy.props.IntProperty(
|
||||||
name="port",
|
name="port",
|
||||||
description='Distant host port',
|
description='Distant host port',
|
||||||
default=5555
|
default=5555
|
||||||
)
|
)
|
||||||
|
sync_flags: bpy.props.PointerProperty(
|
||||||
|
type=ReplicationFlags
|
||||||
|
)
|
||||||
supported_datablocks: bpy.props.CollectionProperty(
|
supported_datablocks: bpy.props.CollectionProperty(
|
||||||
type=ReplicatedDatablock,
|
type=ReplicatedDatablock,
|
||||||
)
|
)
|
||||||
ipc_port: bpy.props.IntProperty(
|
ipc_port: bpy.props.IntProperty(
|
||||||
name="ipc_port",
|
name="ipc_port",
|
||||||
description='internal ttl port(only usefull for multiple local instances)',
|
description='internal ttl port(only usefull for multiple local instances)',
|
||||||
default=5561
|
default=5561
|
||||||
)
|
)
|
||||||
start_empty: bpy.props.BoolProperty(
|
start_empty: bpy.props.BoolProperty(
|
||||||
name="start_empty",
|
name="start_empty",
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
right_strategy: bpy.props.EnumProperty(
|
right_strategy: bpy.props.EnumProperty(
|
||||||
name='right_strategy',
|
name='right_strategy',
|
||||||
description='right strategy',
|
description='right strategy',
|
||||||
@ -57,17 +102,21 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
name="cache directory",
|
name="cache directory",
|
||||||
subtype="DIR_PATH",
|
subtype="DIR_PATH",
|
||||||
default=environment.DEFAULT_CACHE_DIR)
|
default=environment.DEFAULT_CACHE_DIR)
|
||||||
|
connection_timeout: bpy.props.IntProperty(
|
||||||
|
name='connection timeout',
|
||||||
|
description='connection timeout before disconnection',
|
||||||
|
default=1000
|
||||||
|
)
|
||||||
# for UI
|
# for UI
|
||||||
# category: bpy.props.EnumProperty(
|
category: bpy.props.EnumProperty(
|
||||||
# name="Category",
|
name="Category",
|
||||||
# description="Preferences Category",
|
description="Preferences Category",
|
||||||
# items=[
|
items=[
|
||||||
# ('INFO', "Information", "Information about this add-on"),
|
('CONFIG', "Configuration", "Configuration about this add-on"),
|
||||||
# ('CONFIG', "Configuration", "Configuration about this add-on"),
|
('UPDATE', "Update", "Update this add-on"),
|
||||||
# ('UPDATE', "Update", "Update this add-on"),
|
],
|
||||||
# ],
|
default='CONFIG'
|
||||||
# default='INFO'
|
)
|
||||||
# )
|
|
||||||
conf_session_identity_expanded: bpy.props.BoolProperty(
|
conf_session_identity_expanded: bpy.props.BoolProperty(
|
||||||
name="Identity",
|
name="Identity",
|
||||||
description="Identity",
|
description="Identity",
|
||||||
@ -94,82 +143,115 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
|
|
||||||
|
auto_check_update: bpy.props.BoolProperty(
|
||||||
|
name="Auto-check for Update",
|
||||||
|
description="If enabled, auto-check for updates using an interval",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
updater_intrval_months: bpy.props.IntProperty(
|
||||||
|
name='Months',
|
||||||
|
description="Number of months between checking for updates",
|
||||||
|
default=0,
|
||||||
|
min=0
|
||||||
|
)
|
||||||
|
updater_intrval_days: bpy.props.IntProperty(
|
||||||
|
name='Days',
|
||||||
|
description="Number of days between checking for updates",
|
||||||
|
default=7,
|
||||||
|
min=0,
|
||||||
|
max=31
|
||||||
|
)
|
||||||
|
updater_intrval_hours: bpy.props.IntProperty(
|
||||||
|
name='Hours',
|
||||||
|
description="Number of hours between checking for updates",
|
||||||
|
default=0,
|
||||||
|
min=0,
|
||||||
|
max=23
|
||||||
|
)
|
||||||
|
updater_intrval_minutes: bpy.props.IntProperty(
|
||||||
|
name='Minutes',
|
||||||
|
description="Number of minutes between checking for updates",
|
||||||
|
default=0,
|
||||||
|
min=0,
|
||||||
|
max=59
|
||||||
|
)
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
# layout.row().prop(self, "category", expand=True)
|
layout.row().prop(self, "category", expand=True)
|
||||||
|
|
||||||
# if self.category == 'INFO':
|
if self.category == 'CONFIG':
|
||||||
# layout.separator()
|
grid = layout.column()
|
||||||
# layout.label(text="Enable real-time collaborative workflow inside blender")
|
|
||||||
# if self.category == 'CONFIG':
|
|
||||||
grid = layout.column()
|
|
||||||
|
|
||||||
# USER INFORMATIONS
|
# USER INFORMATIONS
|
||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
self, "conf_session_identity_expanded", text="User informations",
|
self, "conf_session_identity_expanded", text="User informations",
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_identity_expanded
|
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_identity_expanded
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
||||||
if self.conf_session_identity_expanded:
|
if self.conf_session_identity_expanded:
|
||||||
box.row().prop(self, "username", text="name")
|
box.row().prop(self, "username", text="name")
|
||||||
box.row().prop(self, "client_color", text="color")
|
box.row().prop(self, "client_color", text="color")
|
||||||
|
|
||||||
# NETWORK SETTINGS
|
# NETWORK SETTINGS
|
||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
self, "conf_session_net_expanded", text="Netorking",
|
self, "conf_session_net_expanded", text="Netorking",
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_net_expanded
|
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_net_expanded
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
|
||||||
|
|
||||||
if self.conf_session_net_expanded:
|
|
||||||
box.row().prop(self, "ip", text="Address")
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Port:")
|
|
||||||
row.prop(self, "port", text="Address")
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Start with an empty scene:")
|
|
||||||
row.prop(self, "start_empty", text="")
|
|
||||||
|
|
||||||
table = box.box()
|
|
||||||
table.row().prop(
|
|
||||||
self, "conf_session_timing_expanded", text="Refresh rates",
|
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_timing_expanded
|
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
||||||
|
|
||||||
if self.conf_session_timing_expanded:
|
if self.conf_session_net_expanded:
|
||||||
line = table.row()
|
box.row().prop(self, "ip", text="Address")
|
||||||
line.label(text=" ")
|
row = box.row()
|
||||||
line.separator()
|
row.label(text="Port:")
|
||||||
line.label(text="refresh (sec)")
|
row.prop(self, "port", text="Address")
|
||||||
line.label(text="apply (sec)")
|
row = box.row()
|
||||||
|
row.label(text="Start with an empty scene:")
|
||||||
|
row.prop(self, "start_empty", text="")
|
||||||
|
|
||||||
for item in self.supported_datablocks:
|
table = box.box()
|
||||||
line = table.row(align=True)
|
table.row().prop(
|
||||||
line.label(text="", icon=item.icon)
|
self, "conf_session_timing_expanded", text="Refresh rates",
|
||||||
line.prop(item, "bl_delay_refresh", text="")
|
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_timing_expanded
|
||||||
line.prop(item, "bl_delay_apply", text="")
|
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
||||||
# HOST SETTINGS
|
|
||||||
box = grid.box()
|
|
||||||
box.prop(
|
|
||||||
self, "conf_session_hosting_expanded", text="Hosting",
|
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_hosting_expanded
|
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
|
||||||
if self.conf_session_hosting_expanded:
|
|
||||||
box.row().prop(self, "right_strategy", text="Right model")
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Start with an empty scene:")
|
|
||||||
row.prop(self, "start_empty", text="")
|
|
||||||
|
|
||||||
# CACHE SETTINGS
|
if self.conf_session_timing_expanded:
|
||||||
box = grid.box()
|
line = table.row()
|
||||||
box.prop(
|
line.label(text=" ")
|
||||||
self, "conf_session_cache_expanded", text="Cache",
|
line.separator()
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_cache_expanded
|
line.label(text="refresh (sec)")
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
line.label(text="apply (sec)")
|
||||||
if self.conf_session_cache_expanded:
|
|
||||||
box.row().prop(self, "cache_directory", text="Cache directory")
|
for item in self.supported_datablocks:
|
||||||
|
line = table.row(align=True)
|
||||||
|
line.label(text="", icon=item.icon)
|
||||||
|
line.prop(item, "bl_delay_refresh", text="")
|
||||||
|
line.prop(item, "bl_delay_apply", text="")
|
||||||
|
# HOST SETTINGS
|
||||||
|
box = grid.box()
|
||||||
|
box.prop(
|
||||||
|
self, "conf_session_hosting_expanded", text="Hosting",
|
||||||
|
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_hosting_expanded
|
||||||
|
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
||||||
|
if self.conf_session_hosting_expanded:
|
||||||
|
box.row().prop(self, "right_strategy", text="Right model")
|
||||||
|
row = box.row()
|
||||||
|
row.label(text="Start with an empty scene:")
|
||||||
|
row.prop(self, "start_empty", text="")
|
||||||
|
|
||||||
|
# CACHE SETTINGS
|
||||||
|
box = grid.box()
|
||||||
|
box.prop(
|
||||||
|
self, "conf_session_cache_expanded", text="Cache",
|
||||||
|
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_cache_expanded
|
||||||
|
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
||||||
|
if self.conf_session_cache_expanded:
|
||||||
|
box.row().prop(self, "cache_directory", text="Cache directory")
|
||||||
|
|
||||||
|
if self.category == 'UPDATE':
|
||||||
|
from . import addon_updater_ops
|
||||||
|
addon_updater_ops.update_settings_ui_condensed(self, context)
|
||||||
|
|
||||||
def generate_supported_types(self):
|
def generate_supported_types(self):
|
||||||
self.supported_datablocks.clear()
|
self.supported_datablocks.clear()
|
||||||
@ -184,16 +266,102 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
new_db.name = type_impl_name
|
new_db.name = type_impl_name
|
||||||
new_db.type_name = type_impl_name
|
new_db.type_name = type_impl_name
|
||||||
new_db.bl_delay_refresh = type_module_class.bl_delay_refresh
|
new_db.bl_delay_refresh = type_module_class.bl_delay_refresh
|
||||||
new_db.bl_delay_apply =type_module_class.bl_delay_apply
|
new_db.bl_delay_apply = type_module_class.bl_delay_apply
|
||||||
new_db.use_as_filter = True
|
new_db.use_as_filter = True
|
||||||
new_db.icon = type_module_class.bl_icon
|
new_db.icon = type_module_class.bl_icon
|
||||||
new_db.auto_push =type_module_class.bl_automatic_push
|
new_db.auto_push = type_module_class.bl_automatic_push
|
||||||
new_db.bl_name=type_module_class.bl_id
|
new_db.bl_name = type_module_class.bl_id
|
||||||
|
|
||||||
|
|
||||||
|
def client_list_callback(scene, context):
|
||||||
|
from . import operators
|
||||||
|
|
||||||
|
items = [(RP_COMMON, RP_COMMON, "")]
|
||||||
|
|
||||||
|
username = utils.get_preferences().username
|
||||||
|
cli = operators.client
|
||||||
|
if cli:
|
||||||
|
client_ids = cli.online_users.keys()
|
||||||
|
for id in client_ids:
|
||||||
|
name_desc = id
|
||||||
|
if id == username:
|
||||||
|
name_desc += " (self)"
|
||||||
|
|
||||||
|
items.append((id, name_desc, ""))
|
||||||
|
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
class SessionUser(bpy.types.PropertyGroup):
|
||||||
|
"""Session User
|
||||||
|
|
||||||
|
Blender user information property
|
||||||
|
"""
|
||||||
|
username: bpy.props.StringProperty(name="username")
|
||||||
|
current_frame: bpy.props.IntProperty(name="current_frame")
|
||||||
|
|
||||||
|
|
||||||
|
class SessionProps(bpy.types.PropertyGroup):
|
||||||
|
is_admin: bpy.props.BoolProperty(
|
||||||
|
name="is_admin",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
session_mode: bpy.props.EnumProperty(
|
||||||
|
name='session_mode',
|
||||||
|
description='session mode',
|
||||||
|
items={
|
||||||
|
('HOST', 'hosting', 'host a session'),
|
||||||
|
('CONNECT', 'connexion', 'connect to a session')},
|
||||||
|
default='HOST')
|
||||||
|
clients: bpy.props.EnumProperty(
|
||||||
|
name="clients",
|
||||||
|
description="client enum",
|
||||||
|
items=client_list_callback)
|
||||||
|
enable_presence: bpy.props.BoolProperty(
|
||||||
|
name="Presence overlay",
|
||||||
|
description='Enable overlay drawing module',
|
||||||
|
default=True,
|
||||||
|
update=presence.update_presence
|
||||||
|
)
|
||||||
|
presence_show_selected: bpy.props.BoolProperty(
|
||||||
|
name="Show selected objects",
|
||||||
|
description='Enable selection overlay ',
|
||||||
|
default=True,
|
||||||
|
update=presence.update_overlay_settings
|
||||||
|
)
|
||||||
|
presence_show_user: bpy.props.BoolProperty(
|
||||||
|
name="Show users",
|
||||||
|
description='Enable user overlay ',
|
||||||
|
default=True,
|
||||||
|
update=presence.update_overlay_settings
|
||||||
|
)
|
||||||
|
presence_show_far_user: bpy.props.BoolProperty(
|
||||||
|
name="Show different scenes",
|
||||||
|
description="Show user on different scenes",
|
||||||
|
default=False,
|
||||||
|
update=presence.update_overlay_settings
|
||||||
|
)
|
||||||
|
filter_owned: bpy.props.BoolProperty(
|
||||||
|
name="filter_owned",
|
||||||
|
description='Show only owned datablocks',
|
||||||
|
default=True
|
||||||
|
)
|
||||||
|
user_snap_running: bpy.props.BoolProperty(
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
time_snap_running: bpy.props.BoolProperty(
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
|
SessionUser,
|
||||||
|
SessionProps,
|
||||||
|
ReplicationFlags,
|
||||||
ReplicatedDatablock,
|
ReplicatedDatablock,
|
||||||
SessionPrefs,
|
SessionPrefs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
from bpy.utils import register_class
|
from bpy.utils import register_class
|
||||||
|
|
||||||
@ -205,6 +373,7 @@ def register():
|
|||||||
logger.info('Generating bl_types preferences')
|
logger.info('Generating bl_types preferences')
|
||||||
prefs.generate_supported_types()
|
prefs.generate_supported_types()
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
from bpy.utils import unregister_class
|
from bpy.utils import unregister_class
|
||||||
|
|
||||||
|
@ -1,3 +1,21 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import math
|
import math
|
||||||
@ -118,9 +136,7 @@ def get_view_matrix():
|
|||||||
area, region, rv3d = view3d_find()
|
area, region, rv3d = view3d_find()
|
||||||
|
|
||||||
if area and region and rv3d:
|
if area and region and rv3d:
|
||||||
matrix_dumper = utils.dump_anything.Dumper()
|
return [list(v) for v in rv3d.view_matrix]
|
||||||
|
|
||||||
return matrix_dumper.dump(rv3d.view_matrix)
|
|
||||||
|
|
||||||
def update_presence(self, context):
|
def update_presence(self, context):
|
||||||
global renderer
|
global renderer
|
||||||
@ -219,6 +235,10 @@ class DrawFactory(object):
|
|||||||
if ob.type == 'EMPTY':
|
if ob.type == 'EMPTY':
|
||||||
# TODO: Child case
|
# TODO: Child case
|
||||||
# Collection instance case
|
# Collection instance case
|
||||||
|
indices = (
|
||||||
|
(0, 1), (1, 2), (2, 3), (0, 3),
|
||||||
|
(4, 5), (5, 6), (6, 7), (4, 7),
|
||||||
|
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||||
if ob.instance_collection:
|
if ob.instance_collection:
|
||||||
for obj in ob.instance_collection.objects:
|
for obj in ob.instance_collection.objects:
|
||||||
if obj.type == 'MESH':
|
if obj.type == 'MESH':
|
||||||
|
@ -1,3 +1,21 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from . import operators, utils
|
from . import operators, utils
|
||||||
@ -6,7 +24,8 @@ from .libs.replication.replication.constants import (ADDED, ERROR, FETCHED,
|
|||||||
STATE_ACTIVE, STATE_AUTH,
|
STATE_ACTIVE, STATE_AUTH,
|
||||||
STATE_CONFIG, STATE_SYNCING,
|
STATE_CONFIG, STATE_SYNCING,
|
||||||
STATE_INITIAL, STATE_SRV_SYNC,
|
STATE_INITIAL, STATE_SRV_SYNC,
|
||||||
STATE_WAITING, STATE_QUITTING)
|
STATE_WAITING, STATE_QUITTING,
|
||||||
|
STATE_LAUNCHING_SERVICES)
|
||||||
|
|
||||||
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
|
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
|
||||||
'TRIA_UP', # COMMITED
|
'TRIA_UP', # COMMITED
|
||||||
@ -35,7 +54,7 @@ def printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1,
|
|||||||
return '{} |{}| {}/{}{}'.format(prefix, bar, iteration,total, suffix)
|
return '{} |{}| {}/{}{}'.format(prefix, bar, iteration,total, suffix)
|
||||||
|
|
||||||
def get_state_str(state):
|
def get_state_str(state):
|
||||||
state_str = 'None'
|
state_str = 'UNKNOWN'
|
||||||
if state == STATE_WAITING:
|
if state == STATE_WAITING:
|
||||||
state_str = 'WARMING UP DATA'
|
state_str = 'WARMING UP DATA'
|
||||||
elif state == STATE_SYNCING:
|
elif state == STATE_SYNCING:
|
||||||
@ -52,6 +71,9 @@ def get_state_str(state):
|
|||||||
state_str = 'INIT'
|
state_str = 'INIT'
|
||||||
elif state == STATE_QUITTING:
|
elif state == STATE_QUITTING:
|
||||||
state_str = 'QUITTING SESSION'
|
state_str = 'QUITTING SESSION'
|
||||||
|
elif state == STATE_LAUNCHING_SERVICES:
|
||||||
|
state_str = 'LAUNCHING SERVICES'
|
||||||
|
|
||||||
return state_str
|
return state_str
|
||||||
|
|
||||||
class SESSION_PT_settings(bpy.types.Panel):
|
class SESSION_PT_settings(bpy.types.Panel):
|
||||||
@ -157,6 +179,9 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
|||||||
row = box.row()
|
row = box.row()
|
||||||
row.label(text="IPC Port:")
|
row.label(text="IPC Port:")
|
||||||
row.prop(settings, "ipc_port", text="")
|
row.prop(settings, "ipc_port", text="")
|
||||||
|
row = box.row()
|
||||||
|
row.label(text="Timeout (ms):")
|
||||||
|
row.prop(settings, "connection_timeout", text="")
|
||||||
|
|
||||||
if runtime_settings.session_mode == 'HOST':
|
if runtime_settings.session_mode == 'HOST':
|
||||||
row = box.row()
|
row = box.row()
|
||||||
@ -219,6 +244,9 @@ class SESSION_PT_settings_replication(bpy.types.Panel):
|
|||||||
|
|
||||||
# Right managment
|
# Right managment
|
||||||
if runtime_settings.session_mode == 'HOST':
|
if runtime_settings.session_mode == 'HOST':
|
||||||
|
row = layout.row()
|
||||||
|
row.prop(settings.sync_flags,"sync_render_settings")
|
||||||
|
|
||||||
row = layout.row(align=True)
|
row = layout.row(align=True)
|
||||||
row.label(text="Right strategy:")
|
row.label(text="Right strategy:")
|
||||||
row.prop(settings,"right_strategy",text="")
|
row.prop(settings,"right_strategy",text="")
|
||||||
@ -261,13 +289,15 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
selected_user = context.window_manager.user_index
|
selected_user = context.window_manager.user_index
|
||||||
settings = utils.get_preferences()
|
settings = utils.get_preferences()
|
||||||
active_user = online_users[selected_user] if len(online_users)-1>=selected_user else 0
|
active_user = online_users[selected_user] if len(online_users)-1>=selected_user else 0
|
||||||
|
runtime_settings = context.window_manager.session
|
||||||
|
|
||||||
# Create a simple row.
|
# Create a simple row.
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
box = row.box()
|
box = row.box()
|
||||||
split = box.split(factor=0.5)
|
split = box.split(factor=0.3)
|
||||||
split.label(text="user")
|
split.label(text="user")
|
||||||
|
split = split.split(factor=0.5)
|
||||||
|
split.label(text="localisation")
|
||||||
split.label(text="frame")
|
split.label(text="frame")
|
||||||
split.label(text="ping")
|
split.label(text="ping")
|
||||||
|
|
||||||
@ -289,6 +319,12 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
text="",
|
text="",
|
||||||
icon='TIME').target_client = active_user.username
|
icon='TIME').target_client = active_user.username
|
||||||
|
|
||||||
|
if runtime_settings.session_mode == 'HOST':
|
||||||
|
user_operations.operator(
|
||||||
|
"session.kick",
|
||||||
|
text="",
|
||||||
|
icon='CANCEL').user = active_user.username
|
||||||
|
|
||||||
|
|
||||||
class SESSION_UL_users(bpy.types.UIList):
|
class SESSION_UL_users(bpy.types.UIList):
|
||||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
|
||||||
@ -297,6 +333,7 @@ class SESSION_UL_users(bpy.types.UIList):
|
|||||||
is_local_user = item.username == settings.username
|
is_local_user = item.username == settings.username
|
||||||
ping = '-'
|
ping = '-'
|
||||||
frame_current = '-'
|
frame_current = '-'
|
||||||
|
scene_current = '-'
|
||||||
if session:
|
if session:
|
||||||
user = session.online_users.get(item.username)
|
user = session.online_users.get(item.username)
|
||||||
if user:
|
if user:
|
||||||
@ -304,9 +341,11 @@ class SESSION_UL_users(bpy.types.UIList):
|
|||||||
metadata = user.get('metadata')
|
metadata = user.get('metadata')
|
||||||
if metadata and 'frame_current' in metadata:
|
if metadata and 'frame_current' in metadata:
|
||||||
frame_current = str(metadata['frame_current'])
|
frame_current = str(metadata['frame_current'])
|
||||||
|
scene_current = metadata['scene_current']
|
||||||
split = layout.split(factor=0.5)
|
split = layout.split(factor=0.3)
|
||||||
split.label(text=item.username)
|
split.label(text=item.username)
|
||||||
|
split = split.split(factor=0.5)
|
||||||
|
split.label(text=scene_current)
|
||||||
split.label(text=frame_current)
|
split.label(text=frame_current)
|
||||||
split.label(text=ping)
|
split.label(text=ping)
|
||||||
|
|
||||||
@ -336,7 +375,10 @@ class SESSION_PT_presence(bpy.types.Panel):
|
|||||||
col = layout.column()
|
col = layout.column()
|
||||||
col.prop(settings,"presence_show_selected")
|
col.prop(settings,"presence_show_selected")
|
||||||
col.prop(settings,"presence_show_user")
|
col.prop(settings,"presence_show_user")
|
||||||
row = layout.row()
|
row = layout.column()
|
||||||
|
row.active = settings.presence_show_user
|
||||||
|
row.prop(settings,"presence_show_far_user")
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_services(bpy.types.Panel):
|
class SESSION_PT_services(bpy.types.Panel):
|
||||||
bl_idname = "MULTIUSER_SERVICE_PT_panel"
|
bl_idname = "MULTIUSER_SERVICE_PT_panel"
|
||||||
|
@ -1,9 +1,26 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import random
|
|
||||||
import string
|
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
|
|
||||||
@ -11,22 +28,10 @@ import bpy
|
|||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from . import environment, presence
|
from . import environment, presence
|
||||||
from .libs import dump_anything
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logger.setLevel(logging.WARNING)
|
logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
def has_action(target):
|
|
||||||
return (hasattr(target, 'animation_data')
|
|
||||||
and target.animation_data
|
|
||||||
and target.animation_data.action)
|
|
||||||
|
|
||||||
|
|
||||||
def has_driver(target):
|
|
||||||
return (hasattr(target, 'animation_data')
|
|
||||||
and target.animation_data
|
|
||||||
and target.animation_data.drivers)
|
|
||||||
|
|
||||||
|
|
||||||
def find_from_attr(attr_name, attr_value, list):
|
def find_from_attr(attr_name, attr_value, list):
|
||||||
for item in list:
|
for item in list:
|
||||||
@ -54,19 +59,6 @@ def get_datablock_users(datablock):
|
|||||||
return users
|
return users
|
||||||
|
|
||||||
|
|
||||||
def random_string_digits(stringLength=6):
|
|
||||||
"""Generate a random string of letters and digits """
|
|
||||||
lettersAndDigits = string.ascii_letters + string.digits
|
|
||||||
return ''.join(random.choices(lettersAndDigits, k=stringLength))
|
|
||||||
|
|
||||||
|
|
||||||
def randomColor():
|
|
||||||
r = random.random()
|
|
||||||
v = random.random()
|
|
||||||
b = random.random()
|
|
||||||
return [r, v, b]
|
|
||||||
|
|
||||||
|
|
||||||
def clean_scene():
|
def clean_scene():
|
||||||
for type_name in dir(bpy.data):
|
for type_name in dir(bpy.data):
|
||||||
try:
|
try:
|
||||||
@ -77,89 +69,10 @@ def clean_scene():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
def revers(d):
|
|
||||||
l = []
|
|
||||||
for i in d:
|
|
||||||
l.append(i)
|
|
||||||
|
|
||||||
return l[::-1]
|
|
||||||
|
|
||||||
|
|
||||||
def get_armature_edition_context(armature):
|
|
||||||
|
|
||||||
override = {}
|
|
||||||
# Set correct area
|
|
||||||
for area in bpy.data.window_managers[0].windows[0].screen.areas:
|
|
||||||
if area.type == 'VIEW_3D':
|
|
||||||
override = bpy.context.copy()
|
|
||||||
override['area'] = area
|
|
||||||
break
|
|
||||||
|
|
||||||
# Set correct armature settings
|
|
||||||
override['window'] = bpy.data.window_managers[0].windows[0]
|
|
||||||
override['screen'] = bpy.data.window_managers[0].windows[0].screen
|
|
||||||
override['mode'] = 'EDIT_ARMATURE'
|
|
||||||
override['active_object'] = armature
|
|
||||||
override['selected_objects'] = [armature]
|
|
||||||
|
|
||||||
for o in bpy.data.objects:
|
|
||||||
if o.data == armature:
|
|
||||||
override['edit_object'] = o
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
||||||
return override
|
|
||||||
|
|
||||||
|
|
||||||
def get_selected_objects(scene, active_view_layer):
|
def get_selected_objects(scene, active_view_layer):
|
||||||
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]
|
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]
|
||||||
|
|
||||||
|
|
||||||
def load_dict(src_dict, target):
|
|
||||||
try:
|
|
||||||
for item in src_dict:
|
|
||||||
# attr =
|
|
||||||
setattr(target, item, src_dict[item])
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def dump_datablock(datablock, depth):
|
|
||||||
if datablock:
|
|
||||||
dumper = dump_anything.Dumper()
|
|
||||||
dumper.type_subset = dumper.match_subset_all
|
|
||||||
dumper.depth = depth
|
|
||||||
|
|
||||||
datablock_type = datablock.bl_rna.name
|
|
||||||
key = "{}/{}".format(datablock_type, datablock.name)
|
|
||||||
data = dumper.dump(datablock)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def dump_datablock_attibutes(datablock=None, attributes=[], depth=1, dickt=None):
|
|
||||||
if datablock:
|
|
||||||
dumper = dump_anything.Dumper()
|
|
||||||
dumper.type_subset = dumper.match_subset_all
|
|
||||||
dumper.depth = depth
|
|
||||||
|
|
||||||
datablock_type = datablock.bl_rna.name
|
|
||||||
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
if dickt:
|
|
||||||
data = dickt
|
|
||||||
for attr in attributes:
|
|
||||||
try:
|
|
||||||
data[attr] = dumper.dump(getattr(datablock, attr))
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_from_id(id, optionnal_type=None):
|
def resolve_from_id(id, optionnal_type=None):
|
||||||
for category in dir(bpy.data):
|
for category in dir(bpy.data):
|
||||||
root = getattr(bpy.data, category)
|
root = getattr(bpy.data, category)
|
||||||
@ -171,3 +84,6 @@ def resolve_from_id(id, optionnal_type=None):
|
|||||||
|
|
||||||
def get_preferences():
|
def get_preferences():
|
||||||
return bpy.context.preferences.addons[__package__].preferences
|
return bpy.context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
|
def current_milli_time():
|
||||||
|
return int(round(time.time() * 1000))
|
25
scripts/test_addon.py
Normal file
25
scripts/test_addon.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import sys
|
||||||
|
try:
|
||||||
|
import blender_addon_tester as BAT
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
addon = sys.argv[1]
|
||||||
|
else:
|
||||||
|
addon = "multi_user"
|
||||||
|
if len(sys.argv) > 2:
|
||||||
|
blender_rev = sys.argv[2]
|
||||||
|
else:
|
||||||
|
blender_rev = "2.82"
|
||||||
|
|
||||||
|
try:
|
||||||
|
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
exit_val = 1
|
||||||
|
sys.exit(exit_val)
|
||||||
|
|
||||||
|
main()
|
25
tests/test_bl_types/conftest.py
Normal file
25
tests/test_bl_types/conftest.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def clear_blend():
|
||||||
|
""" Remove all datablocks of a blend
|
||||||
|
"""
|
||||||
|
for type_name in dir(bpy.data):
|
||||||
|
try:
|
||||||
|
type_collection = getattr(bpy.data, type_name)
|
||||||
|
for item in type_collection:
|
||||||
|
type_collection.remove(item)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def load_blendfile(blendname):
|
||||||
|
print(f"loading {blendname}")
|
||||||
|
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
bpy.ops.wm.open_mainfile(filepath=os.path.join(dir_path, blendname))
|
38
tests/test_bl_types/test_action.py
Normal file
38
tests/test_bl_types/test_action.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_action import BlAction
|
||||||
|
|
||||||
|
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
|
||||||
|
|
||||||
|
# @pytest.mark.parametrize('blendname', ['test_action.blend'])
|
||||||
|
def test_action(clear_blend):
|
||||||
|
# Generate a random action
|
||||||
|
datablock = bpy.data.actions.new("sdsad")
|
||||||
|
fcurve_sample = datablock.fcurves.new('location')
|
||||||
|
fcurve_sample.keyframe_points.add(100)
|
||||||
|
datablock.id_root = 'MESH'
|
||||||
|
|
||||||
|
for i, point in enumerate(fcurve_sample.keyframe_points):
|
||||||
|
point.co[0] = i
|
||||||
|
point.co[1] = random.randint(-10,10)
|
||||||
|
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
|
||||||
|
|
||||||
|
bpy.ops.mesh.primitive_plane_add()
|
||||||
|
bpy.data.objects[0].animation_data_create()
|
||||||
|
bpy.data.objects[0].animation_data.action = datablock
|
||||||
|
|
||||||
|
# Test
|
||||||
|
implementation = BlAction()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.actions.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
22
tests/test_bl_types/test_armature.py
Normal file
22
tests/test_bl_types/test_armature.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_armature import BlArmature
|
||||||
|
|
||||||
|
def test_armature(clear_blend):
|
||||||
|
bpy.ops.object.armature_add()
|
||||||
|
datablock = bpy.data.armatures[0]
|
||||||
|
|
||||||
|
implementation = BlArmature()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.armatures.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
25
tests/test_bl_types/test_camera.py
Normal file
25
tests/test_bl_types/test_camera.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_camera import BlCamera
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('camera_type', ['PANO','PERSP','ORTHO'])
|
||||||
|
def test_camera(clear_blend, camera_type):
|
||||||
|
bpy.ops.object.camera_add()
|
||||||
|
|
||||||
|
datablock = bpy.data.cameras[0]
|
||||||
|
datablock.type = camera_type
|
||||||
|
|
||||||
|
camera_dumper = BlCamera()
|
||||||
|
expected = camera_dumper._dump(datablock)
|
||||||
|
bpy.data.cameras.remove(datablock)
|
||||||
|
|
||||||
|
test = camera_dumper._construct(expected)
|
||||||
|
camera_dumper._load(expected, test)
|
||||||
|
result = camera_dumper._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
28
tests/test_bl_types/test_collection.py
Normal file
28
tests/test_bl_types/test_collection.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_collection import BlCollection
|
||||||
|
|
||||||
|
def test_collection(clear_blend):
|
||||||
|
# Generate a collection with childrens and a cube
|
||||||
|
datablock = bpy.data.collections.new("root")
|
||||||
|
datablock.children.link(bpy.data.collections.new("child"))
|
||||||
|
datablock.children.link(bpy.data.collections.new("child2"))
|
||||||
|
|
||||||
|
bpy.ops.mesh.primitive_cube_add()
|
||||||
|
datablock.objects.link(bpy.data.objects[0])
|
||||||
|
|
||||||
|
# Test
|
||||||
|
implementation = BlCollection()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.collections.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
29
tests/test_bl_types/test_curve.py
Normal file
29
tests/test_bl_types/test_curve.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_curve import BlCurve
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('curve_type', ['TEXT','BEZIER'])
|
||||||
|
def test_curve(clear_blend, curve_type):
|
||||||
|
if curve_type == 'TEXT':
|
||||||
|
bpy.ops.object.text_add(enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
elif curve_type == 'BEZIER':
|
||||||
|
bpy.ops.curve.primitive_bezier_curve_add(enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
else: #TODO: NURBS support
|
||||||
|
bpy.ops.surface.primitive_nurbs_surface_curve_add(radius=1, enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
|
||||||
|
datablock = bpy.data.curves[0]
|
||||||
|
|
||||||
|
implementation = BlCurve()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.curves.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
23
tests/test_bl_types/test_gpencil.py
Normal file
23
tests/test_bl_types/test_gpencil.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_gpencil import BlGpencil
|
||||||
|
|
||||||
|
|
||||||
|
def test_gpencil(clear_blend):
|
||||||
|
bpy.ops.object.gpencil_add(type='MONKEY')
|
||||||
|
|
||||||
|
datablock = bpy.data.grease_pencils[0]
|
||||||
|
|
||||||
|
implementation = BlGpencil()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.grease_pencils.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
21
tests/test_bl_types/test_image.py
Normal file
21
tests/test_bl_types/test_image.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_image import BlImage
|
||||||
|
|
||||||
|
def test_image(clear_blend):
|
||||||
|
datablock = bpy.data.images.new('asd',2000,2000)
|
||||||
|
|
||||||
|
implementation = BlImage()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.images.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
23
tests/test_bl_types/test_lattice.py
Normal file
23
tests/test_bl_types/test_lattice.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_lattice import BlLattice
|
||||||
|
|
||||||
|
|
||||||
|
def test_lattice(clear_blend):
|
||||||
|
bpy.ops.object.add(type='LATTICE', enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
|
||||||
|
datablock = bpy.data.lattices[0]
|
||||||
|
|
||||||
|
implementation = BlLattice()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.lattices.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
24
tests/test_bl_types/test_lightprobes.py
Normal file
24
tests/test_bl_types/test_lightprobes.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_lightprobe import BlLightprobe
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(bpy.app.version[1] < 83, reason="requires blender 2.83 or higher")
|
||||||
|
@pytest.mark.parametrize('lightprobe_type', ['PLANAR','GRID','CUBEMAP'])
|
||||||
|
def test_lightprobes(clear_blend, lightprobe_type):
|
||||||
|
bpy.ops.object.lightprobe_add(type=lightprobe_type)
|
||||||
|
|
||||||
|
blender_light = bpy.data.lightprobes[0]
|
||||||
|
lightprobe_dumper = BlLightprobe()
|
||||||
|
expected = lightprobe_dumper._dump(blender_light)
|
||||||
|
bpy.data.lightprobes.remove(blender_light)
|
||||||
|
|
||||||
|
test = lightprobe_dumper._construct(expected)
|
||||||
|
lightprobe_dumper._load(expected, test)
|
||||||
|
result = lightprobe_dumper._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
23
tests/test_bl_types/test_lights.py
Normal file
23
tests/test_bl_types/test_lights.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_light import BlLight
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('light_type', ['SPOT','SUN','POINT','AREA'])
|
||||||
|
def test_light(clear_blend, light_type):
|
||||||
|
bpy.ops.object.light_add(type=light_type)
|
||||||
|
|
||||||
|
blender_light = bpy.data.lights[0]
|
||||||
|
light_dumper = BlLight()
|
||||||
|
expected = light_dumper._dump(blender_light)
|
||||||
|
bpy.data.lights.remove(blender_light)
|
||||||
|
|
||||||
|
test = light_dumper._construct(expected)
|
||||||
|
light_dumper._load(expected, test)
|
||||||
|
result = light_dumper._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
28
tests/test_bl_types/test_material.py
Normal file
28
tests/test_bl_types/test_material.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_material import BlMaterial
|
||||||
|
|
||||||
|
|
||||||
|
def test_material(clear_blend):
|
||||||
|
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()]
|
||||||
|
|
||||||
|
datablock = bpy.data.materials.new("test")
|
||||||
|
datablock.use_nodes = True
|
||||||
|
bpy.data.materials.create_gpencil_data(datablock)
|
||||||
|
|
||||||
|
for ntype in nodes_types:
|
||||||
|
datablock.node_tree.nodes.new(ntype)
|
||||||
|
|
||||||
|
implementation = BlMaterial()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.materials.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
28
tests/test_bl_types/test_mesh.py
Normal file
28
tests/test_bl_types/test_mesh.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_mesh import BlMesh
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('mesh_type', ['EMPTY','FILLED'])
|
||||||
|
def test_mesh(clear_blend, mesh_type):
|
||||||
|
if mesh_type == 'FILLED':
|
||||||
|
bpy.ops.mesh.primitive_monkey_add()
|
||||||
|
elif mesh_type == 'EMPTY':
|
||||||
|
bpy.data.meshes.new('empty_mesh')
|
||||||
|
|
||||||
|
datablock = bpy.data.meshes[0]
|
||||||
|
|
||||||
|
# Test
|
||||||
|
implementation = BlMesh()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.meshes.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
23
tests/test_bl_types/test_metaball.py
Normal file
23
tests/test_bl_types/test_metaball.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from multi_user.bl_types.bl_metaball import BlMetaball
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('metaballs_type', ['PLANE','CAPSULE','BALL','ELLIPSOID','CUBE'])
|
||||||
|
def test_metaball(clear_blend, metaballs_type):
|
||||||
|
bpy.ops.object.metaball_add(type=metaballs_type)
|
||||||
|
|
||||||
|
datablock = bpy.data.metaballs[0]
|
||||||
|
dumper = BlMetaball()
|
||||||
|
expected = dumper._dump(datablock)
|
||||||
|
bpy.data.metaballs.remove(datablock)
|
||||||
|
|
||||||
|
test = dumper._construct(expected)
|
||||||
|
dumper._load(expected, test)
|
||||||
|
result = dumper._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
64
tests/test_bl_types/test_object.py
Normal file
64
tests/test_bl_types/test_object.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_object import BlObject
|
||||||
|
|
||||||
|
# Removed 'BUILD' modifier because the seed doesn't seems to be
|
||||||
|
# correctly initialized (#TODO: report the bug)
|
||||||
|
MOFIFIERS_TYPES = [
|
||||||
|
'DATA_TRANSFER', 'MESH_CACHE', 'MESH_SEQUENCE_CACHE',
|
||||||
|
'NORMAL_EDIT', 'WEIGHTED_NORMAL', 'UV_PROJECT', 'UV_WARP',
|
||||||
|
'VERTEX_WEIGHT_EDIT', 'VERTEX_WEIGHT_MIX',
|
||||||
|
'VERTEX_WEIGHT_PROXIMITY', 'ARRAY', 'BEVEL', 'BOOLEAN',
|
||||||
|
'DECIMATE', 'EDGE_SPLIT', 'MASK', 'MIRROR',
|
||||||
|
'MULTIRES', 'REMESH', 'SCREW', 'SKIN', 'SOLIDIFY',
|
||||||
|
'SUBSURF', 'TRIANGULATE',
|
||||||
|
'WELD', 'WIREFRAME', 'ARMATURE', 'CAST', 'CURVE',
|
||||||
|
'DISPLACE', 'HOOK', 'LAPLACIANDEFORM', 'LATTICE',
|
||||||
|
'MESH_DEFORM', 'SHRINKWRAP', 'SIMPLE_DEFORM', 'SMOOTH',
|
||||||
|
'CORRECTIVE_SMOOTH', 'LAPLACIANSMOOTH', 'SURFACE_DEFORM',
|
||||||
|
'WARP', 'WAVE', 'CLOTH', 'COLLISION', 'DYNAMIC_PAINT',
|
||||||
|
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE',
|
||||||
|
'SOFT_BODY', 'SURFACE']
|
||||||
|
|
||||||
|
CONSTRAINTS_TYPES = [
|
||||||
|
'CAMERA_SOLVER', 'FOLLOW_TRACK', 'OBJECT_SOLVER', 'COPY_LOCATION',
|
||||||
|
'COPY_ROTATION', 'COPY_SCALE', 'COPY_TRANSFORMS', 'LIMIT_DISTANCE',
|
||||||
|
'LIMIT_LOCATION', 'LIMIT_ROTATION', 'LIMIT_SCALE', 'MAINTAIN_VOLUME',
|
||||||
|
'TRANSFORM', 'TRANSFORM_CACHE', 'CLAMP_TO', 'DAMPED_TRACK', 'IK',
|
||||||
|
'LOCKED_TRACK', 'SPLINE_IK', 'STRETCH_TO', 'TRACK_TO', 'ACTION',
|
||||||
|
'ARMATURE', 'CHILD_OF', 'FLOOR', 'FOLLOW_PATH', 'PIVOT', 'SHRINKWRAP']
|
||||||
|
|
||||||
|
def test_object(clear_blend):
|
||||||
|
bpy.ops.mesh.primitive_cube_add(
|
||||||
|
enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
|
||||||
|
datablock = bpy.data.objects[0]
|
||||||
|
|
||||||
|
# Add modifiers
|
||||||
|
for mod_type in MOFIFIERS_TYPES:
|
||||||
|
datablock.modifiers.new(mod_type,mod_type)
|
||||||
|
|
||||||
|
# Add constraints
|
||||||
|
for const_type in CONSTRAINTS_TYPES:
|
||||||
|
datablock.constraints.new(const_type)
|
||||||
|
|
||||||
|
datablock.vertex_groups.new(name='vg')
|
||||||
|
datablock.vertex_groups.new(name='vg1')
|
||||||
|
datablock.shape_key_add(name='shape')
|
||||||
|
datablock.shape_key_add(name='shape2')
|
||||||
|
|
||||||
|
|
||||||
|
implementation = BlObject()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.objects.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
22
tests/test_bl_types/test_scene.py
Normal file
22
tests/test_bl_types/test_scene.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_scene import BlScene
|
||||||
|
|
||||||
|
def test_scene(clear_blend):
|
||||||
|
datablock = bpy.data.scenes.new("toto")
|
||||||
|
|
||||||
|
# Test
|
||||||
|
implementation = BlScene()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.scenes.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
22
tests/test_bl_types/test_speaker.py
Normal file
22
tests/test_bl_types/test_speaker.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_speaker import BlSpeaker
|
||||||
|
|
||||||
|
def test_speaker(clear_blend):
|
||||||
|
bpy.ops.object.speaker_add()
|
||||||
|
datablock = bpy.data.speakers[0]
|
||||||
|
|
||||||
|
implementation = BlSpeaker()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.speakers.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
22
tests/test_bl_types/test_world.py
Normal file
22
tests/test_bl_types/test_world.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
from multi_user.bl_types.bl_world import BlWorld
|
||||||
|
|
||||||
|
def test_world(clear_blend):
|
||||||
|
datablock = bpy.data.worlds.new('test')
|
||||||
|
datablock.use_nodes = True
|
||||||
|
|
||||||
|
implementation = BlWorld()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.worlds.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
20
tests/test_operators.py
Normal file
20
tests/test_operators.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import random
|
||||||
|
|
||||||
|
|
||||||
|
def test_start_session():
|
||||||
|
result = bpy.ops.session.start()
|
||||||
|
|
||||||
|
|
||||||
|
assert 'FINISHED' in result
|
||||||
|
|
||||||
|
def test_stop_session():
|
||||||
|
|
||||||
|
result = bpy.ops.session.stop()
|
||||||
|
|
||||||
|
assert 'FINISHED' in result
|
Reference in New Issue
Block a user