Compare commits

...

175 Commits

Author SHA1 Message Date
ca64797641 Merge branch 'develop' into 218-new-ui-ux-implementation 2021-07-26 17:51:01 +02:00
a49d9ee437 feat: server ping timeout preferences 2021-07-26 17:42:13 +02:00
4c1cd6b8f8 fix: review 2021-07-26 17:30:56 +02:00
d6cda709a6 fix: replication conflict 2021-07-26 15:46:29 +02:00
4bc0feb3a5 fix: ReferenceError in update_external dependency by removing orphan nodes. 2021-07-23 19:35:56 +02:00
59aab7159a fix: remove logging.info 2021-07-23 17:10:10 +02:00
0a798bb21b feat: clean files+add repository sync icons 2021-07-23 12:51:16 +02:00
beaafce4fa Merge branch 'develop' into 218-new-ui-ux-implementation 2021-07-22 11:01:59 +02:00
6f77337832 feat: request (with bug) 2021-07-22 10:55:18 +02:00
07252d62df feat: fonctional UI, no server pswd/ping 2021-07-22 09:38:01 +02:00
ac615cd134 feat: first+server list UI, ping/pswd unfonctional 2021-07-21 11:12:17 +02:00
a4f9f6e051 fix: replication dependencies conflicts 2021-07-20 16:19:53 +02:00
10de88cdc9 fix: old replication installation conflicts 2021-07-20 16:06:24 +02:00
e4fa34c984 fix: addon version number 2021-07-20 15:37:11 +02:00
0dd685d009 doc: add missing presence flags 2021-07-20 15:11:38 +02:00
3e8c30c0ab fix: supported datablocks in readme 2021-07-20 14:59:30 +02:00
21cc3cd917 fix: update readme to reflect changes 2021-07-20 14:57:52 +02:00
81e620ee3d fix: documentations capture for 0.4.0 2021-07-20 14:50:33 +02:00
fb9bd108bd feat: update changelog to reflect v0.4.0 version 2021-07-20 14:19:33 +02:00
4846fbb589 fix: server list working (no ping/lock/pop-up) 2021-07-19 16:03:12 +02:00
cab6625399 Merge branch '219-lock-annotation-doesn-t-sync' into 'develop'
Resolve "Lock annotation doesn't sync"

See merge request slumber/multi-user!143
2021-07-14 10:41:32 +00:00
1b81251a11 fix: annotation lock 2021-07-14 12:38:30 +02:00
cf44e547a2 fix: presence_text_distance rename 2021-07-13 17:15:34 +02:00
0269363c63 fix: overlay UI 2021-07-13 17:14:32 +02:00
4ffca17c54 fix: edit>prefs UI/UX 2021-07-13 16:40:26 +02:00
77bf269fb5 Merge branch '221-optimize-user-selection-draw-code' into 'develop'
Resolve "Optimize user selection draw code"

See merge request slumber/multi-user!142
2021-07-13 14:37:09 +00:00
1e675132d4 fix: collection instances index offset 2021-07-13 16:33:46 +02:00
781287c390 refactor: use one drawcall for all selection bbox 2021-07-13 15:45:08 +02:00
fc91b252f4 feat: edit>preferences + serverpassword ui 2021-07-12 18:01:35 +02:00
41c7c569ca fix: link session status icons to session header 2021-07-12 14:55:41 +02:00
a82d263f05 feat: add "icons" folder + init 2021-07-12 14:53:18 +02:00
d4476baa1b Merge branch '220-batch-right-selection-update' into 'develop'
Resolve "Batch right selection update"

See merge request slumber/multi-user!141
2021-07-12 10:20:23 +00:00
467e98906e feat: Batch right selection update
Related to https://gitlab.com/slumber/multi-user/-/issues/220
2021-07-12 12:06:45 +02:00
64a25f94a3 fix: gpencil material loading error
Now loading gpencil materials from uuid
2021-07-09 16:59:59 +02:00
e6996316be Merge branch '215-annotations-doesn-t-sync-correctly' into 'develop'
Resolve "Annotations doesn't sync correctly"

See merge request slumber/multi-user!138
2021-07-07 08:18:49 +00:00
cf4cd94096 refactor: remove gpencil dump stroke legacy
Related to #166 and #215
2021-07-07 10:15:23 +02:00
e9ab633aac fix: annotations updates
Related to #215
2021-07-06 16:06:14 +02:00
297639e80f fix: crash on changing workspace change 2021-07-06 15:39:19 +02:00
f0cc63b6f0 Merge branch '214-animated-object-transform-not-correctly-sync' into 'develop'
Resolve "Animated object transform not correctly sync"

See merge request slumber/multi-user!137
2021-07-06 12:32:39 +00:00
d433e8f241 fix: transform offset for object animated with a curve constraint
Related to #214
2021-07-06 14:29:20 +02:00
963a551a1e Merge branch '206-draw-active-mode-in-the-object-presence-overlay-2' into 'develop'
Draw active mode in the object presence overlay

See merge request slumber/multi-user!131
2021-07-01 12:57:01 +00:00
d01a434fb7 fix: Review 2021-07-01 14:53:14 +02:00
3a5a5fc633 fix : draw active mode UI side pannel 2021-07-01 11:58:52 +02:00
8926ab44e1 Merge branch '201-improved-image-support' into 'develop'
Resolve "Improved image support"

See merge request slumber/multi-user!136
2021-07-01 09:55:47 +00:00
a8f96581c5 fix: new mode display 2021-06-30 15:34:03 +02:00
440a4cc1cd feat: add mode visibily 2021-06-29 17:10:59 +02:00
a207c51973 fix: image renamin support
fix: sync Color Space Settings

related to #201
2021-06-29 15:59:26 +02:00
e706c8e0bf Merge branch '209-adding-a-scene-create-node-duplicates' into 'develop'
Resolve "Adding a scene create node duplicates"

See merge request slumber/multi-user!135
2021-06-28 08:30:22 +00:00
e590e896da fix: scene duplicates by using data instead of the update id
Related to #209
2021-06-28 10:27:04 +02:00
4140b62a8e Merge branch '119-add-timeline-marker-sync' into 'develop'
Resolve "Add timeline marker sync"

See merge request slumber/multi-user!133
2021-06-24 15:52:12 +00:00
6d9c9c4532 fix: timeline marker selection
feat: basic test
2021-06-24 17:45:34 +02:00
e9e1911840 Merge branch '208-late-update-logging-error' into 'develop'
Resolve "Late update logging error"

See merge request slumber/multi-user!134
2021-06-24 15:28:56 +00:00
ab350ca7bc fix: late update logging error
Related to #208
2021-06-24 17:24:08 +02:00
0a8f0b5f88 feat: add mode overlay 2021-06-24 16:01:14 +02:00
2238a15c11 feat: initial markers support 2021-06-24 15:51:01 +02:00
de73f022e6 merge 2021-06-24 14:52:07 +02:00
f517205647 fix: doc authors 2021-06-24 14:51:00 +02:00
f33c3d8481 fix: doc version 2021-06-24 14:50:12 +02:00
71c69000ec Merge branch '207-repository-panel-filtering-is-boken' into 'develop'
Resolve "Repository panel filtering is boken"

See merge request slumber/multi-user!132
2021-06-24 12:49:06 +00:00
de1e684b3c fix: name filtering 2021-06-24 14:35:59 +02:00
d87730cffb Merge branch '197-user-selection-bounding-box-glitches-for-non-mesh-objects' into 'develop'
User selection bounding box glitches for non-mesh objects

See merge request slumber/multi-user!129
2021-06-23 16:02:50 +00:00
3f005b86ab fix : add enumerate / remove nb_object 2021-06-23 17:45:01 +02:00
5098e5135d fix: bbox work for non-mesh objects+ins.collection 2021-06-23 17:00:05 +02:00
37cfed489c Merge branch '204-animation-doesn-t-sync-for-gpencil-materials' into 'develop'
Resolve "Animation doesn't sync for materials"

See merge request slumber/multi-user!128
2021-06-22 12:10:23 +00:00
9003abcd18 feat: notes for furtur improvements 2021-06-22 14:06:19 +02:00
a199e0df00 feat: apply bl_apply_child member to force dependencies reloading
fix: node_tree animation dependencies
2021-06-22 11:36:51 +02:00
3774419b7e fix: force push is now pushing the whole node data instead of delta 2021-06-22 10:41:36 +02:00
3e552cb406 feat: gpencil materials animation support 2021-06-22 10:39:40 +02:00
9f381b44c8 fix: material animation support 2021-06-21 18:58:16 +02:00
ad795caed5 fix: only apply repository heads on connection 2021-06-21 18:38:43 +02:00
504dd77405 fix: scene cleaning 2021-06-21 17:10:05 +02:00
82022c9e4d clean: only log ignored update in debug logging level 2021-06-18 15:45:51 +02:00
d81b4dc014 feat: enable delta back for all datablocks execpt gpencil, files and images 2021-06-18 15:30:39 +02:00
63affa079f Merge branch '199-filter-correctly-distant-updates-in-the-depsgraph-handler' into 'develop'
Resolve "Filter correctly distant updates in the depsgraph handler"

See merge request slumber/multi-user!126
2021-06-18 13:12:15 +00:00
fcf5a12dd0 fix: log verbosity level 2021-06-18 15:03:14 +02:00
b0529e4444 refactor: move handlers to hendlers.py 2021-06-18 14:59:56 +02:00
bdfd89c085 feat: temporary store applied update to ignore them. 2021-06-18 14:34:11 +02:00
ff1630f9cc Merge branch '194-smooth-brush-size-reset' into 'develop'
Resolve "Brush deleted on join"

See merge request slumber/multi-user!124
2021-06-16 12:30:31 +00:00
5830fe1abb fix: add items_to_remove 2021-06-16 14:28:26 +02:00
c609f72080 fix: All brushes 2021-06-16 12:29:56 +02:00
a28a6f91bd feat: move testing to blender 2.93 2021-06-15 16:27:49 +02:00
a996f39d3b Merge branch '195-auto-updater-install-a-broken-version-of-the-addon' into 'develop'
Resolve "Auto updater install a broken version of the addon"

See merge request slumber/multi-user!123
2021-06-15 12:54:49 +00:00
7790a16034 fix: download the build artifact instead of the repository default zip
Related to #195
2021-06-15 14:51:37 +02:00
836fdd02b8 Merge branch '192-parent-type-isn-t-synced' into 'develop'
Resolve "Parent type isn't synced"

See merge request slumber/multi-user!122
2021-06-15 09:22:13 +00:00
7cb3482353 fix: parent type and parent bone 2021-06-15 11:20:31 +02:00
041022056c Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2021-06-14 17:32:50 +02:00
05f3eb1445 fix: update readme 2021-06-14 17:32:05 +02:00
17193bde3a fix: doc server .png names 2021-06-14 14:29:45 +00:00
a14b4313f5 feat: update to develop 2021-06-14 16:12:47 +02:00
b203d9dffd Merge branch '188-intgrate-replication-as-a-submodule' into develop 2021-06-14 16:10:15 +02:00
f64db2155e Merge branch '49-connection-preset-system' into 'develop'
Connection-preset-system

See merge request slumber/multi-user!121
2021-06-14 13:50:58 +00:00
e07ebdeff5 fix: remove ui overwrite class 2021-06-14 15:46:57 +02:00
3d6453f7a2 feat: doc 2021-06-14 15:17:30 +02:00
7421511079 fix: override operator 2021-06-14 15:17:07 +02:00
bc24525cec fix: new UI/UX 2021-06-11 16:57:02 +02:00
699cf578e2 feat: prevent updates in sclupt mode 2021-06-11 16:42:23 +02:00
e9b4afb440 refactor: enable partial delta based replication 2021-06-11 15:28:37 +02:00
0c6491590e fix: admin password root 2021-06-11 12:18:51 +02:00
b87e733ddc fix: name conflict + responsive enum 2021-06-11 12:13:23 +02:00
cb0962b484 feat: server preset working with bad ui/ux 2021-06-10 15:39:12 +02:00
1fc25412ac fix: constraint differential update support 2021-06-10 15:21:25 +02:00
b5405553dc refactor: install replication dependencies in libs 2021-06-09 18:16:43 +02:00
a1b6fb0533 feat: server preset 2021-06-08 17:03:43 +02:00
b6a8a2ec01 Revert "doc: comment ui draw()"
This reverts commit f7c4f5d1fe.
2021-06-08 15:02:53 +02:00
3e41b18af1 Merge branch '49-connection-preset-system' of https://gitlab.com/slumber/multi-user into 49-connection-preset-system 2021-06-08 15:00:50 +02:00
f7c4f5d1fe doc: comment ui draw() 2021-06-08 14:58:57 +02:00
c616054878 tour du python blender 2021-06-07 17:06:41 +02:00
5c08493774 fix 'GraphObjectStore' object has no attribute 'object_store' 2021-06-04 18:30:54 +02:00
af8a138b4f fix: modifier order 2021-06-04 17:17:30 +02:00
6d9216f14a refactor: cleanup repository 2021-06-04 16:07:02 +02:00
fc4fb088bb refactor: repository api clean 2021-06-04 14:02:09 +02:00
98553ba00c refactor: remove get_nodes 2021-06-04 12:13:53 +02:00
1e15a12b10 refactor: remove list 2021-06-04 12:07:54 +02:00
569543650f feat: skip external updates 2021-06-03 15:43:47 +02:00
07358802f7 refactor: fix scene item removal 2021-06-03 15:03:09 +02:00
a059fafe12 feat: add mutate to scene delta 2021-06-03 11:43:24 +02:00
297f68ccfe refactor: only apply node when it is necessary (skip for host) 2021-06-03 11:41:25 +02:00
c9c70d1e08 refactor: stamp datablock during apply 2021-06-03 11:20:54 +02:00
a34f58ef3f fix: cherrypick TCP idle bug 2021-06-02 23:10:13 +02:00
e7b7f38991 fix: change rights 2021-06-02 17:49:22 +02:00
392e0aaaa3 refactor: remove missing parameter 2021-06-02 15:45:11 +02:00
4c774d5d53 refactor: move update user metadata to porcelain 2021-06-02 12:59:53 +02:00
4c4cf8a970 refactor: move rm to porcelain 2021-06-02 11:47:41 +02:00
211d0848c2 fix: replication version 2021-06-02 11:39:37 +02:00
c9665c4719 refactor: move unlock/lock/kick to porcelain 2021-06-02 11:31:23 +02:00
431fe0d840 refactor: move lock/unock to porcelain 2021-06-02 10:22:37 +02:00
df7ca66ad8 fix: repo dumps api 2021-06-02 09:35:55 +02:00
c2d2db78e6 refactor: temporary remove name resolution 2021-06-01 15:47:05 +02:00
ad89a4e389 fix: disable mutable delta for scene 2021-06-01 14:53:17 +02:00
6ca6d4443d refactor: move load/dumps to repository 2021-05-31 11:39:54 +02:00
81c9b5fc06 fix: animation loading 2021-05-21 23:02:42 +02:00
9fddfe084c fix: annotation 2021-05-21 17:29:22 +02:00
ca40523393 fix: apply and resolve 2021-05-21 17:14:28 +02:00
76e28ced21 refactor: remove legacy data 2021-05-21 15:40:45 +02:00
55c6002b28 feat: update version 2021-05-20 17:22:00 +02:00
8d5c8aded3 refacor: code formating 2021-05-20 09:57:44 +02:00
8ebba80b97 refactor: add diff back 2021-05-19 17:44:42 +02:00
50d6c6b3c8 fix: filter 2021-05-19 15:59:36 +02:00
f0b03c50f2 refactor: fix tests 2021-05-19 15:12:11 +02:00
28e83a38e6 refactor: add back armature lightprobes, sound and speaker 2021-05-19 15:05:54 +02:00
2e261cd66b refactor: add particle and lattive back 2021-05-19 14:40:13 +02:00
3f6e4f7333 refactor: add texts back 2021-05-19 14:23:56 +02:00
49fadf084a refactor: add gpencil back 2021-05-19 13:56:42 +02:00
e2e0dc31c1 refactor: add volume and world support 2021-05-19 13:42:34 +02:00
389bbd97d5 refactor: add image and file back 2021-05-19 13:31:57 +02:00
19602691d3 feat: texture 2021-05-19 11:43:01 +02:00
2e2ff5d4bf refactor: add material nodegroup back 2021-05-19 11:25:56 +02:00
fef6559ce0 refactor: add light and camera support back 2021-05-19 10:52:04 +02:00
5f669fd49a refactor: add camera back 2021-05-19 09:55:07 +02:00
330ff08fd3 refactor: add collection back 2021-05-19 09:47:01 +02:00
f3be8f9623 feat: bring back icons 2021-05-19 09:37:50 +02:00
ffb70ab74c refactor: protocol refactoring part 1 (mesh, object, action, scene) 2021-05-18 23:14:09 +02:00
26140eefb2 refactor: clear replicated datablock init states 2021-05-18 18:23:28 +02:00
cdf0433e8a refactor: move fetch to repository 2021-05-18 17:17:10 +02:00
acd70f73bf refactor: add remote
refactor: move push to porcelain
2021-05-18 16:54:07 +02:00
36c3a9ab0b refactor: remove sanitize 2021-05-18 11:01:55 +02:00
cfb1afdd72 Revert "feat: node sanitize on collection and scene update"
This reverts commit fb1c985f31.
2021-05-18 11:00:05 +02:00
4eeb80350e fix: layer info missing 2021-05-18 10:54:13 +02:00
fb1c985f31 feat: node sanitize on collection and scene update 2021-05-17 17:35:34 +02:00
689c2473d6 fix: commit 2021-05-17 17:18:17 +02:00
41620fce90 fix: commit 2021-05-17 17:04:43 +02:00
249bcf827b fix: collection instance bounding box selection 2021-05-17 16:03:01 +02:00
d47eab4f26 refactor: move commit to porcelain 2021-05-17 11:12:18 +02:00
f011089d82 refactor: removed apply from replicated datablock 2021-05-17 10:52:28 +02:00
acc58a1c9f fix: tcp keepalive IDLE time 2021-05-16 22:26:53 +02:00
24d850de9f refactor: get metadata updates optimization back 2021-05-11 11:41:43 +02:00
b045911a59 refactor: get diff back for testing 2021-05-10 12:04:45 +02:00
a67be76422 feat: delta commit 2021-05-09 17:42:56 +02:00
32033c743c feat: update repllication version 2021-05-07 17:10:23 +02:00
5da8650611 fix: get replication version 2021-05-07 16:56:00 +02:00
aec5096f87 feat: update submodule url 2021-05-07 16:12:04 +02:00
fba39b9980 fix: ci with submodules 2021-05-07 15:47:53 +02:00
6af3e4b777 refactor: add threaded data handling back on server side 2021-05-04 16:25:36 +02:00
58d639e9d8 feat: add replication as a submoduke 2021-05-04 14:56:50 +02:00
0efe5d5a10 Merge branch 'remove-services' into 'develop'
refactor: differential revision Stage 1

See merge request slumber/multi-user!119
2021-05-04 12:24:05 +00:00
88 changed files with 3087 additions and 1963 deletions

3
.gitignore vendored
View File

@ -13,4 +13,5 @@ multi_user_updater/
_build
# ignore generated zip generated from blender_addon_tester
*.zip
*.zip
libs

View File

@ -8,3 +8,5 @@ build:
name: multi_user
paths:
- multi_user
variables:
GIT_SUBMODULE_STRATEGY: recursive

View File

@ -5,6 +5,7 @@ deploy:
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs"
GIT_SUBMODULE_STRATEGY: recursive
services:
- docker:19.03.12-dind

View File

@ -3,3 +3,5 @@ test:
image: slumber/blender-addon-testing:latest
script:
- python3 scripts/test_addon.py
variables:
GIT_SUBMODULE_STRATEGY: recursive

3
.gitmodules vendored
View File

@ -0,0 +1,3 @@
[submodule "multi_user/libs/replication"]
path = multi_user/libs/replication
url = https://gitlab.com/slumber/replication.git

View File

@ -186,4 +186,34 @@ All notable changes to this project will be documented in this file.
- Exception access violation during Undo/Redo
- Sync missing armature bone Roll
- Sync missing driver data_path
- Constraint replication
- Constraint replication
## [0.4.0] - 2021-07-20
### Added
- Connection preset system (@Kysios)
- Display connected users active mode (users pannel and viewport) (@Kysios)
- Delta-based replication
- Sync timeline marker
- Sync images settings (@Kysios)
- Sync parent relation type (@Kysios)
- Sync uv project modifier
- Sync FCurves modifiers
### Changed
- User selection optimizations (draw and sync) (@Kysios)
- Improved shapekey syncing performances
- Improved gpencil syncing performances
- Integrate replication as a submodule
- The dependencies are now installed in a folder(blender addon folder) that no longer requires administrative rights
- Presence overlay UI optimization (@Kysios)
### Fixed
- User selection bounding box glitches for non-mesh objects (@Kysios)
- Transforms replication for animated objects
- GPencil fill stroke
- Sculpt and GPencil brushes deleted when joining a session (@Kysios)
- Auto-updater doesn't work for master and develop builds

View File

@ -11,9 +11,8 @@ This tool aims to allow multiple users to work on the same scene over the networ
## Quick installation
1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
2. Run blender as administrator (dependencies installation).
3. Install last_version.zip from your addon preferences.
1. Download [latest build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build) or [stable build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
2. Install last_version.zip from your addon preferences.
[Dependencies](#dependencies) will be automatically added to your blender python during installation.
@ -29,35 +28,35 @@ See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_sta
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment |
| -------------- | :----: | :----------------------------------------------------------: |
| action | ✔️ | |
| armature | | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | | Nurbs surfaces not supported |
| gpencil | ✔️ | |
| image | ✔️ | |
| mesh | ✔️ | |
| material | ✔️ | |
| node_groups | | Material & Geometry only |
| geometry nodes | ✔️ | |
| metaball | ✔️ | |
| object | ✔️ | |
| textures | | Supported for modifiers/materials/geo nodes only |
| texts | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| compositing | | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | | |
| volumes | ✔️ | |
| particles | ❗ | The cache isn't syncing. |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | | Mask and Clip not supported yet |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | | Partial |
| Name | Status | Comment |
| -------------- | :----: | :---------------------------------------------------------------------: |
| action | ✔️ | |
| camera | ✔️ | |
| collection | ✔️ | |
| gpencil | ✔️ | |
| image | ✔️ | |
| mesh | ✔️ | |
| material | ✔️ | |
| node_groups | ✔️ | Material & Geometry only |
| geometry nodes | ✔️ | |
| metaball | ✔️ | |
| object | ✔️ | |
| texts | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| volumes | ✔️ | |
| lightprobes | ✔️ | |
| physics | ✔️ | |
| textures | ✔️ | |
| curve | | Nurbs surfaces not supported |
| armature | | Only for Mesh. [Planned for GPencil](https://gitlab.com/slumber/multi-user/-/issues/161). Not stable yet |
| particles | | The cache isn't syncing. |
| speakers | | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❗ | Mask and Clip not supported yet |
| libraries | ❌ | |
| nla | | |
| texts | ❌ | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/81) |
| compositing | | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/46) |

View File

@ -19,10 +19,10 @@ import sys
project = 'multi-user'
copyright = '2020, Swann Martinez'
author = 'Swann Martinez, with contributions from Poochy'
author = 'Swann Martinez, Poochy, Fabian'
# The full version, including alpha/beta/rc tags
release = '0.2.0'
release = '0.5.0-develop'
# -- General configuration ---------------------------------------------------

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 365 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 320 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

View File

@ -108,36 +108,69 @@ Before starting make sure that you have access to the session IP address and por
1. Fill in your user information
--------------------------------
Follow the user-info_ section for this step.
Joining a server
=======================
----------------
2. Network setup
----------------
--------------
Network setup
--------------
In the network panel, select **JOIN**.
The **join sub-panel** (see image below) allows you to configure your client to join a
collaborative session which is already hosted.
.. figure:: img/quickstart_join.png
:align: center
:alt: Connect menu
.. figure:: img/server_preset_image_normal_server.png
:align: center
:width: 200px
Connection panel
Connection pannel
Fill in the fields with your information:
- **IP**: the host's IP address.
- **Port**: the host's port number.
- **Connect as admin**: connect yourself with **admin rights** (see :ref:`admin` ) to the session.
.. Maybe something more explicit here
.. note::
Additional configuration settings can be found in the :ref:`advanced` section.
Once you've configured every field, hit the button **CONNECT** to join the session !
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
.. note::
If you want to have **administrator rights** (see :ref:`admin` ) on the server, just enter the password created by the host in the **Connect as admin** section
.. figure:: img/server_preset_image_admin.png
:align: center
:width: 200px
Admin password
---------------
Server presets
---------------
You can save your server presets in a preset list below the 'JOIN' and 'HOST' buttons. This allows you to quickly access and manage your servers.
To add a server, first enter the ip address and the port (plus the password if needed), then click on the + icon to add a name to your preset. To remove a server from the list, select it and click on the - icon.
.. figure:: img/server_preset_exemple.gif
:align: center
:width: 200px
.. warning:: Be careful, if you don't rename your new preset, or if it has the same name as an existing preset, the old preset will be overwritten.
.. figure:: img/server_preset_image_report.png
:align: center
:width: 200px
.. note::
Two presets are already present when the addon is launched:
- The 'localhost' preset, to host and join a local session quickly
- The 'public session' preset, to join the public sessions of the multi-user server (official discord to participate : https://discord.gg/aBPvGws)
.. note::
Additional configuration settings can be found in the :ref:`advanced` section.
.. note::
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
@ -182,8 +215,10 @@ One of the most vital tools is the **Online user panel**. It lists all connected
users' information including your own:
* **Role** : if a user is an admin or a regular user.
* **Location**: Where the user is actually working.
* **Username** : Name of the user.
* **Mode** : User's active editing mode (edit_mesh, paint,etc.).
* **Frame**: When (on which frame) the user is working.
* **Location**: Where the user is actually working.
* **Ping**: user's connection delay in milliseconds
.. figure:: img/quickstart_users.png
@ -240,6 +275,7 @@ it draw users' related information in your viewport such as:
* Username
* User point of view
* User active mode
* User selection
.. figure:: img/quickstart_presence.png

View File

@ -76,7 +76,7 @@ Hit 'Create a network'(see image below) and go to the network settings.
:align: center
:width: 450px
Network page
Admin password
Now that the network is created, let's configure it.

View File

@ -43,13 +43,10 @@ from bpy.app.handlers import persistent
from . import environment
DEPENDENCIES = {
("replication", '0.1.33'),
}
module_error_msg = "Insufficient rights to install the multi-user \
dependencies, aunch blender with administrator rights."
def register():
# Setup logging policy
logging.basicConfig(
@ -58,16 +55,13 @@ def register():
level=logging.INFO)
try:
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
environment.setup(DEPENDENCIES, python_binary_path)
environment.register()
from . import presence
from . import operators
from . import handlers
from . import ui
from . import icons
from . import preferences
from . import addon_updater_ops
@ -75,7 +69,9 @@ def register():
addon_updater_ops.register(bl_info)
presence.register()
operators.register()
handlers.register()
ui.register()
icons.register()
except ModuleNotFoundError as e:
raise Exception(module_error_msg)
logging.error(module_error_msg)
@ -89,21 +85,28 @@ def register():
type=preferences.SessionUser
)
bpy.types.WindowManager.user_index = bpy.props.IntProperty()
bpy.types.WindowManager.server_index = bpy.props.IntProperty()
bpy.types.TOPBAR_MT_file_import.append(operators.menu_func_import)
bpy.types.TOPBAR_MT_file_export.append(operators.menu_func_export)
def unregister():
from . import presence
from . import operators
from . import handlers
from . import ui
from . import icons
from . import preferences
from . import addon_updater_ops
bpy.types.TOPBAR_MT_file_import.remove(operators.menu_func_import)
bpy.types.TOPBAR_MT_file_export.remove(operators.menu_func_export)
presence.unregister()
addon_updater_ops.unregister()
ui.unregister()
icons.unregister()
handlers.unregister()
operators.unregister()
preferences.unregister()
@ -111,3 +114,6 @@ def unregister():
del bpy.types.ID.uuid
del bpy.types.WindowManager.online_users
del bpy.types.WindowManager.user_index
del bpy.types.WindowManager.server_index
environment.unregister()

View File

@ -1688,10 +1688,7 @@ class GitlabEngine(object):
# Could clash with tag names and if it does, it will
# download TAG zip instead of branch zip to get
# direct path, would need.
return "{}{}{}".format(
self.form_repo_url(updater),
"/repository/archive.zip?sha=",
branch)
return f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{branch}/download?job=build"
def get_zip_url(self, sha, updater):
return "{base}/repository/archive.zip?sha={sha}".format(

View File

@ -28,7 +28,6 @@ __all__ = [
'bl_light',
'bl_scene',
'bl_material',
'bl_library',
'bl_armature',
'bl_action',
'bl_world',
@ -39,7 +38,6 @@ __all__ = [
'bl_font',
'bl_sound',
'bl_file',
# 'bl_sequencer',
'bl_node_group',
'bl_texture',
"bl_particle",
@ -49,8 +47,18 @@ if bpy.app.version[1] >= 91:
__all__.append('bl_volume')
from . import *
from replication.data import DataTranslationProtocol
def types_to_register():
return __all__
from replication.protocol import DataTranslationProtocol
def get_data_translation_protocol()-> DataTranslationProtocol:
""" Return a data translation protocol from implemented bpy types
"""
bpy_protocol = DataTranslationProtocol()
for module_name in __all__:
impl = globals().get(module_name)
if impl and hasattr(impl, "_type") and hasattr(impl, "_type"):
bpy_protocol.register_implementation(impl._type, impl._class)
return bpy_protocol

View File

@ -25,8 +25,8 @@ from enum import Enum
from .. import utils
from .dump_anything import (
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
from .bl_datablock import BlDatablock, has_action, has_driver, dump_driver, load_driver
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
KEYFRAME = [
'amplitude',
@ -41,6 +41,66 @@ KEYFRAME = [
'interpolation',
]
def has_action(datablock):
""" Check if the datablock datablock has actions
"""
return (hasattr(datablock, 'animation_data')
and datablock.animation_data
and datablock.animation_data.action)
def has_driver(datablock):
""" Check if the datablock datablock is driven
"""
return (hasattr(datablock, 'animation_data')
and datablock.animation_data
and datablock.animation_data.drivers)
def dump_driver(driver):
dumper = Dumper()
dumper.depth = 6
data = dumper.dump(driver)
return data
def load_driver(target_datablock, src_driver):
loader = Loader()
drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver']
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
loader.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
src_var_data = src_driver_data['variables'][src_variable]
new_var = new_driver.driver.variables.new()
new_var.name = src_var_data['name']
new_var.type = src_var_data['type']
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
src_id = src_target_data.get('id')
if src_id:
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
loader.load(new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
for p in reversed(new_fcurve):
new_fcurve.remove(p, fast=True)
new_fcurve.add(len(src_driver['keyframe_points']))
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
loader.load(new_point, src_driver['keyframe_points'][src_point])
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
""" Dump a sigle curve to a dict
@ -159,7 +219,7 @@ def load_fcurve(fcurve_data, fcurve):
def dump_animation_data(datablock):
animation_data = {}
if has_action(datablock):
animation_data['action'] = datablock.animation_data.action.name
animation_data['action'] = datablock.animation_data.action.uuid
if has_driver(datablock):
animation_data['drivers'] = []
for driver in datablock.animation_data.drivers:
@ -181,8 +241,10 @@ def load_animation_data(animation_data, datablock):
for driver in animation_data['drivers']:
load_driver(datablock, driver)
if 'action' in animation_data:
datablock.animation_data.action = bpy.data.actions[animation_data['action']]
action = animation_data.get('action')
if action:
action = resolve_datablock_from_uuid(action, bpy.data.actions)
datablock.animation_data.action = action
elif datablock.animation_data.action:
datablock.animation_data.action = None
@ -198,26 +260,30 @@ def resolve_animation_dependencies(datablock):
return []
class BlAction(BlDatablock):
class BlAction(ReplicatedDatablock):
use_delta = True
bl_id = "actions"
bl_class = bpy.types.Action
bl_check_common = False
bl_icon = 'ACTION_TWEAK'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.actions.new(data["name"])
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
for dumped_fcurve in data["fcurves"]:
dumped_data_path = dumped_fcurve["data_path"]
dumped_array_index = dumped_fcurve["dumped_array_index"]
# create fcurve if needed
fcurve = target.fcurves.find(
fcurve = datablock.fcurves.find(
dumped_data_path, index=dumped_array_index)
if fcurve is None:
fcurve = target.fcurves.new(
fcurve = datablock.fcurves.new(
dumped_data_path, index=dumped_array_index)
load_fcurve(dumped_fcurve, fcurve)
@ -225,9 +291,10 @@ class BlAction(BlDatablock):
id_root = data.get('id_root')
if id_root:
target.id_root = id_root
datablock.id_root = id_root
def _dump_implementation(self, data, instance=None):
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.exclude_filter = [
'name_full',
@ -242,11 +309,23 @@ class BlAction(BlDatablock):
'users'
]
dumper.depth = 1
data = dumper.dump(instance)
data = dumper.dump(datablock)
data["fcurves"] = []
for fcurve in instance.fcurves:
for fcurve in datablock.fcurves:
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.actions)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return []
_type = bpy.types.Action
_class = BlAction

View File

@ -22,8 +22,9 @@ import mathutils
from .dump_anything import Loader, Dumper
from .. import presence, operators, utils
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
def get_roll(bone: bpy.types.Bone) -> float:
""" Compute the actuall roll of a pose bone
@ -35,17 +36,21 @@ def get_roll(bone: bpy.types.Bone) -> float:
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
class BlArmature(BlDatablock):
class BlArmature(ReplicatedDatablock):
use_delta = True
bl_id = "armatures"
bl_class = bpy.types.Armature
bl_check_common = False
bl_icon = 'ARMATURE_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.armatures.new(data["name"])
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
# Load parent object
parent_object = utils.find_from_attr(
'uuid',
@ -55,7 +60,7 @@ class BlArmature(BlDatablock):
if parent_object is None:
parent_object = bpy.data.objects.new(
data['user_name'], target)
data['user_name'], datablock)
parent_object.uuid = data['user']
is_object_in_master = (
@ -90,10 +95,10 @@ class BlArmature(BlDatablock):
bpy.ops.object.mode_set(mode='EDIT')
for bone in data['bones']:
if bone not in target.edit_bones:
new_bone = target.edit_bones.new(bone)
if bone not in datablock.edit_bones:
new_bone = datablock.edit_bones.new(bone)
else:
new_bone = target.edit_bones[bone]
new_bone = datablock.edit_bones[bone]
bone_data = data['bones'].get(bone)
@ -104,7 +109,7 @@ class BlArmature(BlDatablock):
new_bone.roll = bone_data['roll']
if 'parent' in bone_data:
new_bone.parent = target.edit_bones[data['bones']
new_bone.parent = datablock.edit_bones[data['bones']
[bone]['parent']]
new_bone.use_connect = bone_data['use_connect']
@ -119,9 +124,10 @@ class BlArmature(BlDatablock):
if 'EDIT' in current_mode:
bpy.ops.object.mode_set(mode='EDIT')
def _dump_implementation(self, data, instance=None):
assert(instance)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 4
dumper.include_filter = [
@ -135,14 +141,14 @@ class BlArmature(BlDatablock):
'name',
'layers',
]
data = dumper.dump(instance)
data = dumper.dump(datablock)
for bone in instance.bones:
for bone in datablock.bones:
if bone.parent:
data['bones'][bone.name]['parent'] = bone.parent.name
# get the parent Object
# TODO: Use id_data instead
object_users = utils.get_datablock_users(instance)[0]
object_users = utils.get_datablock_users(datablock)[0]
data['user'] = object_users.uuid
data['user_name'] = object_users.name
@ -153,7 +159,25 @@ class BlArmature(BlDatablock):
data['user_scene'] = [
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
for bone in instance.bones:
for bone in datablock.bones:
data['bones'][bone.name]['roll'] = get_roll(bone)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.armatures)
if datablock is None:
datablock = bpy.data.armatures.get(name)
return datablock
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return resolve_animation_dependencies(datablock)
_type = bpy.types.Armature
_class = BlArmature

View File

@ -20,39 +20,48 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlCamera(BlDatablock):
class BlCamera(ReplicatedDatablock):
use_delta = True
bl_id = "cameras"
bl_class = bpy.types.Camera
bl_check_common = False
bl_icon = 'CAMERA_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.cameras.new(data["name"])
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
dof_settings = data.get('dof')
load_animation_data(data.get('animation_data'), datablock)
# DOF settings
if dof_settings:
loader.load(target.dof, dof_settings)
loader.load(datablock.dof, dof_settings)
background_images = data.get('background_images')
target.background_images.clear()
datablock.background_images.clear()
# TODO: Use image uuid
if background_images:
for img_name, img_data in background_images.items():
img_id = img_data.get('image')
if img_id:
target_img = target.background_images.new()
target_img = datablock.background_images.new()
target_img.image = bpy.data.images[img_id]
loader.load(target_img, img_data)
@ -61,11 +70,8 @@ class BlCamera(BlDatablock):
loader.load(target_img.image_user, img_user)
def _dump_implementation(self, data, instance=None):
assert(instance)
# TODO: background image support
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 3
dumper.include_filter = [
@ -114,15 +120,29 @@ class BlCamera(BlDatablock):
'use_cyclic',
'use_auto_refresh'
]
data = dumper.dump(instance)
for index, image in enumerate(instance.background_images):
data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
for index, image in enumerate(datablock.background_images):
if image.image_user:
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.cameras)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
for background in self.instance.background_images:
for background in datablock.background_images:
if background.image:
deps.append(background.image)
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.Camera
_class = BlCamera

View File

@ -19,10 +19,12 @@
import bpy
import mathutils
from .. import utils
from .bl_datablock import BlDatablock
from .dump_anything import Loader, Dumper
from deepdiff import DeepDiff, Delta
from .. import utils
from replication.protocol import ReplicatedDatablock
from .dump_anything import Loader, Dumper
from .bl_datablock import resolve_datablock_from_uuid
def dump_collection_children(collection):
collection_children = []
@ -81,58 +83,82 @@ def resolve_collection_dependencies(collection):
return deps
class BlCollection(BlDatablock):
class BlCollection(ReplicatedDatablock):
bl_id = "collections"
bl_icon = 'FILE_FOLDER'
bl_class = bpy.types.Collection
bl_check_common = True
bl_reload_parent = False
def _construct(self, data):
if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.collections = [
name for name in sourceData.collections if name == self.data['name']]
instance = bpy.data.collections[self.data['name']]
return instance
use_delta = True
@staticmethod
def construct(data: dict) -> object:
instance = bpy.data.collections.new(data["name"])
return instance
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
# Objects
load_collection_objects(data['objects'], target)
load_collection_objects(data['objects'], datablock)
# Link childrens
load_collection_childrens(data['children'], target)
load_collection_childrens(data['children'], datablock)
# FIXME: Find a better way after the replication big refacotoring
# Keep other user from deleting collection object by flushing their history
utils.flush_history()
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 1
dumper.include_filter = [
"name",
"instance_offset"
]
data = dumper.dump(instance)
data = dumper.dump(datablock)
# dump objects
data['objects'] = dump_collection_objects(instance)
data['objects'] = dump_collection_objects(datablock)
# dump children collections
data['children'] = dump_collection_children(instance)
data['children'] = dump_collection_children(datablock)
return data
def _resolve_deps_implementation(self):
return resolve_collection_dependencies(self.instance)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.collections)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return resolve_collection_dependencies(datablock)
@staticmethod
def compute_delta(last_data: dict, current_data: dict) -> Delta:
diff_params = {
'ignore_order': True,
'report_repetition': True
}
delta_params = {
# 'mutate': True
}
return Delta(
DeepDiff(last_data,
current_data,
cache_size=5000,
**diff_params),
**delta_params)
_type = bpy.types.Collection
_class = BlCollection

View File

@ -21,13 +21,15 @@ import bpy.types as T
import mathutils
import logging
from .. import utils
from .bl_datablock import BlDatablock
from ..utils import get_preferences
from replication.protocol import ReplicatedDatablock
from .dump_anything import (Dumper, Loader,
np_load_collection,
np_dump_collection)
from .bl_datablock import get_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
SPLINE_BEZIER_POINT = [
# "handle_left_type",
@ -134,25 +136,31 @@ SPLINE_METADATA = [
]
class BlCurve(BlDatablock):
class BlCurve(ReplicatedDatablock):
use_delta = True
bl_id = "curves"
bl_class = bpy.types.Curve
bl_check_common = False
bl_icon = 'CURVE_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.curves.new(data["name"], data["type"])
def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
target.splines.clear()
loader = Loader()
loader.load(datablock, data)
datablock.splines.clear()
# load splines
for spline in data['splines'].values():
new_spline = target.splines.new(spline['type'])
new_spline = datablock.splines.new(spline['type'])
# Load curve geometry data
if new_spline.type == 'BEZIER':
@ -173,15 +181,14 @@ class BlCurve(BlDatablock):
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, target.materials)
load_materials_slots(src_materials, datablock.materials)
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
# Conflicting attributes
# TODO: remove them with the NURBS support
dumper.include_filter = CURVE_METADATA
dumper.exclude_filter = [
'users',
'order_u',
@ -190,14 +197,16 @@ class BlCurve(BlDatablock):
'point_count_u',
'active_textbox'
]
if instance.use_auto_texspace:
if datablock.use_auto_texspace:
dumper.exclude_filter.extend([
'texspace_location',
'texspace_size'])
data = dumper.dump(instance)
data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
data['splines'] = {}
for index, spline in enumerate(instance.splines):
for index, spline in enumerate(datablock.splines):
dumper.depth = 2
dumper.include_filter = SPLINE_METADATA
spline_data = dumper.dump(spline)
@ -211,21 +220,27 @@ class BlCurve(BlDatablock):
spline.bezier_points, SPLINE_BEZIER_POINT)
data['splines'][index] = spline_data
if isinstance(instance, T.SurfaceCurve):
if isinstance(datablock, T.SurfaceCurve):
data['type'] = 'SURFACE'
elif isinstance(instance, T.TextCurve):
elif isinstance(datablock, T.TextCurve):
data['type'] = 'FONT'
elif isinstance(instance, T.Curve):
elif isinstance(datablock, T.Curve):
data['type'] = 'CURVE'
data['materials'] = dump_materials_slots(instance.materials)
data['materials'] = dump_materials_slots(datablock.materials)
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.curves)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = []
curve = self.instance
curve = datablock
if isinstance(curve, T.TextCurve):
deps.extend([
@ -234,15 +249,19 @@ class BlCurve(BlDatablock):
curve.font_bold_italic,
curve.font_italic])
for material in self.instance.materials:
for material in datablock.materials:
if material:
deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps
def diff(self):
if 'EDIT' in bpy.context.mode \
and not self.preferences.sync_flags.sync_during_editmode:
return False
else:
return super().diff()
@staticmethod
def needs_update(datablock: object, data: dict) -> bool:
return 'EDIT' not in bpy.context.mode \
or get_preferences().sync_flags.sync_during_editmode
_type = [bpy.types.Curve, bpy.types.TextCurve]
_class = BlCurve

View File

@ -22,73 +22,11 @@ from collections.abc import Iterable
import bpy
import mathutils
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
from replication.data import ReplicatedDatablock
from replication.protocol import ReplicatedDatablock
from .. import utils
from .dump_anything import Dumper, Loader
def has_action(target):
""" Check if the target datablock has actions
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.action)
def has_driver(target):
""" Check if the target datablock is driven
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.drivers)
def dump_driver(driver):
dumper = Dumper()
dumper.depth = 6
data = dumper.dump(driver)
return data
def load_driver(target_datablock, src_driver):
loader = Loader()
drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver']
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
loader.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
src_var_data = src_driver_data['variables'][src_variable]
new_var = new_driver.driver.variables.new()
new_var.name = src_var_data['name']
new_var.type = src_var_data['type']
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
src_id = src_target_data.get('id')
if src_id:
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
loader.load(new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
for p in reversed(new_fcurve):
new_fcurve.remove(p, fast=True)
new_fcurve.add(len(src_driver['keyframe_points']))
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
loader.load(new_point, src_driver['keyframe_points'][src_point])
def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid:
return default
@ -100,133 +38,8 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
return item
return default
class BlDatablock(ReplicatedDatablock):
"""BlDatablock
bl_id : blender internal storage identifier
bl_class : blender internal type
bl_icon : type icon (blender icon name)
bl_check_common: enable check even in common rights
bl_reload_parent: reload parent
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
self.preferences = utils.get_preferences()
# TODO: use is_library_indirect
self.is_library = (instance and hasattr(instance, 'library') and
instance.library) or \
(hasattr(self,'data') and self.data and 'library' in self.data)
if instance and hasattr(instance, 'uuid'):
instance.uuid = self.uuid
def resolve(self, construct = True):
datablock_root = getattr(bpy.data, self.bl_id)
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
if not datablock_ref:
try:
datablock_ref = datablock_root[self.data['name']]
except Exception:
pass
if construct and not datablock_ref:
name = self.data.get('name')
logging.debug(f"Constructing {name}")
datablock_ref = self._construct(data=self.data)
if datablock_ref is not None:
setattr(datablock_ref, 'uuid', self.uuid)
self.instance = datablock_ref
return True
else:
return False
def remove_instance(self):
"""
Remove instance from blender data
"""
assert(self.instance)
datablock_root = getattr(bpy.data, self.bl_id)
datablock_root.remove(self.instance)
def _dump(self, instance=None):
dumper = Dumper()
data = {}
animation_data = {}
# Dump animation data
if has_action(instance):
animation_data['action'] = instance.animation_data.action.name
if has_driver(instance):
animation_data['drivers'] = []
for driver in instance.animation_data.drivers:
animation_data['drivers'].append(dump_driver(driver))
if animation_data:
data['animation_data'] = animation_data
if self.is_library:
data.update(dumper.dump(instance))
else:
data.update(self._dump_implementation(data, instance=instance))
return data
def _dump_implementation(self, data, target):
raise NotImplementedError
def _load(self, data, target):
# Load animation data
if 'animation_data' in data.keys():
if target.animation_data is None:
target.animation_data_create()
for d in target.animation_data.drivers:
target.animation_data.drivers.remove(d)
if 'drivers' in data['animation_data']:
for driver in data['animation_data']['drivers']:
load_driver(target, driver)
if 'action' in data['animation_data']:
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
elif target.animation_data.action:
target.animation_data.action = None
# Remove existing animation data if there is not more to load
elif hasattr(target, 'animation_data') and target.animation_data:
target.animation_data_clear()
if self.is_library:
return
else:
self._load_implementation(data, target)
def _load_implementation(self, data, target):
raise NotImplementedError
def resolve_deps(self):
dependencies = []
if has_action(self.instance):
dependencies.append(self.instance.animation_data.action)
if not self.is_library:
dependencies.extend(self._resolve_deps_implementation())
logging.debug(f"{self.instance} dependencies: {dependencies}")
return dependencies
def _resolve_deps_implementation(self):
return []
def is_valid(self):
return getattr(bpy.data, self.bl_id).get(self.data['name'])
def resolve_datablock_from_uuid(uuid, bpy_collection):
for item in bpy_collection:
if getattr(item, 'uuid', None) == uuid:
return item
return None

View File

@ -19,14 +19,15 @@
import logging
import os
import sys
from pathlib import Path
from pathlib import Path, WindowsPath, PosixPath
import bpy
import mathutils
from replication.constants import DIFF_BINARY, UP
from replication.data import ReplicatedDatablock
from replication.protocol import ReplicatedDatablock
from .. import utils
from ..utils import get_preferences
from .dump_anything import Dumper, Loader
@ -58,33 +59,16 @@ class BlFile(ReplicatedDatablock):
bl_icon = 'FILE'
bl_reload_parent = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.instance = kwargs.get('instance', None)
if self.instance and not self.instance.exists():
raise FileNotFoundError(str(self.instance))
self.preferences = utils.get_preferences()
@staticmethod
def construct(data: dict) -> object:
return Path(get_filepath(data['name']))
def resolve(self, construct = True):
self.instance = Path(get_filepath(self.data['name']))
file_exists = self.instance.exists()
if not file_exists:
logging.debug("File don't exist, loading it.")
self._load(self.data, self.instance)
return file_exists
@staticmethod
def resolve(data: dict) -> object:
return Path(get_filepath(data['name']))
def push(self, socket, identity=None, check_data=False):
super().push(socket, identity=None, check_data=False)
if self.preferences.clear_memory_filecache:
del self.data['file']
def _dump(self, instance=None):
@staticmethod
def dump(datablock: object) -> dict:
"""
Read the file and return a dict as:
{
@ -96,46 +80,62 @@ class BlFile(ReplicatedDatablock):
logging.info(f"Extracting file metadata")
data = {
'name': self.instance.name,
'name': datablock.name,
}
logging.info(
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
logging.info(f"Reading {datablock.name} content: {datablock.stat().st_size} bytes")
try:
file = open(self.instance, "rb")
file = open(datablock, "rb")
data['file'] = file.read()
file.close()
except IOError:
logging.warning(f"{self.instance} doesn't exist, skipping")
logging.warning(f"{datablock} doesn't exist, skipping")
else:
file.close()
return data
def _load(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
"""
Writing the file
"""
try:
file = open(target, "wb")
file = open(datablock, "wb")
file.write(data['file'])
if self.preferences.clear_memory_filecache:
del self.data['file']
if get_preferences().clear_memory_filecache:
del data['file']
except IOError:
logging.warning(f"{target} doesn't exist, skipping")
logging.warning(f"{datablock} doesn't exist, skipping")
else:
file.close()
def diff(self):
if self.preferences.clear_memory_filecache:
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return []
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
if get_preferences().clear_memory_filecache:
return False
else:
if not self.instance:
if not datablock:
return None
if not data:
return True
memory_size = sys.getsizeof(data['file'])-33
disk_size = datablock.stat().st_size
if memory_size != disk_size:
return True
else:
return False
memory_size = sys.getsizeof(self.data['file'])-33
disk_size = self.instance.stat().st_size
return memory_size != disk_size
_type = [WindowsPath, PosixPath]
_class = BlFile

View File

@ -22,19 +22,20 @@ from pathlib import Path
import bpy
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_file import get_filepath, ensure_unpacked
from .dump_anything import Dumper, Loader
from .bl_datablock import resolve_datablock_from_uuid
class BlFont(BlDatablock):
class BlFont(ReplicatedDatablock):
bl_id = "fonts"
bl_class = bpy.types.VectorFont
bl_check_common = False
bl_icon = 'FILE_FONT'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
filename = data.get('filename')
if filename == '<builtin>':
@ -42,31 +43,43 @@ class BlFont(BlDatablock):
else:
return bpy.data.fonts.load(get_filepath(filename))
def _load(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
pass
def _dump(self, instance=None):
if instance.filepath == '<builtin>':
@staticmethod
def dump(datablock: object) -> dict:
if datablock.filepath == '<builtin>':
filename = '<builtin>'
else:
filename = Path(instance.filepath).name
filename = Path(datablock.filepath).name
if not filename:
raise FileExistsError(instance.filepath)
raise FileExistsError(datablock.filepath)
return {
'filename': filename,
'name': instance.name
'name': datablock.name
}
def diff(self):
return False
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.fonts)
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
if datablock.filepath and datablock.filepath != '<builtin>':
ensure_unpacked(datablock)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
return False
_type = bpy.types.VectorFont
_class = BlFont

View File

@ -24,10 +24,12 @@ from .dump_anything import (Dumper,
Loader,
np_dump_collection,
np_load_collection)
from .bl_datablock import BlDatablock
# GPencil data api is structured as it follow:
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from ..utils import get_preferences
from ..timers import is_annotating
from .bl_material import load_materials_slots, dump_materials_slots
STROKE_POINT = [
'co',
@ -64,36 +66,9 @@ def dump_stroke(stroke):
:param stroke: target grease pencil stroke
:type stroke: bpy.types.GPencilStroke
:return: dict
:return: (p_count, p_data)
"""
assert(stroke)
dumper = Dumper()
dumper.include_filter = [
"aspect",
"display_mode",
"draw_cyclic",
"end_cap_mode",
"hardeness",
"line_width",
"material_index",
"start_cap_mode",
"uv_rotation",
"uv_scale",
"uv_translation",
"vertex_color_fill",
]
dumped_stroke = dumper.dump(stroke)
# Stoke points
p_count = len(stroke.points)
dumped_stroke['p_count'] = p_count
dumped_stroke['points'] = np_dump_collection(stroke.points, STROKE_POINT)
# TODO: uv_factor, uv_rotation
return dumped_stroke
return (len(stroke.points), np_dump_collection(stroke.points, STROKE_POINT))
def load_stroke(stroke_data, stroke):
@ -106,12 +81,13 @@ def load_stroke(stroke_data, stroke):
"""
assert(stroke and stroke_data)
stroke.points.add(stroke_data["p_count"])
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
stroke.points.add(stroke_data[0])
np_load_collection(stroke_data[1], stroke.points, STROKE_POINT)
# HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to
# fix fill issues
stroke.uv_scale = stroke_data["uv_scale"]
stroke.uv_scale = 1.0
def dump_frame(frame):
""" Dump a grease pencil frame to a dict
@ -145,12 +121,15 @@ def load_frame(frame_data, frame):
assert(frame and frame_data)
# Load stroke points
for stroke_data in frame_data['strokes_points']:
target_stroke = frame.strokes.new()
load_stroke(stroke_data, target_stroke)
# Load stroke metadata
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
def dump_layer(layer):
""" Dump a grease pencil layer
@ -167,7 +146,6 @@ def dump_layer(layer):
'opacity',
'channel_color',
'color',
# 'thickness', #TODO: enabling only for annotation
'tint_color',
'tint_factor',
'vertex_paint_opacity',
@ -184,7 +162,7 @@ def dump_layer(layer):
'hide',
'annotation_hide',
'lock',
# 'lock_frame',
'lock_frame',
# 'lock_material',
# 'use_mask_layer',
'use_lights',
@ -192,12 +170,13 @@ def dump_layer(layer):
'select',
'show_points',
'show_in_front',
# 'thickness'
# 'parent',
# 'parent_type',
# 'parent_bone',
# 'matrix_inverse',
]
if layer.id_data.is_annotation:
if layer.thickness != 0:
dumper.include_filter.append('thickness')
dumped_layer = dumper.dump(layer)
@ -228,87 +207,99 @@ def load_layer(layer_data, layer):
load_frame(frame_data, target_frame)
class BlGpencil(BlDatablock):
def layer_changed(datablock: object, data: dict) -> bool:
if datablock.layers.active and \
datablock.layers.active.info != data["active_layers"]:
return True
else:
return False
def frame_changed(data: dict) -> bool:
return bpy.context.scene.frame_current != data["eval_frame"]
class BlGpencil(ReplicatedDatablock):
bl_id = "grease_pencils"
bl_class = bpy.types.GreasePencil
bl_check_common = False
bl_icon = 'GREASEPENCIL'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.grease_pencils.new(data["name"])
def _load_implementation(self, data, target):
target.materials.clear()
if "materials" in data.keys():
for mat in data['materials']:
target.materials.append(bpy.data.materials[mat])
@staticmethod
def load(data: dict, datablock: object):
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, datablock.materials)
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
# TODO: reuse existing layer
for layer in target.layers:
target.layers.remove(layer)
for layer in datablock.layers:
datablock.layers.remove(layer)
if "layers" in data.keys():
for layer in data["layers"]:
layer_data = data["layers"].get(layer)
# if layer not in target.layers.keys():
target_layer = target.layers.new(data["layers"][layer]["info"])
# if layer not in datablock.layers.keys():
target_layer = datablock.layers.new(data["layers"][layer]["info"])
# else:
# target_layer = target.layers[layer]
# target_layer.clear()
load_layer(layer_data, target_layer)
target.layers.update()
datablock.layers.update()
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = [
'materials',
'name',
'zdepth_offset',
'stroke_thickness_space',
'pixel_factor',
'stroke_depth_order'
]
data = dumper.dump(instance)
data = dumper.dump(datablock)
data['materials'] = dump_materials_slots(datablock.materials)
data['layers'] = {}
for layer in instance.layers:
for layer in datablock.layers:
data['layers'][layer.info] = dump_layer(layer)
data["active_layers"] = instance.layers.active.info
data["active_layers"] = datablock.layers.active.info if datablock.layers.active else "None"
data["eval_frame"] = bpy.context.scene.frame_current
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.grease_pencils)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
for material in self.instance.materials:
for material in datablock.materials:
deps.append(material)
return deps
def layer_changed(self):
return self.instance.layers.active.info != self.data["active_layers"]
@staticmethod
def needs_update(datablock: object, data: dict) -> bool:
return bpy.context.mode == 'OBJECT' \
or layer_changed(datablock, data) \
or frame_changed(data) \
or get_preferences().sync_flags.sync_during_editmode \
or is_annotating(bpy.context)
def frame_changed(self):
return bpy.context.scene.frame_current != self.data["eval_frame"]
def diff(self):
if self.layer_changed() \
or self.frame_changed() \
or bpy.context.mode == 'OBJECT' \
or self.preferences.sync_flags.sync_during_editmode:
return super().diff()
else:
return False
_type = bpy.types.GreasePencil
_class = BlGpencil

View File

@ -24,9 +24,12 @@ import bpy
import mathutils
from .. import utils
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .dump_anything import Dumper, Loader
from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
format_to_ext = {
'BMP': 'bmp',
@ -48,35 +51,37 @@ format_to_ext = {
}
class BlImage(BlDatablock):
class BlImage(ReplicatedDatablock):
bl_id = "images"
bl_class = bpy.types.Image
bl_check_common = False
bl_icon = 'IMAGE_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.images.new(
name=data['name'],
width=data['size'][0],
height=data['size'][1]
)
def _load(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(data, target)
loader.load(datablock, data)
target.source = 'FILE'
target.filepath_raw = get_filepath(data['filename'])
color_space_name = data["colorspace_settings"]["name"]
# datablock.name = data.get('name')
datablock.source = 'FILE'
datablock.filepath_raw = get_filepath(data['filename'])
color_space_name = data.get("colorspace")
if color_space_name:
target.colorspace_settings.name = color_space_name
datablock.colorspace_settings.name = color_space_name
def _dump(self, instance=None):
assert(instance)
filename = Path(instance.filepath).name
@staticmethod
def dump(datablock: object) -> dict:
filename = Path(datablock.filepath).name
data = {
"filename": filename
@ -88,40 +93,45 @@ class BlImage(BlDatablock):
"name",
# 'source',
'size',
'height',
'alpha',
'float_buffer',
'alpha_mode',
'colorspace_settings']
data.update(dumper.dump(instance))
'alpha_mode']
data.update(dumper.dump(datablock))
data['colorspace'] = datablock.colorspace_settings.name
return data
def diff(self):
if self.instance.is_dirty:
self.instance.save()
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.images)
if self.instance and (self.instance.name != self.data['name']):
return True
else:
return False
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.packed_file:
filename = Path(bpy.path.abspath(self.instance.filepath)).name
self.instance.filepath_raw = get_filepath(filename)
self.instance.save()
if datablock.packed_file:
filename = Path(bpy.path.abspath(datablock.filepath)).name
datablock.filepath_raw = get_filepath(filename)
datablock.save()
# An image can't be unpacked to the modified path
# TODO: make a bug report
self.instance.unpack(method="REMOVE")
datablock.unpack(method="REMOVE")
elif self.instance.source == "GENERATED":
filename = f"{self.instance.name}.png"
self.instance.filepath = get_filepath(filename)
self.instance.save()
elif datablock.source == "GENERATED":
filename = f"{datablock.name}.png"
datablock.filepath = get_filepath(filename)
datablock.save()
if self.instance.filepath:
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
if datablock.filepath:
deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
if datablock.is_dirty:
datablock.save()
return True
_type = bpy.types.Image
_class = BlImage

View File

@ -20,33 +20,41 @@ import bpy
import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from replication.exception import ContextError
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
POINT = ['co', 'weight_softbody', 'co_deform']
class BlLattice(BlDatablock):
class BlLattice(ReplicatedDatablock):
use_delta = True
bl_id = "lattices"
bl_class = bpy.types.Lattice
bl_check_common = False
bl_icon = 'LATTICE_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.lattices.new(data["name"])
def _load_implementation(self, data, target):
if target.is_editmode:
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
if datablock.is_editmode:
raise ContextError("lattice is in edit mode")
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
np_load_collection(data['points'], target.points, POINT)
np_load_collection(data['points'], datablock.points, POINT)
def _dump_implementation(self, data, instance=None):
if instance.is_editmode:
@staticmethod
def dump(datablock: object) -> dict:
if datablock.is_editmode:
raise ContextError("lattice is in edit mode")
dumper = Dumper()
@ -62,9 +70,20 @@ class BlLattice(BlDatablock):
'interpolation_type_w',
'use_outside'
]
data = dumper.dump(instance)
data['points'] = np_dump_collection(instance.points, POINT)
data = dumper.dump(datablock)
data['points'] = np_dump_collection(datablock.points, POINT)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lattices)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return resolve_animation_dependencies(datablock)
_type = bpy.types.Lattice
_class = BlLattice

View File

@ -20,25 +20,34 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlLight(BlDatablock):
class BlLight(ReplicatedDatablock):
use_delta = True
bl_id = "lights"
bl_class = bpy.types.Light
bl_check_common = False
bl_icon = 'LIGHT_DATA'
bl_reload_parent = False
def _construct(self, data):
return bpy.data.lights.new(data["name"], data["type"])
@staticmethod
def construct(data: dict) -> object:
instance = bpy.data.lights.new(data["name"], data["type"])
instance.uuid = data.get("uuid")
return instance
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
load_animation_data(data.get('animation_data'), datablock)
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 3
dumper.include_filter = [
@ -67,9 +76,23 @@ class BlLight(BlDatablock):
'spot_size',
'spot_blend'
]
data = dumper.dump(instance)
data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lights)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = [bpy.types.SpotLight, bpy.types.PointLight, bpy.types.AreaLight, bpy.types.SunLight]
_class = BlLight

View File

@ -21,17 +21,20 @@ import mathutils
import logging
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
class BlLightprobe(ReplicatedDatablock):
use_delta = True
class BlLightprobe(BlDatablock):
bl_id = "lightprobes"
bl_class = bpy.types.LightProbe
bl_check_common = False
bl_icon = 'LIGHTPROBE_GRID'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
# See https://developer.blender.org/D6396
if bpy.app.version[1] >= 83:
@ -39,12 +42,13 @@ class BlLightprobe(BlDatablock):
else:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
if bpy.app.version[1] < 83:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
@ -71,7 +75,16 @@ class BlLightprobe(BlDatablock):
'visibility_blur'
]
return dumper.dump(instance)
return dumper.dump(datablock)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lightprobes)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return []
_type = bpy.types.LightProbe
_class = BlLightprobe

View File

@ -24,7 +24,10 @@ import re
from uuid import uuid4
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
@ -45,7 +48,11 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
node_tree_uuid = node_data.get('node_tree_uuid', None)
if image_uuid and not target_node.image:
target_node.image = get_datablock_from_uuid(image_uuid, None)
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
if image is None:
logging.error(f"Fail to find material image from uuid {image_uuid}")
else:
target_node.image = image
if node_tree_uuid:
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
@ -117,8 +124,7 @@ def dump_node(node: bpy.types.ShaderNode) -> dict:
"show_preview",
"show_texture",
"outputs",
"width_hidden",
"image"
"width_hidden"
]
dumped_node = node_dumper.dump(node)
@ -381,44 +387,50 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
for mat_uuid, mat_name in src_materials:
mat_ref = None
if mat_uuid is not None:
if mat_uuid:
mat_ref = get_datablock_from_uuid(mat_uuid, None)
else:
mat_ref = bpy.data.materials[mat_name]
dst_materials.append(mat_ref)
class BlMaterial(BlDatablock):
class BlMaterial(ReplicatedDatablock):
use_delta = True
bl_id = "materials"
bl_class = bpy.types.Material
bl_check_common = False
bl_icon = 'MATERIAL_DATA'
bl_reload_parent = False
bl_reload_child = True
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.materials.new(data["name"])
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
is_grease_pencil = data.get('is_grease_pencil')
use_nodes = data.get('use_nodes')
loader.load(target, data)
loader.load(datablock, data)
if is_grease_pencil:
if not target.is_grease_pencil:
bpy.data.materials.create_gpencil_data(target)
loader.load(target.grease_pencil, data['grease_pencil'])
if not datablock.is_grease_pencil:
bpy.data.materials.create_gpencil_data(datablock)
loader.load(datablock.grease_pencil, data['grease_pencil'])
elif use_nodes:
if target.node_tree is None:
target.use_nodes = True
if datablock.node_tree is None:
datablock.use_nodes = True
load_node_tree(data['node_tree'], target.node_tree)
load_node_tree(data['node_tree'], datablock.node_tree)
load_animation_data(data.get('nodes_animation_data'), datablock.node_tree)
load_animation_data(data.get('animation_data'), datablock)
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
mat_dumper = Dumper()
mat_dumper.depth = 2
mat_dumper.include_filter = [
@ -444,9 +456,9 @@ class BlMaterial(BlDatablock):
'line_priority',
'is_grease_pencil'
]
data = mat_dumper.dump(instance)
data = mat_dumper.dump(datablock)
if instance.is_grease_pencil:
if datablock.is_grease_pencil:
gp_mat_dumper = Dumper()
gp_mat_dumper.depth = 3
@ -480,19 +492,30 @@ class BlMaterial(BlDatablock):
'use_overlap_strokes',
'use_fill_holdout',
]
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
elif instance.use_nodes:
data['node_tree'] = dump_node_tree(instance.node_tree)
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
elif datablock.use_nodes:
data['node_tree'] = dump_node_tree(datablock.node_tree)
data['nodes_animation_data'] = dump_animation_data(datablock.node_tree)
data['animation_data'] = dump_animation_data(datablock)
return data
def _resolve_deps_implementation(self):
# TODO: resolve node group deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.materials)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.use_nodes:
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
if self.is_library:
deps.append(self.instance.library)
if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.Material
_class = BlMaterial

View File

@ -25,8 +25,13 @@ import numpy as np
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
from replication.constants import DIFF_BINARY
from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots
from ..utils import get_preferences
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
VERTICE = ['co']
@ -49,76 +54,79 @@ POLYGON = [
'material_index',
]
class BlMesh(BlDatablock):
class BlMesh(ReplicatedDatablock):
use_delta = True
bl_id = "meshes"
bl_class = bpy.types.Mesh
bl_check_common = False
bl_icon = 'MESH_DATA'
bl_reload_parent = True
def _construct(self, data):
instance = bpy.data.meshes.new(data["name"])
instance.uuid = self.uuid
return instance
@staticmethod
def construct(data: dict) -> object:
return bpy.data.meshes.new(data.get("name"))
def _load_implementation(self, data, target):
if not target or target.is_editmode:
@staticmethod
def load(data: dict, datablock: object):
if not datablock or datablock.is_editmode:
raise ContextError
else:
load_animation_data(data.get('animation_data'), datablock)
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, target.materials)
load_materials_slots(src_materials, datablock.materials)
# CLEAR GEOMETRY
if target.vertices:
target.clear_geometry()
if datablock.vertices:
datablock.clear_geometry()
target.vertices.add(data["vertex_count"])
target.edges.add(data["egdes_count"])
target.loops.add(data["loop_count"])
target.polygons.add(data["poly_count"])
datablock.vertices.add(data["vertex_count"])
datablock.edges.add(data["egdes_count"])
datablock.loops.add(data["loop_count"])
datablock.polygons.add(data["poly_count"])
# LOADING
np_load_collection(data['vertices'], target.vertices, VERTICE)
np_load_collection(data['edges'], target.edges, EDGE)
np_load_collection(data['loops'], target.loops, LOOP)
np_load_collection(data["polygons"],target.polygons, POLYGON)
np_load_collection(data['vertices'], datablock.vertices, VERTICE)
np_load_collection(data['edges'], datablock.edges, EDGE)
np_load_collection(data['loops'], datablock.loops, LOOP)
np_load_collection(data["polygons"],datablock.polygons, POLYGON)
# UV Layers
if 'uv_layers' in data.keys():
for layer in data['uv_layers']:
if layer not in target.uv_layers:
target.uv_layers.new(name=layer)
if layer not in datablock.uv_layers:
datablock.uv_layers.new(name=layer)
np_load_collection_primitives(
target.uv_layers[layer].data,
datablock.uv_layers[layer].data,
'uv',
data["uv_layers"][layer]['data'])
# Vertex color
if 'vertex_colors' in data.keys():
for color_layer in data['vertex_colors']:
if color_layer not in target.vertex_colors:
target.vertex_colors.new(name=color_layer)
if color_layer not in datablock.vertex_colors:
datablock.vertex_colors.new(name=color_layer)
np_load_collection_primitives(
target.vertex_colors[color_layer].data,
datablock.vertex_colors[color_layer].data,
'color',
data["vertex_colors"][color_layer]['data'])
target.validate()
target.update()
datablock.validate()
datablock.update()
def _dump_implementation(self, data, instance=None):
assert(instance)
if (instance.is_editmode or bpy.context.mode == "SCULPT") and not self.preferences.sync_flags.sync_during_editmode:
@staticmethod
def dump(datablock: object) -> dict:
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
raise ContextError("Mesh is in edit mode")
mesh = instance
mesh = datablock
dumper = Dumper()
dumper.depth = 1
@ -132,6 +140,8 @@ class BlMesh(BlDatablock):
data = dumper.dump(mesh)
data['animation_data'] = dump_animation_data(datablock)
# VERTICES
data["vertex_count"] = len(mesh.vertices)
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
@ -163,21 +173,30 @@ class BlMesh(BlDatablock):
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
# Materials
data['materials'] = dump_materials_slots(instance.materials)
data['materials'] = dump_materials_slots(datablock.materials)
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
for material in self.instance.materials:
for material in datablock.materials:
if material:
deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps
def diff(self):
if 'EDIT' in bpy.context.mode \
and not self.preferences.sync_flags.sync_during_editmode:
return False
else:
return super().diff()
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.meshes)
@staticmethod
def needs_update(datablock: object, data: dict) -> bool:
return ('EDIT' not in bpy.context.mode and bpy.context.mode != 'SCULPT') \
or get_preferences().sync_flags.sync_during_editmode
_type = bpy.types.Mesh
_class = BlMesh

View File

@ -23,7 +23,9 @@ from .dump_anything import (
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
np_dump_collection, np_load_collection)
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
ELEMENT = [
@ -62,29 +64,35 @@ def load_metaball_elements(elements_data, elements):
np_load_collection(elements_data, elements, ELEMENT)
class BlMetaball(BlDatablock):
class BlMetaball(ReplicatedDatablock):
use_delta = True
bl_id = "metaballs"
bl_class = bpy.types.MetaBall
bl_check_common = False
bl_icon = 'META_BALL'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.metaballs.new(data["name"])
def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
target.elements.clear()
loader = Loader()
loader.load(datablock, data)
datablock.elements.clear()
for mtype in data["elements"]['type']:
new_element = target.elements.new()
new_element = datablock.elements.new()
load_metaball_elements(data['elements'], target.elements)
load_metaball_elements(data['elements'], datablock.elements)
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 1
dumper.include_filter = [
@ -98,7 +106,24 @@ class BlMetaball(BlDatablock):
'texspace_size'
]
data = dumper.dump(instance)
data['elements'] = dump_metaball_elements(instance.elements)
data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
data['elements'] = dump_metaball_elements(datablock.elements)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.metaballs)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.MetaBall
_class = BlMetaball

View File

@ -20,26 +20,45 @@ import bpy
import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_material import (dump_node_tree,
load_node_tree,
get_node_tree_dependencies)
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlNodeGroup(ReplicatedDatablock):
use_delta = True
class BlNodeGroup(BlDatablock):
bl_id = "node_groups"
bl_class = bpy.types.NodeTree
bl_check_common = False
bl_icon = 'NODETREE'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.node_groups.new(data["name"], data["type"])
def _load_implementation(self, data, target):
load_node_tree(data, target)
@staticmethod
def load(data: dict, datablock: object):
load_node_tree(data, datablock)
def _dump_implementation(self, data, instance=None):
return dump_node_tree(instance)
@staticmethod
def dump(datablock: object) -> dict:
return dump_node_tree(datablock)
def _resolve_deps_implementation(self):
return get_node_tree_dependencies(self.instance)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.node_groups)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(get_node_tree_dependencies(datablock))
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = [bpy.types.ShaderNodeTree, bpy.types.GeometryNodeTree]
_class = BlNodeGroup

View File

@ -22,8 +22,10 @@ import bpy
import mathutils
from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS
from ..utils import get_preferences
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .dump_anything import (
Dumper,
@ -44,6 +46,8 @@ SHAPEKEY_BLOCK_ATTR = [
'slider_min',
'slider_max',
]
if bpy.app.version[1] >= 93:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else:
@ -51,6 +55,7 @@ else:
logging.warning("Geometry node Float parameter not supported in \
blender 2.92.")
def get_node_group_inputs(node_group):
inputs = []
for inpt in node_group.inputs:
@ -89,6 +94,7 @@ def dump_physics(target: bpy.types.Object)->dict:
return physics_data
def load_physics(dumped_settings: dict, target: bpy.types.Object):
""" Load all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
@ -114,7 +120,8 @@ def load_physics(dumped_settings: dict, target: bpy.types.Object):
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_remove({"object": target})
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties
@ -295,6 +302,7 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE')
def dump_shape_keys(target_key: bpy.types.Key)->dict:
""" Dump the target shape_keys datablock to a dict using numpy
@ -370,12 +378,12 @@ def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
:type modifiers: bpy.types.bpy_prop_collection
:return: dict
"""
dumped_modifiers = {}
dumped_modifiers = []
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for index, modifier in enumerate(modifiers):
for modifier in modifiers:
dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
@ -397,9 +405,78 @@ def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
elif modifier.type == 'UV_PROJECT':
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
dumped_modifiers[modifier.name] = dumped_modifier
dumped_modifiers.append(dumped_modifier)
return dumped_modifiers
def dump_constraints(constraints: bpy.types.bpy_prop_collection)->list:
"""Dump all constraints to a list
:param constraints: constraints
:type constraints: bpy.types.bpy_prop_collection
:return: dict
"""
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = None
dumped_constraints = []
for constraint in constraints:
dumped_constraints.append(dumper.dump(constraint))
return dumped_constraints
def load_constraints(dumped_constraints: list, constraints: bpy.types.bpy_prop_collection):
""" Load dumped constraints
:param dumped_constraints: list of constraints to load
:type dumped_constraints: list
:param constraints: constraints
:type constraints: bpy.types.bpy_prop_collection
"""
loader = Loader()
constraints.clear()
for dumped_constraint in dumped_constraints:
constraint_type = dumped_constraint.get('type')
new_constraint = constraints.new(constraint_type)
loader.load(new_constraint, dumped_constraint)
def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collection):
""" Dump all modifiers of a modifier collection into a dict
:param dumped_modifiers: list of modifiers to load
:type dumped_modifiers: list
:param modifiers: modifiers
:type modifiers: bpy.types.bpy_prop_collection
"""
loader = Loader()
modifiers.clear()
for dumped_modifier in dumped_modifiers:
name = dumped_modifier.get('name')
mtype = dumped_modifier.get('type')
loaded_modifier = modifiers.new(name, mtype)
loader.load(loaded_modifier, dumped_modifier)
if loaded_modifier.type == 'NODES':
load_modifier_geometry_node_inputs(dumped_modifier, loaded_modifier)
elif loaded_modifier.type == 'PARTICLE_SYSTEM':
default = loaded_modifier.particle_system.settings
dumped_particles = dumped_modifier['particle_system']
loader.load(loaded_modifier.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
loaded_modifier.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
elif loaded_modifier.type in ['SOFT_BODY', 'CLOTH']:
loader.load(loaded_modifier.settings, dumped_modifier['settings'])
elif loaded_modifier.type == 'UV_PROJECT':
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
target_object = bpy.data.objects.get(projector_object)
if target_object:
loaded_modifier.projectors[projector_index].object = target_object
else:
logging.error("Could't load projector target object {projector_object}")
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
""" Load modifiers custom data not managed by the dump_anything loader
@ -413,48 +490,21 @@ def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_
for modifier in modifiers:
dumped_modifier = dumped_modifiers.get(modifier.name)
if modifier.type == 'NODES':
load_modifier_geometry_node_inputs(dumped_modifier, modifier)
elif modifier.type == 'PARTICLE_SYSTEM':
default = modifier.particle_system.settings
dumped_particles = dumped_modifier['particle_system']
loader.load(modifier.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
modifier.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
loader.load(modifier.settings, dumped_modifier['settings'])
elif modifier.type == 'UV_PROJECT':
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
target_object = bpy.data.objects.get(projector_object)
if target_object:
modifier.projectors[projector_index].object = target_object
else:
logging.error("Could't load projector target object {projector_object}")
class BlObject(BlDatablock):
class BlObject(ReplicatedDatablock):
use_delta = True
bl_id = "objects"
bl_class = bpy.types.Object
bl_check_common = False
bl_icon = 'OBJECT_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
instance = None
if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.objects = [
name for name in sourceData.objects if name == self.data['name']]
instance = bpy.data.objects[self.data['name']]
instance.uuid = self.uuid
return instance
# TODO: refactoring
object_name = data.get("name")
data_uuid = data.get("data_uuid")
@ -467,70 +517,68 @@ class BlObject(BlDatablock):
ignore=['images']) # TODO: use resolve_from_id
if data_type != 'EMPTY' and object_data is None:
raise Exception(f"Fail to load object {data['name']}({self.uuid})")
raise Exception(f"Fail to load object {data['name']})")
instance = bpy.data.objects.new(object_name, object_data)
instance.uuid = self.uuid
return bpy.data.objects.new(object_name, object_data)
return instance
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
load_animation_data(data.get('animation_data'), datablock)
data_uuid = data.get("data_uuid")
data_id = data.get("data")
if target.data and (target.data.name != data_id):
target.data = get_datablock_from_uuid(
if datablock.data and (datablock.data.name != data_id):
datablock.data = get_datablock_from_uuid(
data_uuid, find_data_from_name(data_id), ignore=['images'])
# vertex groups
vertex_groups = data.get('vertex_groups', None)
if vertex_groups:
load_vertex_groups(vertex_groups, target)
load_vertex_groups(vertex_groups, datablock)
object_data = target.data
object_data = datablock.data
# SHAPE KEYS
shape_keys = data.get('shape_keys')
if shape_keys:
load_shape_keys(shape_keys, target)
load_shape_keys(shape_keys, datablock)
# Load transformation data
loader.load(target, data)
loader.load(datablock, data)
# Object display fields
if 'display' in data:
loader.load(target.display, data['display'])
loader.load(datablock.display, data['display'])
# Parenting
parent_id = data.get('parent_uid')
if parent_id:
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
# Avoid reloading
if target.parent != parent and parent is not None:
target.parent = parent
elif target.parent:
target.parent = None
if datablock.parent != parent and parent is not None:
datablock.parent = parent
elif datablock.parent:
datablock.parent = None
# Pose
if 'pose' in data:
if not target.pose:
if not datablock.pose:
raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups
for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name)
bg_target = target.pose.bone_groups.get(bg_name)
bg_target = datablock.pose.bone_groups.get(bg_name)
if not bg_target:
bg_target = target.pose.bone_groups.new(name=bg_name)
bg_target = datablock.pose.bone_groups.new(name=bg_name)
loader.load(bg_target, bg_data)
# target.pose.bone_groups.get
# datablock.pose.bone_groups.get
# Bones
for bone in data['pose']['bones']:
target_bone = target.pose.bones.get(bone)
target_bone = datablock.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone)
if 'constraints' in bone_data.keys():
@ -539,13 +587,13 @@ class BlObject(BlDatablock):
load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys():
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
# TODO: find another way...
if target.empty_display_type == "IMAGE":
if datablock.empty_display_type == "IMAGE":
img_uuid = data.get('data_uuid')
if target.data is None and img_uuid:
target.data = get_datablock_from_uuid(img_uuid, None)
if datablock.data is None and img_uuid:
datablock.data = get_datablock_from_uuid(img_uuid, None)
if hasattr(object_data, 'skin_vertices') \
and object_data.skin_vertices\
@ -556,30 +604,31 @@ class BlObject(BlDatablock):
skin_data.data,
SKIN_DATA)
if hasattr(target, 'cycles_visibility') \
if hasattr(datablock, 'cycles_visibility') \
and 'cycles_visibility' in data:
loader.load(target.cycles_visibility, data['cycles_visibility'])
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
if hasattr(target, 'modifiers'):
load_modifiers_custom_data(data['modifiers'], target.modifiers)
if hasattr(datablock, 'modifiers'):
load_modifiers(data['modifiers'], datablock.modifiers)
constraints = data.get('constraints')
if constraints:
load_constraints(constraints, datablock.constraints)
# PHYSICS
load_physics(data, target)
load_physics(data, datablock)
transform = data.get('transforms', None)
if transform:
target.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse'])
target.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
target.matrix_local = mathutils.Matrix(transform['matrix_local'])
datablock.matrix_parent_inverse = mathutils.Matrix(transform['matrix_parent_inverse'])
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
def _dump_implementation(self, data, instance=None):
assert(instance)
if _is_editmode(instance):
if self.preferences.sync_flags.sync_during_editmode:
instance.update_from_editmode()
@staticmethod
def dump(datablock: object) -> dict:
if _is_editmode(datablock):
if get_preferences().sync_flags.sync_during_editmode:
datablock.update_from_editmode()
else:
raise ContextError("Object is in edit-mode.")
@ -615,35 +664,37 @@ class BlObject(BlDatablock):
'show_all_edges',
'show_texture_space',
'show_in_front',
'type'
'type',
'parent_type',
'parent_bone',
'track_axis',
'up_axis',
]
data = dumper.dump(instance)
data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
dumper.include_filter = [
'matrix_parent_inverse',
'matrix_local',
'matrix_basis']
data['transforms'] = dumper.dump(instance)
data['transforms'] = dumper.dump(datablock)
dumper.include_filter = [
'show_shadows',
]
data['display'] = dumper.dump(instance.display)
data['display'] = dumper.dump(datablock.display)
data['data_uuid'] = getattr(instance.data, 'uuid', None)
if self.is_library:
return data
data['data_uuid'] = getattr(datablock.data, 'uuid', None)
# PARENTING
if instance.parent:
data['parent_uid'] = (instance.parent.uuid, instance.parent.name)
if datablock.parent:
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
# MODIFIERS
modifiers = getattr(instance, 'modifiers', None)
if hasattr(instance, 'modifiers'):
modifiers = getattr(datablock, 'modifiers', None)
if hasattr(datablock, 'modifiers'):
data['modifiers'] = dump_modifiers(modifiers)
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
if gp_modifiers:
dumper.include_filter = None
@ -666,16 +717,14 @@ class BlObject(BlDatablock):
# CONSTRAINTS
if hasattr(instance, 'constraints'):
dumper.include_filter = None
dumper.depth = 3
data["constraints"] = dumper.dump(instance.constraints)
if hasattr(datablock, 'constraints'):
data["constraints"] = dump_constraints(datablock.constraints)
# POSE
if hasattr(instance, 'pose') and instance.pose:
if hasattr(datablock, 'pose') and datablock.pose:
# BONES
bones = {}
for bone in instance.pose.bones:
for bone in datablock.pose.bones:
bones[bone.name] = {}
dumper.depth = 1
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
@ -700,7 +749,7 @@ class BlObject(BlDatablock):
# GROUPS
bone_groups = {}
for group in instance.pose.bone_groups:
for group in datablock.pose.bone_groups:
dumper.depth = 3
dumper.include_filter = [
'name',
@ -710,11 +759,11 @@ class BlObject(BlDatablock):
data['pose']['bone_groups'] = bone_groups
# VERTEx GROUP
if len(instance.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(instance)
if len(datablock.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(datablock)
# SHAPE KEYS
object_data = instance.data
object_data = datablock.data
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
@ -727,7 +776,7 @@ class BlObject(BlDatablock):
data['skin_vertices'] = skin_vertices
# CYCLE SETTINGS
if hasattr(instance, 'cycles_visibility'):
if hasattr(datablock, 'cycles_visibility'):
dumper.include_filter = [
'camera',
'diffuse',
@ -736,38 +785,48 @@ class BlObject(BlDatablock):
'scatter',
'shadow',
]
data['cycles_visibility'] = dumper.dump(instance.cycles_visibility)
data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility)
# PHYSICS
data.update(dump_physics(instance))
data.update(dump_physics(datablock))
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
# Avoid Empty case
if self.instance.data:
deps.append(self.instance.data)
if datablock.data:
deps.append(datablock.data)
# Particle systems
for particle_slot in self.instance.particle_systems:
for particle_slot in datablock.particle_systems:
deps.append(particle_slot.settings)
if self.is_library:
deps.append(self.instance.library)
if datablock.parent:
deps.append(datablock.parent)
if self.instance.parent:
deps.append(self.instance.parent)
if self.instance.instance_type == 'COLLECTION':
if datablock.instance_type == 'COLLECTION':
# TODO: uuid based
deps.append(self.instance.instance_collection)
deps.append(datablock.instance_collection)
if self.instance.modifiers:
deps.extend(find_textures_dependencies(self.instance.modifiers))
deps.extend(find_geometry_nodes_dependencies(self.instance.modifiers))
if datablock.modifiers:
deps.extend(find_textures_dependencies(datablock.modifiers))
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
deps.extend(resolve_animation_dependencies(datablock))
if hasattr(self.instance.data, 'shape_keys') and self.instance.data.shape_keys:
deps.extend(resolve_animation_dependencies(self.instance.data.shape_keys))
return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.objects)
_type = bpy.types.Object
_class = BlObject

View File

@ -2,7 +2,10 @@ import bpy
import mathutils
from . import dump_anything
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
@ -37,54 +40,67 @@ IGNORED_ATTR = [
"users"
]
class BlParticle(BlDatablock):
class BlParticle(ReplicatedDatablock):
use_delta = True
bl_id = "particles"
bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES"
bl_check_common = False
bl_reload_parent = False
def _construct(self, data):
instance = bpy.data.particles.new(data["name"])
instance.uuid = self.uuid
return instance
@staticmethod
def construct(data: dict) -> object:
return bpy.data.particles.new(data["name"])
def _load_implementation(self, data, target):
dump_anything.load(target, data)
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
dump_anything.load(datablock, data)
dump_anything.load(target.effector_weights, data["effector_weights"])
dump_anything.load(datablock.effector_weights, data["effector_weights"])
# Force field
force_field_1 = data.get("force_field_1", None)
if force_field_1:
dump_anything.load(target.force_field_1, force_field_1)
dump_anything.load(datablock.force_field_1, force_field_1)
force_field_2 = data.get("force_field_2", None)
if force_field_2:
dump_anything.load(target.force_field_2, force_field_2)
dump_anything.load(datablock.force_field_2, force_field_2)
# Texture slots
load_texture_slots(data["texture_slots"], target.texture_slots)
def _dump_implementation(self, data, instance=None):
assert instance
load_texture_slots(data["texture_slots"], datablock.texture_slots)
@staticmethod
def dump(datablock: object) -> dict:
dumper = dump_anything.Dumper()
dumper.depth = 1
dumper.exclude_filter = IGNORED_ATTR
data = dumper.dump(instance)
data = dumper.dump(datablock)
# Particle effectors
data["effector_weights"] = dumper.dump(instance.effector_weights)
if instance.force_field_1:
data["force_field_1"] = dumper.dump(instance.force_field_1)
if instance.force_field_2:
data["force_field_2"] = dumper.dump(instance.force_field_2)
data["effector_weights"] = dumper.dump(datablock.effector_weights)
if datablock.force_field_1:
data["force_field_1"] = dumper.dump(datablock.force_field_1)
if datablock.force_field_2:
data["force_field_2"] = dumper.dump(datablock.force_field_2)
# Texture slots
data["texture_slots"] = dump_textures_slots(instance.texture_slots)
data["texture_slots"] = dump_textures_slots(datablock.texture_slots)
data['animation_data'] = dump_animation_data(datablock)
return data
def _resolve_deps_implementation(self):
return [t.texture for t in self.instance.texture_slots if t and t.texture]
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.particles)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [t.texture for t in datablock.texture_slots if t and t.texture]
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.ParticleSettings
_class = BlParticle

View File

@ -18,17 +18,21 @@
import logging
from pathlib import Path
from uuid import uuid4
import bpy
import mathutils
from deepdiff import DeepDiff
from deepdiff import DeepDiff, Delta
from replication.constants import DIFF_JSON, MODIFIED
from replication.protocol import ReplicatedDatablock
from ..utils import flush_history
from ..utils import flush_history, get_preferences
from .bl_action import (dump_animation_data, load_animation_data,
resolve_animation_dependencies)
from .bl_collection import (dump_collection_children, dump_collection_objects,
load_collection_childrens, load_collection_objects,
resolve_collection_dependencies)
from .bl_datablock import BlDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_file import get_filepath
from .dump_anything import Dumper, Loader
@ -286,12 +290,10 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
dumper.depth = 1
data = dumper.dump(sequence)
# TODO: Support multiple images
if sequence.type == 'IMAGE':
data['filenames'] = [e.filename for e in sequence.elements]
# Effect strip inputs
input_count = getattr(sequence, 'input_count', None)
if input_count:
@ -302,7 +304,8 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
return data
def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor):
def load_sequence(sequence_data: dict,
sequence_editor: bpy.types.SequenceEditor):
""" Load sequence from dumped data
:arg sequence_data: sequence to dump
@ -321,54 +324,56 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
if strip_type == 'SCENE':
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
sequence = sequence_editor.sequences.new_scene(strip_name,
strip_scene,
strip_channel,
strip_frame_start)
strip_scene,
strip_channel,
strip_frame_start)
elif strip_type == 'MOVIE':
filepath = get_filepath(Path(sequence_data['filepath']).name)
sequence = sequence_editor.sequences.new_movie(strip_name,
filepath,
strip_channel,
strip_frame_start)
filepath,
strip_channel,
strip_frame_start)
elif strip_type == 'SOUND':
filepath = bpy.data.sounds[sequence_data['sound']].filepath
sequence = sequence_editor.sequences.new_sound(strip_name,
filepath,
strip_channel,
strip_frame_start)
filepath,
strip_channel,
strip_frame_start)
elif strip_type == 'IMAGE':
images_name = sequence_data.get('filenames')
filepath = get_filepath(images_name[0])
sequence = sequence_editor.sequences.new_image(strip_name,
filepath,
strip_channel,
strip_frame_start)
filepath,
strip_channel,
strip_frame_start)
# load other images
if len(images_name)>1:
for img_idx in range(1,len(images_name)):
if len(images_name) > 1:
for img_idx in range(1, len(images_name)):
sequence.elements.append((images_name[img_idx]))
else:
seq = {}
for i in range(sequence_data['input_count']):
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(sequence_data.get(f"input_{i+1}", None))
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(
sequence_data.get(f"input_{i+1}", None))
sequence = sequence_editor.sequences.new_effect(name=strip_name,
type=strip_type,
channel=strip_channel,
frame_start=strip_frame_start,
frame_end=sequence_data['frame_final_end'],
**seq)
type=strip_type,
channel=strip_channel,
frame_start=strip_frame_start,
frame_end=sequence_data['frame_final_end'],
**seq)
loader = Loader()
# TODO: Support filepath updates
loader.exclure_filter = ['filepath', 'sound', 'filenames','fps']
loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
loader.load(sequence, sequence_data)
sequence.select = False
class BlScene(BlDatablock):
class BlScene(ReplicatedDatablock):
is_root = True
use_delta = True
bl_id = "scenes"
bl_class = bpy.types.Scene
@ -376,76 +381,88 @@ class BlScene(BlDatablock):
bl_icon = 'SCENE_DATA'
bl_reload_parent = False
def _construct(self, data):
instance = bpy.data.scenes.new(data["name"])
instance.uuid = self.uuid
@staticmethod
def construct(data: dict) -> object:
return bpy.data.scenes.new(data["name"])
return instance
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
def _load_implementation(self, data, target):
# Load other meshes metadata
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
# Load master collection
load_collection_objects(
data['collection']['objects'], target.collection)
data['collection']['objects'], datablock.collection)
load_collection_childrens(
data['collection']['children'], target.collection)
data['collection']['children'], datablock.collection)
if 'world' in data.keys():
target.world = bpy.data.worlds[data['world']]
datablock.world = bpy.data.worlds[data['world']]
# Annotation
if 'grease_pencil' in data.keys():
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
gpencil_uid = data.get('grease_pencil')
if gpencil_uid:
datablock.grease_pencil = resolve_datablock_from_uuid(gpencil_uid, bpy.data.grease_pencils)
if self.preferences.sync_flags.sync_render_settings:
if get_preferences().sync_flags.sync_render_settings:
if 'eevee' in data.keys():
loader.load(target.eevee, data['eevee'])
loader.load(datablock.eevee, data['eevee'])
if 'cycles' in data.keys():
loader.load(target.cycles, data['cycles'])
loader.load(datablock.cycles, data['cycles'])
if 'render' in data.keys():
loader.load(target.render, data['render'])
loader.load(datablock.render, data['render'])
if 'view_settings' in data.keys():
loader.load(target.view_settings, data['view_settings'])
if target.view_settings.use_curve_mapping and \
'curve_mapping' in data['view_settings']:
view_settings = data.get('view_settings')
if view_settings:
loader.load(datablock.view_settings, view_settings)
if datablock.view_settings.use_curve_mapping and \
'curve_mapping' in view_settings:
# TODO: change this ugly fix
target.view_settings.curve_mapping.white_level = data[
'view_settings']['curve_mapping']['white_level']
target.view_settings.curve_mapping.black_level = data[
'view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update()
datablock.view_settings.curve_mapping.white_level = view_settings['curve_mapping']['white_level']
datablock.view_settings.curve_mapping.black_level = view_settings['curve_mapping']['black_level']
datablock.view_settings.curve_mapping.update()
# Sequencer
sequences = data.get('sequences')
if sequences:
# Create sequencer data
target.sequence_editor_create()
vse = target.sequence_editor
datablock.sequence_editor_create()
vse = datablock.sequence_editor
# Clear removed sequences
for seq in vse.sequences_all:
if seq.name not in sequences:
vse.sequences.remove(seq)
# Load existing sequences
for seq_name, seq_data in sequences.items():
for seq_data in sequences.value():
load_sequence(seq_data, vse)
# If the sequence is no longer used, clear it
elif target.sequence_editor and not sequences:
target.sequence_editor_clear()
elif datablock.sequence_editor and not sequences:
datablock.sequence_editor_clear()
# Timeline markers
markers = data.get('timeline_markers')
if markers:
datablock.timeline_markers.clear()
for name, frame, camera in markers:
marker = datablock.timeline_markers.new(name, frame=frame)
if camera:
marker.camera = resolve_datablock_from_uuid(camera, bpy.data.objects)
marker.select = False
# FIXME: Find a better way after the replication big refacotoring
# Keep other user from deleting collection object by flushing their history
flush_history()
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
data = {}
data['animation_data'] = dump_animation_data(datablock)
# Metadata
scene_dumper = Dumper()
@ -454,45 +471,44 @@ class BlScene(BlDatablock):
'name',
'world',
'id',
'grease_pencil',
'frame_start',
'frame_end',
'frame_step',
]
if self.preferences.sync_flags.sync_active_camera:
if get_preferences().sync_flags.sync_active_camera:
scene_dumper.include_filter.append('camera')
data.update(scene_dumper.dump(instance))
data.update(scene_dumper.dump(datablock))
# Master collection
data['collection'] = {}
data['collection']['children'] = dump_collection_children(
instance.collection)
datablock.collection)
data['collection']['objects'] = dump_collection_objects(
instance.collection)
datablock.collection)
scene_dumper.depth = 1
scene_dumper.include_filter = None
# Render settings
if self.preferences.sync_flags.sync_render_settings:
if get_preferences().sync_flags.sync_render_settings:
scene_dumper.include_filter = RENDER_SETTINGS
data['render'] = scene_dumper.dump(instance.render)
data['render'] = scene_dumper.dump(datablock.render)
if instance.render.engine == 'BLENDER_EEVEE':
if datablock.render.engine == 'BLENDER_EEVEE':
scene_dumper.include_filter = EVEE_SETTINGS
data['eevee'] = scene_dumper.dump(instance.eevee)
elif instance.render.engine == 'CYCLES':
data['eevee'] = scene_dumper.dump(datablock.eevee)
elif datablock.render.engine == 'CYCLES':
scene_dumper.include_filter = CYCLES_SETTINGS
data['cycles'] = scene_dumper.dump(instance.cycles)
data['cycles'] = scene_dumper.dump(datablock.cycles)
scene_dumper.include_filter = VIEW_SETTINGS
data['view_settings'] = scene_dumper.dump(instance.view_settings)
data['view_settings'] = scene_dumper.dump(datablock.view_settings)
if instance.view_settings.use_curve_mapping:
if datablock.view_settings.use_curve_mapping:
data['view_settings']['curve_mapping'] = scene_dumper.dump(
instance.view_settings.curve_mapping)
datablock.view_settings.curve_mapping)
scene_dumper.depth = 5
scene_dumper.include_filter = [
'curves',
@ -500,35 +516,44 @@ class BlScene(BlDatablock):
'location',
]
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
instance.view_settings.curve_mapping.curves)
datablock.view_settings.curve_mapping.curves)
# Sequence
vse = instance.sequence_editor
vse = datablock.sequence_editor
if vse:
dumped_sequences = {}
for seq in vse.sequences_all:
dumped_sequences[seq.name] = dump_sequence(seq)
data['sequences'] = dumped_sequences
# Timeline markers
if datablock.timeline_markers:
data['timeline_markers'] = [(m.name, m.frame, getattr(m.camera, 'uuid', None)) for m in datablock.timeline_markers]
if datablock.grease_pencil:
data['grease_pencil'] = datablock.grease_pencil.uuid
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
# Master Collection
deps.extend(resolve_collection_dependencies(self.instance.collection))
deps.extend(resolve_collection_dependencies(datablock.collection))
# world
if self.instance.world:
deps.append(self.instance.world)
if datablock.world:
deps.append(datablock.world)
# annotations
if self.instance.grease_pencil:
deps.append(self.instance.grease_pencil)
if datablock.grease_pencil:
deps.append(datablock.grease_pencil)
deps.extend(resolve_animation_dependencies(datablock))
# Sequences
vse = self.instance.sequence_editor
vse = datablock.sequence_editor
if vse:
for sequence in vse.sequences_all:
if sequence.type == 'MOVIE' and sequence.filepath:
@ -543,16 +568,45 @@ class BlScene(BlDatablock):
return deps
def diff(self):
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
if datablock is None:
datablock = bpy.data.scenes.get(name)
return datablock
@staticmethod
def compute_delta(last_data: dict, current_data: dict) -> Delta:
exclude_path = []
if not self.preferences.sync_flags.sync_render_settings:
if not get_preferences().sync_flags.sync_render_settings:
exclude_path.append("root['eevee']")
exclude_path.append("root['cycles']")
exclude_path.append("root['view_settings']")
exclude_path.append("root['render']")
if not self.preferences.sync_flags.sync_active_camera:
if not get_preferences().sync_flags.sync_active_camera:
exclude_path.append("root['camera']")
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)
diff_params = {
'exclude_paths': exclude_path,
'ignore_order': True,
'report_repetition': True
}
delta_params = {
# 'mutate': True
}
return Delta(
DeepDiff(last_data,
current_data,
cache_size=5000,
**diff_params),
**delta_params)
_type = bpy.types.Scene
_class = BlScene

View File

@ -23,45 +23,59 @@ from pathlib import Path
import bpy
from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .dump_anything import Dumper, Loader
from .bl_datablock import resolve_datablock_from_uuid
class BlSound(BlDatablock):
class BlSound(ReplicatedDatablock):
bl_id = "sounds"
bl_class = bpy.types.Sound
bl_check_common = False
bl_icon = 'SOUND'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
filename = data.get('filename')
return bpy.data.sounds.load(get_filepath(filename))
def _load(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
def diff(self):
return False
def _dump(self, instance=None):
filename = Path(instance.filepath).name
@staticmethod
def dump(datablock: object) -> dict:
filename = Path(datablock.filepath).name
if not filename:
raise FileExistsError(instance.filepath)
raise FileExistsError(datablock.filepath)
return {
'filename': filename,
'name': instance.name
'name': datablock.name
}
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
if datablock.filepath and datablock.filepath != '<builtin>':
ensure_unpacked(datablock)
deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.sounds)
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
return False
_type = bpy.types.Sound
_class = BlSound

View File

@ -20,26 +20,31 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlSpeaker(ReplicatedDatablock):
use_delta = True
class BlSpeaker(BlDatablock):
bl_id = "speakers"
bl_class = bpy.types.Speaker
bl_check_common = False
bl_icon = 'SPEAKER'
bl_reload_parent = False
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
load_animation_data(data.get('animation_data'), datablock)
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.speakers.new(data["name"])
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 1
dumper.include_filter = [
@ -58,17 +63,27 @@ class BlSpeaker(BlDatablock):
'cone_volume_outer'
]
return dumper.dump(instance)
data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.speakers)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = []
sound = self.instance.sound
sound = datablock.sound
if sound:
deps.append(sound)
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.Speaker
_class = BlSpeaker

View File

@ -20,25 +20,32 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
import bpy.types as T
class BlTexture(ReplicatedDatablock):
use_delta = True
class BlTexture(BlDatablock):
bl_id = "textures"
bl_class = bpy.types.Texture
bl_check_common = False
bl_icon = 'TEXTURE'
bl_reload_parent = False
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
load_animation_data(data.get('animation_data'), datablock)
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.textures.new(data["name"], data["type"])
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 1
@ -52,24 +59,39 @@ class BlTexture(BlDatablock):
'name_full'
]
data = dumper.dump(instance)
color_ramp = getattr(instance, 'color_ramp', None)
data = dumper.dump(datablock)
color_ramp = getattr(datablock, 'color_ramp', None)
if color_ramp:
dumper.depth = 4
data['color_ramp'] = dumper.dump(color_ramp)
data['animation_data'] = dump_animation_data(datablock)
return data
def _resolve_deps_implementation(self):
# TODO: resolve material
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.textures)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
image = getattr(self.instance,"image", None)
image = getattr(datablock,"image", None)
if image:
deps.append(image)
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = [T.WoodTexture, T.VoronoiTexture,
T.StucciTexture, T.NoiseTexture,
T.MusgraveTexture, T.MarbleTexture,
T.MagicTexture, T.ImageTexture,
T.DistortedNoiseTexture, T.CloudsTexture,
T.BlendTexture]
_class = BlTexture

View File

@ -21,32 +21,26 @@ import mathutils
from pathlib import Path
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlVolume(ReplicatedDatablock):
use_delta = True
class BlVolume(BlDatablock):
bl_id = "volumes"
bl_class = bpy.types.Volume
bl_check_common = False
bl_icon = 'VOLUME_DATA'
bl_reload_parent = False
def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
loader.load(target.display, data['display'])
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, target.materials)
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.volumes.new(data["name"])
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = [
@ -60,27 +54,48 @@ class BlVolume(BlDatablock):
'use_fake_user'
]
data = dumper.dump(instance)
data = dumper.dump(datablock)
data['display'] = dumper.dump(instance.display)
data['display'] = dumper.dump(datablock.display)
# Fix material index
data['materials'] = dump_materials_slots(instance.materials)
data['materials'] = dump_materials_slots(datablock.materials)
data['animation_data'] = dump_animation_data(datablock)
return data
def _resolve_deps_implementation(self):
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader()
loader.load(datablock, data)
loader.load(datablock.display, data['display'])
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, datablock.materials)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.volumes)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = []
external_vdb = Path(bpy.path.abspath(self.instance.filepath))
external_vdb = Path(bpy.path.abspath(datablock.filepath))
if external_vdb.exists() and not external_vdb.is_dir():
deps.append(external_vdb)
for material in self.instance.materials:
for material in datablock.materials:
if material:
deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.Volume
_class = BlVolume

View File

@ -20,35 +20,42 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_material import (load_node_tree,
dump_node_tree,
get_node_tree_dependencies)
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlWorld(ReplicatedDatablock):
use_delta = True
class BlWorld(BlDatablock):
bl_id = "worlds"
bl_class = bpy.types.World
bl_check_common = True
bl_icon = 'WORLD_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.worlds.new(data["name"])
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
if data["use_nodes"]:
if target.node_tree is None:
target.use_nodes = True
if datablock.node_tree is None:
datablock.use_nodes = True
load_node_tree(data['node_tree'], target.node_tree)
def _dump_implementation(self, data, instance=None):
assert(instance)
load_node_tree(data['node_tree'], datablock.node_tree)
@staticmethod
def dump(datablock: object) -> dict:
world_dumper = Dumper()
world_dumper.depth = 1
world_dumper.include_filter = [
@ -56,17 +63,27 @@ class BlWorld(BlDatablock):
"name",
"color"
]
data = world_dumper.dump(instance)
if instance.use_nodes:
data['node_tree'] = dump_node_tree(instance.node_tree)
data = world_dumper.dump(datablock)
if datablock.use_nodes:
data['node_tree'] = dump_node_tree(datablock.node_tree)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.worlds)
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.use_nodes:
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
if self.is_library:
deps.append(self.instance.library)
if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.World
_class = BlWorld

View File

@ -507,16 +507,12 @@ class Loader:
_constructors = {
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
}
destructors = {
T.ColorRampElement: DESTRUCTOR_REMOVE,
T.Modifier: DESTRUCTOR_CLEAR,
T.GpencilModifier: DESTRUCTOR_CLEAR,
T.Constraint: DESTRUCTOR_REMOVE,
}
element_type = element.bl_rna_property.fixed_type

View File

@ -24,20 +24,25 @@ import sys
from pathlib import Path
import socket
import re
import bpy
VERSION_EXPR = re.compile('\d+.\d+.\d+')
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
DEFAULT_CACHE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "cache")
REPLICATION_DEPENDENCIES = {
"zmq",
"deepdiff"
}
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
REPLICATION = os.path.join(LIBS,"replication")
PYTHON_PATH = None
SUBPROCESS_DIR = None
rtypes = []
def module_can_be_imported(name):
def module_can_be_imported(name: str) -> bool:
try:
__import__(name)
return True
@ -50,7 +55,7 @@ def install_pip():
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
def install_package(name, version):
def install_package(name: str, install_dir: str):
logging.info(f"installing {name} version...")
env = os.environ
if "PIP_REQUIRE_VIRTUALENV" in env:
@ -60,12 +65,13 @@ def install_package(name, version):
# env var for the subprocess.
env = os.environ.copy()
del env["PIP_REQUIRE_VIRTUALENV"]
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
if name in sys.modules:
del sys.modules[name]
def check_package_version(name, required_version):
def check_package_version(name: str, required_version: str):
logging.info(f"Checking {name} version...")
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
@ -77,6 +83,7 @@ def check_package_version(name, required_version):
logging.info(f"{name} need an update")
return False
def get_ip():
"""
Retrieve the main network interface IP.
@ -94,7 +101,25 @@ def check_dir(dir):
os.makedirs(dir)
def setup(dependencies, python_path):
def setup_paths(paths: list):
""" Add missing path to sys.path
"""
for path in paths:
if path not in sys.path:
logging.debug(f"Adding {path} dir to the path.")
sys.path.insert(0, path)
def remove_paths(paths: list):
""" Remove list of path from sys.path
"""
for path in paths:
if path in sys.path:
logging.debug(f"Removing {path} dir from the path.")
sys.path.remove(path)
def install_modules(dependencies: list, python_path: str, install_dir: str):
global PYTHON_PATH, SUBPROCESS_DIR
PYTHON_PATH = Path(python_path)
@ -103,9 +128,23 @@ def setup(dependencies, python_path):
if not module_can_be_imported("pip"):
install_pip()
for package_name, package_version in dependencies:
for package_name in dependencies:
if not module_can_be_imported(package_name):
install_package(package_name, package_version)
install_package(package_name, install_dir=install_dir)
module_can_be_imported(package_name)
elif not check_package_version(package_name, package_version):
install_package(package_name, package_version)
def register():
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
for module_name in list(sys.modules.keys()):
if 'replication' in module_name:
del sys.modules[module_name]
setup_paths([LIBS, REPLICATION])
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
def unregister():
remove_paths([REPLICATION, LIBS])

155
multi_user/handlers.py Normal file
View File

@ -0,0 +1,155 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import bpy
from bpy.app.handlers import persistent
from replication import porcelain
from replication.constants import RP_COMMON, STATE_ACTIVE, STATE_SYNCING, UP
from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session
from . import shared_data, utils
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node in session.repository.graph.values():
node.instance = session.repository.rdp.resolve(node.data)
if node is None \
or (node.state == UP and not node.instance):
if remove_nodes:
try:
porcelain.rm(session.repository,
node.uuid,
remove_dependencies=False)
logging.info(f"Removing {node.uuid}")
rm_cpt += 1
except NonAuthorizedOperationError:
continue
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
def update_external_dependencies():
"""Force external dependencies(files such as images) evaluation
"""
external_types = ['WindowsPath', 'PosixPath', 'Image']
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in external_types]
for node_id in nodes_ids:
node = session.repository.graph.get(node_id)
if node and node.owner in [session.repository.username, RP_COMMON]:
porcelain.commit(session.repository, node_id)
porcelain.push(session.repository, 'origin', node_id)
@persistent
def on_scene_update(scene):
"""Forward blender depsgraph update to replication
"""
if session and session.state == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
incoming_updates = shared_data.session.applied_updates
distant_update = [getattr(u.id, 'uuid', None) for u in dependency_updates if getattr(u.id, 'uuid', None) in incoming_updates]
if distant_update:
for u in distant_update:
shared_data.session.applied_updates.remove(u)
logging.debug(f"Ignoring distant update of {dependency_updates[0].id.name}")
return
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
update_uuid = getattr(update.id, 'uuid', None)
if update_uuid:
node = session.repository.graph.get(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
if node and (node.owner == session.repository.username or check_common):
logging.debug(f"Evaluate {update.id.name}")
if node.state == UP:
try:
porcelain.commit(session.repository, node.uuid)
porcelain.push(session.repository,
'origin', node.uuid)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
except ContextError as e:
logging.debug(e)
except Exception as e:
logging.error(e)
else:
continue
elif isinstance(update.id, bpy.types.Scene):
scene = bpy.data.scenes.get(update.id.name)
scn_uuid = porcelain.add(session.repository, scene)
porcelain.commit(session.repository, scn_uuid)
porcelain.push(session.repository, 'origin', scn_uuid)
scene_graph_changed = [u for u in reversed(dependency_updates) if getattr(u.id, 'uuid', None) and isinstance(u.id,(bpy.types.Scene,bpy.types.Collection))]
if scene_graph_changed:
porcelain.purge_orphan_nodes(session.repository)
update_external_dependencies()
@persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state == STATE_ACTIVE:
porcelain.update_user_metadata(session.repository, {
'frame_current': scene.frame_current
})
def register():
bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister():
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)

View File

@ -15,31 +15,31 @@
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
import os
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from pathlib import Path
import bpy.utils.previews
def register():
global icons_col
pcoll = bpy.utils.previews.new()
icons_dir = os.path.join(os.path.dirname(__file__), ".")
for png in Path(icons_dir).rglob("*.png"):
pcoll.load(png.stem, str(png), "IMAGE")
icons_col = pcoll
class BlLibrary(BlDatablock):
bl_id = "libraries"
bl_class = bpy.types.Library
bl_check_common = False
bl_icon = 'LIBRARY_DATA_DIRECT'
bl_reload_parent = False
def unregister():
def _construct(self, data):
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
targetData = sourceData
return sourceData
def _load(self, data, target):
global icons_col
try:
bpy.utils.previews.remove(icons_col)
except Exception:
pass
def _dump(self, instance=None):
assert(instance)
dumper = Dumper()
return dumper.dump(instance)
icons_col = None

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

File diff suppressed because it is too large Load Diff

View File

@ -17,6 +17,7 @@
import random
import logging
from uuid import uuid4
import bpy
import string
import re
@ -25,7 +26,7 @@ import os
from pathlib import Path
from . import bl_types, environment, addon_updater_ops, presence, ui
from .utils import get_preferences, get_expanded_icon
from .utils import get_preferences, get_expanded_icon, get_folder_size
from replication.constants import RP_COMMON
from replication.interface import session
@ -33,6 +34,25 @@ from replication.interface import session
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
#SERVER PRESETS AT LAUNCH
DEFAULT_PRESETS = {
"localhost" : {
"server_name": "localhost",
"ip": "localhost",
"port": 5555,
"use_admin_password": True,
"admin_password": "admin",
"server_password": ""
},
"public session" : {
"server_name": "public session",
"ip": "51.75.71.183",
"port": 5555,
"admin_password": "",
"server_password": ""
},
}
def randomColor():
"""Generate a random color """
r = random.random()
@ -66,8 +86,6 @@ def update_ip(self, context):
self['ip'] = "127.0.0.1"
def update_directory(self, context):
new_dir = Path(self.cache_directory)
if new_dir.exists() and any(Path(self.cache_directory).iterdir()):
@ -93,6 +111,16 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
auto_push: bpy.props.BoolProperty(default=True)
icon: bpy.props.StringProperty()
class ServerPreset(bpy.types.PropertyGroup):
server_name: bpy.props.StringProperty(default="")
ip: bpy.props.StringProperty(default="127.0.0.1", update=update_ip)
port: bpy.props.IntProperty(default=5555)
use_server_password: bpy.props.BoolProperty(default=False)
server_password: bpy.props.StringProperty(default="", subtype = "PASSWORD")
use_admin_password: bpy.props.BoolProperty(default=False)
admin_password: bpy.props.StringProperty(default="", subtype = "PASSWORD")
is_online: bpy.props.BoolProperty(default=False)
is_private: bpy.props.BoolProperty(default=False)
def set_sync_render_settings(self, value):
self['sync_render_settings'] = value
@ -142,24 +170,60 @@ class ReplicationFlags(bpy.types.PropertyGroup):
class SessionPrefs(bpy.types.AddonPreferences):
bl_idname = __package__
ip: bpy.props.StringProperty(
name="ip",
description='Distant host ip',
default="127.0.0.1",
update=update_ip)
# User settings
username: bpy.props.StringProperty(
name="Username",
default=f"user_{random_string_digits()}"
)
client_color: bpy.props.FloatVectorProperty(
name="client_instance_color",
description='User color',
subtype='COLOR',
default=randomColor())
port: bpy.props.IntProperty(
name="port",
default=randomColor()
)
# Current server settings
server_name: bpy.props.StringProperty(
name="server_name",
description="Custom name of the server",
default='localhost',
)
server_index: bpy.props.IntProperty(
name="server_index",
description="index of the server",
)
# User host session settings
host_port: bpy.props.IntProperty(
name="host_port",
description='Distant host port',
default=5555
)
host_use_server_password: bpy.props.BoolProperty(
name="use_server_password",
description='Use session password',
default=False
)
host_server_password: bpy.props.StringProperty(
name="server_password",
description='Session password',
subtype='PASSWORD'
)
host_use_admin_password: bpy.props.BoolProperty(
name="use_admin_password",
description='Use admin password',
default=True
)
host_admin_password: bpy.props.StringProperty(
name="admin_password",
description='Admin password',
subtype='PASSWORD',
default='admin'
)
# Other
is_first_launch: bpy.props.BoolProperty(
name="is_fnirst_launch",
description="First time lauching the addon",
default=True
)
sync_flags: bpy.props.PointerProperty(
type=ReplicationFlags
)
@ -183,6 +247,11 @@ class SessionPrefs(bpy.types.AddonPreferences):
description='connection timeout before disconnection',
default=5000
)
ping_timeout: bpy.props.IntProperty(
name='ping timeout',
description='check if servers are online',
default=500
)
# Replication update settings
depsgraph_update_rate: bpy.props.FloatProperty(
name='depsgraph update rate (s)',
@ -194,11 +263,12 @@ class SessionPrefs(bpy.types.AddonPreferences):
description="Remove filecache from memory",
default=False
)
# for UI
# For UI
category: bpy.props.EnumProperty(
name="Category",
description="Preferences Category",
items=[
('PREF', "Preferences", "Preferences of this add-on"),
('CONFIG', "Configuration", "Configuration of this add-on"),
('UPDATE', "Update", "Update this add-on"),
],
@ -242,31 +312,58 @@ class SessionPrefs(bpy.types.AddonPreferences):
step=1,
subtype='PERCENTAGE',
)
presence_text_distance: bpy.props.FloatProperty(
name="Distance text visibilty",
description="Adjust the distance visibilty of user's mode/name",
min=0.1,
max=10000,
default=100,
)
conf_session_identity_expanded: bpy.props.BoolProperty(
name="Identity",
description="Identity",
default=True
default=False
)
conf_session_net_expanded: bpy.props.BoolProperty(
name="Net",
description="net",
default=True
default=False
)
conf_session_hosting_expanded: bpy.props.BoolProperty(
name="Rights",
description="Rights",
default=False
)
conf_session_rep_expanded: bpy.props.BoolProperty(
name="Replication",
description="Replication",
default=False
)
conf_session_cache_expanded: bpy.props.BoolProperty(
name="Cache",
description="cache",
default=False
)
conf_session_log_expanded: bpy.props.BoolProperty(
name="conf_session_log_expanded",
description="conf_session_log_expanded",
default=False
)
conf_session_ui_expanded: bpy.props.BoolProperty(
name="Interface",
description="Interface",
default=False
)
sidebar_repository_shown: bpy.props.BoolProperty(
name="sidebar_repository_shown",
description="sidebar_repository_shown",
default=False
)
sidebar_advanced_shown: bpy.props.BoolProperty(
name="sidebar_advanced_shown",
description="sidebar_advanced_shown",
default=False
)
sidebar_advanced_rep_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_rep_expanded",
description="sidebar_advanced_rep_expanded",
@ -277,6 +374,11 @@ class SessionPrefs(bpy.types.AddonPreferences):
description="sidebar_advanced_log_expanded",
default=False
)
sidebar_advanced_hosting_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_hosting_expanded",
description="sidebar_advanced_hosting_expanded",
default=False
)
sidebar_advanced_net_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_net_expanded",
description="sidebar_advanced_net_expanded",
@ -321,6 +423,19 @@ class SessionPrefs(bpy.types.AddonPreferences):
max=59
)
# Server preset
def server_list_callback(scene, context):
settings = get_preferences()
enum = []
for i in settings.server_preset:
enum.append((i.name, i.name, ""))
return enum
server_preset: bpy.props.CollectionProperty(
name="server preset",
type=ServerPreset,
)
# Custom panel
panel_category: bpy.props.StringProperty(
description="Choose a name for the category of the panel",
@ -329,38 +444,28 @@ class SessionPrefs(bpy.types.AddonPreferences):
def draw(self, context):
layout = self.layout
layout.row().prop(self, "category", expand=True)
if self.category == 'PREF':
grid = layout.column()
box = grid.box()
row = box.row()
# USER SETTINGS
split = row.split(factor=0.7, align=True)
split.prop(self, "username", text="User")
split.prop(self, "client_color", text="")
row = box.row()
row.label(text="Hide settings:")
row = box.row()
row.prop(self, "sidebar_advanced_shown", text="Hide “Advanced” settings in side pannel (Not in session)")
row = box.row()
row.prop(self, "sidebar_repository_shown", text="Hide “Repository” settings in side pannel (In session)")
if self.category == 'CONFIG':
grid = layout.column()
# USER INFORMATIONS
box = grid.box()
box.prop(
self, "conf_session_identity_expanded", text="User information",
icon=get_expanded_icon(self.conf_session_identity_expanded),
emboss=False)
if self.conf_session_identity_expanded:
box.row().prop(self, "username", text="name")
box.row().prop(self, "client_color", text="color")
# NETWORK SETTINGS
box = grid.box()
box.prop(
self, "conf_session_net_expanded", text="Networking",
icon=get_expanded_icon(self.conf_session_net_expanded),
emboss=False)
if self.conf_session_net_expanded:
box.row().prop(self, "ip", text="Address")
row = box.row()
row.label(text="Port:")
row.prop(self, "port", text="")
row = box.row()
row.label(text="Init the session from:")
row.prop(self, "init_method", text="")
# HOST SETTINGS
box = grid.box()
box.prop(
@ -368,9 +473,57 @@ class SessionPrefs(bpy.types.AddonPreferences):
icon=get_expanded_icon(self.conf_session_hosting_expanded),
emboss=False)
if self.conf_session_hosting_expanded:
row = box.row()
row.prop(self, "host_port", text="Port: ")
row = box.row()
row.label(text="Init the session from:")
row.prop(self, "init_method", text="")
row = box.row()
col = row.column()
col.prop(self, "host_use_server_password", text="Server password:")
col = row.column()
col.enabled = True if self.host_use_server_password else False
col.prop(self, "host_server_password", text="")
row = box.row()
col = row.column()
col.prop(self, "host_use_admin_password", text="Admin password:")
col = row.column()
col.enabled = True if self.host_use_admin_password else False
col.prop(self, "host_admin_password", text="")
# NETWORKING
box = grid.box()
box.prop(
self, "conf_session_net_expanded", text="Network",
icon=get_expanded_icon(self.conf_session_net_expanded),
emboss=False)
if self.conf_session_net_expanded:
row = box.row()
row.label(text="Timeout (ms):")
row.prop(self, "connection_timeout", text="")
row = box.row()
row.label(text="Server ping (ms):")
row.prop(self, "ping_timeout", text="")
# REPLICATION
box = grid.box()
box.prop(
self, "conf_session_rep_expanded", text="Replication",
icon=get_expanded_icon(self.conf_session_rep_expanded),
emboss=False)
if self.conf_session_rep_expanded:
row = box.row()
row.prop(self.sync_flags, "sync_render_settings")
row = box.row()
row.prop(self.sync_flags, "sync_active_camera")
row = box.row()
row.prop(self.sync_flags, "sync_during_editmode")
row = box.row()
if self.sync_flags.sync_during_editmode:
warning = row.box()
warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
row = box.row()
row.prop(self, "depsgraph_update_rate", text="Apply delay")
# CACHE SETTINGS
box = grid.box()
@ -381,24 +534,18 @@ class SessionPrefs(bpy.types.AddonPreferences):
if self.conf_session_cache_expanded:
box.row().prop(self, "cache_directory", text="Cache directory")
box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache")
# INTERFACE SETTINGS
box.row().operator('session.clear_cache', text=f"Clear cache ({get_folder_size(self.cache_directory)})")
# LOGGING
box = grid.box()
box.prop(
self, "conf_session_ui_expanded", text="Interface",
icon=get_expanded_icon(self.conf_session_ui_expanded),
self, "conf_session_log_expanded", text="Logging",
icon=get_expanded_icon(self.conf_session_log_expanded),
emboss=False)
if self.conf_session_ui_expanded:
box.row().prop(self, "panel_category", text="Panel category", expand=True)
if self.conf_session_log_expanded:
row = box.row()
row.label(text="Session widget:")
col = box.column(align=True)
col.prop(self, "presence_hud_scale", expand=True)
col.prop(self, "presence_hud_hpos", expand=True)
col.prop(self, "presence_hud_vpos", expand=True)
row.label(text="Log level:")
row.prop(self, 'logging_level', text="")
if self.category == 'UPDATE':
from . import addon_updater_ops
@ -407,18 +554,43 @@ class SessionPrefs(bpy.types.AddonPreferences):
def generate_supported_types(self):
self.supported_datablocks.clear()
for type in bl_types.types_to_register():
bpy_protocol = bl_types.get_data_translation_protocol()
# init the factory with supported types
for dcc_type_id, impl in bpy_protocol.implementations.items():
new_db = self.supported_datablocks.add()
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
new_db.name = type_impl_name
new_db.type_name = type_impl_name
new_db.name = dcc_type_id
new_db.type_name = dcc_type_id
new_db.use_as_filter = True
new_db.icon = type_module_class.bl_icon
new_db.bl_name = type_module_class.bl_id
new_db.icon = impl.bl_icon
new_db.bl_name = impl.bl_id
# Get a server preset through its name
def get_server_preset(self, name):
existing_preset = None
for server_preset in self.server_preset :
if server_preset.server_name == name :
existing_preset = server_preset
return existing_preset
# Custom at launch server preset
def generate_default_presets(self):
for preset_name, preset_data in DEFAULT_PRESETS.items():
existing_preset = self.get_server_preset(preset_name)
if existing_preset :
continue
new_server = self.server_preset.add()
new_server.name = str(uuid4())
new_server.server_name = preset_data.get('server_name')
new_server.ip = preset_data.get('ip')
new_server.port = preset_data.get('port')
new_server.use_server_password = preset_data.get('use_server_password',False)
new_server.server_password = preset_data.get('server_password',None)
new_server.use_admin_password = preset_data.get('use_admin_password',False)
new_server.admin_password = preset_data.get('admin_password',None)
def client_list_callback(scene, context):
@ -476,6 +648,11 @@ class SessionProps(bpy.types.PropertyGroup):
description='Enable user overlay ',
default=True,
)
presence_show_mode: bpy.props.BoolProperty(
name="Show users current mode",
description='Enable user mode overlay ',
default=False,
)
presence_show_far_user: bpy.props.BoolProperty(
name="Show users on different scenes",
description="Show user on different scenes",
@ -491,22 +668,16 @@ class SessionProps(bpy.types.PropertyGroup):
description='Show only owned datablocks',
default=True
)
filter_name: bpy.props.StringProperty(
name="filter_name",
default="",
description='Node name filter',
)
admin: bpy.props.BoolProperty(
name="admin",
description='Connect as admin',
default=False
)
password: bpy.props.StringProperty(
name="password",
default=random_string_digits(),
description='Session password',
subtype='PASSWORD'
)
internet_ip: bpy.props.StringProperty(
name="internet ip",
default="no found",
description='Internet interface ip',
)
user_snap_running: bpy.props.BoolProperty(
default=False
)
@ -523,6 +694,7 @@ classes = (
SessionProps,
ReplicationFlags,
ReplicatedDatablock,
ServerPreset,
SessionPrefs,
)
@ -537,6 +709,10 @@ def register():
if len(prefs.supported_datablocks) == 0:
logging.debug('Generating bl_types preferences')
prefs.generate_supported_types()
# at launch server presets
prefs.generate_default_presets()
def unregister():

View File

@ -94,15 +94,41 @@ def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D,
return [target.x, target.y, target.z]
def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
def bbox_from_obj(obj: bpy.types.Object, index: int = 1) -> list:
""" Generate a bounding box for a given object by using its world matrix
:param obj: target object
:type obj: bpy.types.Object
:param radius: bounding box radius
:type radius: float
:return: list of 8 points [(x,y,z),...]
:param index: indice offset
:type index: int
:return: list of 8 points [(x,y,z),...], list of 12 link between these points [(1,2),...]
"""
radius = 1.0 # Radius of the bounding box
index = 8*index
vertex_indices = (
(0+index, 1+index), (0+index, 2+index), (1+index, 3+index), (2+index, 3+index),
(4+index, 5+index), (4+index, 6+index), (5+index, 7+index), (6+index, 7+index),
(0+index, 4+index), (1+index, 5+index), (2+index, 6+index), (3+index, 7+index))
if obj.type == 'EMPTY':
radius = obj.empty_display_size
elif obj.type == 'LIGHT':
radius = obj.data.shadow_soft_size
elif obj.type == 'LIGHT_PROBE':
radius = obj.data.influence_distance
elif obj.type == 'CAMERA':
radius = obj.data.display_size
elif hasattr(obj, 'bound_box'):
vertex_indices = (
(0+index, 1+index), (1+index, 2+index),
(2+index, 3+index), (0+index, 3+index),
(4+index, 5+index), (5+index, 6+index),
(6+index, 7+index), (4+index, 7+index),
(0+index, 4+index), (1+index, 5+index),
(2+index, 6+index), (3+index, 7+index))
vertex_pos = get_bb_coords_from_obj(obj)
return vertex_pos, vertex_indices
coords = [
(-radius, -radius, -radius), (+radius, -radius, -radius),
(-radius, +radius, -radius), (+radius, +radius, -radius),
@ -112,9 +138,32 @@ def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
base = obj.matrix_world
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
return [(point.x, point.y, point.z)
for point in bbox_corners]
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
return vertex_pos, vertex_indices
def bbox_from_instance_collection(ic: bpy.types.Object, index: int = 0) -> list:
""" Generate a bounding box for a given instance collection by using its objects
:param ic: target instance collection
:type ic: bpy.types.Object
:param index: indice offset
:type index: int
:return: list of 8*objs points [(x,y,z),...], tuple of 12*objs link between these points [(1,2),...]
"""
vertex_pos = []
vertex_indices = ()
for obj_index, obj in enumerate(ic.instance_collection.objects):
vertex_pos_temp, vertex_indices_temp = bbox_from_obj(obj, index=index+obj_index)
vertex_pos += vertex_pos_temp
vertex_indices += vertex_indices_temp
bbox_corners = [ic.matrix_world @ mathutils.Vector(vertex) for vertex in vertex_pos]
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
return vertex_pos, vertex_indices
def generate_user_camera() -> list:
""" Generate a basic camera represention of the user point of view
@ -175,7 +224,7 @@ def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object
bbox_corners = [base @ mathutils.Vector(
corner) for corner in object.bound_box]
return [(point.x, point.y, point.z) for point in bbox_corners]
@ -203,6 +252,13 @@ class Widget(object):
"""
return True
def configure_bgl(self):
bgl.glLineWidth(2.)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
def draw(self):
"""How to draw the widget
"""
@ -256,11 +312,6 @@ class UserFrustumWidget(Widget):
{"pos": positions},
indices=self.indices)
bgl.glLineWidth(2.)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
@ -272,6 +323,8 @@ class UserSelectionWidget(Widget):
username):
self.username = username
self.settings = bpy.context.window_manager.session
self.current_selection_ids = []
self.current_selected_objects = []
@property
def data(self):
@ -281,6 +334,15 @@ class UserSelectionWidget(Widget):
else:
return None
@property
def selected_objects(self):
user_selection = self.data.get('selected_objects')
if self.current_selection_ids != user_selection:
self.current_selected_objects = [find_from_attr("uuid", uid, bpy.data.objects) for uid in user_selection]
self.current_selection_ids = user_selection
return self.current_selected_objects
def poll(self):
if self.data is None:
return False
@ -295,48 +357,31 @@ class UserSelectionWidget(Widget):
self.settings.enable_presence
def draw(self):
user_selection = self.data.get('selected_objects')
for select_ob in user_selection:
ob = find_from_attr("uuid", select_ob, bpy.data.objects)
if not ob:
return
vertex_pos = []
vertex_ind = []
collection_offset = 0
for obj_index, obj in enumerate(self.selected_objects):
if obj is None:
continue
obj_index+=collection_offset
if hasattr(obj, 'instance_collection') and obj.instance_collection:
bbox_pos, bbox_ind = bbox_from_instance_collection(obj, index=obj_index)
collection_offset+=len(obj.instance_collection.objects)-1
else :
bbox_pos, bbox_ind = bbox_from_obj(obj, index=obj_index)
vertex_pos += bbox_pos
vertex_ind += bbox_ind
vertex_pos = bbox_from_obj(ob, 1.0)
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
if ob.instance_collection:
for obj in ob.instance_collection.objects:
if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
vertex_pos = get_bb_coords_from_obj(obj, instance=ob)
break
elif ob.type == 'EMPTY':
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
elif ob.type == 'LIGHT':
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
elif ob.type == 'LIGHT_PROBE':
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
elif ob.type == 'CAMERA':
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
elif hasattr(ob, 'bound_box'):
vertex_indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
vertex_pos = get_bb_coords_from_obj(ob)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(
shader,
'LINES',
{"pos": vertex_pos},
indices=vertex_indices)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(
shader,
'LINES',
{"pos": vertex_pos},
indices=vertex_ind)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
class UserNameWidget(Widget):
draw_type = 'POST_PIXEL'
@ -380,6 +425,62 @@ class UserNameWidget(Widget):
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, self.username)
class UserModeWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
self.preferences = get_preferences()
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
scene_current = self.data.get('scene_current')
mode_current = self.data.get('mode_current')
user_selection = self.data.get('selected_objects')
return (scene_current == bpy.context.scene.name or
mode_current == bpy.context.mode or
self.settings.presence_show_far_user) and \
user_selection and \
self.settings.presence_show_mode and \
self.settings.enable_presence
def draw(self):
user_selection = self.data.get('selected_objects')
area, region, rv3d = view3d_find()
viewport_coord = project_to_viewport(region, rv3d, (0, 0))
obj = find_from_attr("uuid", user_selection[0], bpy.data.objects)
if not obj:
return
mode_current = self.data.get('mode_current')
color = self.data.get('color')
origin_coord = project_to_screen(obj.location)
distance_viewport_object = math.sqrt((viewport_coord[0]-obj.location[0])**2+(viewport_coord[1]-obj.location[1])**2+(viewport_coord[2]-obj.location[2])**2)
if distance_viewport_object > self.preferences.presence_mode_distance :
return
if origin_coord :
blf.position(0, origin_coord[0]+8, origin_coord[1]-15, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, mode_current)
class SessionStatusWidget(Widget):
draw_type = 'POST_PIXEL'
@ -462,6 +563,7 @@ class DrawFactory(object):
try:
for widget in self.widgets.values():
if widget.draw_type == 'POST_VIEW' and widget.poll():
widget.configure_bgl()
widget.draw()
except Exception as e:
logging.error(
@ -471,6 +573,7 @@ class DrawFactory(object):
try:
for widget in self.widgets.values():
if widget.draw_type == 'POST_PIXEL' and widget.poll():
widget.configure_bgl()
widget.draw()
except Exception as e:
logging.error(
@ -483,6 +586,7 @@ this.renderer = DrawFactory()
def register():
this.renderer.register_handlers()
this.renderer.add_widget("session_status", SessionStatusWidget())

48
multi_user/shared_data.py Normal file
View File

@ -0,0 +1,48 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
from replication.constants import STATE_INITIAL
class SessionData():
""" A structure to share easily the current session data across the addon
modules.
This object will completely replace the Singleton lying in replication
interface module.
"""
def __init__(self):
self.repository = None # The current repository
self.remote = None # The active remote
self.server = None
self.applied_updates = []
@property
def state(self):
if self.remote is None:
return STATE_INITIAL
else:
return self.remote.connection_status
def clear(self):
self.remote = None
self.repository = None
self.server = None
self.applied_updates = []
session = SessionData()

View File

@ -24,13 +24,15 @@ from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_SRV_SYNC, STATE_SYNCING, UP)
from replication.exception import NonAuthorizedOperationError, ContextError
from replication.interface import session
from replication.porcelain import apply, add
from replication import porcelain
from . import operators, utils
from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
from .presence import (UserFrustumWidget, UserNameWidget, UserModeWidget, UserSelectionWidget,
generate_user_camera, get_view_matrix, refresh_3d_view,
refresh_sidebar_view, renderer)
from . import shared_data
this = sys.modules[__name__]
# Registered timers
@ -39,7 +41,8 @@ this.registry = dict()
def is_annotating(context: bpy.types.Context):
""" Check if the annotate mode is enabled
"""
return bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False).idname == 'builtin.annotate'
active_tool = bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False)
return (active_tool and active_tool.idname == 'builtin.annotate')
class Timer(object):
@ -72,6 +75,7 @@ class Timer(object):
except Exception as e:
logging.error(e)
self.unregister()
traceback.print_exc()
session.disconnect(reason=f"Error during timer {self.id} execution")
else:
if self.is_running:
@ -88,7 +92,7 @@ class Timer(object):
if bpy.app.timers.is_registered(self.main):
logging.info(f"Unregistering {self.id}")
bpy.app.timers.unregister(self.main)
del this.registry[self.id]
self.is_running = False
@ -99,7 +103,7 @@ class SessionBackupTimer(Timer):
def execute(self):
session.save(self._filepath)
session.repository.dumps(self._filepath)
class SessionListenTimer(Timer):
def execute(self):
@ -108,32 +112,76 @@ class SessionListenTimer(Timer):
class ApplyTimer(Timer):
def execute(self):
if session and session.state == STATE_ACTIVE:
nodes = session.list()
for node in nodes:
node_ref = session.repository.get_node(node)
for node in session.repository.graph.keys():
node_ref = session.repository.graph.get(node)
if node_ref.state == FETCHED:
try:
apply(session.repository, node)
shared_data.session.applied_updates.append(node)
porcelain.apply(session.repository, node)
except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}")
traceback.print_exc()
else:
if node_ref.bl_reload_parent:
for parent in session.repository.get_parents(node):
impl = session.repository.rdp.get_implementation(node_ref.instance)
if impl.bl_reload_parent:
for parent in session.repository.graph.get_parents(node):
logging.debug("Refresh parent {node}")
apply(session.repository,
porcelain.apply(session.repository,
parent.uuid,
force=True)
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
for dep in node_ref.dependencies:
porcelain.apply(session.repository,
dep,
force=True)
class AnnotationUpdates(Timer):
def __init__(self, timeout=1):
self._annotating = False
self._settings = utils.get_preferences()
super().__init__(timeout)
def execute(self):
if session and session.state == STATE_ACTIVE:
ctx = bpy.context
annotation_gp = ctx.scene.grease_pencil
if annotation_gp and not annotation_gp.uuid:
ctx.scene.update_tag()
# if an annotation exist and is tracked
if annotation_gp and annotation_gp.uuid:
registered_gp = session.repository.graph.get(annotation_gp.uuid)
if is_annotating(bpy.context):
# try to get the right on it
if registered_gp.owner == RP_COMMON:
self._annotating = True
logging.debug(
"Getting the right on the annotation GP")
porcelain.lock(session.repository,
[registered_gp.uuid],
ignore_warnings=True,
affect_dependencies=False)
if registered_gp.owner == self._settings.username:
porcelain.commit(session.repository, annotation_gp.uuid)
porcelain.push(session.repository, 'origin', annotation_gp.uuid)
elif self._annotating:
porcelain.unlock(session.repository,
[registered_gp.uuid],
ignore_warnings=True,
affect_dependencies=False)
self._annotating = False
class DynamicRightSelectTimer(Timer):
def __init__(self, timeout=.1):
super().__init__(timeout)
self._last_selection = []
self._last_selection = set()
self._user = None
self._annotating = False
def execute(self):
settings = utils.get_preferences()
@ -144,88 +192,46 @@ class DynamicRightSelectTimer(Timer):
self._user = session.online_users.get(settings.username)
if self._user:
ctx = bpy.context
annotation_gp = ctx.scene.grease_pencil
if annotation_gp and not annotation_gp.uuid:
ctx.scene.update_tag()
# if an annotation exist and is tracked
if annotation_gp and annotation_gp.uuid:
registered_gp = session.repository.get_node(annotation_gp.uuid)
if is_annotating(bpy.context):
# try to get the right on it
if registered_gp.owner == RP_COMMON:
self._annotating = True
logging.debug(
"Getting the right on the annotation GP")
session.change_owner(
registered_gp.uuid,
settings.username,
ignore_warnings=True,
affect_dependencies=False)
if registered_gp.owner == settings.username:
gp_node = session.repository.get_node(annotation_gp.uuid)
if gp_node.has_changed():
session.commit(gp_node.uuid)
session.push(gp_node.uuid, check_data=False)
elif self._annotating:
session.change_owner(
registered_gp.uuid,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=False)
current_selection = utils.get_selected_objects(
current_selection = set(utils.get_selected_objects(
bpy.context.scene,
bpy.data.window_managers['WinMan'].windows[0].view_layer
)
))
if current_selection != self._last_selection:
obj_common = [
o for o in self._last_selection if o not in current_selection]
obj_ours = [
o for o in current_selection if o not in self._last_selection]
to_lock = list(current_selection.difference(self._last_selection))
to_release = list(self._last_selection.difference(current_selection))
instances_to_lock = list()
# change old selection right to common
for obj in obj_common:
node = session.repository.get_node(obj)
for node_id in to_lock:
node = session.repository.graph.get(node_id)
instance_mode = node.data.get('instance_type')
if instance_mode and instance_mode == 'COLLECTION':
to_lock.remove(node_id)
instances_to_lock.append(node_id)
if instances_to_lock:
try:
porcelain.lock(session.repository,
instances_to_lock,
ignore_warnings=True,
affect_dependencies=False)
except NonAuthorizedOperationError as e:
logging.warning(e)
if node and (node.owner == settings.username or node.owner == RP_COMMON):
recursive = True
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
try:
session.change_owner(
node.uuid,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {node} owner")
# change new selection to our
for obj in obj_ours:
node = session.repository.get_node(obj)
if node and node.owner == RP_COMMON:
recursive = True
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
try:
session.change_owner(
node.uuid,
settings.username,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {node} owner")
else:
return
if to_release:
try:
porcelain.unlock(session.repository,
to_release,
ignore_warnings=True,
affect_dependencies=True)
except NonAuthorizedOperationError as e:
logging.warning(e)
if to_lock:
try:
porcelain.lock(session.repository,
to_lock,
ignore_warnings=True,
affect_dependencies=True)
except NonAuthorizedOperationError as e:
logging.warning(e)
self._last_selection = current_selection
@ -233,31 +239,29 @@ class DynamicRightSelectTimer(Timer):
'selected_objects': current_selection
}
session.update_user_metadata(user_metadata)
porcelain.update_user_metadata(session.repository, user_metadata)
logging.debug("Update selection")
# Fix deselection until right managment refactoring (with Roles concepts)
if len(current_selection) == 0 :
owned_keys = session.list(
filter_owner=settings.username)
for key in owned_keys:
node = session.repository.get_node(key)
owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
if owned_keys:
try:
session.change_owner(
key,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {key} owner")
porcelain.unlock(session.repository,
owned_keys,
ignore_warnings=True,
affect_dependencies=True)
except NonAuthorizedOperationError as e:
logging.warning(e)
# Objects selectability
for obj in bpy.data.objects:
object_uuid = getattr(obj, 'uuid', None)
if object_uuid:
is_selectable = not session.is_readonly(object_uuid)
is_selectable = not session.repository.is_node_readonly(object_uuid)
if obj.hide_select != is_selectable:
obj.hide_select = is_selectable
shared_data.session.applied_updates.append(object_uuid)
class ClientUpdate(Timer):
@ -307,20 +311,24 @@ class ClientUpdate(Timer):
settings.client_color.b,
1),
'frame_current': bpy.context.scene.frame_current,
'scene_current': scene_current
'scene_current': scene_current,
'mode_current': bpy.context.mode
}
session.update_user_metadata(metadata)
porcelain.update_user_metadata(session.repository, metadata)
# Update client representation
# Update client current scene
elif scene_current != local_user_metadata['scene_current']:
local_user_metadata['scene_current'] = scene_current
session.update_user_metadata(local_user_metadata)
porcelain.update_user_metadata(session.repository, local_user_metadata)
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
local_user_metadata['view_corners'] = current_view_corners
local_user_metadata['view_matrix'] = get_view_matrix(
)
session.update_user_metadata(local_user_metadata)
porcelain.update_user_metadata(session.repository, local_user_metadata)
elif bpy.context.mode != local_user_metadata['mode_current']:
local_user_metadata['mode_current'] = bpy.context.mode
porcelain.update_user_metadata(session.repository, local_user_metadata)
class SessionStatusUpdate(Timer):
@ -348,6 +356,7 @@ class SessionUserSync(Timer):
renderer.remove_widget(f"{user.username}_cam")
renderer.remove_widget(f"{user.username}_select")
renderer.remove_widget(f"{user.username}_name")
renderer.remove_widget(f"{user.username}_mode")
ui_users.remove(index)
break
@ -363,6 +372,8 @@ class SessionUserSync(Timer):
f"{user}_select", UserSelectionWidget(user))
renderer.add_widget(
f"{user}_name", UserNameWidget(user))
renderer.add_widget(
f"{user}_mode", UserModeWidget(user))
class MainThreadExecutor(Timer):

View File

@ -16,7 +16,9 @@
# ##### END GPL LICENSE BLOCK #####
from logging import log
import bpy
import bpy.utils.previews
from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str
from replication.constants import (ADDED, ERROR, FETCHED,
@ -71,152 +73,124 @@ class SESSION_PT_settings(bpy.types.Panel):
def draw_header(self, context):
layout = self.layout
settings = get_preferences()
from multi_user import icons
offline_icon = icons.icons_col["session_status_offline"]
waiting_icon = icons.icons_col["session_status_waiting"]
online_icon = icons.icons_col["session_status_online"]
if session and session.state != STATE_INITIAL:
cli_state = session.state
state = session.state
connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
connection_icon = offline_icon
if state == STATE_ACTIVE:
connection_icon = 'PROP_ON'
connection_icon = online_icon
else:
connection_icon = 'PROP_CON'
connection_icon = waiting_icon
layout.label(text=f"Session - {get_state_str(cli_state)}", icon=connection_icon)
layout.label(text=f"{str(settings.server_name)} - {get_state_str(cli_state)}", icon_value=connection_icon.icon_id)
else:
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
layout.label(text=f"Multi-user - v{__version__}", icon="ANTIALIASED")
def draw(self, context):
layout = self.layout
row = layout.row()
runtime_settings = context.window_manager.session
settings = get_preferences()
if hasattr(context.window_manager, 'session'):
# STATE INITIAL
if not session \
or (session and session.state == STATE_INITIAL):
pass
else:
progress = session.state_progress
row = layout.row()
if settings.is_first_launch:
# USER SETTINGS
row = layout.row()
row.label(text="1. Enter your username and color:")
row = layout.row()
split = row.split(factor=0.7, align=True)
split.prop(settings, "username", text="")
split.prop(settings, "client_color", text="")
current_state = session.state
info_msg = None
# DOC
row = layout.row()
row.label(text="2. New here ? See the doc:")
row = layout.row()
row.operator("doc.get", text="Documentation", icon="HELP")
# START
row = layout.row()
row.label(text="3: Start the Multi-user:")
row = layout.row()
row.scale_y = 2
row.operator("firstlaunch.verify", text="Continue")
if not settings.is_first_launch:
if hasattr(context.window_manager, 'session'):
# STATE INITIAL
if not session \
or (session and session.state == STATE_INITIAL):
layout = self.layout
settings = get_preferences()
server_preset = settings.server_preset
selected_server = context.window_manager.server_index if context.window_manager.server_index<=len(server_preset)-1 else 0
active_server_name = server_preset[selected_server].name if len(server_preset)>=1 else ""
is_server_selected = True if active_server_name else False
if current_state in [STATE_ACTIVE]:
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE')
# SERVER LIST
row = layout.row()
box = row.box()
box.scale_y = 0.7
split = box.split(factor=0.7)
split.label(text="Server")
split.label(text="Online")
row= layout.row()
col = row.column(align=True)
col.operator("session.get_info", icon="FILE_REFRESH", text="")
if current_state in [STATE_ACTIVE] and runtime_settings.is_host:
info_msg = f"LAN: {runtime_settings.internet_ip}"
if current_state == STATE_LOBBY:
info_msg = "Waiting for the session to start."
row = layout.row()
col = row.column(align=True)
col.template_list("SESSION_UL_network", "", settings, "server_preset", context.window_manager, "server_index")
col.separator()
connectOp = col.row()
connectOp.operator("session.host", text="Host")
connectopcol = connectOp.column()
connectopcol.enabled =is_server_selected
connectopcol.operator("session.connect", text="Connect")
if info_msg:
info_box = row.box()
info_box.row().label(text=info_msg,icon='INFO')
col = row.column(align=True)
col.operator("session.preset_server_add", icon="ADD", text="") # TODO : add conditions (need a name, etc..)
row_visible = col.row(align=True)
col_visible = row_visible.column(align=True)
col_visible.enabled = is_server_selected
col_visible.operator("session.preset_server_remove", icon="REMOVE", text="").target_server_name = active_server_name
col_visible.separator()
col_visible.operator("session.preset_server_edit", icon="GREASEPENCIL", text="").target_server_name = active_server_name
# Progress bar
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
info_box = row.box()
info_box.row().label(text=printProgressBar(
progress['current'],
progress['total'],
length=16
))
else:
exitbutton = layout.row()
exitbutton.scale_y = 1.5
exitbutton.operator("session.stop", icon='QUIT', text="Disconnect")
layout.row().operator("session.stop", icon='QUIT', text="Exit")
progress = session.state_progress
current_state = session.state
info_msg = None
if current_state == STATE_LOBBY:
row= layout.row()
info_msg = "Waiting for the session to start."
class SESSION_PT_settings_network(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel"
bl_label = "Network"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
if info_msg:
info_box = row.box()
info_box.row().label(text=info_msg,icon='INFO')
@classmethod
def poll(cls, context):
return not session \
or (session and session.state == 0)
def draw_header(self, context):
self.layout.label(text="", icon='URL')
def draw(self, context):
layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences()
# USER SETTINGS
row = layout.row()
row.prop(runtime_settings, "session_mode", expand=True)
row = layout.row()
box = row.box()
if runtime_settings.session_mode == 'HOST':
row = box.row()
row.label(text="Port:")
row.prop(settings, "port", text="")
row = box.row()
row.label(text="Start from:")
row.prop(settings, "init_method", text="")
row = box.row()
row.label(text="Admin password:")
row.prop(runtime_settings, "password", text="")
row = box.row()
row.operator("session.start", text="HOST").host = True
else:
row = box.row()
row.prop(settings, "ip", text="IP")
row = box.row()
row.label(text="Port:")
row.prop(settings, "port", text="")
row = box.row()
row.prop(runtime_settings, "admin", text='Connect as admin', icon='DISCLOSURE_TRI_DOWN' if runtime_settings.admin
else 'DISCLOSURE_TRI_RIGHT')
if runtime_settings.admin:
row = box.row()
row.label(text="Password:")
row.prop(runtime_settings, "password", text="")
row = box.row()
row.operator("session.start", text="CONNECT").host = False
class SESSION_PT_settings_user(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_USER_PT_panel"
bl_label = "User info"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
@classmethod
def poll(cls, context):
return not session \
or (session and session.state == 0)
def draw_header(self, context):
self.layout.label(text="", icon='USER')
def draw(self, context):
layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences()
row = layout.row()
# USER SETTINGS
row.prop(settings, "username", text="name")
row = layout.row()
row.prop(settings, "client_color", text="color")
row = layout.row()
# PROGRESS BAR
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
row= layout.row()
row.label(text=f"Status: {get_state_str(current_state)}")
row= layout.row()
info_box = row.box()
info_box.label(text=printProgressBar(
progress['current'],
progress['total'],
length=16
))
class SESSION_PT_advanced_settings(bpy.types.Panel):
@ -229,19 +203,45 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
@classmethod
def poll(cls, context):
settings = get_preferences()
return not session \
or (session and session.state == 0)
or (session and session.state == 0) \
and not settings.sidebar_advanced_shown \
and not settings.is_first_launch
def draw_header(self, context):
self.layout.label(text="", icon='PREFERENCES')
def draw(self, context):
layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences()
#ADVANCED HOST
host_selection = layout.row().box()
host_selection.prop(
settings, "sidebar_advanced_hosting_expanded", text="Hosting",
icon=get_expanded_icon(settings.sidebar_advanced_hosting_expanded),
emboss=False)
if settings.sidebar_advanced_hosting_expanded:
host_selection_row = host_selection.row()
host_selection_row.prop(settings, "host_port", text="Port:")
host_selection_row = host_selection.row()
host_selection_row.label(text="Init the session from:")
host_selection_row.prop(settings, "init_method", text="")
host_selection_row = host_selection.row()
host_selection_col = host_selection_row.column()
host_selection_col.prop(settings, "host_use_server_password", text="Server password:")
host_selection_col = host_selection_row.column()
host_selection_col.enabled = True if settings.host_use_server_password else False
host_selection_col.prop(settings, "host_server_password", text="")
host_selection_row = host_selection.row()
host_selection_col = host_selection_row.column()
host_selection_col.prop(settings, "host_use_admin_password", text="Admin password:")
host_selection_col = host_selection_row.column()
host_selection_col.enabled = True if settings.host_use_admin_password else False
host_selection_col.prop(settings, "host_admin_password", text="")
#ADVANCED NET
net_section = layout.row().box()
net_section.prop(
settings,
@ -249,12 +249,15 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
text="Network",
icon=get_expanded_icon(settings.sidebar_advanced_net_expanded),
emboss=False)
if settings.sidebar_advanced_net_expanded:
net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="")
net_section_row = net_section.row()
net_section_row.label(text="Server ping (ms):")
net_section_row.prop(settings, "ping_timeout", text="")
#ADVANCED REPLICATION
replication_section = layout.row().box()
replication_section.prop(
settings,
@ -262,16 +265,12 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
text="Replication",
icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded),
emboss=False)
if settings.sidebar_advanced_rep_expanded:
replication_section_row = replication_section.row()
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_render_settings")
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_active_camera")
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_during_editmode")
replication_section_row = replication_section.row()
if settings.sync_flags.sync_during_editmode:
@ -280,7 +279,7 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
replication_section_row = replication_section.row()
replication_section_row.prop(settings, "depsgraph_update_rate", text="Apply delay")
#ADVANCED CACHE
cache_section = layout.row().box()
cache_section.prop(
settings,
@ -298,6 +297,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
cache_section_row.prop(settings, "clear_memory_filecache", text="")
cache_section_row = cache_section.row()
cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})")
#ADVANCED LOG
log_section = layout.row().box()
log_section.prop(
settings,
@ -305,11 +306,11 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
text="Logging",
icon=get_expanded_icon(settings.sidebar_advanced_log_expanded),
emboss=False)
if settings.sidebar_advanced_log_expanded:
log_section_row = log_section.row()
log_section_row.label(text="Log level:")
log_section_row.prop(settings, 'logging_level', text="")
class SESSION_PT_user(bpy.types.Panel):
bl_idname = "MULTIUSER_USER_PT_panel"
bl_label = "Online users"
@ -319,7 +320,8 @@ class SESSION_PT_user(bpy.types.Panel):
@classmethod
def poll(cls, context):
return session and session.state in [STATE_ACTIVE, STATE_LOBBY]
return session \
and session.state in [STATE_ACTIVE, STATE_LOBBY]
def draw_header(self, context):
self.layout.label(text="", icon='USER')
@ -331,22 +333,23 @@ class SESSION_PT_user(bpy.types.Panel):
settings = get_preferences()
active_user = online_users[selected_user] if len(
online_users)-1 >= selected_user else 0
runtime_settings = context.window_manager.session
# Create a simple row.
#USER LIST
row = layout.row()
box = row.box()
split = box.split(factor=0.35)
split.label(text="user")
split = split.split(factor=0.5)
split.label(text="location")
split = split.split(factor=0.3)
split.label(text="mode")
split.label(text="frame")
split.label(text="location")
split.label(text="ping")
row = layout.row()
layout.template_list("SESSION_UL_users", "", context.window_manager,
"online_users", context.window_manager, "user_index")
#OPERATOR ON USER
if active_user != 0 and active_user.username != settings.username:
row = layout.row()
user_operations = row.split()
@ -378,6 +381,8 @@ class SESSION_UL_users(bpy.types.UIList):
ping = '-'
frame_current = '-'
scene_current = '-'
mode_current = '-'
mode_icon = 'BLANK1'
status_icon = 'BLANK1'
if session:
user = session.online_users.get(item.username)
@ -387,59 +392,51 @@ class SESSION_UL_users(bpy.types.UIList):
if metadata and 'frame_current' in metadata:
frame_current = str(metadata.get('frame_current','-'))
scene_current = metadata.get('scene_current','-')
mode_current = metadata.get('mode_current','-')
if mode_current == "OBJECT" :
mode_icon = "OBJECT_DATAMODE"
elif mode_current == "EDIT_MESH" :
mode_icon = "EDITMODE_HLT"
elif mode_current == 'EDIT_CURVE':
mode_icon = "CURVE_DATA"
elif mode_current == 'EDIT_SURFACE':
mode_icon = "SURFACE_DATA"
elif mode_current == 'EDIT_TEXT':
mode_icon = "FILE_FONT"
elif mode_current == 'EDIT_ARMATURE':
mode_icon = "ARMATURE_DATA"
elif mode_current == 'EDIT_METABALL':
mode_icon = "META_BALL"
elif mode_current == 'EDIT_LATTICE':
mode_icon = "LATTICE_DATA"
elif mode_current == 'POSE':
mode_icon = "POSE_HLT"
elif mode_current == 'SCULPT':
mode_icon = "SCULPTMODE_HLT"
elif mode_current == 'PAINT_WEIGHT':
mode_icon = "WPAINT_HLT"
elif mode_current == 'PAINT_VERTEX':
mode_icon = "VPAINT_HLT"
elif mode_current == 'PAINT_TEXTURE':
mode_icon = "TPAINT_HLT"
elif mode_current == 'PARTICLE':
mode_icon = "PARTICLES"
elif mode_current == 'PAINT_GPENCIL' or mode_current =='EDIT_GPENCIL' or mode_current =='SCULPT_GPENCIL' or mode_current =='WEIGHT_GPENCIL' or mode_current =='VERTEX_GPENCIL':
mode_icon = "GREASEPENCIL"
if user['admin']:
status_icon = 'FAKE_USER_ON'
split = layout.split(factor=0.35)
split.label(text=item.username, icon=status_icon)
split = split.split(factor=0.5)
split.label(text=scene_current)
split = split.split(factor=0.3)
split.label(icon=mode_icon)
split.label(text=frame_current)
split.label(text=scene_current)
split.label(text=ping)
class SESSION_PT_presence(bpy.types.Panel):
bl_idname = "MULTIUSER_MODULE_PT_panel"
bl_label = "Presence overlay"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return not session \
or (session and session.state in [STATE_INITIAL, STATE_ACTIVE])
def draw_header(self, context):
self.layout.prop(context.window_manager.session,
"enable_presence", text="",icon='OVERLAY')
def draw(self, context):
layout = self.layout
settings = context.window_manager.session
pref = get_preferences()
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status")
row = col.column()
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_scale", expand=True)
row = col.column(align=True)
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_hpos", expand=True)
row.prop(pref, "presence_hud_vpos", expand=True)
col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
def draw_property(context, parent, property_uuid, level=0):
settings = get_preferences()
runtime_settings = context.window_manager.session
item = session.repository.get_node(property_uuid)
item = session.repository.graph.get(property_uuid)
type_id = item.data.get('type_id')
area_msg = parent.row(align=True)
if item.state == ERROR:
@ -450,23 +447,25 @@ def draw_property(context, parent, property_uuid, level=0):
line = area_msg.box()
name = item.data['name'] if item.data else item.uuid
icon = settings.supported_datablocks[type_id].icon if type_id else 'ERROR'
detail_item_box = line.row(align=True)
detail_item_box.label(text="",
icon=settings.supported_datablocks[item.str_type].icon)
detail_item_box.label(text="", icon=icon)
detail_item_box.label(text=f"{name}")
# Operations
have_right_to_modify = (item.owner == settings.username or \
item.owner == RP_COMMON) and item.state != ERROR
from multi_user import icons
sync_status = icons.icons_col["repository_push"] #TODO: Link all icons to the right sync (push/merge/issue). For issue use "UNLINKED" for icon
# sync_status = icons.icons_col["repository_merge"]
if have_right_to_modify:
detail_item_box.operator(
"session.commit",
text="",
icon='TRIA_UP').target = item.uuid
icon_value=sync_status.icon_id).target = item.uuid
detail_item_box.separator()
if item.state in [FETCHED, UP]:
@ -498,12 +497,40 @@ def draw_property(context, parent, property_uuid, level=0):
else:
detail_item_box.label(text="", icon="DECORATE_LOCKED")
class SESSION_PT_sync(bpy.types.Panel):
bl_idname = "MULTIUSER_SYNC_PT_panel"
bl_label = "Synchronize"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return session \
and session.state in [STATE_ACTIVE]
def draw_header(self, context):
self.layout.label(text="", icon='UV_SYNC_SELECT')
def draw(self, context):
layout = self.layout
settings = get_preferences()
row= layout.row()
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='VIEW_CAMERA')
class SESSION_PT_repository(bpy.types.Panel):
bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
bl_label = "Repository"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
@ -517,7 +544,8 @@ class SESSION_PT_repository(bpy.types.Panel):
return hasattr(context.window_manager, 'session') and \
session and \
(session.state == STATE_ACTIVE or \
session.state == STATE_LOBBY and admin)
session.state == STATE_LOBBY and admin) and \
not settings.sidebar_repository_shown
def draw_header(self, context):
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
@ -531,54 +559,42 @@ class SESSION_PT_repository(bpy.types.Panel):
usr = session.online_users.get(settings.username)
row = layout.row()
if session.state == STATE_ACTIVE:
if 'SessionBackupTimer' in registry:
row = layout.row()
row.alert = True
row.operator('session.cancel_autosave', icon="CANCEL")
row.alert = False
else:
row.operator('session.save', icon="FILE_TICK")
# else:
# row.operator('session.save', icon="FILE_TICK")
flow = layout.grid_flow(
row_major=True,
columns=0,
even_columns=True,
even_rows=False,
align=True)
for item in settings.supported_datablocks:
col = flow.column(align=True)
col.prop(item, "use_as_filter", text="", icon=item.icon)
row = layout.row(align=True)
row.prop(runtime_settings, "filter_owned", text="Show only owned")
row = layout.row(align=True)
box = layout.box()
row = box.row()
row.prop(runtime_settings, "filter_owned", text="Only show owned data blocks", icon_only=True, icon="DECORATE_UNLOCKED")
row = box.row()
row.prop(runtime_settings, "filter_name", text="Filter")
row = box.row()
# Properties
types_filter = [t.type_name for t in settings.supported_datablocks
if t.use_as_filter]
owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
key_to_filter = session.list(
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
filtered_node = owned_nodes if runtime_settings.filter_owned else list(session.repository.graph.keys())
client_keys = [key for key in key_to_filter
if session.repository.get_node(key).str_type
in types_filter]
if runtime_settings.filter_name:
filtered_node = [n for n in filtered_node if runtime_settings.filter_name.lower() in session.repository.graph.get(n).data.get('name').lower()]
if client_keys:
if filtered_node:
col = layout.column(align=True)
for key in client_keys:
for key in filtered_node:
draw_property(context, col, key)
else:
row.label(text="Empty")
layout.row().label(text="Empty")
elif session.state == STATE_LOBBY and usr and usr['admin']:
row = layout.row()
row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
else:
row = layout.row()
row.label(text="Waiting to start")
class VIEW3D_PT_overlay_session(bpy.types.Panel):
@ -594,37 +610,70 @@ class VIEW3D_PT_overlay_session(bpy.types.Panel):
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
row = col.row(align=True)
settings = context.window_manager.session
pref = get_preferences()
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status")
col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.row()
row.prop(settings, "presence_show_selected",text="Selected Objects")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
row = layout.row(align=True)
row.prop(settings, "presence_show_user", text="Users camera")
row.prop(settings, "presence_show_mode", text="Users mode")
col = layout.column()
if settings.presence_show_mode or settings.presence_show_user:
row = col.column()
row.prop(pref, "presence_text_distance", expand=True)
row = col.column()
row.prop(settings, "presence_show_far_user", text="Users on different scenes")
col.prop(settings, "presence_show_session_status")
if settings.presence_show_session_status :
split = layout.split()
text_pos = split.column(align=True)
text_pos.active = settings.presence_show_session_status
text_pos.prop(pref, "presence_hud_hpos", expand=True)
text_pos.prop(pref, "presence_hud_vpos", expand=True)
text_scale = split.column()
text_scale.active = settings.presence_show_session_status
text_scale.prop(pref, "presence_hud_scale", expand=True)
class SESSION_UL_network(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
settings = get_preferences()
server_name = '-'
server_status = 'BLANK1'
server_private = 'BLANK1'
server_name = item.server_name
split = layout.split(factor=0.7)
if item.is_private:
server_private = 'LOCKED'
split.label(text=server_name, icon=server_private)
else:
split.label(text=server_name)
from multi_user import icons
server_status = icons.icons_col["server_offline"]
if item.is_online:
server_status = icons.icons_col["server_online"]
split.label(icon_value=server_status.icon_id)
classes = (
SESSION_UL_users,
SESSION_UL_network,
SESSION_PT_settings,
SESSION_PT_settings_user,
SESSION_PT_settings_network,
SESSION_PT_presence,
SESSION_PT_advanced_settings,
SESSION_PT_user,
SESSION_PT_sync,
SESSION_PT_repository,
VIEW3D_PT_overlay_session,
)
register, unregister = bpy.utils.register_classes_factory(classes)
if __name__ == "__main__":

View File

@ -38,6 +38,14 @@ from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_LOBBY,
CONNECTING)
CLEARED_DATABLOCKS = ['actions', 'armatures', 'cache_files', 'cameras',
'collections', 'curves', 'filepath', 'fonts',
'grease_pencils', 'images', 'lattices', 'libraries',
'lightprobes', 'lights', 'linestyles', 'masks',
'materials', 'meshes', 'metaballs', 'movieclips',
'node_groups', 'objects', 'paint_curves', 'particles',
'scenes', 'shape_keys', 'sounds', 'speakers', 'texts',
'textures', 'volumes', 'worlds']
def find_from_attr(attr_name, attr_value, list):
for item in list:
@ -101,17 +109,25 @@ def get_state_str(state):
def clean_scene():
for type_name in dir(bpy.data):
try:
type_collection = getattr(bpy.data, type_name)
for item in type_collection:
for type_name in CLEARED_DATABLOCKS:
sub_collection_to_avoid = [
bpy.data.linestyles.get('LineStyle'),
bpy.data.materials.get('Dots Stroke')
]
type_collection = getattr(bpy.data, type_name)
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid]
for item in items_to_remove:
try:
type_collection.remove(item)
except:
continue
logging.info(item.name)
except:
continue
# Clear sequencer
bpy.context.scene.sequence_editor_clear()
def get_selected_objects(scene, active_view_layer):
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]

View File

@ -1,4 +1,4 @@
import re
init_py = open("multi_user/__init__.py").read()
init_py = open("multi_user/libs/replication/replication/__init__.py").read()
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))

View File

@ -13,7 +13,7 @@ def main():
if len(sys.argv) > 2:
blender_rev = sys.argv[2]
else:
blender_rev = "2.92.0"
blender_rev = "2.93.0"
try:
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)

View File

@ -32,11 +32,11 @@ def test_action(clear_blend):
# Test
implementation = BlAction()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.actions.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -12,11 +12,11 @@ def test_armature(clear_blend):
datablock = bpy.data.armatures[0]
implementation = BlArmature()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.armatures.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -15,11 +15,11 @@ def test_camera(clear_blend, camera_type):
datablock.type = camera_type
camera_dumper = BlCamera()
expected = camera_dumper._dump(datablock)
expected = camera_dumper.dump(datablock)
bpy.data.cameras.remove(datablock)
test = camera_dumper._construct(expected)
camera_dumper._load(expected, test)
result = camera_dumper._dump(test)
test = camera_dumper.construct(expected)
camera_dumper.load(expected, test)
result = camera_dumper.dump(test)
assert not DeepDiff(expected, result)

View File

@ -23,11 +23,11 @@ def test_collection(clear_blend):
# Test
implementation = BlCollection()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.collections.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -19,11 +19,11 @@ def test_curve(clear_blend, curve_type):
datablock = bpy.data.curves[0]
implementation = BlCurve()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.curves.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -13,11 +13,11 @@ def test_gpencil(clear_blend):
datablock = bpy.data.grease_pencils[0]
implementation = BlGpencil()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.grease_pencils.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -13,11 +13,11 @@ def test_lattice(clear_blend):
datablock = bpy.data.lattices[0]
implementation = BlLattice()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.lattices.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -14,11 +14,11 @@ def test_lightprobes(clear_blend, lightprobe_type):
blender_light = bpy.data.lightprobes[0]
lightprobe_dumper = BlLightprobe()
expected = lightprobe_dumper._dump(blender_light)
expected = lightprobe_dumper.dump(blender_light)
bpy.data.lightprobes.remove(blender_light)
test = lightprobe_dumper._construct(expected)
lightprobe_dumper._load(expected, test)
result = lightprobe_dumper._dump(test)
test = lightprobe_dumper.construct(expected)
lightprobe_dumper.load(expected, test)
result = lightprobe_dumper.dump(test)
assert not DeepDiff(expected, result)

View File

@ -13,11 +13,11 @@ def test_light(clear_blend, light_type):
blender_light = bpy.data.lights[0]
light_dumper = BlLight()
expected = light_dumper._dump(blender_light)
expected = light_dumper.dump(blender_light)
bpy.data.lights.remove(blender_light)
test = light_dumper._construct(expected)
light_dumper._load(expected, test)
result = light_dumper._dump(test)
test = light_dumper.construct(expected)
light_dumper.load(expected, test)
result = light_dumper.dump(test)
assert not DeepDiff(expected, result)

View File

@ -17,12 +17,12 @@ def test_material_nodes(clear_blend):
datablock.node_tree.nodes.new(ntype)
implementation = BlMaterial()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.materials.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)
@ -32,11 +32,11 @@ def test_material_gpencil(clear_blend):
bpy.data.materials.create_gpencil_data(datablock)
implementation = BlMaterial()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.materials.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -18,11 +18,11 @@ def test_mesh(clear_blend, mesh_type):
# Test
implementation = BlMesh()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.meshes.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -13,11 +13,11 @@ def test_metaball(clear_blend, metaballs_type):
datablock = bpy.data.metaballs[0]
dumper = BlMetaball()
expected = dumper._dump(datablock)
expected = dumper.dump(datablock)
bpy.data.metaballs.remove(datablock)
test = dumper._construct(expected)
dumper._load(expected, test)
result = dumper._dump(test)
test = dumper.construct(expected)
dumper.load(expected, test)
result = dumper.dump(test)
assert not DeepDiff(expected, result)

View File

@ -65,11 +65,11 @@ def test_object(clear_blend):
datablock.shape_key_add(name='shape2')
implementation = BlObject()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.objects.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
print(DeepDiff(expected, result))
assert not DeepDiff(expected, result)

View File

@ -12,14 +12,16 @@ def test_scene(clear_blend):
get_preferences().sync_flags.sync_render_settings = True
datablock = bpy.data.scenes.new("toto")
datablock.timeline_markers.new('toto', frame=10)
datablock.timeline_markers.new('tata', frame=1)
datablock.view_settings.use_curve_mapping = True
# Test
implementation = BlScene()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.scenes.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -12,11 +12,11 @@ def test_speaker(clear_blend):
datablock = bpy.data.speakers[0]
implementation = BlSpeaker()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.speakers.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -14,11 +14,11 @@ def test_texture(clear_blend, texture_type):
datablock = bpy.data.textures.new('test', texture_type)
implementation = BlTexture()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.textures.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -11,11 +11,11 @@ def test_volume(clear_blend):
datablock = bpy.data.volumes.new("Test")
implementation = BlVolume()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.volumes.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)

View File

@ -12,11 +12,11 @@ def test_world(clear_blend):
datablock.use_nodes = True
implementation = BlWorld()
expected = implementation._dump(datablock)
expected = implementation.dump(datablock)
bpy.data.worlds.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
test = implementation.construct(expected)
implementation.load(expected, test)
result = implementation.dump(test)
assert not DeepDiff(expected, result)