Compare commits

..

241 Commits

Author SHA1 Message Date
dfcfb84c20 fix: text curve material loading 2021-12-17 16:14:42 +01:00
5390e1a60c Merge branch '235-show-color-in-connected-user-pannel' into 'develop'
Resolve "Show color in connected user pannel"

See merge request slumber/multi-user!154
2021-12-13 21:35:33 +00:00
2910ea654b clean: row factor 2021-12-13 22:29:55 +01:00
ff2ecec18b Merge branch '243-server-crash-during-public-sessions' into 'develop'
Resolve "Server crash during public sessions"

See merge request slumber/multi-user!162
2021-12-10 15:00:06 +00:00
7555b1332a feat: update version 2021-12-10 15:56:47 +01:00
690e450349 fix: avoid to store Commit in the replication graph 2021-12-10 15:55:59 +01:00
de32bd89e3 Merge branch '237-add-draw-user-option-for-the-session-snapshot-importer' into 'develop'
Resolve "Add draw user option for the session snapshot importer"

See merge request slumber/multi-user!156
2021-11-18 15:21:36 +00:00
50e86aea15 fix user drawing options 2021-11-18 16:05:24 +01:00
c05a12343c feat: selection drawing 2021-11-18 15:22:07 +01:00
a09193fba2 feat: expose user radius and intensity 2021-11-18 11:53:24 +01:00
60e21f2b8e fix: load user 2021-11-18 11:43:01 +01:00
421f00879f feat draw users 2021-11-18 11:40:56 +01:00
5ac61b5348 Merge branch 'develop' into 235-show-color-in-connected-user-pannel 2021-11-17 16:23:03 +01:00
189e5c6cf1 Merge branch 'develop' into 235-show-color-in-connected-user-pannel 2021-11-17 16:19:03 +01:00
964e6a8c63 feat: uesr meshes 2021-11-16 09:55:13 +01:00
80c81dc934 Merge branch '240-adding-music-to-the-sequencer-isn-t-replicating' into 'develop'
Resolve "Adding music to the sequencer isn't replicating"

See merge request slumber/multi-user!159
2021-11-09 09:29:58 +00:00
563fdb693d fix: sound not loading
Related to #240
2021-11-09 10:26:47 +01:00
a64eea3cea Merge branch '239-blender-3-x-compatibility' into 'develop'
Ensure blender 3.x compatibility : Fix geometry node outputs replication

See merge request slumber/multi-user!158
2021-11-09 08:48:30 +00:00
03ad7c0066 fix: geometry nodes input / output 2021-11-08 17:34:02 +01:00
d685573834 Merge branch '239-blender-3-x-compatibility' into 'develop'
Ensure blender 3.x version check

See merge request slumber/multi-user!157
2021-11-05 15:20:35 +00:00
0681b53141 fix: version check 2021-11-05 15:39:46 +01:00
6f02b38b0e fix(replication): missing version update 2021-11-03 16:37:12 +01:00
92c773dae9 Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2021-11-03 16:34:43 +01:00
f48ade6390 fix python 3.10 compatibility (@NotFood) 2021-11-03 16:32:40 +01:00
63c4501b88 Merge branch '236-crash-with-empty-after-a-reconnection' into 'develop'
Resolve "Crash with empty after a reconnection"

See merge request slumber/multi-user!155
2021-10-29 09:40:04 +00:00
06e21c86ce fix none attribute error 2021-10-21 12:19:46 +02:00
e28d3860da user color property 2021-10-21 12:00:12 +02:00
7b247372fb test: add user color 2021-10-21 12:00:00 +02:00
9d484b00e9 Merge branch '234-user-info-in-side-panel' into 'develop'
User Info in side panel

See merge request slumber/multi-user!153
2021-08-19 16:09:24 +00:00
de9255f71c feat: presence overlay button+UInfo in side panel 2021-08-19 18:04:07 +02:00
99528ea3e0 Merge branch '232-fix-ui-host-and-lobby' into 'develop'
Resolve "fix ui host and lobby"

See merge request slumber/multi-user!152
2021-08-16 14:03:16 +00:00
bb342951a5 fix: lobby init 2021-08-16 15:59:19 +02:00
438a79177b fix: host solo 2021-08-16 12:02:10 +02:00
08fc49c40f fix: session private by default 2021-07-30 14:09:40 +02:00
d7e25b1192 fix: clean docker file 2021-07-30 13:47:31 +02:00
1671422143 Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2021-07-30 13:17:29 +02:00
a9620c0752 fix: docker server command 2021-07-30 13:16:43 +02:00
583beaf6fe Merge branch '231-server-public-session-private-issue' into 'develop'
Server "public session" private issue

See merge request slumber/multi-user!151
2021-07-28 15:34:24 +00:00
126d2338f2 fix: server psrwd issue 2021-07-28 17:33:07 +02:00
24b0c0ed8a fix: get active server preset 2021-07-27 17:03:44 +02:00
07fc1cf000 fix: enable tests back 2021-07-27 11:08:57 +02:00
8e0131b3a8 feat: temporary disable test before fixing blender addon_tester 2021-07-26 18:29:06 +02:00
912a2d524c feat: try disable operator tests 2021-07-26 18:19:24 +02:00
82a5124d64 fix: unit tests 2021-07-26 18:16:21 +02:00
cca5bf903b fix: replication deployment 2021-07-26 18:05:36 +02:00
4c0d4cb1c7 Merge branch '218-new-ui-ux-implementation' into 'develop'
New UI/UX implementation

See merge request slumber/multi-user!140
2021-07-26 15:52:19 +00:00
ca64797641 Merge branch 'develop' into 218-new-ui-ux-implementation 2021-07-26 17:51:01 +02:00
a49d9ee437 feat: server ping timeout preferences 2021-07-26 17:42:13 +02:00
4c1cd6b8f8 fix: review 2021-07-26 17:30:56 +02:00
d6cda709a6 fix: replication conflict 2021-07-26 15:46:29 +02:00
4bc0feb3a5 fix: ReferenceError in update_external dependency by removing orphan nodes. 2021-07-23 19:35:56 +02:00
59aab7159a fix: remove logging.info 2021-07-23 17:10:10 +02:00
0a798bb21b feat: clean files+add repository sync icons 2021-07-23 12:51:16 +02:00
beaafce4fa Merge branch 'develop' into 218-new-ui-ux-implementation 2021-07-22 11:01:59 +02:00
6f77337832 feat: request (with bug) 2021-07-22 10:55:18 +02:00
07252d62df feat: fonctional UI, no server pswd/ping 2021-07-22 09:38:01 +02:00
ac615cd134 feat: first+server list UI, ping/pswd unfonctional 2021-07-21 11:12:17 +02:00
a4f9f6e051 fix: replication dependencies conflicts 2021-07-20 16:19:53 +02:00
10de88cdc9 fix: old replication installation conflicts 2021-07-20 16:06:24 +02:00
e4fa34c984 fix: addon version number 2021-07-20 15:37:11 +02:00
0dd685d009 doc: add missing presence flags 2021-07-20 15:11:38 +02:00
3e8c30c0ab fix: supported datablocks in readme 2021-07-20 14:59:30 +02:00
21cc3cd917 fix: update readme to reflect changes 2021-07-20 14:57:52 +02:00
81e620ee3d fix: documentations capture for 0.4.0 2021-07-20 14:50:33 +02:00
fb9bd108bd feat: update changelog to reflect v0.4.0 version 2021-07-20 14:19:33 +02:00
4846fbb589 fix: server list working (no ping/lock/pop-up) 2021-07-19 16:03:12 +02:00
cab6625399 Merge branch '219-lock-annotation-doesn-t-sync' into 'develop'
Resolve "Lock annotation doesn't sync"

See merge request slumber/multi-user!143
2021-07-14 10:41:32 +00:00
1b81251a11 fix: annotation lock 2021-07-14 12:38:30 +02:00
cf44e547a2 fix: presence_text_distance rename 2021-07-13 17:15:34 +02:00
0269363c63 fix: overlay UI 2021-07-13 17:14:32 +02:00
4ffca17c54 fix: edit>prefs UI/UX 2021-07-13 16:40:26 +02:00
77bf269fb5 Merge branch '221-optimize-user-selection-draw-code' into 'develop'
Resolve "Optimize user selection draw code"

See merge request slumber/multi-user!142
2021-07-13 14:37:09 +00:00
1e675132d4 fix: collection instances index offset 2021-07-13 16:33:46 +02:00
781287c390 refactor: use one drawcall for all selection bbox 2021-07-13 15:45:08 +02:00
fc91b252f4 feat: edit>preferences + serverpassword ui 2021-07-12 18:01:35 +02:00
41c7c569ca fix: link session status icons to session header 2021-07-12 14:55:41 +02:00
a82d263f05 feat: add "icons" folder + init 2021-07-12 14:53:18 +02:00
d4476baa1b Merge branch '220-batch-right-selection-update' into 'develop'
Resolve "Batch right selection update"

See merge request slumber/multi-user!141
2021-07-12 10:20:23 +00:00
467e98906e feat: Batch right selection update
Related to https://gitlab.com/slumber/multi-user/-/issues/220
2021-07-12 12:06:45 +02:00
64a25f94a3 fix: gpencil material loading error
Now loading gpencil materials from uuid
2021-07-09 16:59:59 +02:00
e6996316be Merge branch '215-annotations-doesn-t-sync-correctly' into 'develop'
Resolve "Annotations doesn't sync correctly"

See merge request slumber/multi-user!138
2021-07-07 08:18:49 +00:00
cf4cd94096 refactor: remove gpencil dump stroke legacy
Related to #166 and #215
2021-07-07 10:15:23 +02:00
e9ab633aac fix: annotations updates
Related to #215
2021-07-06 16:06:14 +02:00
297639e80f fix: crash on changing workspace change 2021-07-06 15:39:19 +02:00
f0cc63b6f0 Merge branch '214-animated-object-transform-not-correctly-sync' into 'develop'
Resolve "Animated object transform not correctly sync"

See merge request slumber/multi-user!137
2021-07-06 12:32:39 +00:00
d433e8f241 fix: transform offset for object animated with a curve constraint
Related to #214
2021-07-06 14:29:20 +02:00
963a551a1e Merge branch '206-draw-active-mode-in-the-object-presence-overlay-2' into 'develop'
Draw active mode in the object presence overlay

See merge request slumber/multi-user!131
2021-07-01 12:57:01 +00:00
d01a434fb7 fix: Review 2021-07-01 14:53:14 +02:00
3a5a5fc633 fix : draw active mode UI side pannel 2021-07-01 11:58:52 +02:00
8926ab44e1 Merge branch '201-improved-image-support' into 'develop'
Resolve "Improved image support"

See merge request slumber/multi-user!136
2021-07-01 09:55:47 +00:00
a8f96581c5 fix: new mode display 2021-06-30 15:34:03 +02:00
440a4cc1cd feat: add mode visibily 2021-06-29 17:10:59 +02:00
a207c51973 fix: image renamin support
fix: sync Color Space Settings

related to #201
2021-06-29 15:59:26 +02:00
e706c8e0bf Merge branch '209-adding-a-scene-create-node-duplicates' into 'develop'
Resolve "Adding a scene create node duplicates"

See merge request slumber/multi-user!135
2021-06-28 08:30:22 +00:00
e590e896da fix: scene duplicates by using data instead of the update id
Related to #209
2021-06-28 10:27:04 +02:00
4140b62a8e Merge branch '119-add-timeline-marker-sync' into 'develop'
Resolve "Add timeline marker sync"

See merge request slumber/multi-user!133
2021-06-24 15:52:12 +00:00
6d9c9c4532 fix: timeline marker selection
feat: basic test
2021-06-24 17:45:34 +02:00
e9e1911840 Merge branch '208-late-update-logging-error' into 'develop'
Resolve "Late update logging error"

See merge request slumber/multi-user!134
2021-06-24 15:28:56 +00:00
ab350ca7bc fix: late update logging error
Related to #208
2021-06-24 17:24:08 +02:00
0a8f0b5f88 feat: add mode overlay 2021-06-24 16:01:14 +02:00
2238a15c11 feat: initial markers support 2021-06-24 15:51:01 +02:00
de73f022e6 merge 2021-06-24 14:52:07 +02:00
f517205647 fix: doc authors 2021-06-24 14:51:00 +02:00
f33c3d8481 fix: doc version 2021-06-24 14:50:12 +02:00
71c69000ec Merge branch '207-repository-panel-filtering-is-boken' into 'develop'
Resolve "Repository panel filtering is boken"

See merge request slumber/multi-user!132
2021-06-24 12:49:06 +00:00
de1e684b3c fix: name filtering 2021-06-24 14:35:59 +02:00
d87730cffb Merge branch '197-user-selection-bounding-box-glitches-for-non-mesh-objects' into 'develop'
User selection bounding box glitches for non-mesh objects

See merge request slumber/multi-user!129
2021-06-23 16:02:50 +00:00
3f005b86ab fix : add enumerate / remove nb_object 2021-06-23 17:45:01 +02:00
5098e5135d fix: bbox work for non-mesh objects+ins.collection 2021-06-23 17:00:05 +02:00
37cfed489c Merge branch '204-animation-doesn-t-sync-for-gpencil-materials' into 'develop'
Resolve "Animation doesn't sync for materials"

See merge request slumber/multi-user!128
2021-06-22 12:10:23 +00:00
9003abcd18 feat: notes for furtur improvements 2021-06-22 14:06:19 +02:00
a199e0df00 feat: apply bl_apply_child member to force dependencies reloading
fix: node_tree animation dependencies
2021-06-22 11:36:51 +02:00
3774419b7e fix: force push is now pushing the whole node data instead of delta 2021-06-22 10:41:36 +02:00
3e552cb406 feat: gpencil materials animation support 2021-06-22 10:39:40 +02:00
9f381b44c8 fix: material animation support 2021-06-21 18:58:16 +02:00
ad795caed5 fix: only apply repository heads on connection 2021-06-21 18:38:43 +02:00
504dd77405 fix: scene cleaning 2021-06-21 17:10:05 +02:00
82022c9e4d clean: only log ignored update in debug logging level 2021-06-18 15:45:51 +02:00
d81b4dc014 feat: enable delta back for all datablocks execpt gpencil, files and images 2021-06-18 15:30:39 +02:00
63affa079f Merge branch '199-filter-correctly-distant-updates-in-the-depsgraph-handler' into 'develop'
Resolve "Filter correctly distant updates in the depsgraph handler"

See merge request slumber/multi-user!126
2021-06-18 13:12:15 +00:00
fcf5a12dd0 fix: log verbosity level 2021-06-18 15:03:14 +02:00
b0529e4444 refactor: move handlers to hendlers.py 2021-06-18 14:59:56 +02:00
bdfd89c085 feat: temporary store applied update to ignore them. 2021-06-18 14:34:11 +02:00
ff1630f9cc Merge branch '194-smooth-brush-size-reset' into 'develop'
Resolve "Brush deleted on join"

See merge request slumber/multi-user!124
2021-06-16 12:30:31 +00:00
5830fe1abb fix: add items_to_remove 2021-06-16 14:28:26 +02:00
c609f72080 fix: All brushes 2021-06-16 12:29:56 +02:00
a28a6f91bd feat: move testing to blender 2.93 2021-06-15 16:27:49 +02:00
a996f39d3b Merge branch '195-auto-updater-install-a-broken-version-of-the-addon' into 'develop'
Resolve "Auto updater install a broken version of the addon"

See merge request slumber/multi-user!123
2021-06-15 12:54:49 +00:00
7790a16034 fix: download the build artifact instead of the repository default zip
Related to #195
2021-06-15 14:51:37 +02:00
836fdd02b8 Merge branch '192-parent-type-isn-t-synced' into 'develop'
Resolve "Parent type isn't synced"

See merge request slumber/multi-user!122
2021-06-15 09:22:13 +00:00
7cb3482353 fix: parent type and parent bone 2021-06-15 11:20:31 +02:00
041022056c Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2021-06-14 17:32:50 +02:00
05f3eb1445 fix: update readme 2021-06-14 17:32:05 +02:00
17193bde3a fix: doc server .png names 2021-06-14 14:29:45 +00:00
a14b4313f5 feat: update to develop 2021-06-14 16:12:47 +02:00
b203d9dffd Merge branch '188-intgrate-replication-as-a-submodule' into develop 2021-06-14 16:10:15 +02:00
f64db2155e Merge branch '49-connection-preset-system' into 'develop'
Connection-preset-system

See merge request slumber/multi-user!121
2021-06-14 13:50:58 +00:00
e07ebdeff5 fix: remove ui overwrite class 2021-06-14 15:46:57 +02:00
3d6453f7a2 feat: doc 2021-06-14 15:17:30 +02:00
7421511079 fix: override operator 2021-06-14 15:17:07 +02:00
bc24525cec fix: new UI/UX 2021-06-11 16:57:02 +02:00
699cf578e2 feat: prevent updates in sclupt mode 2021-06-11 16:42:23 +02:00
e9b4afb440 refactor: enable partial delta based replication 2021-06-11 15:28:37 +02:00
0c6491590e fix: admin password root 2021-06-11 12:18:51 +02:00
b87e733ddc fix: name conflict + responsive enum 2021-06-11 12:13:23 +02:00
cb0962b484 feat: server preset working with bad ui/ux 2021-06-10 15:39:12 +02:00
1fc25412ac fix: constraint differential update support 2021-06-10 15:21:25 +02:00
b5405553dc refactor: install replication dependencies in libs 2021-06-09 18:16:43 +02:00
a1b6fb0533 feat: server preset 2021-06-08 17:03:43 +02:00
b6a8a2ec01 Revert "doc: comment ui draw()"
This reverts commit f7c4f5d1fe.
2021-06-08 15:02:53 +02:00
3e41b18af1 Merge branch '49-connection-preset-system' of https://gitlab.com/slumber/multi-user into 49-connection-preset-system 2021-06-08 15:00:50 +02:00
f7c4f5d1fe doc: comment ui draw() 2021-06-08 14:58:57 +02:00
c616054878 tour du python blender 2021-06-07 17:06:41 +02:00
5c08493774 fix 'GraphObjectStore' object has no attribute 'object_store' 2021-06-04 18:30:54 +02:00
af8a138b4f fix: modifier order 2021-06-04 17:17:30 +02:00
6d9216f14a refactor: cleanup repository 2021-06-04 16:07:02 +02:00
fc4fb088bb refactor: repository api clean 2021-06-04 14:02:09 +02:00
98553ba00c refactor: remove get_nodes 2021-06-04 12:13:53 +02:00
1e15a12b10 refactor: remove list 2021-06-04 12:07:54 +02:00
569543650f feat: skip external updates 2021-06-03 15:43:47 +02:00
07358802f7 refactor: fix scene item removal 2021-06-03 15:03:09 +02:00
a059fafe12 feat: add mutate to scene delta 2021-06-03 11:43:24 +02:00
297f68ccfe refactor: only apply node when it is necessary (skip for host) 2021-06-03 11:41:25 +02:00
c9c70d1e08 refactor: stamp datablock during apply 2021-06-03 11:20:54 +02:00
a34f58ef3f fix: cherrypick TCP idle bug 2021-06-02 23:10:13 +02:00
e7b7f38991 fix: change rights 2021-06-02 17:49:22 +02:00
392e0aaaa3 refactor: remove missing parameter 2021-06-02 15:45:11 +02:00
4c774d5d53 refactor: move update user metadata to porcelain 2021-06-02 12:59:53 +02:00
4c4cf8a970 refactor: move rm to porcelain 2021-06-02 11:47:41 +02:00
211d0848c2 fix: replication version 2021-06-02 11:39:37 +02:00
c9665c4719 refactor: move unlock/lock/kick to porcelain 2021-06-02 11:31:23 +02:00
431fe0d840 refactor: move lock/unock to porcelain 2021-06-02 10:22:37 +02:00
df7ca66ad8 fix: repo dumps api 2021-06-02 09:35:55 +02:00
c2d2db78e6 refactor: temporary remove name resolution 2021-06-01 15:47:05 +02:00
ad89a4e389 fix: disable mutable delta for scene 2021-06-01 14:53:17 +02:00
6ca6d4443d refactor: move load/dumps to repository 2021-05-31 11:39:54 +02:00
81c9b5fc06 fix: animation loading 2021-05-21 23:02:42 +02:00
9fddfe084c fix: annotation 2021-05-21 17:29:22 +02:00
ca40523393 fix: apply and resolve 2021-05-21 17:14:28 +02:00
76e28ced21 refactor: remove legacy data 2021-05-21 15:40:45 +02:00
55c6002b28 feat: update version 2021-05-20 17:22:00 +02:00
8d5c8aded3 refacor: code formating 2021-05-20 09:57:44 +02:00
8ebba80b97 refactor: add diff back 2021-05-19 17:44:42 +02:00
50d6c6b3c8 fix: filter 2021-05-19 15:59:36 +02:00
f0b03c50f2 refactor: fix tests 2021-05-19 15:12:11 +02:00
28e83a38e6 refactor: add back armature lightprobes, sound and speaker 2021-05-19 15:05:54 +02:00
2e261cd66b refactor: add particle and lattive back 2021-05-19 14:40:13 +02:00
3f6e4f7333 refactor: add texts back 2021-05-19 14:23:56 +02:00
49fadf084a refactor: add gpencil back 2021-05-19 13:56:42 +02:00
e2e0dc31c1 refactor: add volume and world support 2021-05-19 13:42:34 +02:00
389bbd97d5 refactor: add image and file back 2021-05-19 13:31:57 +02:00
19602691d3 feat: texture 2021-05-19 11:43:01 +02:00
2e2ff5d4bf refactor: add material nodegroup back 2021-05-19 11:25:56 +02:00
fef6559ce0 refactor: add light and camera support back 2021-05-19 10:52:04 +02:00
5f669fd49a refactor: add camera back 2021-05-19 09:55:07 +02:00
330ff08fd3 refactor: add collection back 2021-05-19 09:47:01 +02:00
f3be8f9623 feat: bring back icons 2021-05-19 09:37:50 +02:00
ffb70ab74c refactor: protocol refactoring part 1 (mesh, object, action, scene) 2021-05-18 23:14:09 +02:00
26140eefb2 refactor: clear replicated datablock init states 2021-05-18 18:23:28 +02:00
cdf0433e8a refactor: move fetch to repository 2021-05-18 17:17:10 +02:00
acd70f73bf refactor: add remote
refactor: move push to porcelain
2021-05-18 16:54:07 +02:00
36c3a9ab0b refactor: remove sanitize 2021-05-18 11:01:55 +02:00
cfb1afdd72 Revert "feat: node sanitize on collection and scene update"
This reverts commit fb1c985f31.
2021-05-18 11:00:05 +02:00
4eeb80350e fix: layer info missing 2021-05-18 10:54:13 +02:00
fb1c985f31 feat: node sanitize on collection and scene update 2021-05-17 17:35:34 +02:00
689c2473d6 fix: commit 2021-05-17 17:18:17 +02:00
41620fce90 fix: commit 2021-05-17 17:04:43 +02:00
249bcf827b fix: collection instance bounding box selection 2021-05-17 16:03:01 +02:00
d47eab4f26 refactor: move commit to porcelain 2021-05-17 11:12:18 +02:00
f011089d82 refactor: removed apply from replicated datablock 2021-05-17 10:52:28 +02:00
acc58a1c9f fix: tcp keepalive IDLE time 2021-05-16 22:26:53 +02:00
24d850de9f refactor: get metadata updates optimization back 2021-05-11 11:41:43 +02:00
b045911a59 refactor: get diff back for testing 2021-05-10 12:04:45 +02:00
a67be76422 feat: delta commit 2021-05-09 17:42:56 +02:00
32033c743c feat: update repllication version 2021-05-07 17:10:23 +02:00
5da8650611 fix: get replication version 2021-05-07 16:56:00 +02:00
aec5096f87 feat: update submodule url 2021-05-07 16:12:04 +02:00
fba39b9980 fix: ci with submodules 2021-05-07 15:47:53 +02:00
6af3e4b777 refactor: add threaded data handling back on server side 2021-05-04 16:25:36 +02:00
58d639e9d8 feat: add replication as a submoduke 2021-05-04 14:56:50 +02:00
0efe5d5a10 Merge branch 'remove-services' into 'develop'
refactor: differential revision Stage 1

See merge request slumber/multi-user!119
2021-05-04 12:24:05 +00:00
2ad93cf304 Merge branch 'develop' into remove-services 2021-04-30 16:53:02 +02:00
771d76a98b fix: missing shapekeys attr 2021-04-30 16:51:11 +02:00
1e83241494 feat: remove pull socket 2021-04-30 16:26:20 +02:00
1bcbff3ed4 Merge branch 'develop' into remove-services 2021-04-29 14:41:55 +02:00
9a45fe7125 fix: shapekey animation data 2021-04-29 14:41:11 +02:00
207901afdd Merge branch '184-uv-project-modifier-target-object-doesn-t-sync' into 'develop'
Resolve "UV project modifier target object doesn't sync"

See merge request slumber/multi-user!118
2021-04-29 09:11:47 +00:00
c6eb1ba22f fix: shapekey performances
Related to #187
2021-04-29 11:06:46 +02:00
ba4168d0fd Merge branch 'develop' into remove-services 2021-04-28 16:56:20 +02:00
00e7adf022 fix: Image Empty is not loading.
Related to #186
2021-04-28 10:01:04 +02:00
d9d8ca7ca0 revert: image source replication until a proper fix is done 2021-04-23 15:35:19 +02:00
e8cd271bd8 fix: renable gitlab-ci file 2021-04-23 11:48:01 +02:00
e71af6402c feat: increment addon version 2021-04-23 11:46:29 +02:00
dd1c6a4fc7 feat: enable back ci 2021-04-23 11:45:47 +02:00
7fe1ae83b1 feat: update replication version to the right one 2021-04-23 11:25:15 +02:00
a7ad9d30c3 Merge branch 'develop' into remove-services 2021-04-23 11:21:16 +02:00
14779be1ed feat: support video file as camera background images 2021-04-22 15:52:06 +02:00
a36c3740cc fix: load driver variable without id 2021-04-22 15:00:08 +02:00
d2108facab feat: fcurve modifiers support 2021-04-22 14:52:43 +02:00
e5651151d9 fix: having both animation and drivers on the same object 2021-04-22 14:00:26 +02:00
fb61b380b6 fix: uv_projector modifier
refactor: move modifier related code to dump_modifiers and load_modifier_custom_data
2021-04-22 11:05:34 +02:00
92 changed files with 3403 additions and 1827 deletions

3
.gitignore vendored
View File

@ -13,4 +13,5 @@ multi_user_updater/
_build _build
# ignore generated zip generated from blender_addon_tester # ignore generated zip generated from blender_addon_tester
*.zip *.zip
libs

13
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,13 @@
stages:
- test
- build
- deploy
- doc
include:
- local: .gitlab/ci/test.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/deploy.gitlab-ci.yml
- local: .gitlab/ci/doc.gitlab-ci.yml

View File

@ -8,3 +8,5 @@ build:
name: multi_user name: multi_user
paths: paths:
- multi_user - multi_user
variables:
GIT_SUBMODULE_STRATEGY: recursive

View File

@ -5,6 +5,7 @@ deploy:
variables: variables:
DOCKER_DRIVER: overlay2 DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs" DOCKER_TLS_CERTDIR: "/certs"
GIT_SUBMODULE_STRATEGY: recursive
services: services:
- docker:19.03.12-dind - docker:19.03.12-dind

View File

@ -3,3 +3,5 @@ test:
image: slumber/blender-addon-testing:latest image: slumber/blender-addon-testing:latest
script: script:
- python3 scripts/test_addon.py - python3 scripts/test_addon.py
variables:
GIT_SUBMODULE_STRATEGY: recursive

2
.gitmodules vendored
View File

@ -1,3 +1,3 @@
[submodule "multi_user/libs/replication"] [submodule "multi_user/libs/replication"]
path = multi_user/libs/replication path = multi_user/libs/replication
url = https://gitlab.com/slumber/replication url = https://gitlab.com/slumber/replication.git

View File

@ -186,4 +186,34 @@ All notable changes to this project will be documented in this file.
- Exception access violation during Undo/Redo - Exception access violation during Undo/Redo
- Sync missing armature bone Roll - Sync missing armature bone Roll
- Sync missing driver data_path - Sync missing driver data_path
- Constraint replication - Constraint replication
## [0.4.0] - 2021-07-20
### Added
- Connection preset system (@Kysios)
- Display connected users active mode (users pannel and viewport) (@Kysios)
- Delta-based replication
- Sync timeline marker
- Sync images settings (@Kysios)
- Sync parent relation type (@Kysios)
- Sync uv project modifier
- Sync FCurves modifiers
### Changed
- User selection optimizations (draw and sync) (@Kysios)
- Improved shapekey syncing performances
- Improved gpencil syncing performances
- Integrate replication as a submodule
- The dependencies are now installed in a folder(blender addon folder) that no longer requires administrative rights
- Presence overlay UI optimization (@Kysios)
### Fixed
- User selection bounding box glitches for non-mesh objects (@Kysios)
- Transforms replication for animated objects
- GPencil fill stroke
- Sculpt and GPencil brushes deleted when joining a session (@Kysios)
- Auto-updater doesn't work for master and develop builds

View File

@ -11,9 +11,8 @@ This tool aims to allow multiple users to work on the same scene over the networ
## Quick installation ## Quick installation
1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build). 1. Download [latest build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build) or [stable build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
2. Run blender as administrator (dependencies installation). 2. Install last_version.zip from your addon preferences.
3. Install last_version.zip from your addon preferences.
[Dependencies](#dependencies) will be automatically added to your blender python during installation. [Dependencies](#dependencies) will be automatically added to your blender python during installation.
@ -29,35 +28,35 @@ See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_sta
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones. Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment | | Name | Status | Comment |
| -------------- | :----: | :----------------------------------------------------------: | | -------------- | :----: | :---------------------------------------------------------------------: |
| action | ✔️ | | | action | ✔️ | |
| armature | | Not stable | | camera | ✔️ | |
| camera | ✔️ | | | collection | ✔️ | |
| collection | ✔️ | | | gpencil | ✔️ | |
| curve | | Nurbs surfaces not supported | | image | ✔️ | |
| gpencil | ✔️ | | | mesh | ✔️ | |
| image | ✔️ | | | material | ✔️ | |
| mesh | ✔️ | | | node_groups | ✔️ | Material & Geometry only |
| material | ✔️ | | | geometry nodes | ✔️ | |
| node_groups | | Material & Geometry only | | metaball | ✔️ | |
| geometry nodes | ✔️ | | | object | ✔️ | |
| metaball | ✔️ | | | texts | ✔️ | |
| object | ✔️ | | | scene | ✔️ | |
| textures | | Supported for modifiers/materials/geo nodes only | | world | ✔️ | |
| texts | ✔️ | | | volumes | ✔️ | |
| scene | ✔️ | | | lightprobes | ✔️ | |
| world | ✔️ | | | physics | ✔️ | |
| lightprobes | ✔️ | | | textures | ✔️ | |
| compositing | | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) | | curve | | Nurbs surfaces not supported |
| texts | | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) | | armature | | Only for Mesh. [Planned for GPencil](https://gitlab.com/slumber/multi-user/-/issues/161). Not stable yet |
| nla | | | | particles | | The cache isn't syncing. |
| volumes | ✔️ | | | speakers | | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| particles | ❗ | The cache isn't syncing. | | vse | ❗ | Mask and Clip not supported yet |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) | | libraries | ❌ | |
| vse | | Mask and Clip not supported yet | | nla | | |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) | | texts | ❌ | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/81) |
| libraries | | Partial | | compositing | | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/46) |

View File

@ -19,10 +19,10 @@ import sys
project = 'multi-user' project = 'multi-user'
copyright = '2020, Swann Martinez' copyright = '2020, Swann Martinez'
author = 'Swann Martinez, with contributions from Poochy' author = 'Swann Martinez, Poochy, Fabian'
# The full version, including alpha/beta/rc tags # The full version, including alpha/beta/rc tags
release = '0.2.0' release = '0.5.0-develop'
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 365 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 320 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

View File

@ -108,36 +108,69 @@ Before starting make sure that you have access to the session IP address and por
1. Fill in your user information 1. Fill in your user information
-------------------------------- --------------------------------
Follow the user-info_ section for this step. Joining a server
=======================
---------------- --------------
2. Network setup Network setup
---------------- --------------
In the network panel, select **JOIN**. In the network panel, select **JOIN**.
The **join sub-panel** (see image below) allows you to configure your client to join a The **join sub-panel** (see image below) allows you to configure your client to join a
collaborative session which is already hosted. collaborative session which is already hosted.
.. figure:: img/quickstart_join.png .. figure:: img/server_preset_image_normal_server.png
:align: center :align: center
:alt: Connect menu :width: 200px
Connection panel Connection pannel
Fill in the fields with your information: Fill in the fields with your information:
- **IP**: the host's IP address. - **IP**: the host's IP address.
- **Port**: the host's port number. - **Port**: the host's port number.
- **Connect as admin**: connect yourself with **admin rights** (see :ref:`admin` ) to the session.
.. Maybe something more explicit here
.. note::
Additional configuration settings can be found in the :ref:`advanced` section.
Once you've configured every field, hit the button **CONNECT** to join the session ! Once you've configured every field, hit the button **CONNECT** to join the session !
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating. When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
.. note::
If you want to have **administrator rights** (see :ref:`admin` ) on the server, just enter the password created by the host in the **Connect as admin** section
.. figure:: img/server_preset_image_admin.png
:align: center
:width: 200px
Admin password
---------------
Server presets
---------------
You can save your server presets in a preset list below the 'JOIN' and 'HOST' buttons. This allows you to quickly access and manage your servers.
To add a server, first enter the ip address and the port (plus the password if needed), then click on the + icon to add a name to your preset. To remove a server from the list, select it and click on the - icon.
.. figure:: img/server_preset_exemple.gif
:align: center
:width: 200px
.. warning:: Be careful, if you don't rename your new preset, or if it has the same name as an existing preset, the old preset will be overwritten.
.. figure:: img/server_preset_image_report.png
:align: center
:width: 200px
.. note::
Two presets are already present when the addon is launched:
- The 'localhost' preset, to host and join a local session quickly
- The 'public session' preset, to join the public sessions of the multi-user server (official discord to participate : https://discord.gg/aBPvGws)
.. note::
Additional configuration settings can be found in the :ref:`advanced` section.
.. note:: .. note::
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session. When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
@ -182,8 +215,10 @@ One of the most vital tools is the **Online user panel**. It lists all connected
users' information including your own: users' information including your own:
* **Role** : if a user is an admin or a regular user. * **Role** : if a user is an admin or a regular user.
* **Location**: Where the user is actually working. * **Username** : Name of the user.
* **Mode** : User's active editing mode (edit_mesh, paint,etc.).
* **Frame**: When (on which frame) the user is working. * **Frame**: When (on which frame) the user is working.
* **Location**: Where the user is actually working.
* **Ping**: user's connection delay in milliseconds * **Ping**: user's connection delay in milliseconds
.. figure:: img/quickstart_users.png .. figure:: img/quickstart_users.png
@ -240,6 +275,7 @@ it draw users' related information in your viewport such as:
* Username * Username
* User point of view * User point of view
* User active mode
* User selection * User selection
.. figure:: img/quickstart_presence.png .. figure:: img/quickstart_presence.png

View File

@ -76,7 +76,7 @@ Hit 'Create a network'(see image below) and go to the network settings.
:align: center :align: center
:width: 450px :width: 450px
Network page Admin password
Now that the network is created, let's configure it. Now that the network is created, let's configure it.
@ -212,14 +212,14 @@ You can run the dedicated server on any platform by following these steps:
.. code-block:: bash .. code-block:: bash
replication.server replication.serve
.. hint:: .. hint::
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
.. code-block:: bash .. code-block:: bash
replication.server -p 5555 -pwd admin -t 5000 -l INFO -lf server.log replication.serve -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
Here, for example, a server is instantiated on port 5555, with password 'admin', a 5 second timeout, and logging enabled. Here, for example, a server is instantiated on port 5555, with password 'admin', a 5 second timeout, and logging enabled.
@ -562,7 +562,7 @@ The default Docker image essentially runs the equivalent of:
.. code-block:: bash .. code-block:: bash
replication.server -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log replication.serve -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters. This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.
@ -572,7 +572,7 @@ For example, I would like to launch my server with a different administrator pas
.. code-block:: bash .. code-block:: bash
python3 -m replication.server -pwd supersecretpassword -p 5555 -t 3000 -l DEBUG -lf logname.log replication.serve -pwd supersecretpassword -p 5555 -t 3000 -l DEBUG -lf logname.log
Now, my configuration should look like this: Now, my configuration should look like this:
@ -691,7 +691,7 @@ We're finally ready to launch the server. Simply run:
.. code-block:: bash .. code-block:: bash
python3 -m replication.server -p 5555 -pwd admin -t 5000 -l INFO -lf server.log replication.serve -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
See :ref:`cmd-line` for a description of optional parameters See :ref:`cmd-line` for a description of optional parameters

View File

@ -19,7 +19,7 @@
bl_info = { bl_info = {
"name": "Multi-User", "name": "Multi-User",
"author": "Swann Martinez", "author": "Swann Martinez",
"version": (0, 3, 0), "version": (0, 4, 1),
"description": "Enable real-time collaborative workflow inside blender", "description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 82, 0), "blender": (2, 82, 0),
"location": "3D View > Sidebar > Multi-User tab", "location": "3D View > Sidebar > Multi-User tab",
@ -41,11 +41,12 @@ import bpy
from bpy.app.handlers import persistent from bpy.app.handlers import persistent
from . import environment from . import environment
from uuid import uuid4
LIBS = os.path.dirname(os.path.abspath(__file__))+"/libs/replication"
module_error_msg = "Insufficient rights to install the multi-user \ module_error_msg = "Insufficient rights to install the multi-user \
dependencies, aunch blender with administrator rights." dependencies, aunch blender with administrator rights."
def register(): def register():
# Setup logging policy # Setup logging policy
logging.basicConfig( logging.basicConfig(
@ -53,18 +54,14 @@ def register():
datefmt='%H:%M:%S', datefmt='%H:%M:%S',
level=logging.INFO) level=logging.INFO)
for module_name in list(sys.modules.keys()):
if 'replication' in module_name:
del sys.modules[module_name]
if LIBS not in sys.path:
logging.info('Adding local modules dir to the path')
sys.path.insert(0, LIBS)
try: try:
environment.register()
from . import presence from . import presence
from . import operators from . import operators
from . import handlers
from . import ui from . import ui
from . import icons
from . import preferences from . import preferences
from . import addon_updater_ops from . import addon_updater_ops
@ -72,10 +69,12 @@ def register():
addon_updater_ops.register(bl_info) addon_updater_ops.register(bl_info)
presence.register() presence.register()
operators.register() operators.register()
handlers.register()
ui.register() ui.register()
icons.register()
except ModuleNotFoundError as e: except ModuleNotFoundError as e:
raise Exception(module_error_msg) raise Exception(module_error_msg)
logging.error(e) logging.error(module_error_msg)
bpy.types.WindowManager.session = bpy.props.PointerProperty( bpy.types.WindowManager.session = bpy.props.PointerProperty(
type=preferences.SessionProps) type=preferences.SessionProps)
@ -86,21 +85,28 @@ def register():
type=preferences.SessionUser type=preferences.SessionUser
) )
bpy.types.WindowManager.user_index = bpy.props.IntProperty() bpy.types.WindowManager.user_index = bpy.props.IntProperty()
bpy.types.WindowManager.server_index = bpy.props.IntProperty()
bpy.types.TOPBAR_MT_file_import.append(operators.menu_func_import) bpy.types.TOPBAR_MT_file_import.append(operators.menu_func_import)
bpy.types.TOPBAR_MT_file_export.append(operators.menu_func_export)
def unregister(): def unregister():
from . import presence from . import presence
from . import operators from . import operators
from . import handlers
from . import ui from . import ui
from . import icons
from . import preferences from . import preferences
from . import addon_updater_ops from . import addon_updater_ops
bpy.types.TOPBAR_MT_file_import.remove(operators.menu_func_import) bpy.types.TOPBAR_MT_file_import.remove(operators.menu_func_import)
bpy.types.TOPBAR_MT_file_export.remove(operators.menu_func_export)
presence.unregister() presence.unregister()
addon_updater_ops.unregister() addon_updater_ops.unregister()
ui.unregister() ui.unregister()
icons.unregister()
handlers.unregister()
operators.unregister() operators.unregister()
preferences.unregister() preferences.unregister()
@ -108,3 +114,6 @@ def unregister():
del bpy.types.ID.uuid del bpy.types.ID.uuid
del bpy.types.WindowManager.online_users del bpy.types.WindowManager.online_users
del bpy.types.WindowManager.user_index del bpy.types.WindowManager.user_index
del bpy.types.WindowManager.server_index
environment.unregister()

View File

@ -1688,10 +1688,7 @@ class GitlabEngine(object):
# Could clash with tag names and if it does, it will # Could clash with tag names and if it does, it will
# download TAG zip instead of branch zip to get # download TAG zip instead of branch zip to get
# direct path, would need. # direct path, would need.
return "{}{}{}".format( return f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{branch}/download?job=build"
self.form_repo_url(updater),
"/repository/archive.zip?sha=",
branch)
def get_zip_url(self, sha, updater): def get_zip_url(self, sha, updater):
return "{base}/repository/archive.zip?sha={sha}".format( return "{base}/repository/archive.zip?sha={sha}".format(

View File

@ -16,38 +16,40 @@
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import bpy import bpy
from replication.protocol import ReplicatedDatablock
__all__ = [ __all__ = [
'bl_object', 'bl_object',
'bl_mesh', 'bl_mesh',
# 'bl_camera', 'bl_camera',
'bl_collection', 'bl_collection',
# 'bl_curve', 'bl_curve',
# 'bl_gpencil', 'bl_gpencil',
# 'bl_image', 'bl_image',
# 'bl_light', 'bl_light',
'bl_scene', 'bl_scene',
'bl_material', 'bl_material',
# 'bl_library', 'bl_armature',
# 'bl_armature', 'bl_action',
# 'bl_action', 'bl_world',
# 'bl_world', 'bl_metaball',
# 'bl_metaball', 'bl_lattice',
# 'bl_lattice', 'bl_lightprobe',
# 'bl_lightprobe', 'bl_speaker',
# 'bl_speaker', 'bl_font',
# 'bl_font', 'bl_sound',
# 'bl_sound', 'bl_file',
# 'bl_file', 'bl_node_group',
# 'bl_sequencer', 'bl_texture',
# 'bl_node_group', "bl_particle",
# 'bl_texture',
# "bl_particle",
] # Order here defines execution order ] # Order here defines execution order
# if bpy.app.version[1] >= 91: if bpy.app.version >= (2,91,0):
# __all__.append('bl_volume') __all__.append('bl_volume')
from . import *
def types_to_register():
return __all__
from replication.protocol import DataTranslationProtocol from replication.protocol import DataTranslationProtocol

View File

@ -24,14 +24,9 @@ from enum import Enum
from .. import utils from .. import utils
from .dump_anything import ( from .dump_anything import (
Dumper, Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
Loader,
np_dump_collection,
np_load_collection,
remove_items_from_dict)
from .bl_datablock import stamp_uuid
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from replication.objects import Node from .bl_datablock import resolve_datablock_from_uuid
KEYFRAME = [ KEYFRAME = [
'amplitude', 'amplitude',
@ -46,7 +41,6 @@ KEYFRAME = [
'interpolation', 'interpolation',
] ]
def has_action(datablock): def has_action(datablock):
""" Check if the datablock datablock has actions """ Check if the datablock datablock has actions
""" """
@ -75,8 +69,7 @@ def load_driver(target_datablock, src_driver):
loader = Loader() loader = Loader()
drivers = target_datablock.animation_data.drivers drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver'] src_driver_data = src_driver['driver']
new_driver = drivers.new( new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
src_driver['data_path'], index=src_driver['array_index'])
# Settings # Settings
new_driver.driver.type = src_driver_data['type'] new_driver.driver.type = src_driver_data['type']
@ -92,10 +85,10 @@ def load_driver(target_datablock, src_driver):
for src_target in src_var_data['targets']: for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target] src_target_data = src_var_data['targets'][src_target]
new_var.targets[src_target].id = utils.resolve_from_id( src_id = src_target_data.get('id')
src_target_data['id'], src_target_data['id_type']) if src_id:
loader.load( new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
new_var.targets[src_target], src_target_data) loader.load(new_var.targets[src_target], src_target_data)
# Fcurve # Fcurve
new_fcurve = new_driver.keyframe_points new_fcurve = new_driver.keyframe_points
@ -128,7 +121,6 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
points = fcurve.keyframe_points points = fcurve.keyframe_points
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points) fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME) fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
else: # Legacy method else: # Legacy method
dumper = Dumper() dumper = Dumper()
fcurve_data["keyframe_points"] = [] fcurve_data["keyframe_points"] = []
@ -138,6 +130,18 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
dumper.dump(k) dumper.dump(k)
) )
if fcurve.modifiers:
dumper = Dumper()
dumper.exclude_filter = [
'is_valid',
'active'
]
dumped_modifiers = []
for modfifier in fcurve.modifiers:
dumped_modifiers.append(dumper.dump(modfifier))
fcurve_data['modifiers'] = dumped_modifiers
return fcurve_data return fcurve_data
@ -150,7 +154,7 @@ def load_fcurve(fcurve_data, fcurve):
:type fcurve: bpy.types.FCurve :type fcurve: bpy.types.FCurve
""" """
use_numpy = fcurve_data.get('use_numpy') use_numpy = fcurve_data.get('use_numpy')
loader = Loader()
keyframe_points = fcurve.keyframe_points keyframe_points = fcurve.keyframe_points
# Remove all keyframe points # Remove all keyframe points
@ -195,37 +199,55 @@ def load_fcurve(fcurve_data, fcurve):
fcurve.update() fcurve.update()
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
def dump_animation_data(datablock, data): if dumped_fcurve_modifiers:
# clear modifiers
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
# Load each modifiers in order
for modifier_data in dumped_fcurve_modifiers:
modifier = fcurve.modifiers.new(modifier_data['type'])
loader.load(modifier, modifier_data)
elif fcurve.modifiers:
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
def dump_animation_data(datablock):
animation_data = {}
if has_action(datablock): if has_action(datablock):
dumper = Dumper() animation_data['action'] = datablock.animation_data.action.uuid
dumper.include_filter = ['action']
data['animation_data'] = dumper.dump(datablock.animation_data)
if has_driver(datablock): if has_driver(datablock):
dumped_drivers = {'animation_data': {'drivers': []}} animation_data['drivers'] = []
for driver in datablock.animation_data.drivers: for driver in datablock.animation_data.drivers:
dumped_drivers['animation_data']['drivers'].append( animation_data['drivers'].append(dump_driver(driver))
dump_driver(driver))
data.update(dumped_drivers) return animation_data
def load_animation_data(data, datablock): def load_animation_data(animation_data, datablock):
# Load animation data # Load animation data
if 'animation_data' in data.keys(): if animation_data:
if datablock.animation_data is None: if datablock.animation_data is None:
datablock.animation_data_create() datablock.animation_data_create()
for d in datablock.animation_data.drivers: for d in datablock.animation_data.drivers:
datablock.animation_data.drivers.remove(d) datablock.animation_data.drivers.remove(d)
if 'drivers' in data['animation_data']: if 'drivers' in animation_data:
for driver in data['animation_data']['drivers']: for driver in animation_data['drivers']:
load_driver(datablock, driver) load_driver(datablock, driver)
if 'action' in data['animation_data']: action = animation_data.get('action')
datablock.animation_data.action = bpy.data.actions[data['animation_data']['action']] if action:
action = resolve_datablock_from_uuid(action, bpy.data.actions)
datablock.animation_data.action = action
elif datablock.animation_data.action:
datablock.animation_data.action = None
# Remove existing animation data if there is not more to load # Remove existing animation data if there is not more to load
elif hasattr(datablock, 'animation_data') and datablock.animation_data: elif hasattr(datablock, 'animation_data') and datablock.animation_data:
datablock.animation_data_clear() datablock.animation_data_clear()
@ -239,6 +261,8 @@ def resolve_animation_dependencies(datablock):
class BlAction(ReplicatedDatablock): class BlAction(ReplicatedDatablock):
use_delta = True
bl_id = "actions" bl_id = "actions"
bl_class = bpy.types.Action bl_class = bpy.types.Action
bl_check_common = False bl_check_common = False
@ -271,8 +295,6 @@ class BlAction(ReplicatedDatablock):
@staticmethod @staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
stamp_uuid(datablock)
dumper = Dumper() dumper = Dumper()
dumper.exclude_filter = [ dumper.exclude_filter = [
'name_full', 'name_full',
@ -295,3 +317,15 @@ class BlAction(ReplicatedDatablock):
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True)) data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.actions)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return []
_type = bpy.types.Action
_class = BlAction

View File

@ -22,8 +22,9 @@ import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .. import presence, operators, utils from .. import presence, operators, utils
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
def get_roll(bone: bpy.types.Bone) -> float: def get_roll(bone: bpy.types.Bone) -> float:
""" Compute the actuall roll of a pose bone """ Compute the actuall roll of a pose bone
@ -35,16 +36,20 @@ def get_roll(bone: bpy.types.Bone) -> float:
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1] return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
class BlArmature(BlDatablock): class BlArmature(ReplicatedDatablock):
use_delta = True
bl_id = "armatures" bl_id = "armatures"
bl_class = bpy.types.Armature bl_class = bpy.types.Armature
bl_check_common = False bl_check_common = False
bl_icon = 'ARMATURE_DATA' bl_icon = 'ARMATURE_DATA'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.armatures.new(data["name"]) return bpy.data.armatures.new(data["name"])
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
# Load parent object # Load parent object
parent_object = utils.find_from_attr( parent_object = utils.find_from_attr(
@ -55,7 +60,7 @@ class BlArmature(BlDatablock):
if parent_object is None: if parent_object is None:
parent_object = bpy.data.objects.new( parent_object = bpy.data.objects.new(
data['user_name'], target) data['user_name'], datablock)
parent_object.uuid = data['user'] parent_object.uuid = data['user']
is_object_in_master = ( is_object_in_master = (
@ -90,10 +95,10 @@ class BlArmature(BlDatablock):
bpy.ops.object.mode_set(mode='EDIT') bpy.ops.object.mode_set(mode='EDIT')
for bone in data['bones']: for bone in data['bones']:
if bone not in target.edit_bones: if bone not in datablock.edit_bones:
new_bone = target.edit_bones.new(bone) new_bone = datablock.edit_bones.new(bone)
else: else:
new_bone = target.edit_bones[bone] new_bone = datablock.edit_bones[bone]
bone_data = data['bones'].get(bone) bone_data = data['bones'].get(bone)
@ -104,7 +109,7 @@ class BlArmature(BlDatablock):
new_bone.roll = bone_data['roll'] new_bone.roll = bone_data['roll']
if 'parent' in bone_data: if 'parent' in bone_data:
new_bone.parent = target.edit_bones[data['bones'] new_bone.parent = datablock.edit_bones[data['bones']
[bone]['parent']] [bone]['parent']]
new_bone.use_connect = bone_data['use_connect'] new_bone.use_connect = bone_data['use_connect']
@ -119,9 +124,10 @@ class BlArmature(BlDatablock):
if 'EDIT' in current_mode: if 'EDIT' in current_mode:
bpy.ops.object.mode_set(mode='EDIT') bpy.ops.object.mode_set(mode='EDIT')
def dump(datablock: object) -> dict: load_animation_data(data.get('animation_data'), datablock)
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper() dumper = Dumper()
dumper.depth = 4 dumper.depth = 4
dumper.include_filter = [ dumper.include_filter = [
@ -135,14 +141,14 @@ class BlArmature(BlDatablock):
'name', 'name',
'layers', 'layers',
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
for bone in instance.bones: for bone in datablock.bones:
if bone.parent: if bone.parent:
data['bones'][bone.name]['parent'] = bone.parent.name data['bones'][bone.name]['parent'] = bone.parent.name
# get the parent Object # get the parent Object
# TODO: Use id_data instead # TODO: Use id_data instead
object_users = utils.get_datablock_users(instance)[0] object_users = utils.get_datablock_users(datablock)[0]
data['user'] = object_users.uuid data['user'] = object_users.uuid
data['user_name'] = object_users.name data['user_name'] = object_users.name
@ -153,7 +159,25 @@ class BlArmature(BlDatablock):
data['user_scene'] = [ data['user_scene'] = [
item.name for item in container_users if isinstance(item, bpy.types.Scene)] item.name for item in container_users if isinstance(item, bpy.types.Scene)]
for bone in instance.bones: for bone in datablock.bones:
data['bones'][bone.name]['roll'] = get_roll(bone) data['bones'][bone.name]['roll'] = get_roll(bone)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.armatures)
if datablock is None:
datablock = bpy.data.armatures.get(name)
return datablock
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return resolve_animation_dependencies(datablock)
_type = bpy.types.Armature
_class = BlArmature

View File

@ -20,47 +20,58 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlCamera(BlDatablock): class BlCamera(ReplicatedDatablock):
use_delta = True
bl_id = "cameras" bl_id = "cameras"
bl_class = bpy.types.Camera bl_class = bpy.types.Camera
bl_check_common = False bl_check_common = False
bl_icon = 'CAMERA_DATA' bl_icon = 'CAMERA_DATA'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.cameras.new(data["name"]) return bpy.data.cameras.new(data["name"])
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
dof_settings = data.get('dof') dof_settings = data.get('dof')
load_animation_data(data.get('animation_data'), datablock)
# DOF settings # DOF settings
if dof_settings: if dof_settings:
loader.load(target.dof, dof_settings) loader.load(datablock.dof, dof_settings)
background_images = data.get('background_images') background_images = data.get('background_images')
target.background_images.clear() datablock.background_images.clear()
# TODO: Use image uuid
if background_images: if background_images:
for img_name, img_data in background_images.items(): for img_name, img_data in background_images.items():
img_id = img_data.get('image') img_id = img_data.get('image')
if img_id: if img_id:
target_img = target.background_images.new() target_img = datablock.background_images.new()
target_img.image = bpy.data.images[img_id] target_img.image = bpy.data.images[img_id]
loader.load(target_img, img_data) loader.load(target_img, img_data)
def dump(datablock: object) -> dict: img_user = img_data.get('image_user')
assert(instance) if img_user:
loader.load(target_img.image_user, img_user)
# TODO: background image support
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper() dumper = Dumper()
dumper.depth = 3 dumper.depth = 3
dumper.include_filter = [ dumper.include_filter = [
@ -101,15 +112,37 @@ class BlCamera(BlDatablock):
'scale', 'scale',
'use_flip_x', 'use_flip_x',
'use_flip_y', 'use_flip_y',
'image' 'image_user',
'image',
'frame_duration',
'frame_start',
'frame_offset',
'use_cyclic',
'use_auto_refresh'
] ]
return dumper.dump(instance) data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
for index, image in enumerate(datablock.background_images):
if image.image_user:
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.cameras)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
for background in datablock.background_images: for background in datablock.background_images:
if background.image: if background.image:
deps.append(background.image) deps.append(background.image)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
_type = bpy.types.Camera
_class = BlCamera

View File

@ -19,10 +19,12 @@
import bpy import bpy
import mathutils import mathutils
from deepdiff import DeepDiff, Delta
from .. import utils from .. import utils
from .dump_anything import Loader, Dumper
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from replication.objects import Node from .dump_anything import Loader, Dumper
from .bl_datablock import resolve_datablock_from_uuid
def dump_collection_children(collection): def dump_collection_children(collection):
collection_children = [] collection_children = []
@ -87,15 +89,17 @@ class BlCollection(ReplicatedDatablock):
bl_class = bpy.types.Collection bl_class = bpy.types.Collection
bl_check_common = True bl_check_common = True
bl_reload_parent = False bl_reload_parent = False
use_delta = True
@staticmethod @staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
datablock = bpy.data.collections.new(node.data["name"]) instance = bpy.data.collections.new(data["name"])
return datablock return instance
@staticmethod @staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
data = node.data
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(datablock, data)
@ -109,10 +113,9 @@ class BlCollection(ReplicatedDatablock):
# Keep other user from deleting collection object by flushing their history # Keep other user from deleting collection object by flushing their history
utils.flush_history() utils.flush_history()
@staticmethod @staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(datablock)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
@ -129,9 +132,33 @@ class BlCollection(ReplicatedDatablock):
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.collections)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
return resolve_collection_dependencies(datablock) return resolve_collection_dependencies(datablock)
@staticmethod
def compute_delta(last_data: dict, current_data: dict) -> Delta:
diff_params = {
'ignore_order': True,
'report_repetition': True
}
delta_params = {
# 'mutate': True
}
return Delta(
DeepDiff(last_data,
current_data,
cache_size=5000,
**diff_params),
**delta_params)
_type = bpy.types.Collection _type = bpy.types.Collection
_class = BlCollection _class = BlCollection

View File

@ -21,13 +21,15 @@ import bpy.types as T
import mathutils import mathutils
import logging import logging
from .. import utils from ..utils import get_preferences
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .dump_anything import (Dumper, Loader, from .dump_anything import (Dumper, Loader,
np_load_collection, np_load_collection,
np_dump_collection) np_dump_collection)
from .bl_datablock import get_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots from .bl_material import dump_materials_slots, load_materials_slots
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
SPLINE_BEZIER_POINT = [ SPLINE_BEZIER_POINT = [
# "handle_left_type", # "handle_left_type",
@ -134,25 +136,31 @@ SPLINE_METADATA = [
] ]
class BlCurve(BlDatablock): class BlCurve(ReplicatedDatablock):
use_delta = True
bl_id = "curves" bl_id = "curves"
bl_class = bpy.types.Curve bl_class = bpy.types.Curve
bl_check_common = False bl_check_common = False
bl_icon = 'CURVE_DATA' bl_icon = 'CURVE_DATA'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.curves.new(data["name"], data["type"]) return bpy.data.curves.new(data["name"], data["type"])
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
loader = Loader() load_animation_data(data.get('animation_data'), datablock)
loader.load(target, data)
target.splines.clear() loader = Loader()
loader.load(datablock, data)
datablock.splines.clear()
# load splines # load splines
for spline in data['splines'].values(): for spline in data['splines'].values():
new_spline = target.splines.new(spline['type']) new_spline = datablock.splines.new(spline['type'])
# Load curve geometry data # Load curve geometry data
if new_spline.type == 'BEZIER': if new_spline.type == 'BEZIER':
@ -170,18 +178,17 @@ class BlCurve(BlDatablock):
loader.load(new_spline, spline) loader.load(new_spline, spline)
# MATERIAL SLOTS # MATERIAL SLOTS
src_materials = data.get('materials', None) src_materials = data.get('materials', None)
if src_materials: if src_materials:
load_materials_slots(src_materials, target.materials) load_materials_slots(src_materials, datablock.materials)
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper() dumper = Dumper()
# Conflicting attributes # Conflicting attributes
# TODO: remove them with the NURBS support # TODO: remove them with the NURBS support
dumper.include_filter = CURVE_METADATA dumper.include_filter = CURVE_METADATA
dumper.exclude_filter = [ dumper.exclude_filter = [
'users', 'users',
'order_u', 'order_u',
@ -190,14 +197,16 @@ class BlCurve(BlDatablock):
'point_count_u', 'point_count_u',
'active_textbox' 'active_textbox'
] ]
if instance.use_auto_texspace: if datablock.use_auto_texspace:
dumper.exclude_filter.extend([ dumper.exclude_filter.extend([
'texspace_location', 'texspace_location',
'texspace_size']) 'texspace_size'])
data = dumper.dump(instance) data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
data['splines'] = {} data['splines'] = {}
for index, spline in enumerate(instance.splines): for index, spline in enumerate(datablock.splines):
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = SPLINE_METADATA dumper.include_filter = SPLINE_METADATA
spline_data = dumper.dump(spline) spline_data = dumper.dump(spline)
@ -211,19 +220,25 @@ class BlCurve(BlDatablock):
spline.bezier_points, SPLINE_BEZIER_POINT) spline.bezier_points, SPLINE_BEZIER_POINT)
data['splines'][index] = spline_data data['splines'][index] = spline_data
if isinstance(instance, T.SurfaceCurve): if isinstance(datablock, T.SurfaceCurve):
data['type'] = 'SURFACE' data['type'] = 'SURFACE'
elif isinstance(instance, T.TextCurve): elif isinstance(datablock, T.TextCurve):
data['type'] = 'FONT' data['type'] = 'FONT'
elif isinstance(instance, T.Curve): elif isinstance(datablock, T.Curve):
data['type'] = 'CURVE' data['type'] = 'CURVE'
data['materials'] = dump_materials_slots(instance.materials) data['materials'] = dump_materials_slots(datablock.materials)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.curves)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = [] deps = []
curve = datablock curve = datablock
@ -234,15 +249,19 @@ class BlCurve(BlDatablock):
curve.font_bold_italic, curve.font_bold_italic,
curve.font_italic]) curve.font_italic])
for material in curve.materials: for material in datablock.materials:
if material: if material:
deps.append(material) deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
def diff(self): @staticmethod
if 'EDIT' in bpy.context.mode \ def needs_update(datablock: object, data: dict) -> bool:
and not self.preferences.sync_flags.sync_during_editmode: return 'EDIT' not in bpy.context.mode \
return False or get_preferences().sync_flags.sync_during_editmode
else:
return super().diff()
_type = [bpy.types.Curve, bpy.types.TextCurve]
_class = BlCurve

View File

@ -23,14 +23,10 @@ import bpy
import mathutils import mathutils
from replication.constants import DIFF_BINARY, DIFF_JSON, UP from replication.constants import DIFF_BINARY, DIFF_JSON, UP
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from replication.objects import Node
from uuid import uuid4
from .. import utils from .. import utils
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
def get_datablock_from_uuid(uuid, default, ignore=[]): def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid: if not uuid:
return default return default
@ -42,18 +38,8 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
return item return item
return default return default
def resolve_datablock_from_uuid(uuid, bpy_collection):
def resolve_datablock_from_root(node:Node, root)->object: for item in bpy_collection:
datablock_ref = utils.find_from_attr('uuid', node.uuid, root) if getattr(item, 'uuid', None) == uuid:
return item
if not datablock_ref: return None
try:
datablock_ref = root[node.data['name']]
except Exception:
pass
return datablock_ref
def stamp_uuid(datablock):
if not datablock.uuid:
datablock.uuid = str(uuid4())

View File

@ -19,7 +19,7 @@
import logging import logging
import os import os
import sys import sys
from pathlib import Path from pathlib import Path, WindowsPath, PosixPath
import bpy import bpy
import mathutils import mathutils
@ -27,6 +27,7 @@ from replication.constants import DIFF_BINARY, UP
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from .. import utils from .. import utils
from ..utils import get_preferences
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
@ -58,33 +59,16 @@ class BlFile(ReplicatedDatablock):
bl_icon = 'FILE' bl_icon = 'FILE'
bl_reload_parent = True bl_reload_parent = True
def __init__(self, *args, **kwargs): @staticmethod
super().__init__(*args, **kwargs) def construct(data: dict) -> object:
self.instance = kwargs.get('instance', None) return Path(get_filepath(data['name']))
if self.instance and not self.instance.exists():
raise FileNotFoundError(str(self.instance))
self.preferences = utils.get_preferences()
def resolve(self, construct = True): @staticmethod
self.instance = Path(get_filepath(self.data['name'])) def resolve(data: dict) -> object:
return Path(get_filepath(data['name']))
file_exists = self.instance.exists()
if not file_exists:
logging.debug("File don't exist, loading it.")
self._load(self.data, self.instance)
return file_exists
@staticmethod
def push(self, socket, identity=None, check_data=False): def dump(datablock: object) -> dict:
super().push(socket, identity=None, check_data=False)
if self.preferences.clear_memory_filecache:
del self.data['file']
def dump(self, instance=None):
""" """
Read the file and return a dict as: Read the file and return a dict as:
{ {
@ -96,44 +80,62 @@ class BlFile(ReplicatedDatablock):
logging.info(f"Extracting file metadata") logging.info(f"Extracting file metadata")
data = { data = {
'name': self.instance.name, 'name': datablock.name,
} }
logging.info( logging.info(f"Reading {datablock.name} content: {datablock.stat().st_size} bytes")
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
try: try:
file = open(self.instance, "rb") file = open(datablock, "rb")
data['file'] = file.read() data['file'] = file.read()
file.close() file.close()
except IOError: except IOError:
logging.warning(f"{self.instance} doesn't exist, skipping") logging.warning(f"{datablock} doesn't exist, skipping")
else: else:
file.close() file.close()
return data return data
def load(self, data, target): @staticmethod
def load(data: dict, datablock: object):
""" """
Writing the file Writing the file
""" """
try: try:
file = open(target, "wb") file = open(datablock, "wb")
file.write(data['file']) file.write(data['file'])
if self.preferences.clear_memory_filecache: if get_preferences().clear_memory_filecache:
del self.data['file'] del data['file']
except IOError: except IOError:
logging.warning(f"{target} doesn't exist, skipping") logging.warning(f"{datablock} doesn't exist, skipping")
else: else:
file.close() file.close()
def diff(self): @staticmethod
if self.preferences.clear_memory_filecache: def resolve_deps(datablock: object) -> [object]:
return []
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
if get_preferences().clear_memory_filecache:
return False return False
else: else:
memory_size = sys.getsizeof(self.data['file'])-33 if not datablock:
disk_size = self.instance.stat().st_size return None
return memory_size != disk_size
if not data:
return True
memory_size = sys.getsizeof(data['file'])-33
disk_size = datablock.stat().st_size
if memory_size != disk_size:
return True
else:
return False
_type = [WindowsPath, PosixPath]
_class = BlFile

View File

@ -22,18 +22,19 @@ from pathlib import Path
import bpy import bpy
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_file import get_filepath, ensure_unpacked from .bl_file import get_filepath, ensure_unpacked
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
from .bl_datablock import resolve_datablock_from_uuid
class BlFont(ReplicatedDatablock):
class BlFont(BlDatablock):
bl_id = "fonts" bl_id = "fonts"
bl_class = bpy.types.VectorFont bl_class = bpy.types.VectorFont
bl_check_common = False bl_check_common = False
bl_icon = 'FILE_FONT' bl_icon = 'FILE_FONT'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
filename = data.get('filename') filename = data.get('filename')
@ -42,25 +43,29 @@ class BlFont(BlDatablock):
else: else:
return bpy.data.fonts.load(get_filepath(filename)) return bpy.data.fonts.load(get_filepath(filename))
def load(self, data, target): @staticmethod
def load(data: dict, datablock: object):
pass pass
def dump(self, instance=None): @staticmethod
if instance.filepath == '<builtin>': def dump(datablock: object) -> dict:
if datablock.filepath == '<builtin>':
filename = '<builtin>' filename = '<builtin>'
else: else:
filename = Path(instance.filepath).name filename = Path(datablock.filepath).name
if not filename: if not filename:
raise FileExistsError(instance.filepath) raise FileExistsError(datablock.filepath)
return { return {
'filename': filename, 'filename': filename,
'name': instance.name 'name': datablock.name
} }
def diff(self): @staticmethod
return False def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.fonts)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
@ -71,3 +76,10 @@ class BlFont(BlDatablock):
deps.append(Path(bpy.path.abspath(datablock.filepath))) deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps return deps
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
return False
_type = bpy.types.VectorFont
_class = BlFont

View File

@ -24,10 +24,12 @@ from .dump_anything import (Dumper,
Loader, Loader,
np_dump_collection, np_dump_collection,
np_load_collection) np_load_collection)
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
# GPencil data api is structured as it follow: from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points from ..utils import get_preferences
from ..timers import is_annotating
from .bl_material import load_materials_slots, dump_materials_slots
STROKE_POINT = [ STROKE_POINT = [
'co', 'co',
@ -51,12 +53,12 @@ STROKE = [
"uv_translation", "uv_translation",
"vertex_color_fill", "vertex_color_fill",
] ]
if bpy.app.version[1] >= 91: if bpy.app.version >= (2,91,0):
STROKE.append('use_cyclic') STROKE.append('use_cyclic')
else: else:
STROKE.append('draw_cyclic') STROKE.append('draw_cyclic')
if bpy.app.version[1] >= 83: if bpy.app.version >= (2,83,0):
STROKE_POINT.append('vertex_color') STROKE_POINT.append('vertex_color')
def dump_stroke(stroke): def dump_stroke(stroke):
@ -64,36 +66,9 @@ def dump_stroke(stroke):
:param stroke: target grease pencil stroke :param stroke: target grease pencil stroke
:type stroke: bpy.types.GPencilStroke :type stroke: bpy.types.GPencilStroke
:return: dict :return: (p_count, p_data)
""" """
return (len(stroke.points), np_dump_collection(stroke.points, STROKE_POINT))
assert(stroke)
dumper = Dumper()
dumper.include_filter = [
"aspect",
"display_mode",
"draw_cyclic",
"end_cap_mode",
"hardeness",
"line_width",
"material_index",
"start_cap_mode",
"uv_rotation",
"uv_scale",
"uv_translation",
"vertex_color_fill",
]
dumped_stroke = dumper.dump(stroke)
# Stoke points
p_count = len(stroke.points)
dumped_stroke['p_count'] = p_count
dumped_stroke['points'] = np_dump_collection(stroke.points, STROKE_POINT)
# TODO: uv_factor, uv_rotation
return dumped_stroke
def load_stroke(stroke_data, stroke): def load_stroke(stroke_data, stroke):
@ -106,12 +81,13 @@ def load_stroke(stroke_data, stroke):
""" """
assert(stroke and stroke_data) assert(stroke and stroke_data)
stroke.points.add(stroke_data["p_count"]) stroke.points.add(stroke_data[0])
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT) np_load_collection(stroke_data[1], stroke.points, STROKE_POINT)
# HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to # HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to
# fix fill issues # fix fill issues
stroke.uv_scale = stroke_data["uv_scale"] stroke.uv_scale = 1.0
def dump_frame(frame): def dump_frame(frame):
""" Dump a grease pencil frame to a dict """ Dump a grease pencil frame to a dict
@ -145,12 +121,15 @@ def load_frame(frame_data, frame):
assert(frame and frame_data) assert(frame and frame_data)
# Load stroke points
for stroke_data in frame_data['strokes_points']: for stroke_data in frame_data['strokes_points']:
target_stroke = frame.strokes.new() target_stroke = frame.strokes.new()
load_stroke(stroke_data, target_stroke) load_stroke(stroke_data, target_stroke)
# Load stroke metadata
np_load_collection(frame_data['strokes'], frame.strokes, STROKE) np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
def dump_layer(layer): def dump_layer(layer):
""" Dump a grease pencil layer """ Dump a grease pencil layer
@ -167,7 +146,6 @@ def dump_layer(layer):
'opacity', 'opacity',
'channel_color', 'channel_color',
'color', 'color',
# 'thickness', #TODO: enabling only for annotation
'tint_color', 'tint_color',
'tint_factor', 'tint_factor',
'vertex_paint_opacity', 'vertex_paint_opacity',
@ -184,7 +162,7 @@ def dump_layer(layer):
'hide', 'hide',
'annotation_hide', 'annotation_hide',
'lock', 'lock',
# 'lock_frame', 'lock_frame',
# 'lock_material', # 'lock_material',
# 'use_mask_layer', # 'use_mask_layer',
'use_lights', 'use_lights',
@ -192,12 +170,13 @@ def dump_layer(layer):
'select', 'select',
'show_points', 'show_points',
'show_in_front', 'show_in_front',
# 'thickness'
# 'parent', # 'parent',
# 'parent_type', # 'parent_type',
# 'parent_bone', # 'parent_bone',
# 'matrix_inverse', # 'matrix_inverse',
] ]
if layer.id_data.is_annotation: if layer.thickness != 0:
dumper.include_filter.append('thickness') dumper.include_filter.append('thickness')
dumped_layer = dumper.dump(layer) dumped_layer = dumper.dump(layer)
@ -228,68 +207,83 @@ def load_layer(layer_data, layer):
load_frame(frame_data, target_frame) load_frame(frame_data, target_frame)
class BlGpencil(BlDatablock): def layer_changed(datablock: object, data: dict) -> bool:
if datablock.layers.active and \
datablock.layers.active.info != data["active_layers"]:
return True
else:
return False
def frame_changed(data: dict) -> bool:
return bpy.context.scene.frame_current != data["eval_frame"]
class BlGpencil(ReplicatedDatablock):
bl_id = "grease_pencils" bl_id = "grease_pencils"
bl_class = bpy.types.GreasePencil bl_class = bpy.types.GreasePencil
bl_check_common = False bl_check_common = False
bl_icon = 'GREASEPENCIL' bl_icon = 'GREASEPENCIL'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.grease_pencils.new(data["name"]) return bpy.data.grease_pencils.new(data["name"])
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
target.materials.clear() # MATERIAL SLOTS
if "materials" in data.keys(): src_materials = data.get('materials', None)
for mat in data['materials']: if src_materials:
target.materials.append(bpy.data.materials[mat]) load_materials_slots(src_materials, datablock.materials)
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
# TODO: reuse existing layer # TODO: reuse existing layer
for layer in target.layers: for layer in datablock.layers:
target.layers.remove(layer) datablock.layers.remove(layer)
if "layers" in data.keys(): if "layers" in data.keys():
for layer in data["layers"]: for layer in data["layers"]:
layer_data = data["layers"].get(layer) layer_data = data["layers"].get(layer)
# if layer not in target.layers.keys(): # if layer not in datablock.layers.keys():
target_layer = target.layers.new(data["layers"][layer]["info"]) target_layer = datablock.layers.new(data["layers"][layer]["info"])
# else: # else:
# target_layer = target.layers[layer] # target_layer = target.layers[layer]
# target_layer.clear() # target_layer.clear()
load_layer(layer_data, target_layer) load_layer(layer_data, target_layer)
target.layers.update() datablock.layers.update()
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
'materials',
'name', 'name',
'zdepth_offset', 'zdepth_offset',
'stroke_thickness_space', 'stroke_thickness_space',
'pixel_factor', 'pixel_factor',
'stroke_depth_order' 'stroke_depth_order'
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
data['materials'] = dump_materials_slots(datablock.materials)
data['layers'] = {} data['layers'] = {}
for layer in instance.layers: for layer in datablock.layers:
data['layers'][layer.info] = dump_layer(layer) data['layers'][layer.info] = dump_layer(layer)
data["active_layers"] = instance.layers.active.info data["active_layers"] = datablock.layers.active.info if datablock.layers.active else "None"
data["eval_frame"] = bpy.context.scene.frame_current data["eval_frame"] = bpy.context.scene.frame_current
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.grease_pencils)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
@ -299,17 +293,13 @@ class BlGpencil(BlDatablock):
return deps return deps
def layer_changed(self): @staticmethod
return self.instance.layers.active.info != self.data["active_layers"] def needs_update(datablock: object, data: dict) -> bool:
return bpy.context.mode == 'OBJECT' \
or layer_changed(datablock, data) \
or frame_changed(data) \
or get_preferences().sync_flags.sync_during_editmode \
or is_annotating(bpy.context)
def frame_changed(self): _type = bpy.types.GreasePencil
return bpy.context.scene.frame_current != self.data["eval_frame"] _class = BlGpencil
def diff(self):
if self.layer_changed() \
or self.frame_changed() \
or bpy.context.mode == 'OBJECT' \
or self.preferences.sync_flags.sync_during_editmode:
return super().diff()
else:
return False

View File

@ -24,9 +24,12 @@ import bpy
import mathutils import mathutils
from .. import utils from .. import utils
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
from .bl_file import get_filepath, ensure_unpacked from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
format_to_ext = { format_to_ext = {
'BMP': 'bmp', 'BMP': 'bmp',
@ -48,13 +51,14 @@ format_to_ext = {
} }
class BlImage(BlDatablock): class BlImage(ReplicatedDatablock):
bl_id = "images" bl_id = "images"
bl_class = bpy.types.Image bl_class = bpy.types.Image
bl_check_common = False bl_check_common = False
bl_icon = 'IMAGE_DATA' bl_icon = 'IMAGE_DATA'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.images.new( return bpy.data.images.new(
name=data['name'], name=data['name'],
@ -62,18 +66,22 @@ class BlImage(BlDatablock):
height=data['size'][1] height=data['size'][1]
) )
def load(self, data, target): @staticmethod
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(data, target) loader.load(datablock, data)
target.source = 'FILE' # datablock.name = data.get('name')
target.filepath_raw = get_filepath(data['filename']) datablock.source = 'FILE'
target.colorspace_settings.name = data["colorspace_settings"]["name"] datablock.filepath_raw = get_filepath(data['filename'])
color_space_name = data.get("colorspace")
def dump(self, instance=None): if color_space_name:
assert(instance) datablock.colorspace_settings.name = color_space_name
filename = Path(instance.filepath).name @staticmethod
def dump(datablock: object) -> dict:
filename = Path(datablock.filepath).name
data = { data = {
"filename": filename "filename": filename
@ -83,23 +91,18 @@ class BlImage(BlDatablock):
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
"name", "name",
# 'source',
'size', 'size',
'height', 'alpha_mode']
'alpha', data.update(dumper.dump(datablock))
'float_buffer', data['colorspace'] = datablock.colorspace_settings.name
'alpha_mode',
'colorspace_settings']
data.update(dumper.dump(instance))
return data return data
def diff(self): @staticmethod
if self.instance.is_dirty: def resolve(data: dict) -> object:
self.instance.save() uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.images)
if self.instance and (self.instance.name != self.data['name']):
return True
else:
return False
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
@ -122,3 +125,13 @@ class BlImage(BlDatablock):
deps.append(Path(bpy.path.abspath(datablock.filepath))) deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps return deps
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
if datablock.is_dirty:
datablock.save()
return True
_type = bpy.types.Image
_class = BlImage

View File

@ -20,33 +20,41 @@ import bpy
import mathutils import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from replication.exception import ContextError from replication.exception import ContextError
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
POINT = ['co', 'weight_softbody', 'co_deform'] POINT = ['co', 'weight_softbody', 'co_deform']
class BlLattice(BlDatablock): class BlLattice(ReplicatedDatablock):
use_delta = True
bl_id = "lattices" bl_id = "lattices"
bl_class = bpy.types.Lattice bl_class = bpy.types.Lattice
bl_check_common = False bl_check_common = False
bl_icon = 'LATTICE_DATA' bl_icon = 'LATTICE_DATA'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.lattices.new(data["name"]) return bpy.data.lattices.new(data["name"])
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
if target.is_editmode: load_animation_data(data.get('animation_data'), datablock)
if datablock.is_editmode:
raise ContextError("lattice is in edit mode") raise ContextError("lattice is in edit mode")
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
np_load_collection(data['points'], target.points, POINT) np_load_collection(data['points'], datablock.points, POINT)
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
if instance.is_editmode: if datablock.is_editmode:
raise ContextError("lattice is in edit mode") raise ContextError("lattice is in edit mode")
dumper = Dumper() dumper = Dumper()
@ -62,9 +70,20 @@ class BlLattice(BlDatablock):
'interpolation_type_w', 'interpolation_type_w',
'use_outside' 'use_outside'
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
data['points'] = np_dump_collection(instance.points, POINT)
data['points'] = np_dump_collection(datablock.points, POINT)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lattices)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return resolve_animation_dependencies(datablock)
_type = bpy.types.Lattice
_class = BlLattice

View File

@ -20,25 +20,34 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlLight(BlDatablock): class BlLight(ReplicatedDatablock):
use_delta = True
bl_id = "lights" bl_id = "lights"
bl_class = bpy.types.Light bl_class = bpy.types.Light
bl_check_common = False bl_check_common = False
bl_icon = 'LIGHT_DATA' bl_icon = 'LIGHT_DATA'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.lights.new(data["name"], data["type"]) instance = bpy.data.lights.new(data["name"], data["type"])
instance.uuid = data.get("uuid")
return instance
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 3 dumper.depth = 3
dumper.include_filter = [ dumper.include_filter = [
@ -67,9 +76,23 @@ class BlLight(BlDatablock):
'spot_size', 'spot_size',
'spot_blend' 'spot_blend'
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lights)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = [bpy.types.SpotLight, bpy.types.PointLight, bpy.types.AreaLight, bpy.types.SunLight]
_class = BlLight

View File

@ -21,31 +21,35 @@ import mathutils
import logging import logging
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
class BlLightprobe(ReplicatedDatablock):
use_delta = True
class BlLightprobe(BlDatablock):
bl_id = "lightprobes" bl_id = "lightprobes"
bl_class = bpy.types.LightProbe bl_class = bpy.types.LightProbe
bl_check_common = False bl_check_common = False
bl_icon = 'LIGHTPROBE_GRID' bl_icon = 'LIGHTPROBE_GRID'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type'] type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
# See https://developer.blender.org/D6396 # See https://developer.blender.org/D6396
if bpy.app.version[1] >= 83: if bpy.app.version >= (2,83,0):
return bpy.data.lightprobes.new(data["name"], type) return bpy.data.lightprobes.new(data["name"], type)
else: else:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance) if bpy.app.version < (2,83,0):
if bpy.app.version[1] < 83:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
dumper = Dumper() dumper = Dumper()
@ -71,7 +75,16 @@ class BlLightprobe(BlDatablock):
'visibility_blur' 'visibility_blur'
] ]
return dumper.dump(instance) return dumper.dump(datablock)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.lightprobes)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return []
_type = bpy.types.LightProbe
_class = BlLightprobe

View File

@ -24,9 +24,10 @@ import re
from uuid import uuid4 from uuid import uuid4
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import get_datablock_from_uuid, stamp_uuid
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from replication.objects import Node
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]') NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM'] IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
@ -36,7 +37,7 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
:arg node_data: dumped node data :arg node_data: dumped node data
:type node_data: dict :type node_data: dict
:arg node_tree: datablock node_tree :arg node_tree: target node_tree
:type node_tree: bpy.types.NodeTree :type node_tree: bpy.types.NodeTree
""" """
loader = Loader() loader = Loader()
@ -47,7 +48,11 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
node_tree_uuid = node_data.get('node_tree_uuid', None) node_tree_uuid = node_data.get('node_tree_uuid', None)
if image_uuid and not target_node.image: if image_uuid and not target_node.image:
target_node.image = get_datablock_from_uuid(image_uuid, None) image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
if image is None:
logging.error(f"Fail to find material image from uuid {image_uuid}")
else:
target_node.image = image
if node_tree_uuid: if node_tree_uuid:
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None) target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
@ -90,7 +95,7 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
def dump_node(node: bpy.types.ShaderNode) -> dict: def dump_node(node: bpy.types.ShaderNode) -> dict:
""" Dump a single node to a dict """ Dump a single node to a dict
:arg node: datablock node :arg node: target node
:type node: bpy.types.Node :type node: bpy.types.Node
:retrun: dict :retrun: dict
""" """
@ -119,8 +124,7 @@ def dump_node(node: bpy.types.ShaderNode) -> dict:
"show_preview", "show_preview",
"show_texture", "show_texture",
"outputs", "outputs",
"width_hidden", "width_hidden"
"image"
] ]
dumped_node = node_dumper.dump(node) dumped_node = node_dumper.dump(node)
@ -251,7 +255,7 @@ def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict: def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
""" dump sockets of a shader_node_tree """ dump sockets of a shader_node_tree
:arg target_node_tree: datablock node_tree :arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree :type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer :arg socket_id: socket identifer
:type socket_id: str :type socket_id: str
@ -274,7 +278,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection,
sockets_data: dict): sockets_data: dict):
""" load sockets of a shader_node_tree """ load sockets of a shader_node_tree
:arg target_node_tree: datablock node_tree :arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree :type target_node_tree: bpy.types.NodeTree
:arg socket_id: socket identifer :arg socket_id: socket identifer
:type socket_id: str :type socket_id: str
@ -302,7 +306,7 @@ def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeT
:arg node_tree_data: dumped node data :arg node_tree_data: dumped node data
:type node_tree_data: dict :type node_tree_data: dict
:arg target_node_tree: datablock node_tree :arg target_node_tree: target node_tree
:type target_node_tree: bpy.types.NodeTree :type target_node_tree: bpy.types.NodeTree
""" """
# TODO: load only required nodes # TODO: load only required nodes
@ -375,7 +379,7 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
:arg src_materials: dumped material collection (ex: object.materials) :arg src_materials: dumped material collection (ex: object.materials)
:type src_materials: list of tuples (uuid, name) :type src_materials: list of tuples (uuid, name)
:arg dst_materials: datablock material collection pointer :arg dst_materials: target material collection pointer
:type dst_materials: bpy.types.bpy_prop_collection :type dst_materials: bpy.types.bpy_prop_collection
""" """
# MATERIAL SLOTS # MATERIAL SLOTS
@ -383,20 +387,22 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
for mat_uuid, mat_name in src_materials: for mat_uuid, mat_name in src_materials:
mat_ref = None mat_ref = None
if mat_uuid is not None: if mat_uuid:
mat_ref = get_datablock_from_uuid(mat_uuid, None) mat_ref = get_datablock_from_uuid(mat_uuid, None)
else: else:
mat_ref = bpy.data.materials[mat_name] mat_ref = bpy.data.materials[mat_name]
dst_materials.append(mat_ref) dst_materials.append(mat_ref)
class BlMaterial(ReplicatedDatablock): class BlMaterial(ReplicatedDatablock):
use_delta = True
bl_id = "materials" bl_id = "materials"
bl_class = bpy.types.Material bl_class = bpy.types.Material
bl_check_common = False bl_check_common = False
bl_icon = 'MATERIAL_DATA' bl_icon = 'MATERIAL_DATA'
bl_reload_parent = False bl_reload_parent = False
bl_reload_child = True
@staticmethod @staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
@ -404,7 +410,6 @@ class BlMaterial(ReplicatedDatablock):
@staticmethod @staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
data = data
loader = Loader() loader = Loader()
is_grease_pencil = data.get('is_grease_pencil') is_grease_pencil = data.get('is_grease_pencil')
@ -421,11 +426,11 @@ class BlMaterial(ReplicatedDatablock):
datablock.use_nodes = True datablock.use_nodes = True
load_node_tree(data['node_tree'], datablock.node_tree) load_node_tree(data['node_tree'], datablock.node_tree)
load_animation_data(data.get('nodes_animation_data'), datablock.node_tree)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod @staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
stamp_uuid(datablock)
mat_dumper = Dumper() mat_dumper = Dumper()
mat_dumper.depth = 2 mat_dumper.depth = 2
mat_dumper.include_filter = [ mat_dumper.include_filter = [
@ -490,18 +495,27 @@ class BlMaterial(ReplicatedDatablock):
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil) data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
elif datablock.use_nodes: elif datablock.use_nodes:
data['node_tree'] = dump_node_tree(datablock.node_tree) data['node_tree'] = dump_node_tree(datablock.node_tree)
data['nodes_animation_data'] = dump_animation_data(datablock.node_tree)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.materials)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
# TODO: resolve node group deps
deps = [] deps = []
if datablock.use_nodes: if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree)) deps.extend(get_node_tree_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
_type = bpy.types.Material _type = bpy.types.Material
_class = BlMaterial _class = BlMaterial

View File

@ -22,20 +22,16 @@ import mathutils
import logging import logging
import numpy as np import numpy as np
from .dump_anything import (Dumper, from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
Loader,
np_load_collection_primitives,
np_dump_collection_primitive,
np_load_collection, np_dump_collection)
from replication.constants import DIFF_BINARY from replication.constants import DIFF_BINARY
from replication.exception import ContextError from replication.exception import ContextError
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from replication.objects import Node
from .bl_datablock import get_datablock_from_uuid, stamp_uuid from .bl_datablock import get_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots from .bl_material import dump_materials_slots, load_materials_slots
from ..utils import get_preferences
from ..preferences import get_preferences from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
VERTICE = ['co'] VERTICE = ['co']
@ -58,8 +54,9 @@ POLYGON = [
'material_index', 'material_index',
] ]
class BlMesh(ReplicatedDatablock): class BlMesh(ReplicatedDatablock):
use_delta = True
bl_id = "meshes" bl_id = "meshes"
bl_class = bpy.types.Mesh bl_class = bpy.types.Mesh
bl_check_common = False bl_check_common = False
@ -68,17 +65,15 @@ class BlMesh(ReplicatedDatablock):
@staticmethod @staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
datablock = bpy.data.meshes.new(data["name"]) return bpy.data.meshes.new(data.get("name"))
datablock.uuid = data['uuid']
return datablock
@staticmethod @staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
data = data
if not datablock or datablock.is_editmode: if not datablock or datablock.is_editmode:
raise ContextError raise ContextError
else: else:
load_animation_data(data.get('animation_data'), datablock)
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(datablock, data)
@ -100,7 +95,7 @@ class BlMesh(ReplicatedDatablock):
np_load_collection(data['vertices'], datablock.vertices, VERTICE) np_load_collection(data['vertices'], datablock.vertices, VERTICE)
np_load_collection(data['edges'], datablock.edges, EDGE) np_load_collection(data['edges'], datablock.edges, EDGE)
np_load_collection(data['loops'], datablock.loops, LOOP) np_load_collection(data['loops'], datablock.loops, LOOP)
np_load_collection(data["polygons"], datablock.polygons, POLYGON) np_load_collection(data["polygons"],datablock.polygons, POLYGON)
# UV Layers # UV Layers
if 'uv_layers' in data.keys(): if 'uv_layers' in data.keys():
@ -109,10 +104,10 @@ class BlMesh(ReplicatedDatablock):
datablock.uv_layers.new(name=layer) datablock.uv_layers.new(name=layer)
np_load_collection_primitives( np_load_collection_primitives(
datablock.uv_layers[layer].data, datablock.uv_layers[layer].data,
'uv', 'uv',
data["uv_layers"][layer]['data']) data["uv_layers"][layer]['data'])
# Vertex color # Vertex color
if 'vertex_colors' in data.keys(): if 'vertex_colors' in data.keys():
for color_layer in data['vertex_colors']: for color_layer in data['vertex_colors']:
@ -120,8 +115,8 @@ class BlMesh(ReplicatedDatablock):
datablock.vertex_colors.new(name=color_layer) datablock.vertex_colors.new(name=color_layer)
np_load_collection_primitives( np_load_collection_primitives(
datablock.vertex_colors[color_layer].data, datablock.vertex_colors[color_layer].data,
'color', 'color',
data["vertex_colors"][color_layer]['data']) data["vertex_colors"][color_layer]['data'])
datablock.validate() datablock.validate()
@ -129,8 +124,6 @@ class BlMesh(ReplicatedDatablock):
@staticmethod @staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
stamp_uuid(datablock)
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode: if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
raise ContextError("Mesh is in edit mode") raise ContextError("Mesh is in edit mode")
mesh = datablock mesh = datablock
@ -138,7 +131,6 @@ class BlMesh(ReplicatedDatablock):
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
'uuid'
'name', 'name',
'use_auto_smooth', 'use_auto_smooth',
'auto_smooth_angle', 'auto_smooth_angle',
@ -148,6 +140,8 @@ class BlMesh(ReplicatedDatablock):
data = dumper.dump(mesh) data = dumper.dump(mesh)
data['animation_data'] = dump_animation_data(datablock)
# VERTICES # VERTICES
data["vertex_count"] = len(mesh.vertices) data["vertex_count"] = len(mesh.vertices)
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE) data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
@ -169,21 +163,19 @@ class BlMesh(ReplicatedDatablock):
data['uv_layers'] = {} data['uv_layers'] = {}
for layer in mesh.uv_layers: for layer in mesh.uv_layers:
data['uv_layers'][layer.name] = {} data['uv_layers'][layer.name] = {}
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive( data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
layer.data, 'uv')
# Vertex color # Vertex color
if mesh.vertex_colors: if mesh.vertex_colors:
data['vertex_colors'] = {} data['vertex_colors'] = {}
for color_map in mesh.vertex_colors: for color_map in mesh.vertex_colors:
data['vertex_colors'][color_map.name] = {} data['vertex_colors'][color_map.name] = {}
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive( data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
color_map.data, 'color')
# Materials # Materials
data['materials'] = dump_materials_slots(datablock.materials) data['materials'] = dump_materials_slots(datablock.materials)
return data return data
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
@ -192,14 +184,19 @@ class BlMesh(ReplicatedDatablock):
if material: if material:
deps.append(material) deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
def diff(self): @staticmethod
if 'EDIT' in bpy.context.mode \ def resolve(data: dict) -> object:
and not get_preferences().sync_flags.sync_during_editmode: uuid = data.get('uuid')
return False return resolve_datablock_from_uuid(uuid, bpy.data.meshes)
else:
return super().diff() @staticmethod
def needs_update(datablock: object, data: dict) -> bool:
return ('EDIT' not in bpy.context.mode and bpy.context.mode != 'SCULPT') \
or get_preferences().sync_flags.sync_during_editmode
_type = bpy.types.Mesh _type = bpy.types.Mesh
_class = BlMesh _class = BlMesh

View File

@ -23,7 +23,9 @@ from .dump_anything import (
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives, Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
np_dump_collection, np_load_collection) np_dump_collection, np_load_collection)
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
ELEMENT = [ ELEMENT = [
@ -62,29 +64,35 @@ def load_metaball_elements(elements_data, elements):
np_load_collection(elements_data, elements, ELEMENT) np_load_collection(elements_data, elements, ELEMENT)
class BlMetaball(BlDatablock): class BlMetaball(ReplicatedDatablock):
use_delta = True
bl_id = "metaballs" bl_id = "metaballs"
bl_class = bpy.types.MetaBall bl_class = bpy.types.MetaBall
bl_check_common = False bl_check_common = False
bl_icon = 'META_BALL' bl_icon = 'META_BALL'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.metaballs.new(data["name"]) return bpy.data.metaballs.new(data["name"])
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
loader = Loader() load_animation_data(data.get('animation_data'), datablock)
loader.load(target, data)
target.elements.clear() loader = Loader()
loader.load(datablock, data)
datablock.elements.clear()
for mtype in data["elements"]['type']: for mtype in data["elements"]['type']:
new_element = target.elements.new() new_element = datablock.elements.new()
load_metaball_elements(data['elements'], target.elements) load_metaball_elements(data['elements'], datablock.elements)
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
@ -98,7 +106,24 @@ class BlMetaball(BlDatablock):
'texspace_size' 'texspace_size'
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
data['elements'] = dump_metaball_elements(instance.elements) data['animation_data'] = dump_animation_data(datablock)
data['elements'] = dump_metaball_elements(datablock.elements)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.metaballs)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.MetaBall
_class = BlMetaball

View File

@ -20,27 +20,45 @@ import bpy
import mathutils import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_material import (dump_node_tree, from .bl_material import (dump_node_tree,
load_node_tree, load_node_tree,
get_node_tree_dependencies) get_node_tree_dependencies)
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlNodeGroup(ReplicatedDatablock):
use_delta = True
class BlNodeGroup(BlDatablock):
bl_id = "node_groups" bl_id = "node_groups"
bl_class = bpy.types.NodeTree bl_class = bpy.types.NodeTree
bl_check_common = False bl_check_common = False
bl_icon = 'NODETREE' bl_icon = 'NODETREE'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.node_groups.new(data["name"], data["type"]) return bpy.data.node_groups.new(data["name"], data["type"])
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
load_node_tree(data, target) load_node_tree(data, datablock)
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
return dump_node_tree(instance) return dump_node_tree(datablock)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.node_groups)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
return get_node_tree_dependencies(datablock) deps = []
deps.extend(get_node_tree_dependencies(datablock))
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = [bpy.types.ShaderNodeTree, bpy.types.GeometryNodeTree]
_class = BlNodeGroup

View File

@ -21,17 +21,12 @@ import re
import bpy import bpy
import mathutils import mathutils
from replication.exception import ContextError from replication.exception import ContextError
from replication.objects import Node
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_datablock import get_datablock_from_uuid, stamp_uuid
from .bl_action import (load_animation_data,
dump_animation_data,
resolve_animation_dependencies)
from ..preferences import get_preferences
from .bl_datablock import get_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS from .bl_material import IGNORED_SOCKETS
from ..utils import get_preferences
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .dump_anything import ( from .dump_anything import (
Dumper, Dumper,
Loader, Loader,
@ -45,21 +40,40 @@ SKIN_DATA = [
'use_root' 'use_root'
] ]
if bpy.app.version[1] >= 93: SHAPEKEY_BLOCK_ATTR = [
'mute',
'value',
'slider_min',
'slider_max',
]
if bpy.app.version >= (2,93,0):
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float) SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else: else:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str) SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str)
logging.warning("Geometry node Float parameter not supported in \ logging.warning("Geometry node Float parameter not supported in \
blender 2.92.") blender 2.92.")
def get_node_group_inputs(node_group):
inputs = [] def get_node_group_properties_identifiers(node_group):
props_ids = []
# Inputs
for inpt in node_group.inputs: for inpt in node_group.inputs:
if inpt.type in IGNORED_SOCKETS: if inpt.type in IGNORED_SOCKETS:
continue continue
else: else:
inputs.append(inpt) props_ids.append((inpt.identifier, inpt.type))
return inputs
if inpt.type in ['INT', 'VALUE', 'BOOLEAN', 'RGBA', 'VECTOR']:
props_ids.append((f"{inpt.identifier}_attribute_name",'STR'))
props_ids.append((f"{inpt.identifier}_use_attribute", 'BOOL'))
for outpt in node_group.outputs:
if outpt.type not in IGNORED_SOCKETS and outpt.type in ['INT', 'VALUE', 'BOOLEAN', 'RGBA', 'VECTOR']:
props_ids.append((f"{outpt.identifier}_attribute_name", 'STR'))
return props_ids
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS] # return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
@ -90,6 +104,7 @@ def dump_physics(target: bpy.types.Object)->dict:
return physics_data return physics_data
def load_physics(dumped_settings: dict, target: bpy.types.Object): def load_physics(dumped_settings: dict, target: bpy.types.Object):
""" Load all physics settings from a given object excluding modifier """ Load all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid) related physics settings (such as softbody, cloth, dynapaint and fluid)
@ -115,30 +130,37 @@ def load_physics(dumped_settings: dict, target: bpy.types.Object):
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint']) loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint: elif target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_remove({"object": target}) bpy.ops.rigidbody.constraint_remove({"object": target})
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
def dump_modifier_geometry_node_props(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties """ Dump geometry node modifier input properties
:arg modifier: geometry node modifier to dump :arg modifier: geometry node modifier to dump
:type modifier: bpy.type.Modifier :type modifier: bpy.type.Modifier
""" """
dumped_inputs = [] dumped_props = []
for inpt in get_node_group_inputs(modifier.node_group):
input_value = modifier[inpt.identifier] for prop_value, prop_type in get_node_group_properties_identifiers(modifier.node_group):
try:
prop_value = modifier[prop_value]
except KeyError as e:
logging.error(f"fail to dump geomety node modifier property : {prop_value} ({e})")
else:
dump = None
if isinstance(prop_value, bpy.types.ID):
dump = prop_value.uuid
elif isinstance(prop_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
dump = prop_value
elif hasattr(prop_value, 'to_list'):
dump = prop_value.to_list()
dumped_input = None dumped_props.append((dump, prop_type))
if isinstance(input_value, bpy.types.ID): # logging.info(prop_value)
dumped_input = input_value.uuid
elif isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
dumped_input = input_value
elif hasattr(input_value, 'to_list'):
dumped_input = input_value.to_list()
dumped_inputs.append(dumped_input)
return dumped_inputs return dumped_props
def load_modifier_geometry_node_inputs(dumped_modifier: dict, target_modifier: bpy.types.Modifier): def load_modifier_geometry_node_props(dumped_modifier: dict, target_modifier: bpy.types.Modifier):
""" Load geometry node modifier inputs """ Load geometry node modifier inputs
:arg dumped_modifier: source dumped modifier to load :arg dumped_modifier: source dumped modifier to load
@ -147,17 +169,17 @@ def load_modifier_geometry_node_inputs(dumped_modifier: dict, target_modifier: b
:type target_modifier: bpy.type.Modifier :type target_modifier: bpy.type.Modifier
""" """
for input_index, inpt in enumerate(get_node_group_inputs(target_modifier.node_group)): for input_index, inpt in enumerate(get_node_group_properties_identifiers(target_modifier.node_group)):
dumped_value = dumped_modifier['inputs'][input_index] dumped_value, dumped_type = dumped_modifier['props'][input_index]
input_value = target_modifier[inpt.identifier] input_value = target_modifier[inpt[0]]
if isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS): if dumped_type in ['INT', 'VALUE', 'STR']:
target_modifier[inpt.identifier] = dumped_value logging.info(f"{inpt[0]}/{dumped_value}")
elif hasattr(input_value, 'to_list'): target_modifier[inpt[0]] = dumped_value
elif dumped_type in ['RGBA', 'VECTOR']:
for index in range(len(input_value)): for index in range(len(input_value)):
input_value[index] = dumped_value[index] input_value[index] = dumped_value[index]
elif inpt.type in ['COLLECTION', 'OBJECT']: elif dumped_type in ['COLLECTION', 'OBJECT', 'IMAGE', 'TEXTURE', 'MATERIAL']:
target_modifier[inpt.identifier] = get_datablock_from_uuid( target_modifier[inpt[0]] = get_datablock_from_uuid(dumped_value, None)
dumped_value, None)
def load_pose(target_bone, data): def load_pose(target_bone, data):
@ -167,40 +189,45 @@ def load_pose(target_bone, data):
def find_data_from_name(name=None): def find_data_from_name(name=None):
datablock = None instance = None
if not name: if not name:
pass pass
elif name in bpy.data.meshes.keys(): elif name in bpy.data.meshes.keys():
datablock = bpy.data.meshes[name] instance = bpy.data.meshes[name]
elif name in bpy.data.lights.keys(): elif name in bpy.data.lights.keys():
datablock = bpy.data.lights[name] instance = bpy.data.lights[name]
elif name in bpy.data.cameras.keys(): elif name in bpy.data.cameras.keys():
datablock = bpy.data.cameras[name] instance = bpy.data.cameras[name]
elif name in bpy.data.curves.keys(): elif name in bpy.data.curves.keys():
datablock = bpy.data.curves[name] instance = bpy.data.curves[name]
elif name in bpy.data.metaballs.keys(): elif name in bpy.data.metaballs.keys():
datablock = bpy.data.metaballs[name] instance = bpy.data.metaballs[name]
elif name in bpy.data.armatures.keys(): elif name in bpy.data.armatures.keys():
datablock = bpy.data.armatures[name] instance = bpy.data.armatures[name]
elif name in bpy.data.grease_pencils.keys(): elif name in bpy.data.grease_pencils.keys():
datablock = bpy.data.grease_pencils[name] instance = bpy.data.grease_pencils[name]
elif name in bpy.data.curves.keys(): elif name in bpy.data.curves.keys():
datablock = bpy.data.curves[name] instance = bpy.data.curves[name]
elif name in bpy.data.lattices.keys(): elif name in bpy.data.lattices.keys():
datablock = bpy.data.lattices[name] instance = bpy.data.lattices[name]
elif name in bpy.data.speakers.keys(): elif name in bpy.data.speakers.keys():
datablock = bpy.data.speakers[name] instance = bpy.data.speakers[name]
elif name in bpy.data.lightprobes.keys(): elif name in bpy.data.lightprobes.keys():
# Only supported since 2.83 # Only supported since 2.83
if bpy.app.version[1] >= 83: if bpy.app.version >= (2,83,0):
datablock = bpy.data.lightprobes[name] instance = bpy.data.lightprobes[name]
else: else:
logging.warning( logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") "Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys(): elif bpy.app.version >= (2,91,0) and name in bpy.data.volumes.keys():
# Only supported since 2.91 # Only supported since 2.91
datablock = bpy.data.volumes[name] instance = bpy.data.volumes[name]
return datablock return instance
def load_data(object, name):
logging.info("loading data")
pass
def _is_editmode(object: bpy.types.Object) -> bool: def _is_editmode(object: bpy.types.Object) -> bool:
@ -239,15 +266,15 @@ def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -
for mod in modifiers: for mod in modifiers:
if mod.type == 'NODES' and mod.node_group: if mod.type == 'NODES' and mod.node_group:
dependencies.append(mod.node_group) dependencies.append(mod.node_group)
# for inpt in get_node_group_inputs(mod.node_group): for inpt, inpt_type in get_node_group_properties_identifiers(mod.node_group):
# parameter = mod.get(inpt.identifier) inpt_value = mod.get(inpt)
# if parameter and isinstance(parameter, bpy.types.ID): # Avoid to handle 'COLLECTION', 'OBJECT' to avoid circular dependencies
# dependencies.append(parameter) if inpt_type in ['IMAGE', 'TEXTURE', 'MATERIAL'] and inpt_value:
dependencies.append(inpt_value)
return dependencies return dependencies
def dump_vertex_groups(src_object: bpy.types.Object) -> dict: def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
""" Dump object's vertex groups """ Dump object's vertex groups
@ -293,43 +320,225 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
vertex_group.add([index], weight, 'REPLACE') vertex_group.add([index], weight, 'REPLACE')
def dump_shape_keys(target_key: bpy.types.Key)->dict:
""" Dump the target shape_keys datablock to a dict using numpy
:param dumped_key: target key datablock
:type dumped_key: bpy.types.Key
:return: dict
"""
dumped_key_blocks = []
dumper = Dumper()
dumper.include_filter = [
'name',
'mute',
'value',
'slider_min',
'slider_max',
]
for key in target_key.key_blocks:
dumped_key_block = dumper.dump(key)
dumped_key_block['data'] = np_dump_collection(key.data, ['co'])
dumped_key_block['relative_key'] = key.relative_key.name
dumped_key_blocks.append(dumped_key_block)
return {
'reference_key': target_key.reference_key.name,
'use_relative': target_key.use_relative,
'key_blocks': dumped_key_blocks,
'animation_data': dump_animation_data(target_key)
}
def load_shape_keys(dumped_shape_keys: dict, target_object: bpy.types.Object):
""" Load the target shape_keys datablock to a dict using numpy
:param dumped_key: src key data
:type dumped_key: bpy.types.Key
:param target_object: object used to load the shapekeys data onto
:type target_object: bpy.types.Object
"""
loader = Loader()
# Remove existing ones
target_object.shape_key_clear()
# Create keys and load vertices coords
dumped_key_blocks = dumped_shape_keys.get('key_blocks')
for dumped_key_block in dumped_key_blocks:
key_block = target_object.shape_key_add(name=dumped_key_block['name'])
loader.load(key_block, dumped_key_block)
np_load_collection(dumped_key_block['data'], key_block.data, ['co'])
# Load relative key after all
for dumped_key_block in dumped_key_blocks:
relative_key_name = dumped_key_block.get('relative_key')
key_name = dumped_key_block.get('name')
target_keyblock = target_object.data.shape_keys.key_blocks[key_name]
relative_key = target_object.data.shape_keys.key_blocks[relative_key_name]
target_keyblock.relative_key = relative_key
# Shape keys animation data
anim_data = dumped_shape_keys.get('animation_data')
if anim_data:
load_animation_data(anim_data, target_object.data.shape_keys)
def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
""" Dump all modifiers of a modifier collection into a dict
:param modifiers: modifiers
:type modifiers: bpy.types.bpy_prop_collection
:return: dict
"""
dumped_modifiers = []
dumper = Dumper()
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for modifier in modifiers:
dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_modifier['props'] = dump_modifier_geometry_node_props(modifier)
elif modifier.type == 'PARTICLE_SYSTEM':
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
elif modifier.type == 'UV_PROJECT':
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
dumped_modifiers.append(dumped_modifier)
return dumped_modifiers
def dump_constraints(constraints: bpy.types.bpy_prop_collection)->list:
"""Dump all constraints to a list
:param constraints: constraints
:type constraints: bpy.types.bpy_prop_collection
:return: dict
"""
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = None
dumped_constraints = []
for constraint in constraints:
dumped_constraints.append(dumper.dump(constraint))
return dumped_constraints
def load_constraints(dumped_constraints: list, constraints: bpy.types.bpy_prop_collection):
""" Load dumped constraints
:param dumped_constraints: list of constraints to load
:type dumped_constraints: list
:param constraints: constraints
:type constraints: bpy.types.bpy_prop_collection
"""
loader = Loader()
constraints.clear()
for dumped_constraint in dumped_constraints:
constraint_type = dumped_constraint.get('type')
new_constraint = constraints.new(constraint_type)
loader.load(new_constraint, dumped_constraint)
def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collection):
""" Dump all modifiers of a modifier collection into a dict
:param dumped_modifiers: list of modifiers to load
:type dumped_modifiers: list
:param modifiers: modifiers
:type modifiers: bpy.types.bpy_prop_collection
"""
loader = Loader()
modifiers.clear()
for dumped_modifier in dumped_modifiers:
name = dumped_modifier.get('name')
mtype = dumped_modifier.get('type')
loaded_modifier = modifiers.new(name, mtype)
loader.load(loaded_modifier, dumped_modifier)
if loaded_modifier.type == 'NODES':
load_modifier_geometry_node_props(dumped_modifier, loaded_modifier)
elif loaded_modifier.type == 'PARTICLE_SYSTEM':
default = loaded_modifier.particle_system.settings
dumped_particles = dumped_modifier['particle_system']
loader.load(loaded_modifier.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
loaded_modifier.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
elif loaded_modifier.type in ['SOFT_BODY', 'CLOTH']:
loader.load(loaded_modifier.settings, dumped_modifier['settings'])
elif loaded_modifier.type == 'UV_PROJECT':
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
target_object = bpy.data.objects.get(projector_object)
if target_object:
loaded_modifier.projectors[projector_index].object = target_object
else:
logging.error("Could't load projector target object {projector_object}")
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
""" Load modifiers custom data not managed by the dump_anything loader
:param dumped_modifiers: modifiers to load
:type dumped_modifiers: dict
:param modifiers: target modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
"""
loader = Loader()
for modifier in modifiers:
dumped_modifier = dumped_modifiers.get(modifier.name)
class BlObject(ReplicatedDatablock): class BlObject(ReplicatedDatablock):
use_delta = True
bl_id = "objects" bl_id = "objects"
bl_class = bpy.types.Object
bl_check_common = False bl_check_common = False
bl_icon = 'OBJECT_DATA' bl_icon = 'OBJECT_DATA'
bl_reload_parent = False bl_reload_parent = False
is_root = False
@staticmethod @staticmethod
def construct(data: dict) -> bpy.types.Object: def construct(data: dict) -> object:
datablock = None instance = None
# TODO: refactoring # TODO: refactoring
object_name = data.get("name") object_name = data.get("name")
data_uuid = data.get("data_uuid") data_uuid = data.get("data_uuid")
data_id = data.get("data") data_id = data.get("data")
object_uuid = data.get('uuid') data_type = data.get("type")
object_data = get_datablock_from_uuid( object_data = get_datablock_from_uuid(
data_uuid, data_uuid,
find_data_from_name(data_id), find_data_from_name(data_id),
ignore=['images']) # TODO: use resolve_from_id ignore=['images']) # TODO: use resolve_from_id
if object_data is None and data_uuid: if data_type != 'EMPTY' and object_data is None:
raise Exception(f"Fail to load object {data['name']}({object_uuid})") raise Exception(f"Fail to load object {data['name']})")
datablock = bpy.data.objects.new(object_name, object_data) return bpy.data.objects.new(object_name, object_data)
datablock.uuid = object_uuid
return datablock
@staticmethod @staticmethod
def load(data: dict, datablock: bpy.types.Object): def load(data: dict, datablock: object):
data = datablock.data
load_animation_data(data, datablock)
loader = Loader() loader = Loader()
load_animation_data(data.get('animation_data'), datablock)
data_uuid = data.get("data_uuid") data_uuid = data.get("data_uuid")
data_id = data.get("data") data_id = data.get("data")
@ -345,24 +554,9 @@ class BlObject(ReplicatedDatablock):
object_data = datablock.data object_data = datablock.data
# SHAPE KEYS # SHAPE KEYS
if 'shape_keys' in data: shape_keys = data.get('shape_keys')
datablock.shape_key_clear() if shape_keys:
load_shape_keys(shape_keys, datablock)
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data = data['shape_keys']['key_blocks'][key_block]
datablock.shape_key_add(name=key_block)
loader.load(
datablock.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
datablock.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
datablock.data.shape_keys.key_blocks[key_block].relative_key = datablock.data.shape_keys.key_blocks[reference]
# Load transformation data # Load transformation data
loader.load(datablock, data) loader.load(datablock, data)
@ -388,26 +582,26 @@ class BlObject(ReplicatedDatablock):
# Bone groups # Bone groups
for bg_name in data['pose']['bone_groups']: for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name) bg_data = data['pose']['bone_groups'].get(bg_name)
bg_datablock = datablock.pose.bone_groups.get(bg_name) bg_target = datablock.pose.bone_groups.get(bg_name)
if not bg_datablock: if not bg_target:
bg_datablock = datablock.pose.bone_groups.new(name=bg_name) bg_target = datablock.pose.bone_groups.new(name=bg_name)
loader.load(bg_datablock, bg_data) loader.load(bg_target, bg_data)
# datablock.pose.bone_groups.get # datablock.pose.bone_groups.get
# Bones # Bones
for bone in data['pose']['bones']: for bone in data['pose']['bones']:
datablock_bone = datablock.pose.bones.get(bone) target_bone = datablock.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone) bone_data = data['pose']['bones'].get(bone)
if 'constraints' in bone_data.keys(): if 'constraints' in bone_data.keys():
loader.load(datablock_bone, bone_data['constraints']) loader.load(target_bone, bone_data['constraints'])
load_pose(datablock_bone, bone_data) load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys(): if 'bone_index' in bone_data.keys():
datablock_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']] target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
# TODO: find another way... # TODO: find another way...
if datablock.empty_display_type == "IMAGE": if datablock.empty_display_type == "IMAGE":
@ -428,52 +622,26 @@ class BlObject(ReplicatedDatablock):
and 'cycles_visibility' in data: and 'cycles_visibility' in data:
loader.load(datablock.cycles_visibility, data['cycles_visibility']) loader.load(datablock.cycles_visibility, data['cycles_visibility'])
# TODO: handle geometry nodes input from dump_anything
if hasattr(datablock, 'modifiers'): if hasattr(datablock, 'modifiers'):
nodes_modifiers = [ load_modifiers(data['modifiers'], datablock.modifiers)
mod for mod in datablock.modifiers if mod.type == 'NODES']
for modifier in nodes_modifiers:
load_modifier_geometry_node_inputs(
data['modifiers'][modifier.name], modifier)
particles_modifiers = [ constraints = data.get('constraints')
mod for mod in datablock.modifiers if mod.type == 'PARTICLE_SYSTEM'] if constraints:
load_constraints(constraints, datablock.constraints)
for mod in particles_modifiers:
default = mod.particle_system.settings
dumped_particles = data['modifiers'][mod.name]['particle_system']
loader.load(mod.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
mod.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
phys_modifiers = [
mod for mod in datablock.modifiers if mod.type in ['SOFT_BODY', 'CLOTH']]
for mod in phys_modifiers:
loader.load(mod.settings, data['modifiers'][mod.name]['settings'])
# PHYSICS # PHYSICS
load_physics(data, datablock) load_physics(data, datablock)
transform = data.get('transforms', None) transform = data.get('transforms', None)
if transform: if transform:
datablock.matrix_parent_inverse = mathutils.Matrix( datablock.matrix_parent_inverse = mathutils.Matrix(transform['matrix_parent_inverse'])
transform['matrix_parent_inverse'])
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis']) datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
@staticmethod @staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(datablock)
if _is_editmode(datablock): if _is_editmode(datablock):
if self.preferences.sync_flags.sync_during_editmode: if get_preferences().sync_flags.sync_during_editmode:
datablock.update_from_editmode() datablock.update_from_editmode()
else: else:
raise ContextError("Object is in edit-mode.") raise ContextError("Object is in edit-mode.")
@ -481,7 +649,6 @@ class BlObject(ReplicatedDatablock):
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
"uuid",
"name", "name",
"rotation_mode", "rotation_mode",
"data", "data",
@ -511,11 +678,15 @@ class BlObject(ReplicatedDatablock):
'show_all_edges', 'show_all_edges',
'show_texture_space', 'show_texture_space',
'show_in_front', 'show_in_front',
'type' 'type',
'parent_type',
'parent_bone',
'track_axis',
'up_axis',
] ]
data = dumper.dump(datablock) data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
dumper.include_filter = [ dumper.include_filter = [
'matrix_parent_inverse', 'matrix_parent_inverse',
'matrix_local', 'matrix_local',
@ -533,34 +704,9 @@ class BlObject(ReplicatedDatablock):
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name) data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
# MODIFIERS # MODIFIERS
modifiers = getattr(datablock, 'modifiers', None)
if hasattr(datablock, 'modifiers'): if hasattr(datablock, 'modifiers'):
data["modifiers"] = {} data['modifiers'] = dump_modifiers(modifiers)
modifiers = getattr(datablock, 'modifiers', None)
if modifiers:
dumper.include_filter = None
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for index, modifier in enumerate(modifiers):
dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
dumped_modifier['inputs'] = dumped_inputs
elif modifier.type == 'PARTICLE_SYSTEM':
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
data["modifiers"][modifier.name] = dumped_modifier
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None) gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
@ -586,9 +732,7 @@ class BlObject(ReplicatedDatablock):
# CONSTRAINTS # CONSTRAINTS
if hasattr(datablock, 'constraints'): if hasattr(datablock, 'constraints'):
dumper.include_filter = None data["constraints"] = dump_constraints(datablock.constraints)
dumper.depth = 3
data["constraints"] = dumper.dump(datablock.constraints)
# POSE # POSE
if hasattr(datablock, 'pose') and datablock.pose: if hasattr(datablock, 'pose') and datablock.pose:
@ -635,30 +779,7 @@ class BlObject(ReplicatedDatablock):
# SHAPE KEYS # SHAPE KEYS
object_data = datablock.data object_data = datablock.data
if hasattr(object_data, 'shape_keys') and object_data.shape_keys: if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
dumper = Dumper() data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
dumper.depth = 2
dumper.include_filter = [
'reference_key',
'use_relative'
]
data['shape_keys'] = dumper.dump(object_data.shape_keys)
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
key_blocks = {}
for key in object_data.shape_keys.key_blocks:
dumper.depth = 3
dumper.include_filter = [
'name',
'data',
'mute',
'value',
'slider_min',
'slider_max',
'data',
'co'
]
key_blocks[key.name] = dumper.dump(key)
key_blocks[key.name]['relative_key'] = key.relative_key.name
data['shape_keys']['key_blocks'] = key_blocks
# SKIN VERTICES # SKIN VERTICES
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices: if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
@ -678,16 +799,15 @@ class BlObject(ReplicatedDatablock):
'scatter', 'scatter',
'shadow', 'shadow',
] ]
data['cycles_visibility'] = dumper.dump( data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility)
datablock.cycles_visibility)
# PHYSICS # PHYSICS
data.update(dump_physics(instance)) data.update(dump_physics(datablock))
return data return data
@staticmethod @staticmethod
def resolve_deps(datablock: bpy.types.Object) -> list: def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
# Avoid Empty case # Avoid Empty case
@ -705,13 +825,22 @@ class BlObject(ReplicatedDatablock):
# TODO: uuid based # TODO: uuid based
deps.append(datablock.instance_collection) deps.append(datablock.instance_collection)
deps.extend(resolve_animation_dependencies(datablock))
if datablock.modifiers: if datablock.modifiers:
deps.extend(find_textures_dependencies(datablock.modifiers)) deps.extend(find_textures_dependencies(datablock.modifiers))
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers)) deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.objects)
_type = bpy.types.Object _type = bpy.types.Object
_class = BlObject _class = BlObject

View File

@ -0,0 +1,106 @@
import bpy
import mathutils
from . import dump_anything
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
""" Dump every texture slot collection as the form:
[(index, slot_texture_uuid, slot_texture_name), (), ...]
"""
dumped_slots = []
for index, slot in enumerate(texture_slots):
if slot and slot.texture:
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
return dumped_slots
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
"""
"""
for index, slot in enumerate(target_slots):
if slot:
target_slots.clear(index)
for index, slot_uuid, slot_name in dumped_slots:
target_slots.create(index).texture = get_datablock_from_uuid(
slot_uuid, slot_name
)
IGNORED_ATTR = [
"is_embedded_data",
"is_evaluated",
"is_fluid",
"is_library_indirect",
"users"
]
class BlParticle(ReplicatedDatablock):
use_delta = True
bl_id = "particles"
bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES"
bl_check_common = False
bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object:
return bpy.data.particles.new(data["name"])
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
dump_anything.load(datablock, data)
dump_anything.load(datablock.effector_weights, data["effector_weights"])
# Force field
force_field_1 = data.get("force_field_1", None)
if force_field_1:
dump_anything.load(datablock.force_field_1, force_field_1)
force_field_2 = data.get("force_field_2", None)
if force_field_2:
dump_anything.load(datablock.force_field_2, force_field_2)
# Texture slots
load_texture_slots(data["texture_slots"], datablock.texture_slots)
@staticmethod
def dump(datablock: object) -> dict:
dumper = dump_anything.Dumper()
dumper.depth = 1
dumper.exclude_filter = IGNORED_ATTR
data = dumper.dump(datablock)
# Particle effectors
data["effector_weights"] = dumper.dump(datablock.effector_weights)
if datablock.force_field_1:
data["force_field_1"] = dumper.dump(datablock.force_field_1)
if datablock.force_field_2:
data["force_field_2"] = dumper.dump(datablock.force_field_2)
# Texture slots
data["texture_slots"] = dump_textures_slots(datablock.texture_slots)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.particles)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = [t.texture for t in datablock.texture_slots if t and t.texture]
deps.extend(resolve_animation_dependencies(datablock))
return deps
_type = bpy.types.ParticleSettings
_class = BlParticle

View File

@ -18,26 +18,23 @@
import logging import logging
from pathlib import Path from pathlib import Path
from uuid import uuid4
import bpy import bpy
import mathutils import mathutils
from deepdiff import DeepDiff from deepdiff import DeepDiff, Delta
from replication.constants import DIFF_JSON, MODIFIED from replication.constants import DIFF_JSON, MODIFIED
from replication.protocol import ReplicatedDatablock from replication.protocol import ReplicatedDatablock
from replication.objects import Node
from ..utils import flush_history from ..utils import flush_history, get_preferences
from .bl_action import (dump_animation_data, load_animation_data,
resolve_animation_dependencies)
from .bl_collection import (dump_collection_children, dump_collection_objects, from .bl_collection import (dump_collection_children, dump_collection_objects,
load_collection_childrens, load_collection_objects, load_collection_childrens, load_collection_objects,
resolve_collection_dependencies) resolve_collection_dependencies)
from .bl_action import (load_animation_data, from .bl_datablock import resolve_datablock_from_uuid
dump_animation_data,
resolve_animation_dependencies)
from .bl_datablock import stamp_uuid
from .bl_file import get_filepath from .bl_file import get_filepath
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
from ..preferences import get_preferences
RENDER_SETTINGS = [ RENDER_SETTINGS = [
'dither_intensity', 'dither_intensity',
@ -307,7 +304,8 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
return data return data
def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor): def load_sequence(sequence_data: dict,
sequence_editor: bpy.types.SequenceEditor):
""" Load sequence from dumped data """ Load sequence from dumped data
:arg sequence_data: sequence to dump :arg sequence_data: sequence to dump
@ -367,7 +365,7 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
**seq) **seq)
loader = Loader() loader = Loader()
# TODO: Support filepath updates
loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps'] loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
loader.load(sequence, sequence_data) loader.load(sequence, sequence_data)
sequence.select = False sequence.select = False
@ -375,6 +373,7 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
class BlScene(ReplicatedDatablock): class BlScene(ReplicatedDatablock):
is_root = True is_root = True
use_delta = True
bl_id = "scenes" bl_id = "scenes"
bl_class = bpy.types.Scene bl_class = bpy.types.Scene
@ -384,13 +383,12 @@ class BlScene(ReplicatedDatablock):
@staticmethod @staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
datablock = bpy.data.scenes.new(data["name"]) return bpy.data.scenes.new(data["name"])
datablock.uuid = data.get("uuid")
return datablock
@staticmethod @staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
# Load other meshes metadata # Load other meshes metadata
loader = Loader() loader = Loader()
loader.load(datablock, data) loader.load(datablock, data)
@ -405,8 +403,9 @@ class BlScene(ReplicatedDatablock):
datablock.world = bpy.data.worlds[data['world']] datablock.world = bpy.data.worlds[data['world']]
# Annotation # Annotation
if 'grease_pencil' in data.keys(): gpencil_uid = data.get('grease_pencil')
datablock.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']] if gpencil_uid:
datablock.grease_pencil = resolve_datablock_from_uuid(gpencil_uid, bpy.data.grease_pencils)
if get_preferences().sync_flags.sync_render_settings: if get_preferences().sync_flags.sync_render_settings:
if 'eevee' in data.keys(): if 'eevee' in data.keys():
@ -418,20 +417,19 @@ class BlScene(ReplicatedDatablock):
if 'render' in data.keys(): if 'render' in data.keys():
loader.load(datablock.render, data['render']) loader.load(datablock.render, data['render'])
if 'view_settings' in data.keys(): view_settings = data.get('view_settings')
loader.load(datablock.view_settings, data['view_settings']) if view_settings:
loader.load(datablock.view_settings, view_settings)
if datablock.view_settings.use_curve_mapping and \ if datablock.view_settings.use_curve_mapping and \
'curve_mapping' in data['view_settings']: 'curve_mapping' in view_settings:
# TODO: change this ugly fix # TODO: change this ugly fix
datablock.view_settings.curve_mapping.white_level = data[ datablock.view_settings.curve_mapping.white_level = view_settings['curve_mapping']['white_level']
'view_settings']['curve_mapping']['white_level'] datablock.view_settings.curve_mapping.black_level = view_settings['curve_mapping']['black_level']
datablock.view_settings.curve_mapping.black_level = data[
'view_settings']['curve_mapping']['black_level']
datablock.view_settings.curve_mapping.update() datablock.view_settings.curve_mapping.update()
# Sequencer # Sequencer
sequences = data.get('sequences') sequences = data.get('sequences')
if sequences: if sequences:
# Create sequencer data # Create sequencer data
datablock.sequence_editor_create() datablock.sequence_editor_create()
@ -442,19 +440,29 @@ class BlScene(ReplicatedDatablock):
if seq.name not in sequences: if seq.name not in sequences:
vse.sequences.remove(seq) vse.sequences.remove(seq)
# Load existing sequences # Load existing sequences
for seq_name, seq_data in sequences.items(): for seq_data in sequences.values():
load_sequence(seq_data, vse) load_sequence(seq_data, vse)
# If the sequence is no longer used, clear it # If the sequence is no longer used, clear it
elif datablock.sequence_editor and not sequences: elif datablock.sequence_editor and not sequences:
datablock.sequence_editor_clear() datablock.sequence_editor_clear()
# Timeline markers
markers = data.get('timeline_markers')
if markers:
datablock.timeline_markers.clear()
for name, frame, camera in markers:
marker = datablock.timeline_markers.new(name, frame=frame)
if camera:
marker.camera = resolve_datablock_from_uuid(camera, bpy.data.objects)
marker.select = False
# FIXME: Find a better way after the replication big refacotoring # FIXME: Find a better way after the replication big refacotoring
# Keep other user from deleting collection object by flushing their history # Keep other user from deleting collection object by flushing their history
flush_history() flush_history()
@staticmethod @staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
stamp_uuid(datablock) data = {}
data['animation_data'] = dump_animation_data(datablock)
# Metadata # Metadata
scene_dumper = Dumper() scene_dumper = Dumper()
@ -463,17 +471,14 @@ class BlScene(ReplicatedDatablock):
'name', 'name',
'world', 'world',
'id', 'id',
'grease_pencil',
'frame_start', 'frame_start',
'frame_end', 'frame_end',
'frame_step', 'frame_step',
'uuid'
] ]
if get_preferences().sync_flags.sync_active_camera: if get_preferences().sync_flags.sync_active_camera:
scene_dumper.include_filter.append('camera') scene_dumper.include_filter.append('camera')
data = scene_dumper.dump(datablock) data.update(scene_dumper.dump(datablock))
dump_animation_data(datablock, data)
# Master collection # Master collection
data['collection'] = {} data['collection'] = {}
@ -521,6 +526,13 @@ class BlScene(ReplicatedDatablock):
dumped_sequences[seq.name] = dump_sequence(seq) dumped_sequences[seq.name] = dump_sequence(seq)
data['sequences'] = dumped_sequences data['sequences'] = dumped_sequences
# Timeline markers
if datablock.timeline_markers:
data['timeline_markers'] = [(m.name, m.frame, getattr(m.camera, 'uuid', None)) for m in datablock.timeline_markers]
if datablock.grease_pencil:
data['grease_pencil'] = datablock.grease_pencil.uuid
return data return data
@staticmethod @staticmethod
@ -538,6 +550,8 @@ class BlScene(ReplicatedDatablock):
if datablock.grease_pencil: if datablock.grease_pencil:
deps.append(datablock.grease_pencil) deps.append(datablock.grease_pencil)
deps.extend(resolve_animation_dependencies(datablock))
# Sequences # Sequences
vse = datablock.sequence_editor vse = datablock.sequence_editor
if vse: if vse:
@ -550,11 +564,22 @@ class BlScene(ReplicatedDatablock):
for elem in sequence.elements: for elem in sequence.elements:
sequence.append( sequence.append(
Path(bpy.path.abspath(sequence.directory), Path(bpy.path.abspath(sequence.directory),
elem.filename)) elem.filename))
return deps return deps
def diff(self): @staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
if datablock is None:
datablock = bpy.data.scenes.get(name)
return datablock
@staticmethod
def compute_delta(last_data: dict, current_data: dict) -> Delta:
exclude_path = [] exclude_path = []
if not get_preferences().sync_flags.sync_render_settings: if not get_preferences().sync_flags.sync_render_settings:
@ -566,7 +591,22 @@ class BlScene(ReplicatedDatablock):
if not get_preferences().sync_flags.sync_active_camera: if not get_preferences().sync_flags.sync_active_camera:
exclude_path.append("root['camera']") exclude_path.append("root['camera']")
return DeepDiff(self.data, self._dump(datablock=self.datablock), exclude_paths=exclude_path) diff_params = {
'exclude_paths': exclude_path,
'ignore_order': True,
'report_repetition': True
}
delta_params = {
# 'mutate': True
}
return Delta(
DeepDiff(last_data,
current_data,
cache_size=5000,
**diff_params),
**delta_params)
_type = bpy.types.Scene _type = bpy.types.Scene
_class = BlScene _class = BlScene

View File

@ -23,38 +23,39 @@ from pathlib import Path
import bpy import bpy
from .bl_file import get_filepath, ensure_unpacked from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .dump_anything import Dumper, Loader from .dump_anything import Dumper, Loader
from .bl_datablock import resolve_datablock_from_uuid
class BlSound(BlDatablock): class BlSound(ReplicatedDatablock):
bl_id = "sounds" bl_id = "sounds"
bl_class = bpy.types.Sound bl_class = bpy.types.Sound
bl_check_common = False bl_check_common = False
bl_icon = 'SOUND' bl_icon = 'SOUND'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
filename = data.get('filename') filename = data.get('filename')
return bpy.data.sounds.load(get_filepath(filename)) return bpy.data.sounds.load(get_filepath(filename))
def load(self, data, target): @staticmethod
def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
def diff(self): @staticmethod
return False def dump(datablock: object) -> dict:
filename = Path(datablock.filepath).name
def dump(self, instance=None):
filename = Path(instance.filepath).name
if not filename: if not filename:
raise FileExistsError(instance.filepath) raise FileExistsError(datablock.filepath)
return { return {
'filename': filename, 'filename': filename,
'name': instance.name 'name': datablock.name
} }
@staticmethod @staticmethod
@ -62,7 +63,19 @@ class BlSound(BlDatablock):
deps = [] deps = []
if datablock.filepath and datablock.filepath != '<builtin>': if datablock.filepath and datablock.filepath != '<builtin>':
ensure_unpacked(datablock) ensure_unpacked(datablock)
deps.append(Path(bpy.path.abspath(datablock.filepath))) deps.append(Path(bpy.path.abspath(datablock.filepath)))
return deps return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.sounds)
@staticmethod
def needs_update(datablock: object, data:dict)-> bool:
return False
_type = bpy.types.Sound
_class = BlSound

View File

@ -20,26 +20,31 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlSpeaker(ReplicatedDatablock):
use_delta = True
class BlSpeaker(BlDatablock):
bl_id = "speakers" bl_id = "speakers"
bl_class = bpy.types.Speaker bl_class = bpy.types.Speaker
bl_check_common = False bl_check_common = False
bl_icon = 'SPEAKER' bl_icon = 'SPEAKER'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.speakers.new(data["name"]) return bpy.data.speakers.new(data["name"])
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
@ -58,10 +63,18 @@ class BlSpeaker(BlDatablock):
'cone_volume_outer' 'cone_volume_outer'
] ]
return dumper.dump(instance) data = dumper.dump(datablock)
data['animation_data'] = dump_animation_data(datablock)
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.speakers)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = [] deps = []
sound = datablock.sound sound = datablock.sound
@ -69,6 +82,8 @@ class BlSpeaker(BlDatablock):
if sound: if sound:
deps.append(sound) deps.append(sound)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
_type = bpy.types.Speaker
_class = BlSpeaker

View File

@ -20,25 +20,32 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
import bpy.types as T
class BlTexture(ReplicatedDatablock):
use_delta = True
class BlTexture(BlDatablock):
bl_id = "textures" bl_id = "textures"
bl_class = bpy.types.Texture bl_class = bpy.types.Texture
bl_check_common = False bl_check_common = False
bl_icon = 'TEXTURE' bl_icon = 'TEXTURE'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
load_animation_data(data.get('animation_data'), datablock)
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.textures.new(data["name"], data["type"]) return bpy.data.textures.new(data["name"], data["type"])
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
@ -52,15 +59,22 @@ class BlTexture(BlDatablock):
'name_full' 'name_full'
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
color_ramp = getattr(instance, 'color_ramp', None)
color_ramp = getattr(datablock, 'color_ramp', None)
if color_ramp: if color_ramp:
dumper.depth = 4 dumper.depth = 4
data['color_ramp'] = dumper.dump(color_ramp) data['color_ramp'] = dumper.dump(color_ramp)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.textures)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
deps = [] deps = []
@ -70,6 +84,14 @@ class BlTexture(BlDatablock):
if image: if image:
deps.append(image) deps.append(image)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
_type = [T.WoodTexture, T.VoronoiTexture,
T.StucciTexture, T.NoiseTexture,
T.MusgraveTexture, T.MarbleTexture,
T.MagicTexture, T.ImageTexture,
T.DistortedNoiseTexture, T.CloudsTexture,
T.BlendTexture]
_class = BlTexture

View File

@ -21,32 +21,26 @@ import mathutils
from pathlib import Path from pathlib import Path
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots from .bl_material import dump_materials_slots, load_materials_slots
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlVolume(ReplicatedDatablock):
use_delta = True
class BlVolume(BlDatablock):
bl_id = "volumes" bl_id = "volumes"
bl_class = bpy.types.Volume bl_class = bpy.types.Volume
bl_check_common = False bl_check_common = False
bl_icon = 'VOLUME_DATA' bl_icon = 'VOLUME_DATA'
bl_reload_parent = False bl_reload_parent = False
def load(data: dict, datablock: object): @staticmethod
loader = Loader()
loader.load(target, data)
loader.load(target.display, data['display'])
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, target.materials)
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.volumes.new(data["name"]) return bpy.data.volumes.new(data["name"])
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.exclude_filter = [ dumper.exclude_filter = [
@ -60,17 +54,35 @@ class BlVolume(BlDatablock):
'use_fake_user' 'use_fake_user'
] ]
data = dumper.dump(instance) data = dumper.dump(datablock)
data['display'] = dumper.dump(instance.display) data['display'] = dumper.dump(datablock.display)
# Fix material index # Fix material index
data['materials'] = dump_materials_slots(instance.materials) data['materials'] = dump_materials_slots(datablock.materials)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader()
loader.load(datablock, data)
loader.load(datablock.display, data['display'])
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, datablock.materials)
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.volumes)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = [] deps = []
external_vdb = Path(bpy.path.abspath(datablock.filepath)) external_vdb = Path(bpy.path.abspath(datablock.filepath))
@ -81,6 +93,9 @@ class BlVolume(BlDatablock):
if material: if material:
deps.append(material) deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
_type = bpy.types.Volume
_class = BlVolume

View File

@ -20,35 +20,42 @@ import bpy
import mathutils import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from replication.protocol import ReplicatedDatablock
from .bl_material import (load_node_tree, from .bl_material import (load_node_tree,
dump_node_tree, dump_node_tree,
get_node_tree_dependencies) get_node_tree_dependencies)
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
class BlWorld(ReplicatedDatablock):
use_delta = True
class BlWorld(BlDatablock):
bl_id = "worlds" bl_id = "worlds"
bl_class = bpy.types.World bl_class = bpy.types.World
bl_check_common = True bl_check_common = True
bl_icon = 'WORLD_DATA' bl_icon = 'WORLD_DATA'
bl_reload_parent = False bl_reload_parent = False
@staticmethod
def construct(data: dict) -> object: def construct(data: dict) -> object:
return bpy.data.worlds.new(data["name"]) return bpy.data.worlds.new(data["name"])
@staticmethod
def load(data: dict, datablock: object): def load(data: dict, datablock: object):
load_animation_data(data.get('animation_data'), datablock)
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(datablock, data)
if data["use_nodes"]: if data["use_nodes"]:
if target.node_tree is None: if datablock.node_tree is None:
target.use_nodes = True datablock.use_nodes = True
load_node_tree(data['node_tree'], target.node_tree) load_node_tree(data['node_tree'], datablock.node_tree)
@staticmethod
def dump(datablock: object) -> dict: def dump(datablock: object) -> dict:
assert(instance)
world_dumper = Dumper() world_dumper = Dumper()
world_dumper.depth = 1 world_dumper.depth = 1
world_dumper.include_filter = [ world_dumper.include_filter = [
@ -56,11 +63,17 @@ class BlWorld(BlDatablock):
"name", "name",
"color" "color"
] ]
data = world_dumper.dump(instance) data = world_dumper.dump(datablock)
if instance.use_nodes: if datablock.use_nodes:
data['node_tree'] = dump_node_tree(instance.node_tree) data['node_tree'] = dump_node_tree(datablock.node_tree)
data['animation_data'] = dump_animation_data(datablock)
return data return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
return resolve_datablock_from_uuid(uuid, bpy.data.worlds)
@staticmethod @staticmethod
def resolve_deps(datablock: object) -> [object]: def resolve_deps(datablock: object) -> [object]:
@ -69,4 +82,8 @@ class BlWorld(BlDatablock):
if datablock.use_nodes: if datablock.use_nodes:
deps.extend(get_node_tree_dependencies(datablock.node_tree)) deps.extend(get_node_tree_dependencies(datablock.node_tree))
deps.extend(resolve_animation_dependencies(datablock))
return deps return deps
_type = bpy.types.World
_class = BlWorld

View File

@ -507,16 +507,12 @@ class Loader:
_constructors = { _constructors = {
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]), T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []), T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]), T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
} }
destructors = { destructors = {
T.ColorRampElement: DESTRUCTOR_REMOVE, T.ColorRampElement: DESTRUCTOR_REMOVE,
T.Modifier: DESTRUCTOR_CLEAR,
T.GpencilModifier: DESTRUCTOR_CLEAR, T.GpencilModifier: DESTRUCTOR_CLEAR,
T.Constraint: DESTRUCTOR_REMOVE,
} }
element_type = element.bl_rna_property.fixed_type element_type = element.bl_rna_property.fixed_type

View File

@ -24,20 +24,25 @@ import sys
from pathlib import Path from pathlib import Path
import socket import socket
import re import re
import bpy
VERSION_EXPR = re.compile('\d+.\d+.\d+') VERSION_EXPR = re.compile('\d+.\d+.\d+')
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
DEFAULT_CACHE_DIR = os.path.join( DEFAULT_CACHE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "cache") os.path.dirname(os.path.abspath(__file__)), "cache")
REPLICATION_DEPENDENCIES = {
"zmq",
"deepdiff"
}
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
REPLICATION = os.path.join(LIBS,"replication")
PYTHON_PATH = None PYTHON_PATH = None
SUBPROCESS_DIR = None SUBPROCESS_DIR = None
rtypes = [] rtypes = []
def module_can_be_imported(name): def module_can_be_imported(name: str) -> bool:
try: try:
__import__(name) __import__(name)
return True return True
@ -50,7 +55,7 @@ def install_pip():
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"]) subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
def install_package(name, version): def install_package(name: str, install_dir: str):
logging.info(f"installing {name} version...") logging.info(f"installing {name} version...")
env = os.environ env = os.environ
if "PIP_REQUIRE_VIRTUALENV" in env: if "PIP_REQUIRE_VIRTUALENV" in env:
@ -60,12 +65,13 @@ def install_package(name, version):
# env var for the subprocess. # env var for the subprocess.
env = os.environ.copy() env = os.environ.copy()
del env["PIP_REQUIRE_VIRTUALENV"] del env["PIP_REQUIRE_VIRTUALENV"]
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env) subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
if name in sys.modules: if name in sys.modules:
del sys.modules[name] del sys.modules[name]
def check_package_version(name, required_version):
def check_package_version(name: str, required_version: str):
logging.info(f"Checking {name} version...") logging.info(f"Checking {name} version...")
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True) out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
@ -77,6 +83,7 @@ def check_package_version(name, required_version):
logging.info(f"{name} need an update") logging.info(f"{name} need an update")
return False return False
def get_ip(): def get_ip():
""" """
Retrieve the main network interface IP. Retrieve the main network interface IP.
@ -94,7 +101,25 @@ def check_dir(dir):
os.makedirs(dir) os.makedirs(dir)
def setup(dependencies, python_path): def setup_paths(paths: list):
""" Add missing path to sys.path
"""
for path in paths:
if path not in sys.path:
logging.debug(f"Adding {path} dir to the path.")
sys.path.insert(0, path)
def remove_paths(paths: list):
""" Remove list of path from sys.path
"""
for path in paths:
if path in sys.path:
logging.debug(f"Removing {path} dir from the path.")
sys.path.remove(path)
def install_modules(dependencies: list, python_path: str, install_dir: str):
global PYTHON_PATH, SUBPROCESS_DIR global PYTHON_PATH, SUBPROCESS_DIR
PYTHON_PATH = Path(python_path) PYTHON_PATH = Path(python_path)
@ -103,9 +128,23 @@ def setup(dependencies, python_path):
if not module_can_be_imported("pip"): if not module_can_be_imported("pip"):
install_pip() install_pip()
for package_name, package_version in dependencies: for package_name in dependencies:
if not module_can_be_imported(package_name): if not module_can_be_imported(package_name):
install_package(package_name, package_version) install_package(package_name, install_dir=install_dir)
module_can_be_imported(package_name) module_can_be_imported(package_name)
elif not check_package_version(package_name, package_version):
install_package(package_name, package_version) def register():
if bpy.app.version >= (2,91,0):
python_binary_path = sys.executable
else:
python_binary_path = bpy.app.binary_path_python
for module_name in list(sys.modules.keys()):
if 'replication' in module_name:
del sys.modules[module_name]
setup_paths([LIBS, REPLICATION])
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
def unregister():
remove_paths([REPLICATION, LIBS])

155
multi_user/handlers.py Normal file
View File

@ -0,0 +1,155 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import bpy
from bpy.app.handlers import persistent
from replication import porcelain
from replication.constants import RP_COMMON, STATE_ACTIVE, STATE_SYNCING, UP
from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session
from . import shared_data, utils
def sanitize_deps_graph(remove_nodes: bool = False):
""" Cleanup the replication graph
"""
if session and session.state == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
for node in session.repository.graph.values():
node.instance = session.repository.rdp.resolve(node.data)
if node is None \
or (node.state == UP and not node.instance):
if remove_nodes:
try:
porcelain.rm(session.repository,
node.uuid,
remove_dependencies=False)
logging.info(f"Removing {node.uuid}")
rm_cpt += 1
except NonAuthorizedOperationError:
continue
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
def update_external_dependencies():
"""Force external dependencies(files such as images) evaluation
"""
external_types = ['WindowsPath', 'PosixPath', 'Image']
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in external_types]
for node_id in nodes_ids:
node = session.repository.graph.get(node_id)
if node and node.owner in [session.repository.username, RP_COMMON]:
porcelain.commit(session.repository, node_id)
porcelain.push(session.repository, 'origin', node_id)
@persistent
def on_scene_update(scene):
"""Forward blender depsgraph update to replication
"""
if session and session.state == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
incoming_updates = shared_data.session.applied_updates
distant_update = [getattr(u.id, 'uuid', None) for u in dependency_updates if getattr(u.id, 'uuid', None) in incoming_updates]
if distant_update:
for u in distant_update:
shared_data.session.applied_updates.remove(u)
logging.debug(f"Ignoring distant update of {dependency_updates[0].id.name}")
return
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
update_uuid = getattr(update.id, 'uuid', None)
if update_uuid:
node = session.repository.graph.get(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
if node and (node.owner == session.repository.username or check_common):
logging.debug(f"Evaluate {update.id.name}")
if node.state == UP:
try:
porcelain.commit(session.repository, node.uuid)
porcelain.push(session.repository,
'origin', node.uuid)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
except ContextError as e:
logging.debug(e)
except Exception as e:
logging.error(e)
else:
continue
elif isinstance(update.id, bpy.types.Scene):
scene = bpy.data.scenes.get(update.id.name)
scn_uuid = porcelain.add(session.repository, scene)
porcelain.commit(session.repository, scn_uuid)
porcelain.push(session.repository, 'origin', scn_uuid)
scene_graph_changed = [u for u in reversed(dependency_updates) if getattr(u.id, 'uuid', None) and isinstance(u.id,(bpy.types.Scene,bpy.types.Collection))]
if scene_graph_changed:
porcelain.purge_orphan_nodes(session.repository)
update_external_dependencies()
@persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
Temporary solution to resolve each node pointers after a Undo.
A future solution should be to avoid storing dataclock reference...
"""
if session and session.state == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
def load_pre_handler(dummy):
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if session and session.state == STATE_ACTIVE:
porcelain.update_user_metadata(session.repository, {
'frame_current': scene.frame_current
})
def register():
bpy.app.handlers.undo_post.append(resolve_deps_graph)
bpy.app.handlers.redo_post.append(resolve_deps_graph)
bpy.app.handlers.load_pre.append(load_pre_handler)
bpy.app.handlers.frame_change_pre.append(update_client_frame)
def unregister():
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)

View File

@ -15,31 +15,31 @@
# #
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import bpy import bpy
import mathutils import os
from .dump_anything import Loader, Dumper from pathlib import Path
from .bl_datablock import BlDatablock import bpy.utils.previews
def register():
global icons_col
pcoll = bpy.utils.previews.new()
icons_dir = os.path.join(os.path.dirname(__file__), ".")
for png in Path(icons_dir).rglob("*.png"):
pcoll.load(png.stem, str(png), "IMAGE")
icons_col = pcoll
class BlLibrary(BlDatablock): def unregister():
bl_id = "libraries"
bl_class = bpy.types.Library
bl_check_common = False
bl_icon = 'LIBRARY_DATA_DIRECT'
bl_reload_parent = False
def construct(data: dict) -> object: global icons_col
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
targetData = sourceData try:
return sourceData bpy.utils.previews.remove(icons_col)
def load(self, data, target): except Exception:
pass pass
def dump(self, instance=None): icons_col = None
assert(instance)
dumper = Dumper()
return dumper.dump(instance)

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

View File

@ -1,90 +0,0 @@
import bpy
import mathutils
from . import dump_anything
from .bl_datablock import BlDatablock, get_datablock_from_uuid
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
""" Dump every texture slot collection as the form:
[(index, slot_texture_uuid, slot_texture_name), (), ...]
"""
dumped_slots = []
for index, slot in enumerate(texture_slots):
if slot and slot.texture:
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
return dumped_slots
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
"""
"""
for index, slot in enumerate(target_slots):
if slot:
target_slots.clear(index)
for index, slot_uuid, slot_name in dumped_slots:
target_slots.create(index).texture = get_datablock_from_uuid(
slot_uuid, slot_name
)
IGNORED_ATTR = [
"is_embedded_data",
"is_evaluated",
"is_fluid",
"is_library_indirect",
"users"
]
class BlParticle(BlDatablock):
bl_id = "particles"
bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES"
bl_check_common = False
bl_reload_parent = False
def _construct(self, data):
instance = bpy.data.particles.new(data["name"])
instance.uuid = self.uuid
return instance
def _load_implementation(self, data, target):
dump_anything.load(target, data)
dump_anything.load(target.effector_weights, data["effector_weights"])
# Force field
force_field_1 = data.get("force_field_1", None)
if force_field_1:
dump_anything.load(target.force_field_1, force_field_1)
force_field_2 = data.get("force_field_2", None)
if force_field_2:
dump_anything.load(target.force_field_2, force_field_2)
# Texture slots
load_texture_slots(data["texture_slots"], target.texture_slots)
def _dump_implementation(self, data, instance=None):
assert instance
dumper = dump_anything.Dumper()
dumper.depth = 1
dumper.exclude_filter = IGNORED_ATTR
data = dumper.dump(instance)
# Particle effectors
data["effector_weights"] = dumper.dump(instance.effector_weights)
if instance.force_field_1:
data["force_field_1"] = dumper.dump(instance.force_field_1)
if instance.force_field_2:
data["force_field_2"] = dumper.dump(instance.force_field_2)
# Texture slots
data["texture_slots"] = dump_textures_slots(instance.texture_slots)
return data
def _resolve_deps_implementation(self):
return [t.texture for t in self.instance.texture_slots if t and t.texture]

File diff suppressed because it is too large Load Diff

View File

@ -17,6 +17,7 @@
import random import random
import logging import logging
from uuid import uuid4
import bpy import bpy
import string import string
import re import re
@ -24,8 +25,8 @@ import os
from pathlib import Path from pathlib import Path
from . import io_bpy, environment, addon_updater_ops, presence, ui from . import bl_types, environment, addon_updater_ops, presence, ui
from .utils import get_preferences, get_expanded_icon from .utils import get_preferences, get_expanded_icon, get_folder_size
from replication.constants import RP_COMMON from replication.constants import RP_COMMON
from replication.interface import session from replication.interface import session
@ -33,6 +34,25 @@ from replication.interface import session
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$") IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$") HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
#SERVER PRESETS AT LAUNCH
DEFAULT_PRESETS = {
"localhost" : {
"server_name": "localhost",
"ip": "localhost",
"port": 5555,
"use_admin_password": True,
"admin_password": "admin",
"server_password": ""
},
"public session" : {
"server_name": "public session",
"ip": "51.75.71.183",
"port": 5555,
"admin_password": "",
"server_password": ""
},
}
def randomColor(): def randomColor():
"""Generate a random color """ """Generate a random color """
r = random.random() r = random.random()
@ -66,8 +86,6 @@ def update_ip(self, context):
self['ip'] = "127.0.0.1" self['ip'] = "127.0.0.1"
def update_directory(self, context): def update_directory(self, context):
new_dir = Path(self.cache_directory) new_dir = Path(self.cache_directory)
if new_dir.exists() and any(Path(self.cache_directory).iterdir()): if new_dir.exists() and any(Path(self.cache_directory).iterdir()):
@ -93,6 +111,16 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
auto_push: bpy.props.BoolProperty(default=True) auto_push: bpy.props.BoolProperty(default=True)
icon: bpy.props.StringProperty() icon: bpy.props.StringProperty()
class ServerPreset(bpy.types.PropertyGroup):
server_name: bpy.props.StringProperty(default="")
ip: bpy.props.StringProperty(default="127.0.0.1", update=update_ip)
port: bpy.props.IntProperty(default=5555)
use_server_password: bpy.props.BoolProperty(default=False)
server_password: bpy.props.StringProperty(default="", subtype = "PASSWORD")
use_admin_password: bpy.props.BoolProperty(default=False)
admin_password: bpy.props.StringProperty(default="", subtype = "PASSWORD")
is_online: bpy.props.BoolProperty(default=False)
is_private: bpy.props.BoolProperty(default=False)
def set_sync_render_settings(self, value): def set_sync_render_settings(self, value):
self['sync_render_settings'] = value self['sync_render_settings'] = value
@ -142,24 +170,60 @@ class ReplicationFlags(bpy.types.PropertyGroup):
class SessionPrefs(bpy.types.AddonPreferences): class SessionPrefs(bpy.types.AddonPreferences):
bl_idname = __package__ bl_idname = __package__
ip: bpy.props.StringProperty( # User settings
name="ip",
description='Distant host ip',
default="127.0.0.1",
update=update_ip)
username: bpy.props.StringProperty( username: bpy.props.StringProperty(
name="Username", name="Username",
default=f"user_{random_string_digits()}" default=f"user_{random_string_digits()}"
) )
client_color: bpy.props.FloatVectorProperty( client_color: bpy.props.FloatVectorProperty(
name="client_instance_color", name="client_instance_color",
description='User color',
subtype='COLOR', subtype='COLOR',
default=randomColor()) default=randomColor()
port: bpy.props.IntProperty( )
name="port", # Current server settings
server_name: bpy.props.StringProperty(
name="server_name",
description="Custom name of the server",
default='localhost',
)
server_index: bpy.props.IntProperty(
name="server_index",
description="index of the server",
)
# User host session settings
host_port: bpy.props.IntProperty(
name="host_port",
description='Distant host port', description='Distant host port',
default=5555 default=5555
) )
host_use_server_password: bpy.props.BoolProperty(
name="use_server_password",
description='Use session password',
default=False
)
host_server_password: bpy.props.StringProperty(
name="server_password",
description='Session password',
subtype='PASSWORD'
)
host_use_admin_password: bpy.props.BoolProperty(
name="use_admin_password",
description='Use admin password',
default=True
)
host_admin_password: bpy.props.StringProperty(
name="admin_password",
description='Admin password',
subtype='PASSWORD',
default='admin'
)
# Other
is_first_launch: bpy.props.BoolProperty(
name="is_fnirst_launch",
description="First time lauching the addon",
default=True
)
sync_flags: bpy.props.PointerProperty( sync_flags: bpy.props.PointerProperty(
type=ReplicationFlags type=ReplicationFlags
) )
@ -183,6 +247,11 @@ class SessionPrefs(bpy.types.AddonPreferences):
description='connection timeout before disconnection', description='connection timeout before disconnection',
default=5000 default=5000
) )
ping_timeout: bpy.props.IntProperty(
name='ping timeout',
description='check if servers are online',
default=500
)
# Replication update settings # Replication update settings
depsgraph_update_rate: bpy.props.FloatProperty( depsgraph_update_rate: bpy.props.FloatProperty(
name='depsgraph update rate (s)', name='depsgraph update rate (s)',
@ -194,11 +263,12 @@ class SessionPrefs(bpy.types.AddonPreferences):
description="Remove filecache from memory", description="Remove filecache from memory",
default=False default=False
) )
# for UI # For UI
category: bpy.props.EnumProperty( category: bpy.props.EnumProperty(
name="Category", name="Category",
description="Preferences Category", description="Preferences Category",
items=[ items=[
('PREF', "Preferences", "Preferences of this add-on"),
('CONFIG', "Configuration", "Configuration of this add-on"), ('CONFIG', "Configuration", "Configuration of this add-on"),
('UPDATE', "Update", "Update this add-on"), ('UPDATE', "Update", "Update this add-on"),
], ],
@ -242,31 +312,58 @@ class SessionPrefs(bpy.types.AddonPreferences):
step=1, step=1,
subtype='PERCENTAGE', subtype='PERCENTAGE',
) )
presence_text_distance: bpy.props.FloatProperty(
name="Distance text visibilty",
description="Adjust the distance visibilty of user's mode/name",
min=0.1,
max=10000,
default=100,
)
conf_session_identity_expanded: bpy.props.BoolProperty( conf_session_identity_expanded: bpy.props.BoolProperty(
name="Identity", name="Identity",
description="Identity", description="Identity",
default=True default=False
) )
conf_session_net_expanded: bpy.props.BoolProperty( conf_session_net_expanded: bpy.props.BoolProperty(
name="Net", name="Net",
description="net", description="net",
default=True default=False
) )
conf_session_hosting_expanded: bpy.props.BoolProperty( conf_session_hosting_expanded: bpy.props.BoolProperty(
name="Rights", name="Rights",
description="Rights", description="Rights",
default=False default=False
) )
conf_session_rep_expanded: bpy.props.BoolProperty(
name="Replication",
description="Replication",
default=False
)
conf_session_cache_expanded: bpy.props.BoolProperty( conf_session_cache_expanded: bpy.props.BoolProperty(
name="Cache", name="Cache",
description="cache", description="cache",
default=False default=False
) )
conf_session_log_expanded: bpy.props.BoolProperty(
name="conf_session_log_expanded",
description="conf_session_log_expanded",
default=False
)
conf_session_ui_expanded: bpy.props.BoolProperty( conf_session_ui_expanded: bpy.props.BoolProperty(
name="Interface", name="Interface",
description="Interface", description="Interface",
default=False default=False
) )
sidebar_repository_shown: bpy.props.BoolProperty(
name="sidebar_repository_shown",
description="sidebar_repository_shown",
default=False
)
sidebar_advanced_shown: bpy.props.BoolProperty(
name="sidebar_advanced_shown",
description="sidebar_advanced_shown",
default=False
)
sidebar_advanced_rep_expanded: bpy.props.BoolProperty( sidebar_advanced_rep_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_rep_expanded", name="sidebar_advanced_rep_expanded",
description="sidebar_advanced_rep_expanded", description="sidebar_advanced_rep_expanded",
@ -277,6 +374,11 @@ class SessionPrefs(bpy.types.AddonPreferences):
description="sidebar_advanced_log_expanded", description="sidebar_advanced_log_expanded",
default=False default=False
) )
sidebar_advanced_uinfo_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_uinfo_expanded",
description="sidebar_advanced_uinfo_expanded",
default=False
)
sidebar_advanced_net_expanded: bpy.props.BoolProperty( sidebar_advanced_net_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_net_expanded", name="sidebar_advanced_net_expanded",
description="sidebar_advanced_net_expanded", description="sidebar_advanced_net_expanded",
@ -321,6 +423,19 @@ class SessionPrefs(bpy.types.AddonPreferences):
max=59 max=59
) )
# Server preset
def server_list_callback(scene, context):
settings = get_preferences()
enum = []
for i in settings.server_preset:
enum.append((i.name, i.name, ""))
return enum
server_preset: bpy.props.CollectionProperty(
name="server preset",
type=ServerPreset,
)
# Custom panel # Custom panel
panel_category: bpy.props.StringProperty( panel_category: bpy.props.StringProperty(
description="Choose a name for the category of the panel", description="Choose a name for the category of the panel",
@ -329,38 +444,28 @@ class SessionPrefs(bpy.types.AddonPreferences):
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
layout.row().prop(self, "category", expand=True) layout.row().prop(self, "category", expand=True)
if self.category == 'PREF':
grid = layout.column()
box = grid.box()
row = box.row()
# USER SETTINGS
split = row.split(factor=0.7, align=True)
split.prop(self, "username", text="User")
split.prop(self, "client_color", text="")
row = box.row()
row.label(text="Hide settings:")
row = box.row()
row.prop(self, "sidebar_advanced_shown", text="Hide “Advanced” settings in side pannel (Not in session)")
row = box.row()
row.prop(self, "sidebar_repository_shown", text="Hide “Repository” settings in side pannel (In session)")
if self.category == 'CONFIG': if self.category == 'CONFIG':
grid = layout.column() grid = layout.column()
# USER INFORMATIONS
box = grid.box()
box.prop(
self, "conf_session_identity_expanded", text="User information",
icon=get_expanded_icon(self.conf_session_identity_expanded),
emboss=False)
if self.conf_session_identity_expanded:
box.row().prop(self, "username", text="name")
box.row().prop(self, "client_color", text="color")
# NETWORK SETTINGS
box = grid.box()
box.prop(
self, "conf_session_net_expanded", text="Networking",
icon=get_expanded_icon(self.conf_session_net_expanded),
emboss=False)
if self.conf_session_net_expanded:
box.row().prop(self, "ip", text="Address")
row = box.row()
row.label(text="Port:")
row.prop(self, "port", text="")
row = box.row()
row.label(text="Init the session from:")
row.prop(self, "init_method", text="")
# HOST SETTINGS # HOST SETTINGS
box = grid.box() box = grid.box()
box.prop( box.prop(
@ -368,9 +473,57 @@ class SessionPrefs(bpy.types.AddonPreferences):
icon=get_expanded_icon(self.conf_session_hosting_expanded), icon=get_expanded_icon(self.conf_session_hosting_expanded),
emboss=False) emboss=False)
if self.conf_session_hosting_expanded: if self.conf_session_hosting_expanded:
row = box.row()
row.prop(self, "host_port", text="Port: ")
row = box.row() row = box.row()
row.label(text="Init the session from:") row.label(text="Init the session from:")
row.prop(self, "init_method", text="") row.prop(self, "init_method", text="")
row = box.row()
col = row.column()
col.prop(self, "host_use_server_password", text="Server password:")
col = row.column()
col.enabled = True if self.host_use_server_password else False
col.prop(self, "host_server_password", text="")
row = box.row()
col = row.column()
col.prop(self, "host_use_admin_password", text="Admin password:")
col = row.column()
col.enabled = True if self.host_use_admin_password else False
col.prop(self, "host_admin_password", text="")
# NETWORKING
box = grid.box()
box.prop(
self, "conf_session_net_expanded", text="Network",
icon=get_expanded_icon(self.conf_session_net_expanded),
emboss=False)
if self.conf_session_net_expanded:
row = box.row()
row.label(text="Timeout (ms):")
row.prop(self, "connection_timeout", text="")
row = box.row()
row.label(text="Server ping (ms):")
row.prop(self, "ping_timeout", text="")
# REPLICATION
box = grid.box()
box.prop(
self, "conf_session_rep_expanded", text="Replication",
icon=get_expanded_icon(self.conf_session_rep_expanded),
emboss=False)
if self.conf_session_rep_expanded:
row = box.row()
row.prop(self.sync_flags, "sync_render_settings")
row = box.row()
row.prop(self.sync_flags, "sync_active_camera")
row = box.row()
row.prop(self.sync_flags, "sync_during_editmode")
row = box.row()
if self.sync_flags.sync_during_editmode:
warning = row.box()
warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
row = box.row()
row.prop(self, "depsgraph_update_rate", text="Apply delay")
# CACHE SETTINGS # CACHE SETTINGS
box = grid.box() box = grid.box()
@ -381,24 +534,18 @@ class SessionPrefs(bpy.types.AddonPreferences):
if self.conf_session_cache_expanded: if self.conf_session_cache_expanded:
box.row().prop(self, "cache_directory", text="Cache directory") box.row().prop(self, "cache_directory", text="Cache directory")
box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache") box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache")
box.row().operator('session.clear_cache', text=f"Clear cache ({get_folder_size(self.cache_directory)})")
# INTERFACE SETTINGS
# LOGGING
box = grid.box() box = grid.box()
box.prop( box.prop(
self, "conf_session_ui_expanded", text="Interface", self, "conf_session_log_expanded", text="Logging",
icon=get_expanded_icon(self.conf_session_ui_expanded), icon=get_expanded_icon(self.conf_session_log_expanded),
emboss=False) emboss=False)
if self.conf_session_ui_expanded: if self.conf_session_log_expanded:
box.row().prop(self, "panel_category", text="Panel category", expand=True)
row = box.row() row = box.row()
row.label(text="Session widget:") row.label(text="Log level:")
row.prop(self, 'logging_level', text="")
col = box.column(align=True)
col.prop(self, "presence_hud_scale", expand=True)
col.prop(self, "presence_hud_hpos", expand=True)
col.prop(self, "presence_hud_vpos", expand=True)
if self.category == 'UPDATE': if self.category == 'UPDATE':
from . import addon_updater_ops from . import addon_updater_ops
@ -407,18 +554,44 @@ class SessionPrefs(bpy.types.AddonPreferences):
def generate_supported_types(self): def generate_supported_types(self):
self.supported_datablocks.clear() self.supported_datablocks.clear()
bpy_protocol = io_bpy.get_data_translation_protocol() bpy_protocol = bl_types.get_data_translation_protocol()
# init the factory with supported types # init the factory with supported types
for impl in bpy_protocol.implementations.values(): for dcc_type_id, impl in bpy_protocol.implementations.items():
new_db = self.supported_datablocks.add() new_db = self.supported_datablocks.add()
new_db.name = impl.__name__ new_db.name = dcc_type_id
new_db.type_name = impl.__name__ new_db.type_name = dcc_type_id
new_db.use_as_filter = True new_db.use_as_filter = True
new_db.icon = impl.bl_icon new_db.icon = impl.bl_icon
new_db.bl_name = impl.bl_id new_db.bl_name = impl.bl_id
# Get a server preset through its name
def get_server_preset(self, name):
existing_preset = None
for server_preset in self.server_preset :
if server_preset.server_name == name :
existing_preset = server_preset
return existing_preset
# Custom at launch server preset
def generate_default_presets(self):
for preset_name, preset_data in DEFAULT_PRESETS.items():
existing_preset = self.get_server_preset(preset_name)
if existing_preset :
continue
new_server = self.server_preset.add()
new_server.name = str(uuid4())
new_server.server_name = preset_data.get('server_name')
new_server.ip = preset_data.get('ip')
new_server.port = preset_data.get('port')
new_server.use_server_password = preset_data.get('use_server_password',False)
new_server.server_password = preset_data.get('server_password',None)
new_server.use_admin_password = preset_data.get('use_admin_password',False)
new_server.admin_password = preset_data.get('admin_password',None)
def client_list_callback(scene, context): def client_list_callback(scene, context):
from . import operators from . import operators
@ -446,6 +619,11 @@ class SessionUser(bpy.types.PropertyGroup):
""" """
username: bpy.props.StringProperty(name="username") username: bpy.props.StringProperty(name="username")
current_frame: bpy.props.IntProperty(name="current_frame") current_frame: bpy.props.IntProperty(name="current_frame")
color: bpy.props.FloatVectorProperty(name="color", subtype="COLOR",
min=0.0,
max=1.0,
size=4,
default=(1.0, 1.0, 1.0, 1.0))
class SessionProps(bpy.types.PropertyGroup): class SessionProps(bpy.types.PropertyGroup):
@ -475,6 +653,11 @@ class SessionProps(bpy.types.PropertyGroup):
description='Enable user overlay ', description='Enable user overlay ',
default=True, default=True,
) )
presence_show_mode: bpy.props.BoolProperty(
name="Show users current mode",
description='Enable user mode overlay ',
default=False,
)
presence_show_far_user: bpy.props.BoolProperty( presence_show_far_user: bpy.props.BoolProperty(
name="Show users on different scenes", name="Show users on different scenes",
description="Show user on different scenes", description="Show user on different scenes",
@ -490,32 +673,33 @@ class SessionProps(bpy.types.PropertyGroup):
description='Show only owned datablocks', description='Show only owned datablocks',
default=True default=True
) )
filter_name: bpy.props.StringProperty(
name="filter_name",
default="",
description='Node name filter',
)
admin: bpy.props.BoolProperty( admin: bpy.props.BoolProperty(
name="admin", name="admin",
description='Connect as admin', description='Connect as admin',
default=False default=False
) )
password: bpy.props.StringProperty(
name="password",
default=random_string_digits(),
description='Session password',
subtype='PASSWORD'
)
user_snap_running: bpy.props.BoolProperty( user_snap_running: bpy.props.BoolProperty(
default=False default=False
) )
time_snap_running: bpy.props.BoolProperty( time_snap_running: bpy.props.BoolProperty(
default=False default=False
) )
is_host: bpy.props.BoolProperty(
default=False
)
def get_preferences():
return bpy.context.preferences.addons[__package__].preferences
classes = ( classes = (
SessionUser, SessionUser,
SessionProps, SessionProps,
ReplicationFlags, ReplicationFlags,
ReplicatedDatablock, ReplicatedDatablock,
ServerPreset,
SessionPrefs, SessionPrefs,
) )
@ -528,8 +712,12 @@ def register():
prefs = bpy.context.preferences.addons[__package__].preferences prefs = bpy.context.preferences.addons[__package__].preferences
if len(prefs.supported_datablocks) == 0: if len(prefs.supported_datablocks) == 0:
logging.debug('Generating io_bpy preferences') logging.debug('Generating bl_types preferences')
prefs.generate_supported_types() prefs.generate_supported_types()
# at launch server presets
prefs.generate_default_presets()
def unregister(): def unregister():

View File

@ -94,15 +94,41 @@ def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D,
return [target.x, target.y, target.z] return [target.x, target.y, target.z]
def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list: def bbox_from_obj(obj: bpy.types.Object, index: int = 1) -> list:
""" Generate a bounding box for a given object by using its world matrix """ Generate a bounding box for a given object by using its world matrix
:param obj: target object :param obj: target object
:type obj: bpy.types.Object :type obj: bpy.types.Object
:param radius: bounding box radius :param index: indice offset
:type radius: float :type index: int
:return: list of 8 points [(x,y,z),...] :return: list of 8 points [(x,y,z),...], list of 12 link between these points [(1,2),...]
""" """
radius = 1.0 # Radius of the bounding box
index = 8*index
vertex_indices = (
(0+index, 1+index), (0+index, 2+index), (1+index, 3+index), (2+index, 3+index),
(4+index, 5+index), (4+index, 6+index), (5+index, 7+index), (6+index, 7+index),
(0+index, 4+index), (1+index, 5+index), (2+index, 6+index), (3+index, 7+index))
if obj.type == 'EMPTY':
radius = obj.empty_display_size
elif obj.type == 'LIGHT':
radius = obj.data.shadow_soft_size
elif obj.type == 'LIGHT_PROBE':
radius = obj.data.influence_distance
elif obj.type == 'CAMERA':
radius = obj.data.display_size
elif hasattr(obj, 'bound_box'):
vertex_indices = (
(0+index, 1+index), (1+index, 2+index),
(2+index, 3+index), (0+index, 3+index),
(4+index, 5+index), (5+index, 6+index),
(6+index, 7+index), (4+index, 7+index),
(0+index, 4+index), (1+index, 5+index),
(2+index, 6+index), (3+index, 7+index))
vertex_pos = get_bb_coords_from_obj(obj)
return vertex_pos, vertex_indices
coords = [ coords = [
(-radius, -radius, -radius), (+radius, -radius, -radius), (-radius, -radius, -radius), (+radius, -radius, -radius),
(-radius, +radius, -radius), (+radius, +radius, -radius), (-radius, +radius, -radius), (+radius, +radius, -radius),
@ -112,9 +138,32 @@ def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
base = obj.matrix_world base = obj.matrix_world
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords] bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
return [(point.x, point.y, point.z) vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
for point in bbox_corners]
return vertex_pos, vertex_indices
def bbox_from_instance_collection(ic: bpy.types.Object, index: int = 0) -> list:
""" Generate a bounding box for a given instance collection by using its objects
:param ic: target instance collection
:type ic: bpy.types.Object
:param index: indice offset
:type index: int
:return: list of 8*objs points [(x,y,z),...], tuple of 12*objs link between these points [(1,2),...]
"""
vertex_pos = []
vertex_indices = ()
for obj_index, obj in enumerate(ic.instance_collection.objects):
vertex_pos_temp, vertex_indices_temp = bbox_from_obj(obj, index=index+obj_index)
vertex_pos += vertex_pos_temp
vertex_indices += vertex_indices_temp
bbox_corners = [ic.matrix_world @ mathutils.Vector(vertex) for vertex in vertex_pos]
vertex_pos = [(point.x, point.y, point.z) for point in bbox_corners]
return vertex_pos, vertex_indices
def generate_user_camera() -> list: def generate_user_camera() -> list:
""" Generate a basic camera represention of the user point of view """ Generate a basic camera represention of the user point of view
@ -175,7 +224,7 @@ def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object
bbox_corners = [base @ mathutils.Vector( bbox_corners = [base @ mathutils.Vector(
corner) for corner in object.bound_box] corner) for corner in object.bound_box]
return [(point.x, point.y, point.z) for point in bbox_corners] return [(point.x, point.y, point.z) for point in bbox_corners]
@ -203,6 +252,13 @@ class Widget(object):
""" """
return True return True
def configure_bgl(self):
bgl.glLineWidth(2.)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
def draw(self): def draw(self):
"""How to draw the widget """How to draw the widget
""" """
@ -256,11 +312,6 @@ class UserFrustumWidget(Widget):
{"pos": positions}, {"pos": positions},
indices=self.indices) indices=self.indices)
bgl.glLineWidth(2.)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
shader.bind() shader.bind()
shader.uniform_float("color", self.data.get('color')) shader.uniform_float("color", self.data.get('color'))
batch.draw(shader) batch.draw(shader)
@ -272,6 +323,8 @@ class UserSelectionWidget(Widget):
username): username):
self.username = username self.username = username
self.settings = bpy.context.window_manager.session self.settings = bpy.context.window_manager.session
self.current_selection_ids = []
self.current_selected_objects = []
@property @property
def data(self): def data(self):
@ -281,6 +334,15 @@ class UserSelectionWidget(Widget):
else: else:
return None return None
@property
def selected_objects(self):
user_selection = self.data.get('selected_objects')
if self.current_selection_ids != user_selection:
self.current_selected_objects = [find_from_attr("uuid", uid, bpy.data.objects) for uid in user_selection]
self.current_selection_ids = user_selection
return self.current_selected_objects
def poll(self): def poll(self):
if self.data is None: if self.data is None:
return False return False
@ -295,48 +357,31 @@ class UserSelectionWidget(Widget):
self.settings.enable_presence self.settings.enable_presence
def draw(self): def draw(self):
user_selection = self.data.get('selected_objects') vertex_pos = []
for select_ob in user_selection: vertex_ind = []
ob = find_from_attr("uuid", select_ob, bpy.data.objects) collection_offset = 0
if not ob: for obj_index, obj in enumerate(self.selected_objects):
return if obj is None:
continue
obj_index+=collection_offset
if hasattr(obj, 'instance_collection') and obj.instance_collection:
bbox_pos, bbox_ind = bbox_from_instance_collection(obj, index=obj_index)
collection_offset+=len(obj.instance_collection.objects)-1
else :
bbox_pos, bbox_ind = bbox_from_obj(obj, index=obj_index)
vertex_pos += bbox_pos
vertex_ind += bbox_ind
vertex_pos = bbox_from_obj(ob, 1.0) shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3), batch = batch_for_shader(
(4, 5), (4, 6), (5, 7), (6, 7), shader,
(0, 4), (1, 5), (2, 6), (3, 7)) 'LINES',
{"pos": vertex_pos},
if ob.instance_collection: indices=vertex_ind)
for obj in ob.instance_collection.objects:
if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
vertex_pos = get_bb_coords_from_obj(obj, instance=ob)
break
elif ob.type == 'EMPTY':
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
elif ob.type == 'LIGHT':
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
elif ob.type == 'LIGHT_PROBE':
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
elif ob.type == 'CAMERA':
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
elif hasattr(ob, 'bound_box'):
vertex_indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
vertex_pos = get_bb_coords_from_obj(ob)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(
shader,
'LINES',
{"pos": vertex_pos},
indices=vertex_indices)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
class UserNameWidget(Widget): class UserNameWidget(Widget):
draw_type = 'POST_PIXEL' draw_type = 'POST_PIXEL'
@ -380,6 +425,62 @@ class UserNameWidget(Widget):
blf.color(0, color[0], color[1], color[2], color[3]) blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, self.username) blf.draw(0, self.username)
class UserModeWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
self.preferences = get_preferences()
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
scene_current = self.data.get('scene_current')
mode_current = self.data.get('mode_current')
user_selection = self.data.get('selected_objects')
return (scene_current == bpy.context.scene.name or
mode_current == bpy.context.mode or
self.settings.presence_show_far_user) and \
user_selection and \
self.settings.presence_show_mode and \
self.settings.enable_presence
def draw(self):
user_selection = self.data.get('selected_objects')
area, region, rv3d = view3d_find()
viewport_coord = project_to_viewport(region, rv3d, (0, 0))
obj = find_from_attr("uuid", user_selection[0], bpy.data.objects)
if not obj:
return
mode_current = self.data.get('mode_current')
color = self.data.get('color')
origin_coord = project_to_screen(obj.location)
distance_viewport_object = math.sqrt((viewport_coord[0]-obj.location[0])**2+(viewport_coord[1]-obj.location[1])**2+(viewport_coord[2]-obj.location[2])**2)
if distance_viewport_object > self.preferences.presence_mode_distance :
return
if origin_coord :
blf.position(0, origin_coord[0]+8, origin_coord[1]-15, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, mode_current)
class SessionStatusWidget(Widget): class SessionStatusWidget(Widget):
draw_type = 'POST_PIXEL' draw_type = 'POST_PIXEL'
@ -462,6 +563,7 @@ class DrawFactory(object):
try: try:
for widget in self.widgets.values(): for widget in self.widgets.values():
if widget.draw_type == 'POST_VIEW' and widget.poll(): if widget.draw_type == 'POST_VIEW' and widget.poll():
widget.configure_bgl()
widget.draw() widget.draw()
except Exception as e: except Exception as e:
logging.error( logging.error(
@ -471,6 +573,7 @@ class DrawFactory(object):
try: try:
for widget in self.widgets.values(): for widget in self.widgets.values():
if widget.draw_type == 'POST_PIXEL' and widget.poll(): if widget.draw_type == 'POST_PIXEL' and widget.poll():
widget.configure_bgl()
widget.draw() widget.draw()
except Exception as e: except Exception as e:
logging.error( logging.error(
@ -483,6 +586,7 @@ this.renderer = DrawFactory()
def register(): def register():
this.renderer.register_handlers() this.renderer.register_handlers()
this.renderer.add_widget("session_status", SessionStatusWidget()) this.renderer.add_widget("session_status", SessionStatusWidget())

48
multi_user/shared_data.py Normal file
View File

@ -0,0 +1,48 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
from replication.constants import STATE_INITIAL
class SessionData():
""" A structure to share easily the current session data across the addon
modules.
This object will completely replace the Singleton lying in replication
interface module.
"""
def __init__(self):
self.repository = None # The current repository
self.remote = None # The active remote
self.server = None
self.applied_updates = []
@property
def state(self):
if self.remote is None:
return STATE_INITIAL
else:
return self.remote.connection_status
def clear(self):
self.remote = None
self.repository = None
self.server = None
self.applied_updates = []
session = SessionData()

View File

@ -27,10 +27,12 @@ from replication.interface import session
from replication import porcelain from replication import porcelain
from . import operators, utils from . import operators, utils
from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget, from .presence import (UserFrustumWidget, UserNameWidget, UserModeWidget, UserSelectionWidget,
generate_user_camera, get_view_matrix, refresh_3d_view, generate_user_camera, get_view_matrix, refresh_3d_view,
refresh_sidebar_view, renderer) refresh_sidebar_view, renderer)
from . import shared_data
this = sys.modules[__name__] this = sys.modules[__name__]
# Registered timers # Registered timers
@ -39,7 +41,8 @@ this.registry = dict()
def is_annotating(context: bpy.types.Context): def is_annotating(context: bpy.types.Context):
""" Check if the annotate mode is enabled """ Check if the annotate mode is enabled
""" """
return bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False).idname == 'builtin.annotate' active_tool = bpy.context.workspace.tools.from_space_view3d_mode('OBJECT', create=False)
return (active_tool and active_tool.idname == 'builtin.annotate')
class Timer(object): class Timer(object):
@ -72,6 +75,7 @@ class Timer(object):
except Exception as e: except Exception as e:
logging.error(e) logging.error(e)
self.unregister() self.unregister()
traceback.print_exc()
session.disconnect(reason=f"Error during timer {self.id} execution") session.disconnect(reason=f"Error during timer {self.id} execution")
else: else:
if self.is_running: if self.is_running:
@ -88,7 +92,7 @@ class Timer(object):
if bpy.app.timers.is_registered(self.main): if bpy.app.timers.is_registered(self.main):
logging.info(f"Unregistering {self.id}") logging.info(f"Unregistering {self.id}")
bpy.app.timers.unregister(self.main) bpy.app.timers.unregister(self.main)
del this.registry[self.id] del this.registry[self.id]
self.is_running = False self.is_running = False
@ -99,7 +103,7 @@ class SessionBackupTimer(Timer):
def execute(self): def execute(self):
session.save(self._filepath) session.repository.dumps(self._filepath)
class SessionListenTimer(Timer): class SessionListenTimer(Timer):
def execute(self): def execute(self):
@ -108,32 +112,76 @@ class SessionListenTimer(Timer):
class ApplyTimer(Timer): class ApplyTimer(Timer):
def execute(self): def execute(self):
if session and session.state == STATE_ACTIVE: if session and session.state == STATE_ACTIVE:
nodes = session.list() for node in session.repository.graph.keys():
node_ref = session.repository.graph.get(node)
for node in nodes:
node_ref = session.repository.get_node(node)
if node_ref.state == FETCHED: if node_ref.state == FETCHED:
try: try:
shared_data.session.applied_updates.append(node)
porcelain.apply(session.repository, node) porcelain.apply(session.repository, node)
except Exception as e: except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}") logging.error(f"Fail to apply {node_ref.uuid}")
traceback.print_exc() traceback.print_exc()
else: else:
if node_ref.bl_reload_parent: impl = session.repository.rdp.get_implementation(node_ref.instance)
for parent in session.repository.get_parents(node): if impl.bl_reload_parent:
for parent in session.repository.graph.get_parents(node):
logging.debug("Refresh parent {node}") logging.debug("Refresh parent {node}")
porcelain.apply(session.repository, porcelain.apply(session.repository,
parent.uuid, parent.uuid,
force=True) force=True)
if hasattr(impl, 'bl_reload_child') and impl.bl_reload_child:
for dep in node_ref.dependencies:
porcelain.apply(session.repository,
dep,
force=True)
class AnnotationUpdates(Timer):
def __init__(self, timeout=1):
self._annotating = False
self._settings = utils.get_preferences()
super().__init__(timeout)
def execute(self):
if session and session.state == STATE_ACTIVE:
ctx = bpy.context
annotation_gp = ctx.scene.grease_pencil
if annotation_gp and not annotation_gp.uuid:
ctx.scene.update_tag()
# if an annotation exist and is tracked
if annotation_gp and annotation_gp.uuid:
registered_gp = session.repository.graph.get(annotation_gp.uuid)
if is_annotating(bpy.context):
# try to get the right on it
if registered_gp.owner == RP_COMMON:
self._annotating = True
logging.debug(
"Getting the right on the annotation GP")
porcelain.lock(session.repository,
[registered_gp.uuid],
ignore_warnings=True,
affect_dependencies=False)
if registered_gp.owner == self._settings.username:
porcelain.commit(session.repository, annotation_gp.uuid)
porcelain.push(session.repository, 'origin', annotation_gp.uuid)
elif self._annotating:
porcelain.unlock(session.repository,
[registered_gp.uuid],
ignore_warnings=True,
affect_dependencies=False)
self._annotating = False
class DynamicRightSelectTimer(Timer): class DynamicRightSelectTimer(Timer):
def __init__(self, timeout=.1): def __init__(self, timeout=.1):
super().__init__(timeout) super().__init__(timeout)
self._last_selection = [] self._last_selection = set()
self._user = None self._user = None
self._annotating = False
def execute(self): def execute(self):
settings = utils.get_preferences() settings = utils.get_preferences()
@ -144,88 +192,47 @@ class DynamicRightSelectTimer(Timer):
self._user = session.online_users.get(settings.username) self._user = session.online_users.get(settings.username)
if self._user: if self._user:
ctx = bpy.context current_selection = set(utils.get_selected_objects(
annotation_gp = ctx.scene.grease_pencil
if annotation_gp and not annotation_gp.uuid:
ctx.scene.update_tag()
# if an annotation exist and is tracked
if annotation_gp and annotation_gp.uuid:
registered_gp = session.repository.get_node(annotation_gp.uuid)
if is_annotating(bpy.context):
# try to get the right on it
if registered_gp.owner == RP_COMMON:
self._annotating = True
logging.debug(
"Getting the right on the annotation GP")
session.change_owner(
registered_gp.uuid,
settings.username,
ignore_warnings=True,
affect_dependencies=False)
if registered_gp.owner == settings.username:
gp_node = session.repository.get_node(annotation_gp.uuid)
if gp_node.has_changed():
porcelain.commit(session.repository, gp_node.uuid)
session.push(gp_node.uuid, check_data=False)
elif self._annotating:
session.change_owner(
registered_gp.uuid,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=False)
current_selection = utils.get_selected_objects(
bpy.context.scene, bpy.context.scene,
bpy.data.window_managers['WinMan'].windows[0].view_layer bpy.data.window_managers['WinMan'].windows[0].view_layer
) ))
if current_selection != self._last_selection: if current_selection != self._last_selection:
obj_common = [ to_lock = list(current_selection.difference(self._last_selection))
o for o in self._last_selection if o not in current_selection] to_release = list(self._last_selection.difference(current_selection))
obj_ours = [ instances_to_lock = list()
o for o in current_selection if o not in self._last_selection]
# change old selection right to common for node_id in to_lock:
for obj in obj_common: node = session.repository.graph.get(node_id)
node = session.repository.get_node(obj) if node and hasattr(node,'data'):
instance_mode = node.data.get('instance_type')
if instance_mode and instance_mode == 'COLLECTION':
to_lock.remove(node_id)
instances_to_lock.append(node_id)
if instances_to_lock:
try:
porcelain.lock(session.repository,
instances_to_lock,
ignore_warnings=True,
affect_dependencies=False)
except NonAuthorizedOperationError as e:
logging.warning(e)
if node and (node.owner == settings.username or node.owner == RP_COMMON): if to_release:
recursive = True try:
if node.data and 'instance_type' in node.data.keys(): porcelain.unlock(session.repository,
recursive = node.data['instance_type'] != 'COLLECTION' to_release,
try: ignore_warnings=True,
session.change_owner( affect_dependencies=True)
node.uuid, except NonAuthorizedOperationError as e:
RP_COMMON, logging.warning(e)
ignore_warnings=True, if to_lock:
affect_dependencies=recursive) try:
except NonAuthorizedOperationError: porcelain.lock(session.repository,
logging.warning( to_lock,
f"Not authorized to change {node} owner") ignore_warnings=True,
affect_dependencies=True)
# change new selection to our except NonAuthorizedOperationError as e:
for obj in obj_ours: logging.warning(e)
node = session.repository.get_node(obj)
if node and node.owner == RP_COMMON:
recursive = True
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
try:
session.change_owner(
node.uuid,
settings.username,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(
f"Not authorized to change {node} owner")
else:
return
self._last_selection = current_selection self._last_selection = current_selection
@ -233,32 +240,29 @@ class DynamicRightSelectTimer(Timer):
'selected_objects': current_selection 'selected_objects': current_selection
} }
session.update_user_metadata(user_metadata) porcelain.update_user_metadata(session.repository, user_metadata)
logging.debug("Update selection") logging.debug("Update selection")
# Fix deselection until right managment refactoring (with Roles concepts) # Fix deselection until right managment refactoring (with Roles concepts)
if len(current_selection) == 0 : if len(current_selection) == 0 :
owned_keys = session.list( owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
filter_owner=settings.username) if owned_keys:
for key in owned_keys:
node = session.repository.get_node(key)
try: try:
session.change_owner( porcelain.unlock(session.repository,
key, owned_keys,
RP_COMMON, ignore_warnings=True,
ignore_warnings=True, affect_dependencies=True)
affect_dependencies=recursive) except NonAuthorizedOperationError as e:
except NonAuthorizedOperationError: logging.warning(e)
logging.warning(
f"Not authorized to change {key} owner")
# Objects selectability
for obj in bpy.data.objects: for obj in bpy.data.objects:
object_uuid = getattr(obj, 'uuid', None) object_uuid = getattr(obj, 'uuid', None)
if object_uuid: if object_uuid:
node = session.repository.get_node(object_uuid) is_selectable = not session.repository.is_node_readonly(object_uuid)
is_selectable = not node.owner in [settings.username, RP_COMMON]
if obj.hide_select != is_selectable: if obj.hide_select != is_selectable:
obj.hide_select = is_selectable obj.hide_select = is_selectable
shared_data.session.applied_updates.append(object_uuid)
class ClientUpdate(Timer): class ClientUpdate(Timer):
@ -272,7 +276,8 @@ class ClientUpdate(Timer):
if session and renderer: if session and renderer:
if session.state in [STATE_ACTIVE, STATE_LOBBY]: if session.state in [STATE_ACTIVE, STATE_LOBBY]:
local_user = session.online_users.get(settings.username) local_user = session.online_users.get(
settings.username)
if not local_user: if not local_user:
return return
@ -307,20 +312,24 @@ class ClientUpdate(Timer):
settings.client_color.b, settings.client_color.b,
1), 1),
'frame_current': bpy.context.scene.frame_current, 'frame_current': bpy.context.scene.frame_current,
'scene_current': scene_current 'scene_current': scene_current,
'mode_current': bpy.context.mode
} }
session.update_user_metadata(metadata) porcelain.update_user_metadata(session.repository, metadata)
# Update client representation # Update client representation
# Update client current scene # Update client current scene
elif scene_current != local_user_metadata['scene_current']: elif scene_current != local_user_metadata['scene_current']:
local_user_metadata['scene_current'] = scene_current local_user_metadata['scene_current'] = scene_current
session.update_user_metadata(local_user_metadata) porcelain.update_user_metadata(session.repository, local_user_metadata)
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']: elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
local_user_metadata['view_corners'] = current_view_corners local_user_metadata['view_corners'] = current_view_corners
local_user_metadata['view_matrix'] = get_view_matrix( local_user_metadata['view_matrix'] = get_view_matrix(
) )
session.update_user_metadata(local_user_metadata) porcelain.update_user_metadata(session.repository, local_user_metadata)
elif bpy.context.mode != local_user_metadata['mode_current']:
local_user_metadata['mode_current'] = bpy.context.mode
porcelain.update_user_metadata(session.repository, local_user_metadata)
class SessionStatusUpdate(Timer): class SessionStatusUpdate(Timer):
@ -348,6 +357,7 @@ class SessionUserSync(Timer):
renderer.remove_widget(f"{user.username}_cam") renderer.remove_widget(f"{user.username}_cam")
renderer.remove_widget(f"{user.username}_select") renderer.remove_widget(f"{user.username}_select")
renderer.remove_widget(f"{user.username}_name") renderer.remove_widget(f"{user.username}_name")
renderer.remove_widget(f"{user.username}_mode")
ui_users.remove(index) ui_users.remove(index)
break break
@ -363,6 +373,8 @@ class SessionUserSync(Timer):
f"{user}_select", UserSelectionWidget(user)) f"{user}_select", UserSelectionWidget(user))
renderer.add_widget( renderer.add_widget(
f"{user}_name", UserNameWidget(user)) f"{user}_name", UserNameWidget(user))
renderer.add_widget(
f"{user}_mode", UserModeWidget(user))
class MainThreadExecutor(Timer): class MainThreadExecutor(Timer):

View File

@ -16,7 +16,9 @@
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
from logging import log
import bpy import bpy
import bpy.utils.previews
from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str
from replication.constants import (ADDED, ERROR, FETCHED, from replication.constants import (ADDED, ERROR, FETCHED,
@ -60,7 +62,41 @@ def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=
bar = fill * filledLength + fill_empty * (length - filledLength) bar = fill * filledLength + fill_empty * (length - filledLength)
return f"{prefix} |{bar}| {iteration}/{total}{suffix}" return f"{prefix} |{bar}| {iteration}/{total}{suffix}"
def get_mode_icon(mode_name: str) -> str:
""" given a mode name retrieve a built-in icon
"""
mode_icon = "NONE"
if mode_name == "OBJECT" :
mode_icon = "OBJECT_DATAMODE"
elif mode_name == "EDIT_MESH" :
mode_icon = "EDITMODE_HLT"
elif mode_name == 'EDIT_CURVE':
mode_icon = "CURVE_DATA"
elif mode_name == 'EDIT_SURFACE':
mode_icon = "SURFACE_DATA"
elif mode_name == 'EDIT_TEXT':
mode_icon = "FILE_FONT"
elif mode_name == 'EDIT_ARMATURE':
mode_icon = "ARMATURE_DATA"
elif mode_name == 'EDIT_METABALL':
mode_icon = "META_BALL"
elif mode_name == 'EDIT_LATTICE':
mode_icon = "LATTICE_DATA"
elif mode_name == 'POSE':
mode_icon = "POSE_HLT"
elif mode_name == 'SCULPT':
mode_icon = "SCULPTMODE_HLT"
elif mode_name == 'PAINT_WEIGHT':
mode_icon = "WPAINT_HLT"
elif mode_name == 'PAINT_VERTEX':
mode_icon = "VPAINT_HLT"
elif mode_name == 'PAINT_TEXTURE':
mode_icon = "TPAINT_HLT"
elif mode_name == 'PARTICLE':
mode_icon = "PARTICLES"
elif mode_name == 'PAINT_GPENCIL' or mode_name =='EDIT_GPENCIL' or mode_name =='SCULPT_GPENCIL' or mode_name =='WEIGHT_GPENCIL' or mode_name =='VERTEX_GPENCIL':
mode_icon = "GREASEPENCIL"
return mode_icon
class SESSION_PT_settings(bpy.types.Panel): class SESSION_PT_settings(bpy.types.Panel):
"""Settings panel""" """Settings panel"""
bl_idname = "MULTIUSER_SETTINGS_PT_panel" bl_idname = "MULTIUSER_SETTINGS_PT_panel"
@ -71,155 +107,132 @@ class SESSION_PT_settings(bpy.types.Panel):
def draw_header(self, context): def draw_header(self, context):
layout = self.layout layout = self.layout
settings = get_preferences()
from multi_user import icons
offline_icon = icons.icons_col["session_status_offline"]
waiting_icon = icons.icons_col["session_status_waiting"]
online_icon = icons.icons_col["session_status_online"]
if session and session.state != STATE_INITIAL: if session and session.state != STATE_INITIAL:
cli_state = session.state cli_state = session.state
state = session.state state = session.state
connection_icon = "KEYTYPE_MOVING_HOLD_VEC" connection_icon = offline_icon
if state == STATE_ACTIVE: if state == STATE_ACTIVE:
connection_icon = 'PROP_ON' connection_icon = online_icon
else: else:
connection_icon = 'PROP_CON' connection_icon = waiting_icon
layout.label(text=f"Session - {get_state_str(cli_state)}", icon=connection_icon) layout.label(text=f"{str(settings.server_name)} - {get_state_str(cli_state)}", icon_value=connection_icon.icon_id)
else: else:
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF") layout.label(text=f"Multi-user - v{__version__}", icon="ANTIALIASED")
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
row = layout.row()
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
settings = get_preferences() settings = get_preferences()
if hasattr(context.window_manager, 'session'): if settings.is_first_launch:
# STATE INITIAL # USER SETTINGS
if not session \ row = layout.row()
or (session and session.state == STATE_INITIAL): row.label(text="1. Enter your username and color:")
pass row = layout.row()
else: split = row.split(factor=0.7, align=True)
progress = session.state_progress split.prop(settings, "username", text="")
row = layout.row() split.prop(settings, "client_color", text="")
current_state = session.state # DOC
info_msg = None row = layout.row()
row.label(text="2. New here ? See the doc:")
row = layout.row()
row.operator("doc.get", text="Documentation", icon="HELP")
# START
row = layout.row()
row.label(text="3: Start the Multi-user:")
row = layout.row()
row.scale_y = 2
row.operator("firstlaunch.verify", text="Continue")
if not settings.is_first_launch:
if hasattr(context.window_manager, 'session'):
# STATE INITIAL
if not session \
or (session and session.state == STATE_INITIAL):
layout = self.layout
settings = get_preferences()
server_preset = settings.server_preset
selected_server = context.window_manager.server_index if context.window_manager.server_index<=len(server_preset)-1 else 0
active_server_name = server_preset[selected_server].name if len(server_preset)>=1 else ""
is_server_selected = True if active_server_name else False
if current_state in [STATE_ACTIVE]: # SERVER LIST
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True) row = layout.row()
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE') box = row.box()
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT') box.scale_y = 0.7
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE') split = box.split(factor=0.7)
split.label(text="Server")
split.label(text="Online")
row= layout.row() col = row.column(align=True)
col.operator("session.get_info", icon="FILE_REFRESH", text="")
if current_state == STATE_LOBBY: row = layout.row()
info_msg = "Waiting for the session to start." col = row.column(align=True)
col.template_list("SESSION_UL_network", "", settings, "server_preset", context.window_manager, "server_index")
col.separator()
connectOp = col.row()
connectOp.enabled =is_server_selected
connectOp.operator("session.connect", text="Connect")
if info_msg: col = row.column(align=True)
info_box = row.box() col.operator("session.preset_server_add", icon="ADD", text="") # TODO : add conditions (need a name, etc..)
info_box.row().label(text=info_msg,icon='INFO') row_visible = col.row(align=True)
col_visible = row_visible.column(align=True)
col_visible.enabled = is_server_selected
col_visible.operator("session.preset_server_remove", icon="REMOVE", text="").target_server_name = active_server_name
col_visible.separator()
col_visible.operator("session.preset_server_edit", icon="GREASEPENCIL", text="").target_server_name = active_server_name
# Progress bar else:
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]: exitbutton = layout.row()
info_box = row.box() exitbutton.scale_y = 1.5
info_box.row().label(text=printProgressBar( exitbutton.operator("session.stop", icon='QUIT', text="Disconnect")
progress['current'],
progress['total'],
length=16
))
layout.row().operator("session.stop", icon='QUIT', text="Exit") progress = session.state_progress
current_state = session.state
info_msg = None
if current_state == STATE_LOBBY:
usr = session.online_users.get(settings.username)
row= layout.row()
info_msg = "Waiting for the session to start."
if usr and usr['admin']:
info_msg = "Init the session to start."
info_box = layout.row()
info_box.label(text=info_msg,icon='INFO')
init_row = layout.row()
init_row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
else:
info_box = layout.row()
info_box.row().label(text=info_msg,icon='INFO')
class SESSION_PT_settings_network(bpy.types.Panel): # PROGRESS BAR
bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel" if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
bl_label = "Network" row= layout.row()
bl_space_type = 'VIEW_3D' row.label(text=f"Status: {get_state_str(current_state)}")
bl_region_type = 'UI' row= layout.row()
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel' info_box = row.box()
info_box.label(text=printProgressBar(
progress['current'],
progress['total'],
length=16
))
@classmethod class SESSION_PT_host_settings(bpy.types.Panel):
def poll(cls, context): bl_idname = "MULTIUSER_SETTINGS_HOST_PT_panel"
return not session \ bl_label = "Hosting"
or (session and session.state == 0)
def draw_header(self, context):
self.layout.label(text="", icon='URL')
def draw(self, context):
layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences()
# USER SETTINGS
row = layout.row()
row.prop(runtime_settings, "session_mode", expand=True)
row = layout.row()
box = row.box()
if runtime_settings.session_mode == 'HOST':
row = box.row()
row.label(text="Port:")
row.prop(settings, "port", text="")
row = box.row()
row.label(text="Start from:")
row.prop(settings, "init_method", text="")
row = box.row()
row.label(text="Admin password:")
row.prop(runtime_settings, "password", text="")
row = box.row()
row.operator("session.start", text="HOST").host = True
else:
row = box.row()
row.prop(settings, "ip", text="IP")
row = box.row()
row.label(text="Port:")
row.prop(settings, "port", text="")
row = box.row()
row.prop(runtime_settings, "admin", text='Connect as admin', icon='DISCLOSURE_TRI_DOWN' if runtime_settings.admin
else 'DISCLOSURE_TRI_RIGHT')
if runtime_settings.admin:
row = box.row()
row.label(text="Password:")
row.prop(runtime_settings, "password", text="")
row = box.row()
row.operator("session.start", text="CONNECT").host = False
class SESSION_PT_settings_user(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_USER_PT_panel"
bl_label = "User info"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
@classmethod
def poll(cls, context):
return not session \
or (session and session.state == 0)
def draw_header(self, context):
self.layout.label(text="", icon='USER')
def draw(self, context):
layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences()
row = layout.row()
# USER SETTINGS
row.prop(settings, "username", text="name")
row = layout.row()
row.prop(settings, "client_color", text="color")
row = layout.row()
class SESSION_PT_advanced_settings(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_REPLICATION_PT_panel"
bl_label = "Advanced"
bl_space_type = 'VIEW_3D' bl_space_type = 'VIEW_3D'
bl_region_type = 'UI' bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel' bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
@ -227,19 +240,82 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
settings = get_preferences()
return not session \ return not session \
or (session and session.state == 0) or (session and session.state == 0) \
and not settings.sidebar_advanced_shown \
and not settings.is_first_launch
def draw_header(self, context):
self.layout.label(text="", icon='NETWORK_DRIVE')
def draw(self, context):
layout = self.layout
settings = get_preferences()
#HOST
host_selection = layout.row().box()
host_selection_row = host_selection.row()
host_selection_row.label(text="Init the session from:")
host_selection_row.prop(settings, "init_method", text="")
host_selection_row = host_selection.row()
host_selection_row.label(text="Port:")
host_selection_row.prop(settings, "host_port", text="")
host_selection_row = host_selection.row()
host_selection_col = host_selection_row.column()
host_selection_col.prop(settings, "host_use_server_password", text="Server password:")
host_selection_col = host_selection_row.column()
host_selection_col.enabled = True if settings.host_use_server_password else False
host_selection_col.prop(settings, "host_server_password", text="")
host_selection_row = host_selection.row()
host_selection_col = host_selection_row.column()
host_selection_col.prop(settings, "host_use_admin_password", text="Admin password:")
host_selection_col = host_selection_row.column()
host_selection_col.enabled = True if settings.host_use_admin_password else False
host_selection_col.prop(settings, "host_admin_password", text="")
host_selection = layout.column()
host_selection.operator("session.host", text="Host")
class SESSION_PT_advanced_settings(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_REPLICATION_PT_panel"
bl_label = "General Settings"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
settings = get_preferences()
return not session \
or (session and session.state == 0) \
and not settings.sidebar_advanced_shown \
and not settings.is_first_launch
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='PREFERENCES') self.layout.label(text="", icon='PREFERENCES')
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
runtime_settings = context.window_manager.session
settings = get_preferences() settings = get_preferences()
#ADVANCED USER INFO
uinfo_section = layout.row().box()
uinfo_section.prop(
settings,
"sidebar_advanced_uinfo_expanded",
text="User Info",
icon=get_expanded_icon(settings.sidebar_advanced_uinfo_expanded),
emboss=False)
if settings.sidebar_advanced_uinfo_expanded:
uinfo_section_row = uinfo_section.row()
uinfo_section_split = uinfo_section_row.split(factor=0.7, align=True)
uinfo_section_split.prop(settings, "username", text="")
uinfo_section_split.prop(settings, "client_color", text="")
#ADVANCED NET
net_section = layout.row().box() net_section = layout.row().box()
net_section.prop( net_section.prop(
settings, settings,
@ -247,12 +323,15 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
text="Network", text="Network",
icon=get_expanded_icon(settings.sidebar_advanced_net_expanded), icon=get_expanded_icon(settings.sidebar_advanced_net_expanded),
emboss=False) emboss=False)
if settings.sidebar_advanced_net_expanded: if settings.sidebar_advanced_net_expanded:
net_section_row = net_section.row() net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):") net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="") net_section_row.prop(settings, "connection_timeout", text="")
net_section_row = net_section.row()
net_section_row.label(text="Server ping (ms):")
net_section_row.prop(settings, "ping_timeout", text="")
#ADVANCED REPLICATION
replication_section = layout.row().box() replication_section = layout.row().box()
replication_section.prop( replication_section.prop(
settings, settings,
@ -260,16 +339,12 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
text="Replication", text="Replication",
icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded), icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded),
emboss=False) emboss=False)
if settings.sidebar_advanced_rep_expanded: if settings.sidebar_advanced_rep_expanded:
replication_section_row = replication_section.row()
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_render_settings") replication_section_row.prop(settings.sync_flags, "sync_render_settings")
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_active_camera") replication_section_row.prop(settings.sync_flags, "sync_active_camera")
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_during_editmode") replication_section_row.prop(settings.sync_flags, "sync_during_editmode")
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
if settings.sync_flags.sync_during_editmode: if settings.sync_flags.sync_during_editmode:
@ -278,7 +353,7 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
replication_section_row = replication_section.row() replication_section_row = replication_section.row()
replication_section_row.prop(settings, "depsgraph_update_rate", text="Apply delay") replication_section_row.prop(settings, "depsgraph_update_rate", text="Apply delay")
#ADVANCED CACHE
cache_section = layout.row().box() cache_section = layout.row().box()
cache_section.prop( cache_section.prop(
settings, settings,
@ -296,6 +371,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
cache_section_row.prop(settings, "clear_memory_filecache", text="") cache_section_row.prop(settings, "clear_memory_filecache", text="")
cache_section_row = cache_section.row() cache_section_row = cache_section.row()
cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})") cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})")
#ADVANCED LOG
log_section = layout.row().box() log_section = layout.row().box()
log_section.prop( log_section.prop(
settings, settings,
@ -303,11 +380,11 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
text="Logging", text="Logging",
icon=get_expanded_icon(settings.sidebar_advanced_log_expanded), icon=get_expanded_icon(settings.sidebar_advanced_log_expanded),
emboss=False) emboss=False)
if settings.sidebar_advanced_log_expanded: if settings.sidebar_advanced_log_expanded:
log_section_row = log_section.row() log_section_row = log_section.row()
log_section_row.label(text="Log level:") log_section_row.label(text="Log level:")
log_section_row.prop(settings, 'logging_level', text="") log_section_row.prop(settings, 'logging_level', text="")
class SESSION_PT_user(bpy.types.Panel): class SESSION_PT_user(bpy.types.Panel):
bl_idname = "MULTIUSER_USER_PT_panel" bl_idname = "MULTIUSER_USER_PT_panel"
bl_label = "Online users" bl_label = "Online users"
@ -317,7 +394,8 @@ class SESSION_PT_user(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return session and session.state in [STATE_ACTIVE, STATE_LOBBY] return session \
and session.state in [STATE_ACTIVE, STATE_LOBBY]
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='USER') self.layout.label(text="", icon='USER')
@ -329,22 +407,36 @@ class SESSION_PT_user(bpy.types.Panel):
settings = get_preferences() settings = get_preferences()
active_user = online_users[selected_user] if len( active_user = online_users[selected_user] if len(
online_users)-1 >= selected_user else 0 online_users)-1 >= selected_user else 0
runtime_settings = context.window_manager.session
# Create a simple row. #USER LIST
row = layout.row() col = layout.column(align=True)
row = col.row(align=True)
row = row.split(factor=0.35, align=True)
box = row.box() box = row.box()
split = box.split(factor=0.35) brow = box.row(align=True)
split.label(text="user") brow.label(text="user")
split = split.split(factor=0.5)
split.label(text="location")
split.label(text="frame")
split.label(text="ping")
row = layout.row() row = row.split(factor=0.25, align=True)
layout.template_list("SESSION_UL_users", "", context.window_manager,
box = row.box()
brow = box.row(align=True)
brow.label(text="mode")
box = row.box()
brow = box.row(align=True)
brow.label(text="frame")
box = row.box()
brow = box.row(align=True)
brow.label(text="scene")
box = row.box()
brow = box.row(align=True)
brow.label(text="ping")
row = col.row(align=True)
row.template_list("SESSION_UL_users", "", context.window_manager,
"online_users", context.window_manager, "user_index") "online_users", context.window_manager, "user_index")
#OPERATOR ON USER
if active_user != 0 and active_user.username != settings.username: if active_user != 0 and active_user.username != settings.username:
row = layout.row() row = layout.row()
user_operations = row.split() user_operations = row.split()
@ -376,6 +468,8 @@ class SESSION_UL_users(bpy.types.UIList):
ping = '-' ping = '-'
frame_current = '-' frame_current = '-'
scene_current = '-' scene_current = '-'
mode_current = '-'
mode_icon = 'BLANK1'
status_icon = 'BLANK1' status_icon = 'BLANK1'
if session: if session:
user = session.online_users.get(item.username) user = session.online_users.get(item.username)
@ -385,59 +479,38 @@ class SESSION_UL_users(bpy.types.UIList):
if metadata and 'frame_current' in metadata: if metadata and 'frame_current' in metadata:
frame_current = str(metadata.get('frame_current','-')) frame_current = str(metadata.get('frame_current','-'))
scene_current = metadata.get('scene_current','-') scene_current = metadata.get('scene_current','-')
mode_current = metadata.get('mode_current','-')
mode_current = metadata.get('mode_current','-')
mode_icon = get_mode_icon(mode_current)
user_color = metadata.get('color',[1.0,1.0,1.0,1.0])
item.color = user_color
if user['admin']: if user['admin']:
status_icon = 'FAKE_USER_ON' status_icon = 'FAKE_USER_ON'
split = layout.split(factor=0.35) row = layout.split(factor=0.35, align=True)
split.label(text=item.username, icon=status_icon) entry = row.row(align=True)
split = split.split(factor=0.5) entry.scale_x = 0.05
split.label(text=scene_current) entry.enabled = False
split.label(text=frame_current) entry.prop(item, 'color', text="", event=False, full_event=False)
split.label(text=ping) entry.enabled = True
entry.scale_x = 1.0
entry.label(icon=status_icon, text="")
entry.label(text=item.username)
row = row.split(factor=0.25, align=True)
class SESSION_PT_presence(bpy.types.Panel):
bl_idname = "MULTIUSER_MODULE_PT_panel" entry = row.row()
bl_label = "Presence overlay" entry.label(icon=mode_icon)
bl_space_type = 'VIEW_3D' entry = row.row()
bl_region_type = 'UI' entry.label(text=frame_current)
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel' entry = row.row()
bl_options = {'DEFAULT_CLOSED'} entry.label(text=scene_current)
entry = row.row()
@classmethod entry.label(text=ping)
def poll(cls, context):
return not session \
or (session and session.state in [STATE_INITIAL, STATE_ACTIVE])
def draw_header(self, context):
self.layout.prop(context.window_manager.session,
"enable_presence", text="",icon='OVERLAY')
def draw(self, context):
layout = self.layout
settings = context.window_manager.session
pref = get_preferences()
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status")
row = col.column()
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_scale", expand=True)
row = col.column(align=True)
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_hpos", expand=True)
row.prop(pref, "presence_hud_vpos", expand=True)
col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
def draw_property(context, parent, property_uuid, level=0): def draw_property(context, parent, property_uuid, level=0):
settings = get_preferences() settings = get_preferences()
runtime_settings = context.window_manager.session item = session.repository.graph.get(property_uuid)
item = session.repository.get_node(property_uuid) type_id = item.data.get('type_id')
area_msg = parent.row(align=True) area_msg = parent.row(align=True)
if item.state == ERROR: if item.state == ERROR:
@ -448,23 +521,25 @@ def draw_property(context, parent, property_uuid, level=0):
line = area_msg.box() line = area_msg.box()
name = item.data['name'] if item.data else item.uuid name = item.data['name'] if item.data else item.uuid
icon = settings.supported_datablocks[type_id].icon if type_id else 'ERROR'
detail_item_box = line.row(align=True) detail_item_box = line.row(align=True)
detail_item_box.label(text="", detail_item_box.label(text="", icon=icon)
icon=settings.supported_datablocks[item.str_type].icon)
detail_item_box.label(text=f"{name}") detail_item_box.label(text=f"{name}")
# Operations # Operations
have_right_to_modify = (item.owner == settings.username or \ have_right_to_modify = (item.owner == settings.username or \
item.owner == RP_COMMON) and item.state != ERROR item.owner == RP_COMMON) and item.state != ERROR
from multi_user import icons
sync_status = icons.icons_col["repository_push"] #TODO: Link all icons to the right sync (push/merge/issue). For issue use "UNLINKED" for icon
# sync_status = icons.icons_col["repository_merge"]
if have_right_to_modify: if have_right_to_modify:
detail_item_box.operator( detail_item_box.operator(
"session.commit", "session.commit",
text="", text="",
icon='TRIA_UP').target = item.uuid icon_value=sync_status.icon_id).target = item.uuid
detail_item_box.separator() detail_item_box.separator()
if item.state in [FETCHED, UP]: if item.state in [FETCHED, UP]:
@ -496,12 +571,40 @@ def draw_property(context, parent, property_uuid, level=0):
else: else:
detail_item_box.label(text="", icon="DECORATE_LOCKED") detail_item_box.label(text="", icon="DECORATE_LOCKED")
class SESSION_PT_sync(bpy.types.Panel):
bl_idname = "MULTIUSER_SYNC_PT_panel"
bl_label = "Synchronize"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return session \
and session.state in [STATE_ACTIVE]
def draw_header(self, context):
self.layout.label(text="", icon='UV_SYNC_SELECT')
def draw(self, context):
layout = self.layout
settings = get_preferences()
row= layout.row()
row = row.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='VIEW_CAMERA')
class SESSION_PT_repository(bpy.types.Panel): class SESSION_PT_repository(bpy.types.Panel):
bl_idname = "MULTIUSER_PROPERTIES_PT_panel" bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
bl_label = "Repository" bl_label = "Repository"
bl_space_type = 'VIEW_3D' bl_space_type = 'VIEW_3D'
bl_region_type = 'UI' bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel' bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
@ -514,8 +617,8 @@ class SESSION_PT_repository(bpy.types.Panel):
admin = usr['admin'] admin = usr['admin']
return hasattr(context.window_manager, 'session') and \ return hasattr(context.window_manager, 'session') and \
session and \ session and \
(session.state == STATE_ACTIVE or \ session.state == STATE_ACTIVE and \
session.state == STATE_LOBBY and admin) not settings.sidebar_repository_shown
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE') self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
@ -529,55 +632,37 @@ class SESSION_PT_repository(bpy.types.Panel):
usr = session.online_users.get(settings.username) usr = session.online_users.get(settings.username)
row = layout.row()
if session.state == STATE_ACTIVE: if session.state == STATE_ACTIVE:
if 'SessionBackupTimer' in registry: if 'SessionBackupTimer' in registry:
row = layout.row()
row.alert = True row.alert = True
row.operator('session.cancel_autosave', icon="CANCEL") row.operator('session.cancel_autosave', icon="CANCEL")
row.alert = False row.alert = False
else: # else:
row.operator('session.save', icon="FILE_TICK") # row.operator('session.save', icon="FILE_TICK")
flow = layout.grid_flow( box = layout.box()
row_major=True, row = box.row()
columns=0, row.prop(runtime_settings, "filter_owned", text="Only show owned data blocks", icon_only=True, icon="DECORATE_UNLOCKED")
even_columns=True, row = box.row()
even_rows=False, row.prop(runtime_settings, "filter_name", text="Filter")
align=True) row = box.row()
for item in settings.supported_datablocks:
col = flow.column(align=True)
col.prop(item, "use_as_filter", text="", icon=item.icon)
row = layout.row(align=True)
row.prop(runtime_settings, "filter_owned", text="Show only owned")
row = layout.row(align=True)
# Properties # Properties
types_filter = [t.type_name for t in settings.supported_datablocks owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
if t.use_as_filter]
key_to_filter = session.list( filtered_node = owned_nodes if runtime_settings.filter_owned else list(session.repository.graph.keys())
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
client_keys = [key for key in key_to_filter if runtime_settings.filter_name:
if session.repository.get_node(key).str_type filtered_node = [n for n in filtered_node if runtime_settings.filter_name.lower() in session.repository.graph.get(n).data.get('name').lower()]
in types_filter]
if client_keys: if filtered_node:
col = layout.column(align=True) col = layout.column(align=True)
for key in client_keys: for key in filtered_node:
draw_property(context, col, key) draw_property(context, col, key)
else: else:
row.label(text="Empty") layout.row().label(text="Empty")
elif session.state == STATE_LOBBY and usr and usr['admin']:
row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
else:
row.label(text="Waiting to start")
class VIEW3D_PT_overlay_session(bpy.types.Panel): class VIEW3D_PT_overlay_session(bpy.types.Panel):
bl_space_type = 'VIEW_3D' bl_space_type = 'VIEW_3D'
@ -592,37 +677,74 @@ class VIEW3D_PT_overlay_session(bpy.types.Panel):
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
row = col.row(align=True)
settings = context.window_manager.session settings = context.window_manager.session
pref = get_preferences()
layout.active = settings.enable_presence layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status") row = layout.row()
col.prop(settings, "presence_show_selected") row.prop(settings, "enable_presence",text="Presence Overlay")
col.prop(settings, "presence_show_user")
row = layout.column() row = layout.row()
row.active = settings.presence_show_user row.prop(settings, "presence_show_selected",text="Selected Objects")
row.prop(settings, "presence_show_far_user")
row = layout.row(align=True)
row.prop(settings, "presence_show_user", text="Users camera")
row.prop(settings, "presence_show_mode", text="Users mode")
col = layout.column()
if settings.presence_show_mode or settings.presence_show_user:
row = col.column()
row.prop(pref, "presence_text_distance", expand=True)
row = col.column()
row.prop(settings, "presence_show_far_user", text="Users on different scenes")
col.prop(settings, "presence_show_session_status")
if settings.presence_show_session_status :
split = layout.split()
text_pos = split.column(align=True)
text_pos.active = settings.presence_show_session_status
text_pos.prop(pref, "presence_hud_hpos", expand=True)
text_pos.prop(pref, "presence_hud_vpos", expand=True)
text_scale = split.column()
text_scale.active = settings.presence_show_session_status
text_scale.prop(pref, "presence_hud_scale", expand=True)
class SESSION_UL_network(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
settings = get_preferences()
server_name = '-'
server_status = 'BLANK1'
server_private = 'BLANK1'
server_name = item.server_name
split = layout.split(factor=0.7)
if item.is_private:
server_private = 'LOCKED'
split.label(text=server_name, icon=server_private)
else:
split.label(text=server_name)
from multi_user import icons
server_status = icons.icons_col["server_offline"]
if item.is_online:
server_status = icons.icons_col["server_online"]
split.label(icon_value=server_status.icon_id)
classes = ( classes = (
SESSION_UL_users, SESSION_UL_users,
SESSION_UL_network,
SESSION_PT_settings, SESSION_PT_settings,
SESSION_PT_settings_user, SESSION_PT_host_settings,
SESSION_PT_settings_network,
SESSION_PT_presence,
SESSION_PT_advanced_settings, SESSION_PT_advanced_settings,
SESSION_PT_user, SESSION_PT_user,
SESSION_PT_sync,
SESSION_PT_repository, SESSION_PT_repository,
VIEW3D_PT_overlay_session, VIEW3D_PT_overlay_session,
) )
register, unregister = bpy.utils.register_classes_factory(classes) register, unregister = bpy.utils.register_classes_factory(classes)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -38,6 +38,14 @@ from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_LOBBY, STATE_LOBBY,
CONNECTING) CONNECTING)
CLEARED_DATABLOCKS = ['actions', 'armatures', 'cache_files', 'cameras',
'collections', 'curves', 'filepath', 'fonts',
'grease_pencils', 'images', 'lattices', 'libraries',
'lightprobes', 'lights', 'linestyles', 'masks',
'materials', 'meshes', 'metaballs', 'movieclips',
'node_groups', 'objects', 'paint_curves', 'particles',
'scenes', 'shape_keys', 'sounds', 'speakers', 'texts',
'textures', 'volumes', 'worlds']
def find_from_attr(attr_name, attr_value, list): def find_from_attr(attr_name, attr_value, list):
for item in list: for item in list:
@ -101,17 +109,25 @@ def get_state_str(state):
def clean_scene(): def clean_scene():
for type_name in dir(bpy.data): for type_name in CLEARED_DATABLOCKS:
try: sub_collection_to_avoid = [
type_collection = getattr(bpy.data, type_name) bpy.data.linestyles.get('LineStyle'),
for item in type_collection: bpy.data.materials.get('Dots Stroke')
]
type_collection = getattr(bpy.data, type_name)
items_to_remove = [i for i in type_collection if i not in sub_collection_to_avoid]
for item in items_to_remove:
try:
type_collection.remove(item) type_collection.remove(item)
except: logging.info(item.name)
continue except:
continue
# Clear sequencer # Clear sequencer
bpy.context.scene.sequence_editor_clear() bpy.context.scene.sequence_editor_clear()
def get_selected_objects(scene, active_view_layer): def get_selected_objects(scene, active_view_layer):
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)] return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]

View File

@ -1,7 +1,7 @@
# Download base image debian jessie # Download base image debian jessie
FROM python:slim FROM python:slim
ARG replication_version=0.1.13 ARG replication_version=0.9.1
ARG version=0.1.1 ARG version=0.1.1
# Infos # Infos
@ -22,4 +22,4 @@ RUN pip install replication==$replication_version
# Run the server with parameters # Run the server with parameters
ENTRYPOINT ["/bin/sh", "-c"] ENTRYPOINT ["/bin/sh", "-c"]
CMD ["python3 -m replication.server -pwd ${password} -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"] CMD ["replication.serve -apwd ${password} -spwd '' -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]

View File

@ -1,4 +1,4 @@
import re import re
init_py = open("multi_user/__init__.py").read() init_py = open("multi_user/libs/replication/replication/__init__.py").read()
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0)) print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))

View File

@ -13,7 +13,7 @@ def main():
if len(sys.argv) > 2: if len(sys.argv) > 2:
blender_rev = sys.argv[2] blender_rev = sys.argv[2]
else: else:
blender_rev = "2.92.0" blender_rev = "2.93.0"
try: try:
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev) exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)

View File

@ -5,9 +5,10 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_action import BlAction from multi_user.bl_types.bl_action import BlAction
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC'] INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
FMODIFIERS = ['GENERATOR', 'FNGENERATOR', 'ENVELOPE', 'CYCLES', 'NOISE', 'LIMITS', 'STEPPED']
# @pytest.mark.parametrize('blendname', ['test_action.blend']) # @pytest.mark.parametrize('blendname', ['test_action.blend'])
def test_action(clear_blend): def test_action(clear_blend):
@ -22,17 +23,20 @@ def test_action(clear_blend):
point.co[1] = random.randint(-10,10) point.co[1] = random.randint(-10,10)
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)] point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
for mod_type in FMODIFIERS:
fcurve_sample.modifiers.new(mod_type)
bpy.ops.mesh.primitive_plane_add() bpy.ops.mesh.primitive_plane_add()
bpy.data.objects[0].animation_data_create() bpy.data.objects[0].animation_data_create()
bpy.data.objects[0].animation_data.action = datablock bpy.data.objects[0].animation_data.action = datablock
# Test # Test
implementation = BlAction() implementation = BlAction()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.actions.remove(datablock) bpy.data.actions.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,18 +5,18 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_armature import BlArmature from multi_user.bl_types.bl_armature import BlArmature
def test_armature(clear_blend): def test_armature(clear_blend):
bpy.ops.object.armature_add() bpy.ops.object.armature_add()
datablock = bpy.data.armatures[0] datablock = bpy.data.armatures[0]
implementation = BlArmature() implementation = BlArmature()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.armatures.remove(datablock) bpy.data.armatures.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.io_bpy.bl_camera import BlCamera from multi_user.bl_types.bl_camera import BlCamera
@pytest.mark.parametrize('camera_type', ['PANO','PERSP','ORTHO']) @pytest.mark.parametrize('camera_type', ['PANO','PERSP','ORTHO'])
@ -15,11 +15,11 @@ def test_camera(clear_blend, camera_type):
datablock.type = camera_type datablock.type = camera_type
camera_dumper = BlCamera() camera_dumper = BlCamera()
expected = camera_dumper._dump(datablock) expected = camera_dumper.dump(datablock)
bpy.data.cameras.remove(datablock) bpy.data.cameras.remove(datablock)
test = camera_dumper._construct(expected) test = camera_dumper.construct(expected)
camera_dumper._load(expected, test) camera_dumper.load(expected, test)
result = camera_dumper._dump(test) result = camera_dumper.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
from uuid import uuid4 from uuid import uuid4
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_collection import BlCollection from multi_user.bl_types.bl_collection import BlCollection
def test_collection(clear_blend): def test_collection(clear_blend):
# Generate a collection with childrens and a cube # Generate a collection with childrens and a cube
@ -23,11 +23,11 @@ def test_collection(clear_blend):
# Test # Test
implementation = BlCollection() implementation = BlCollection()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.collections.remove(datablock) bpy.data.collections.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_curve import BlCurve from multi_user.bl_types.bl_curve import BlCurve
@pytest.mark.parametrize('curve_type', ['TEXT','BEZIER']) @pytest.mark.parametrize('curve_type', ['TEXT','BEZIER'])
def test_curve(clear_blend, curve_type): def test_curve(clear_blend, curve_type):
@ -19,11 +19,11 @@ def test_curve(clear_blend, curve_type):
datablock = bpy.data.curves[0] datablock = bpy.data.curves[0]
implementation = BlCurve() implementation = BlCurve()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.curves.remove(datablock) bpy.data.curves.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.io_bpy.bl_gpencil import BlGpencil from multi_user.bl_types.bl_gpencil import BlGpencil
def test_gpencil(clear_blend): def test_gpencil(clear_blend):
@ -13,11 +13,11 @@ def test_gpencil(clear_blend):
datablock = bpy.data.grease_pencils[0] datablock = bpy.data.grease_pencils[0]
implementation = BlGpencil() implementation = BlGpencil()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.grease_pencils.remove(datablock) bpy.data.grease_pencils.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.io_bpy.bl_lattice import BlLattice from multi_user.bl_types.bl_lattice import BlLattice
def test_lattice(clear_blend): def test_lattice(clear_blend):
@ -13,11 +13,11 @@ def test_lattice(clear_blend):
datablock = bpy.data.lattices[0] datablock = bpy.data.lattices[0]
implementation = BlLattice() implementation = BlLattice()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.lattices.remove(datablock) bpy.data.lattices.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -4,21 +4,21 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.io_bpy.bl_lightprobe import BlLightprobe from multi_user.bl_types.bl_lightprobe import BlLightprobe
@pytest.mark.skipif(bpy.app.version[1] < 83, reason="requires blender 2.83 or higher") @pytest.mark.skipif(bpy.app.version < (2,83,0), reason="requires blender 2.83 or higher")
@pytest.mark.parametrize('lightprobe_type', ['PLANAR','GRID','CUBEMAP']) @pytest.mark.parametrize('lightprobe_type', ['PLANAR','GRID','CUBEMAP'])
def test_lightprobes(clear_blend, lightprobe_type): def test_lightprobes(clear_blend, lightprobe_type):
bpy.ops.object.lightprobe_add(type=lightprobe_type) bpy.ops.object.lightprobe_add(type=lightprobe_type)
blender_light = bpy.data.lightprobes[0] blender_light = bpy.data.lightprobes[0]
lightprobe_dumper = BlLightprobe() lightprobe_dumper = BlLightprobe()
expected = lightprobe_dumper._dump(blender_light) expected = lightprobe_dumper.dump(blender_light)
bpy.data.lightprobes.remove(blender_light) bpy.data.lightprobes.remove(blender_light)
test = lightprobe_dumper._construct(expected) test = lightprobe_dumper.construct(expected)
lightprobe_dumper._load(expected, test) lightprobe_dumper.load(expected, test)
result = lightprobe_dumper._dump(test) result = lightprobe_dumper.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.io_bpy.bl_light import BlLight from multi_user.bl_types.bl_light import BlLight
@pytest.mark.parametrize('light_type', ['SPOT','SUN','POINT','AREA']) @pytest.mark.parametrize('light_type', ['SPOT','SUN','POINT','AREA'])
@ -13,11 +13,11 @@ def test_light(clear_blend, light_type):
blender_light = bpy.data.lights[0] blender_light = bpy.data.lights[0]
light_dumper = BlLight() light_dumper = BlLight()
expected = light_dumper._dump(blender_light) expected = light_dumper.dump(blender_light)
bpy.data.lights.remove(blender_light) bpy.data.lights.remove(blender_light)
test = light_dumper._construct(expected) test = light_dumper.construct(expected)
light_dumper._load(expected, test) light_dumper.load(expected, test)
result = light_dumper._dump(test) result = light_dumper.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.io_bpy.bl_material import BlMaterial from multi_user.bl_types.bl_material import BlMaterial
def test_material_nodes(clear_blend): def test_material_nodes(clear_blend):
@ -17,12 +17,12 @@ def test_material_nodes(clear_blend):
datablock.node_tree.nodes.new(ntype) datablock.node_tree.nodes.new(ntype)
implementation = BlMaterial() implementation = BlMaterial()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.materials.remove(datablock) bpy.data.materials.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)
@ -32,11 +32,11 @@ def test_material_gpencil(clear_blend):
bpy.data.materials.create_gpencil_data(datablock) bpy.data.materials.create_gpencil_data(datablock)
implementation = BlMaterial() implementation = BlMaterial()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.materials.remove(datablock) bpy.data.materials.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_mesh import BlMesh from multi_user.bl_types.bl_mesh import BlMesh
@pytest.mark.parametrize('mesh_type', ['EMPTY','FILLED']) @pytest.mark.parametrize('mesh_type', ['EMPTY','FILLED'])
def test_mesh(clear_blend, mesh_type): def test_mesh(clear_blend, mesh_type):
@ -18,11 +18,11 @@ def test_mesh(clear_blend, mesh_type):
# Test # Test
implementation = BlMesh() implementation = BlMesh()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.meshes.remove(datablock) bpy.data.meshes.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -4,7 +4,7 @@ import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
import bpy import bpy
from multi_user.io_bpy.bl_metaball import BlMetaball from multi_user.bl_types.bl_metaball import BlMetaball
@pytest.mark.parametrize('metaballs_type', ['PLANE','CAPSULE','BALL','ELLIPSOID','CUBE']) @pytest.mark.parametrize('metaballs_type', ['PLANE','CAPSULE','BALL','ELLIPSOID','CUBE'])
@ -13,11 +13,11 @@ def test_metaball(clear_blend, metaballs_type):
datablock = bpy.data.metaballs[0] datablock = bpy.data.metaballs[0]
dumper = BlMetaball() dumper = BlMetaball()
expected = dumper._dump(datablock) expected = dumper.dump(datablock)
bpy.data.metaballs.remove(datablock) bpy.data.metaballs.remove(datablock)
test = dumper._construct(expected) test = dumper.construct(expected)
dumper._load(expected, test) dumper.load(expected, test)
result = dumper._dump(test) result = dumper.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_object import BlObject from multi_user.bl_types.bl_object import BlObject
# Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be # Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be
# correctly initialized (#TODO: report the bug) # correctly initialized (#TODO: report the bug)
@ -65,11 +65,11 @@ def test_object(clear_blend):
datablock.shape_key_add(name='shape2') datablock.shape_key_add(name='shape2')
implementation = BlObject() implementation = BlObject()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.objects.remove(datablock) bpy.data.objects.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
print(DeepDiff(expected, result)) print(DeepDiff(expected, result))
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,21 +5,23 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_scene import BlScene from multi_user.bl_types.bl_scene import BlScene
from multi_user.utils import get_preferences from multi_user.utils import get_preferences
def test_scene(clear_blend): def test_scene(clear_blend):
get_preferences().sync_flags.sync_render_settings = True get_preferences().sync_flags.sync_render_settings = True
datablock = bpy.data.scenes.new("toto") datablock = bpy.data.scenes.new("toto")
datablock.timeline_markers.new('toto', frame=10)
datablock.timeline_markers.new('tata', frame=1)
datablock.view_settings.use_curve_mapping = True datablock.view_settings.use_curve_mapping = True
# Test # Test
implementation = BlScene() implementation = BlScene()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.scenes.remove(datablock) bpy.data.scenes.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,18 +5,18 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_speaker import BlSpeaker from multi_user.bl_types.bl_speaker import BlSpeaker
def test_speaker(clear_blend): def test_speaker(clear_blend):
bpy.ops.object.speaker_add() bpy.ops.object.speaker_add()
datablock = bpy.data.speakers[0] datablock = bpy.data.speakers[0]
implementation = BlSpeaker() implementation = BlSpeaker()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.speakers.remove(datablock) bpy.data.speakers.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,7 +5,7 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_texture import BlTexture from multi_user.bl_types.bl_texture import BlTexture
TEXTURE_TYPES = ['NONE', 'BLEND', 'CLOUDS', 'DISTORTED_NOISE', 'IMAGE', 'MAGIC', 'MARBLE', 'MUSGRAVE', 'NOISE', 'STUCCI', 'VORONOI', 'WOOD'] TEXTURE_TYPES = ['NONE', 'BLEND', 'CLOUDS', 'DISTORTED_NOISE', 'IMAGE', 'MAGIC', 'MARBLE', 'MUSGRAVE', 'NOISE', 'STUCCI', 'VORONOI', 'WOOD']
@ -14,11 +14,11 @@ def test_texture(clear_blend, texture_type):
datablock = bpy.data.textures.new('test', texture_type) datablock = bpy.data.textures.new('test', texture_type)
implementation = BlTexture() implementation = BlTexture()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.textures.remove(datablock) bpy.data.textures.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,17 +5,17 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_volume import BlVolume from multi_user.bl_types.bl_volume import BlVolume
def test_volume(clear_blend): def test_volume(clear_blend):
datablock = bpy.data.volumes.new("Test") datablock = bpy.data.volumes.new("Test")
implementation = BlVolume() implementation = BlVolume()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.volumes.remove(datablock) bpy.data.volumes.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -5,18 +5,18 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.io_bpy.bl_world import BlWorld from multi_user.bl_types.bl_world import BlWorld
def test_world(clear_blend): def test_world(clear_blend):
datablock = bpy.data.worlds.new('test') datablock = bpy.data.worlds.new('test')
datablock.use_nodes = True datablock.use_nodes = True
implementation = BlWorld() implementation = BlWorld()
expected = implementation._dump(datablock) expected = implementation.dump(datablock)
bpy.data.worlds.remove(datablock) bpy.data.worlds.remove(datablock)
test = implementation._construct(expected) test = implementation.construct(expected)
implementation._load(expected, test) implementation.load(expected, test)
result = implementation._dump(test) result = implementation.dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)

View File

@ -1,20 +0,0 @@
import os
import pytest
from deepdiff import DeepDiff
import bpy
import random
def test_start_session():
result = bpy.ops.session.start()
assert 'FINISHED' in result
def test_stop_session():
result = bpy.ops.session.stop()
assert 'FINISHED' in result