Compare commits

..

292 Commits

Author SHA1 Message Date
45437660ba clean: remove unused lock 2020-10-22 17:37:53 +02:00
ee93a5b209 Merge branch 'develop' into 132-fix-undo-edit-last-operation-redo-handling 2020-10-22 16:21:31 +02:00
f90c12b27f doc: added missing fields
feat: changed session widget defaults
2020-10-22 16:07:19 +02:00
3573db0969 Merge branch '134-revamp-session-status-ui-widget' into 'develop'
Resolve "Revamp session status UI widget"

See merge request slumber/multi-user!67
2020-10-22 13:52:29 +00:00
92bde00a5a feat: store session widget settings to preferences 2020-10-22 15:48:13 +02:00
2c82560d24 fix: grease pencil material 2020-10-22 13:55:26 +02:00
6f364d2b88 feat: session widget position and scale settings
feat: ui_scale is now taken in account for session widget text size
2020-10-21 23:33:44 +02:00
760b52c02b Merge branch '135-empty-and-light-objects-user-selection-highlight-is-broken' into 'develop'
Resolve "Empty and Light objects user selection highlight is broken"

See merge request slumber/multi-user!66
2020-10-21 15:25:42 +00:00
4dd932fc56 fix: empty and light display broken 2020-10-21 17:23:59 +02:00
ba1a03cbfa Merge branch '133-material-renaming-is-unstable' into 'develop'
Resolve "Material renaming is unstable"

See merge request slumber/multi-user!65
2020-10-21 13:17:18 +00:00
18b5fa795c feat: resolve materials from uuid by default and fallback on regular name resolving 2020-10-21 15:10:37 +02:00
1a82ec72e4 fix: change owner call in opterator 2020-10-21 14:40:15 +02:00
804747c73b fix: owning parent when a child is already owned (ex: duplicate linked) 2020-10-21 14:15:42 +02:00
7ee705332f feat: update replication to prevent UnpicklingError from crashing the network Thred 2020-10-20 17:25:50 +02:00
4bd0055056 Merge branch 'develop' into 132-fix-undo-edit-last-operation-redo-handling 2020-10-16 14:57:36 +02:00
716c78e380 feat: update changelog 2020-10-16 11:06:41 +02:00
5e4ce4556f doc: update operator descriptions 2020-10-16 10:57:45 +02:00
aa9ea08151 doc: update refresh icon 2020-10-16 10:28:29 +02:00
f56890128e fix: material test by splitting it in a gpencil and nodal material test 2020-10-15 18:08:08 +02:00
8865556229 feat: update CHANGELOG 2020-10-15 18:02:07 +02:00
5bc9b10c12 fix: material gpencil loading 2020-10-15 18:01:54 +02:00
7db3c18213 feat: affect dependencies option in change owner 2020-10-15 17:48:04 +02:00
f151c61d7b feat: mimic blender undo handling 2020-10-15 17:21:14 +02:00
ff35e34032 feat: update apply ui icon
fix: material property filter
2020-10-15 17:09:50 +02:00
9f8222afa7 fix: handle apply dependencies 2020-10-15 12:11:28 +02:00
1828bfac22 feat: update changelog 2020-10-14 19:25:59 +02:00
3a1087ecb8 Merge branch '131-sync-render-settings-flag-cause-a-race-condition' into 'develop'
Resolve "Sync render settings flag cause a race condition"

See merge request slumber/multi-user!63
2020-10-14 17:16:20 +00:00
b398541787 fix: apply operator 2020-10-14 19:12:28 +02:00
f0b33d8471 fix: race condition in scene sync 2020-10-14 19:11:32 +02:00
5a282a3e22 Merge branch '130-mesh-transfert-is-broken-between-a-hybrid-linux-windows-session' into 'develop'
Resolve "Mesh transfert is broken between a hybrid linux-windows session"

See merge request slumber/multi-user!62
2020-10-14 14:07:59 +00:00
4283fc0fff fix: crash during hybrid session
Related to #130
2020-10-14 16:06:11 +02:00
753f4d3f27 fix: prevent NonAuthorizedOperationError to kill the right managment timer 2020-10-14 00:36:59 +02:00
9dd02b2756 feat: fix binary diff 2020-10-13 17:15:31 +02:00
c74d12c843 fix: handle world empty dependencies 2020-10-13 17:10:25 +02:00
e1d9982276 fix: bl_file diff when clear memory cache is enabled 2020-10-13 17:09:43 +02:00
8861986213 fix: packed image save error 2020-10-13 16:58:48 +02:00
1cb9fb410c feat: material node output default value support
fix: prevent material empty dependencies
2020-10-12 23:10:42 +02:00
c4a8cc4606 Merge branch 'fix_deploy' into 'develop'
Fix deploy

See merge request slumber/multi-user!61
2020-10-12 19:03:47 +00:00
187f11071c feat: enable build and deploy for only master and develop 2020-10-12 21:01:54 +02:00
530fae8cb4 feat: active deploy 2020-10-12 20:24:12 +02:00
6771c371a1 feat: enable deploy back 2020-10-12 20:23:08 +02:00
c844c6e54f clean: keep only active renderer settings (bl_scene.py)
fix: resolve_deps_implementation now only resolve master collection objects (bl_scene.py)
2020-10-12 20:21:08 +02:00
a4d0b1a68b fix: client selection 2020-10-12 18:56:42 +02:00
2fdc11692d fix: handle None bounding box position 2020-10-12 18:15:59 +02:00
dbfca4568f fix: get_preference import syntax 2020-10-12 18:07:09 +02:00
069a528276 feat: test scene with sync render settings enabled 2020-10-12 18:04:54 +02:00
030f2661fd fix: buffer empty for the first diff 2020-10-12 17:13:35 +02:00
e589e3eec4 fix: file not found logging
clean: remove cache for scene diff
2020-10-12 17:12:50 +02:00
04140ced1b fix: collection instance bounding box display 2020-10-12 17:11:46 +02:00
0d9ce43e74 fix: enable binrary differentialback
feat: ignore material node bl_label
2020-10-12 13:33:49 +02:00
d3969b4fd4 Revert "feat: avoid dumping read only properties"
This reverts commit cefaef5c4b.
2020-10-12 10:23:19 +02:00
e21f64ac98 revert: bl_label 2020-10-11 19:20:53 +02:00
b25b380d21 fix: missing bl_idname 2020-10-11 19:11:51 +02:00
1146d9d304 feat: disable render settings sync by default 2020-10-11 19:08:06 +02:00
51b60521e6 feat: update relplication version 2020-10-11 19:07:48 +02:00
035f8a1dcd feat: skipping not required parameters 2020-10-11 19:07:28 +02:00
cefaef5c4b feat: avoid dumping read only properties 2020-10-11 19:06:58 +02:00
4714e60ff7 Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2020-10-11 15:22:05 +02:00
3eca25ae19 feat: update replication version 2020-10-11 15:10:28 +02:00
96346f8a25 refactor: clean debug logs 2020-10-11 15:06:32 +02:00
a258c2c182 Merge branch 'feature/doc-updates-2' into 'develop'
Feature/doc updates 2

See merge request slumber/multi-user!60
2020-10-09 09:28:36 +00:00
6862df5331 Minor doc update 2020-10-09 01:59:42 +02:00
f271a9d0e3 Updated contribution doc to indicate how to sync with upstream repository 2020-10-09 01:55:45 +02:00
bdff6eb5c9 Updated contribution documentation with how to sync upstream repo 2020-10-09 01:29:01 +02:00
b661407952 Merge branch '128-ui-gizmo-error' into 'develop'
Resolve "UI gizmo error"

See merge request slumber/multi-user!59
2020-10-08 22:50:11 +00:00
d5eb7fda02 fix: ci yaml error 2020-10-09 00:46:52 +02:00
35e8ac9c33 feat: disable deploy until fixed 2020-10-09 00:45:30 +02:00
4453d256b8 feat: update replication version, switched dependency to pyzmq 2020-10-08 23:57:39 +02:00
299e330ec6 fix: internal gizmo error by launching the modal operator from the timer 2020-10-08 23:42:14 +02:00
34b9f7ae27 Merge branch 'master' into develop 2020-10-08 23:14:58 +02:00
9d100d84ad Merge branch 'hotfix/ui-spelling-fixes' into 'master'
Hotfix/ui spelling fixes

See merge request slumber/multi-user!58
2020-10-08 20:58:13 +00:00
2f677c399e UI spelling fixes to preferences.py and ui.py 2020-10-08 22:52:24 +02:00
e967b35c38 Revert "Minor UI spelling errors"
This reverts commit 673c4e69a4.
2020-10-08 21:58:30 +02:00
7bd0a196b4 Merge branch 'feature/doc-updates' into 'develop'
Feature/doc updates

See merge request slumber/multi-user!57
2020-10-08 17:04:37 +00:00
7892b5e9b6 Adding log-level to server startup scripts 2020-10-08 18:35:08 +02:00
f779678c0e Updates to hosting guide and contribution documentation 2020-10-08 18:31:20 +02:00
629fc2d223 feat: update dockerfile 2020-10-08 15:10:32 +02:00
724c2345df refactor: disable force apply during the reparent 2020-10-08 15:00:27 +02:00
673c4e69a4 Minor UI spelling errors 2020-10-08 00:31:56 +02:00
fbfff6c7ec Doc updates clarifying developer workflow, updating hosting instructions 2020-10-08 00:08:23 +02:00
f592294335 Added scripts to conveniently start server instance via docker or replication 2020-10-07 21:20:43 +02:00
8e7be5afde Merge branch '126-draw-refactoring' into 'develop'
Resolve "Draw refactoring"

See merge request slumber/multi-user!55
2020-10-06 14:12:13 +00:00
fc76b2a8e6 fix: avoid to remove inexistant user widget 2020-10-06 16:10:10 +02:00
1a8bcddb74 refactor: formatting 2020-10-06 15:53:29 +02:00
60fba5b9df refactor: use dict to store widgets 2020-10-06 15:46:35 +02:00
be0eb1fa42 clean: remove unused import 2020-10-06 09:45:13 +02:00
93d9bea3ae feat: display session status 2020-10-05 23:38:52 +02:00
022b7f7822 refactor: enable username display again
refactor: avoid to draw the local user
2020-10-05 22:34:43 +02:00
ae34846509 fix: ci syntax 2020-10-05 21:53:14 +02:00
d328077cb0 feat: deploy and build only for master and develop
refactor: carry on presence refactoring
2020-10-05 21:51:54 +02:00
0c4740eef8 fix: import error 2020-10-05 18:48:40 +02:00
d7b2c7e2f6 refactor: started to rewrite presence
fix: weird bounding boxes on various objects types

Related to #55
2020-10-05 18:34:41 +02:00
efbb9e7096 doc: feat changelog 0.1.0 release date 2020-10-05 16:11:04 +02:00
e0b56d8990 Merge branch 'develop' into 'master'
v0.1.0

See merge request slumber/multi-user!43
2020-10-05 13:42:16 +00:00
7a94c21187 doc: update version 2020-10-05 15:37:06 +02:00
0687090f05 feat: update changelog 2020-10-05 15:18:01 +02:00
920744334c Merge branch '125-autobuild-docker-image' into 'develop'
Resolve "Autobuild docker image"

See merge request slumber/multi-user!53
2020-10-05 09:32:57 +00:00
dfa7f98126 refactor: remove useless script 2020-10-05 11:28:45 +02:00
ea530f0f96 featL enable tast and build back 2020-10-03 00:30:37 +02:00
c3546ff74f fix: var name 2020-10-03 00:28:11 +02:00
83aa9b57ec feat: automatic image version 2020-10-03 00:26:44 +02:00
28a265be68 test: login in script 2020-10-03 00:12:39 +02:00
7dfabb16c7 fix: tls 2020-10-03 00:07:07 +02:00
ea5d9371ca feat: login 2020-10-03 00:00:42 +02:00
3df73a0716 feat: find replication version 2020-10-02 23:58:08 +02:00
ae3c994ff1 feat: dind tests 2020-10-02 23:55:04 +02:00
bd73b385b6 feat: dind 2020-10-02 23:52:19 +02:00
f054b1c5f2 fix: trying to use a standard docker image 2020-10-02 23:38:09 +02:00
d083100a2a fix: image directory path 2020-10-02 23:33:50 +02:00
b813b8df9e feat: docker build and push 2020-10-02 23:32:06 +02:00
d0e966ff1a fix: path 2020-10-02 23:29:48 +02:00
56cbf14fe1 refactor: use custom image 2020-10-02 23:27:45 +02:00
8bf55ebd46 feat: apk update 2020-10-02 23:19:34 +02:00
edbc5ee343 feat: apt install 2020-10-02 23:16:46 +02:00
4a92511582 feat: test install python 2020-10-02 23:14:49 +02:00
b42df2cf4a feat: retrieve version 2020-10-02 23:07:25 +02:00
7549466824 fix: ci deploy name 2020-10-02 18:59:25 +02:00
423e71476d feat: update ci 2020-10-02 18:57:50 +02:00
3bc4b20035 feat: CI file and docker image 2020-10-02 18:56:26 +02:00
9966a24b5e feat: update supported types in README.md 2020-10-02 18:04:32 +02:00
577c01a594 Merge branch '124-use-a-global-session-instance-in-replication' into 'develop'
Resolve "use a global session instance in replication"

See merge request slumber/multi-user!52
2020-10-02 15:51:30 +00:00
3d72796c10 refactor: remove old client ref
feat: update changelog
2020-10-02 17:48:56 +02:00
edcbd7b02a feat: display node in error in the repository view 2020-10-02 17:22:14 +02:00
b368c985b8 refactor: session handler encapsulation 2020-10-02 12:11:53 +02:00
cab1a71eaa fix: version 2020-10-02 09:52:21 +02:00
33cb188509 refactor: use replication session global instance 2020-10-02 00:05:33 +02:00
0a3dd9b5b8 fix: missing get_datablock_from_uuid 2020-10-02 00:00:34 +02:00
7fbdbdcc21 feat: show flag in blender overlays panel 2020-10-01 22:55:06 +02:00
8f9d5aabf9 refactor: moved get_datablock_from_uuid to bl_datablock 2020-10-01 22:51:48 +02:00
824d4d6a83 feat: upgrade replication version to fix duplicate during REPARENT
Related to #113
2020-10-01 15:34:36 +02:00
5f4bccbcd9 feat: POLY curves support
During a mesh->curve conversion, curve type spline is changed to POLY. This is adding the support for POLY curves.

Related to #113
2020-10-01 15:33:10 +02:00
8e8e54fe7d Merge branch '122-crash-on-connection' into 'develop'
Resolve "Crash on connection"

See merge request slumber/multi-user!50
2020-10-01 09:17:59 +00:00
04b13cc0b7 refactor: moveconnection handlers to the main thread 2020-10-01 10:58:30 +02:00
ba98875560 fix: version check command format 2020-09-29 17:33:39 +02:00
a9fb84a5c6 fix: world viewport color sync 2020-09-29 11:47:48 +02:00
2f139178d3 feat: update replication version 2020-09-28 22:59:43 +02:00
e466f81600 fix: file handler not properly closed 2020-09-28 22:51:07 +02:00
cb836e30f5 fix: empty uv useless update 2020-09-28 22:50:42 +02:00
152e356dad fix: font/sound loading 2020-09-28 10:40:07 +02:00
7b13e8978b fix: close file handler after quitting the session 2020-09-28 10:32:41 +02:00
e0839fe1fb Merge branch '118-optionnal-active-camera-sync-flag' into 'develop'
Resolve "Optionnal active camera sync flag"

See merge request slumber/multi-user!49
2020-09-25 14:09:31 +00:00
aec3e8b8bf doc: update replication flag section 2020-09-25 15:27:01 +02:00
a89564de6b feat: append synchronization flags to the top
refactor: enable sync render settings by default
2020-09-25 14:26:31 +02:00
e301a10456 feat: active camera sync flag 2020-09-25 11:33:35 +02:00
cfc6ce91bc feat: initial live syncflag support 2020-09-25 11:23:36 +02:00
4f731c6640 fix: implementation not found if a new type is added 2020-09-23 17:37:21 +02:00
9b1b8f11fd feat: sync object hide_render 2020-09-23 16:48:17 +02:00
e742c824fc feat: sync all object show flags except hide_viewport. 2020-09-23 16:47:51 +02:00
6757bbbd30 fix: enable DIFF_BINARY by default 2020-09-23 16:04:31 +02:00
f6a39e4290 fix: scene differential error
fix: bl_file loading error
feat: update replication version
2020-09-23 14:24:57 +02:00
410d8d2f1a feat: display sync 2020-09-23 10:00:08 +02:00
bd64c17f05 feat: update version 2020-09-22 16:36:59 +02:00
dc063b5954 fix: handle file not found exception 2020-09-21 18:52:27 +02:00
0ae34d5702 Merge branch 'file_replication' into 'develop'
Basic file replication interface

See merge request slumber/multi-user!48
2020-09-21 16:17:58 +00:00
167b39f15e doc: added a cache section to the quickstart 2020-09-21 18:14:30 +02:00
9adc0d7d6e clean: remove image testing (until the file replication interface is done) 2020-09-21 17:48:07 +02:00
fb622fa098 fix: get_datablock_users attribute error 2020-09-21 17:37:06 +02:00
c533d4b86a ci: run tests on every branch 2020-09-21 17:31:07 +02:00
6c47e095be feat: cache managenent utility 2020-09-21 16:47:49 +02:00
f992d06b03 feat: handle packed datablock
feat: filecache settings
2020-09-21 12:12:19 +02:00
af3afc1124 feat: use bl_file in bl_image 2020-09-21 00:11:37 +02:00
b77ab2dd05 feat: use bl_file to replicate font and sound files 2020-09-20 23:31:24 +02:00
150054d19c feat: generic file replication ground work 2020-09-20 19:53:51 +02:00
8d2b9e5580 Merge branch '65-sync-speaker-sounds' into 'develop'
Partial support for syncinf speaker sound files

See merge request slumber/multi-user!47
2020-09-19 19:37:43 +00:00
6870331c34 feat: notice 2020-09-19 18:59:03 +02:00
6f73b7fc29 feat: ground work for sound sync 2020-09-19 00:47:46 +02:00
6385830f53 fix: prevent world replication conflict with external addons 2020-09-18 23:38:21 +02:00
b705228f4a feat: support all font file extention 2020-09-18 23:30:50 +02:00
73d2da4c47 fix: ReparentException error
feat: replication protocol version in ui header
2020-09-18 23:25:01 +02:00
b28e7c2149 Merge branch '116-bfon-is-missing' into 'develop'
Resolve "Bfont is missing"

See merge request slumber/multi-user!46
2020-09-18 21:10:13 +00:00
38f06683be fix: bfont is missing
related to #116
2020-09-18 23:09:47 +02:00
62221c9e49 Merge branch '114-support-custom-fonts' into 'develop'
Resolve "Support custom fonts"

See merge request slumber/multi-user!45
2020-09-18 15:05:25 +00:00
e9f416f682 feat: ground work for custom font support 2020-09-18 17:04:24 +02:00
3108a06e89 fix: sync flag missing comma 2020-09-18 16:17:19 +02:00
470df50dc2 fix: bl_image test, disabling texture unload from ram. 2020-09-18 16:02:50 +02:00
d8a94e3f5e fix: image uuid error 2020-09-18 15:58:43 +02:00
47a0efef27 Merge branch '113-support-datablock-conversion' into 'develop'
Resolve "Support datablock conversion"

See merge request slumber/multi-user!44
2020-09-18 13:33:43 +00:00
ca5aebfeff feat: various images format support
feat: world environment image support
2020-09-18 15:25:52 +02:00
fe6ffd19b4 feat: child date renaming support 2020-09-17 23:45:09 +02:00
b9a6ddafe9 fix: object data load 2020-09-17 23:17:51 +02:00
ae71d7757e feat: reparent ground work 2020-09-17 22:47:11 +02:00
34ed5da6f0 fix: logging 2020-09-15 16:33:49 +02:00
2c16f07ae7 doc: update Changelog 2020-09-15 15:05:09 +02:00
60f25359d1 Merge branch '111-improve-the-logging-process' into 'develop'
Resolve "Improve the logging process"

See merge request slumber/multi-user!42
2020-09-15 11:03:42 +00:00
975b50a988 doc: update log related sections 2020-09-15 13:02:50 +02:00
66417dc84a refactor: minor ui cleanup 2020-09-15 12:40:51 +02:00
514f90d602 feat: logging to files
feat: logging level

Related to #111
2020-09-15 12:31:46 +02:00
086876ad2e feat: update version check to handle experimental ones 2020-09-15 12:29:20 +02:00
71c179f32f fix: python version 2020-09-09 11:58:51 +02:00
2399096b07 feat: experimenting a custom testing image 2020-09-09 11:57:34 +02:00
0c4d1aaa5f feat: update changelog to reflect changes 2020-09-09 11:55:53 +02:00
de8fbb0629 feat: update addon updater to support installation from branches (develop and master) 2020-09-09 10:58:02 +02:00
d7396e578c Merge branch '107-optionnal-flag-to-allow-edit-mesh-updates' into 'develop'
Resolve "Optionnal flag to allow edit mesh updates"

See merge request slumber/multi-user!41
2020-09-08 21:11:09 +00:00
7f5b5866f2 feat: usage warning 2020-09-08 23:09:42 +02:00
3eb1af406b doc: reflect advanced settings changes 2020-09-08 22:56:23 +02:00
79ccac915f feat: experimental edit mode update
Related to #107
2020-09-08 22:37:58 +02:00
f5232ccea0 Merge branch 'master' into develop 2020-09-03 17:23:21 +02:00
c599a4e6ea doc: update advanced section 2020-09-03 16:15:49 +02:00
b3230177d8 Merge branch 'feature/event_driven_updates' into develop 2020-09-03 15:59:19 +02:00
f2da4cb8e9 Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2020-09-02 16:45:08 +02:00
605bcc7581 refactor: bl_collection lint
feat: late update replication
2020-09-02 16:44:11 +02:00
e31d76a641 Merge branch 'fix-pip-require-virtualenv' into 'develop'
Resolve "'zmq' install (and other pip packages) fails when PIP_REQUIRE_VIRTUALENV env var is set to true"

See merge request slumber/multi-user!40
2020-08-28 17:49:27 +00:00
97c2118b7e doc: add comment to explain why unsetting PIP_REQUIRE_VIRTUALENV is required. 2020-08-28 18:12:01 +02:00
352977e442 fix: unset PIP_REQUIRE_VIRTUALENV if set to ensure multi-user can install its packages 2020-08-28 17:23:25 +02:00
a46d5fa227 fix: missing ui error, missing scene 2020-08-28 15:27:46 +02:00
ade736d8a5 refacotr: collection test 2020-08-28 15:01:50 +02:00
d7f7e86015 fix: collection dump 2020-08-28 14:52:56 +02:00
5e7d1e1dda feat: update replication version 2020-08-28 14:20:00 +02:00
fa5f0c7296 fix: replication version 2020-08-28 14:13:20 +02:00
f14d0915c8 feat: same collection management for Scene Master collection 2020-08-28 14:10:09 +02:00
d1e088d229 feat: orhtographic_scale sync 2020-08-28 14:09:10 +02:00
aa35da9c56 refactor: move attribute skipping warnings to debug 2020-08-28 11:28:26 +02:00
f26c3b2606 refactor: use uuid for collection loading 2020-08-28 11:27:03 +02:00
00d60be75b feat: change replication to the pre-release version 2020-08-27 11:40:26 +02:00
bb5b9fe4c8 refactor: move deepdiff dependency to replication 2020-08-27 10:45:54 +02:00
c6af49492e Merge branch 'master' of gitlab.com:slumber/multi-user 2020-08-26 11:35:47 +02:00
6158ef5171 feat: discord link in readme 2020-08-26 11:35:06 +02:00
6475b4fc08 feat: collection insance offset support
Related to #105
2020-08-24 17:49:17 +02:00
e4e09d63ff fix: instanced collection replication
Related to #105
2020-08-24 17:48:14 +02:00
4b07ae0cc3 fix: fix test condition 2020-08-07 15:47:05 +02:00
49a419cbe2 fix: none result while trying to access a node 2020-08-07 15:38:11 +02:00
5d52fb2460 fix: avoid build ci from running on other branch than develop and master 2020-08-07 15:08:08 +02:00
f1e09c1507 Merge branch 'develop' into feature/event_driven_updates 2020-08-07 15:07:17 +02:00
f915c52bd0 fix: loader missing 2020-08-06 15:33:08 +02:00
dee2e77552 fix: modifier assigned vertex groups 2020-08-06 15:26:55 +02:00
7953a2a177 feat: Update CHANGELOG.md 2020-07-31 09:01:01 +00:00
3f0082927e feat: lock movement support 2020-07-29 11:10:35 +02:00
07ffe05a84 feat: enable autoupdater back 2020-07-28 17:26:14 +02:00
09ee1cf826 fix: auto updater download 2020-07-28 17:17:39 +02:00
61bcec98c3 fix: wrong debian version 2020-07-28 13:37:06 +00:00
1c85d436fd feat: missing update 2020-07-28 13:32:33 +00:00
03318026d4 feat: missing zip program 2020-07-28 13:26:54 +00:00
7a0b142d69 feat: zip build artifacts 2020-07-28 13:20:35 +00:00
eb874110f8 Merge branch 'develop' 2020-07-28 14:25:42 +02:00
6e0c7bc332 clean disable armature missing roll 2020-07-28 12:06:58 +02:00
ee83e61b09 fix: None image 2020-07-28 12:05:26 +02:00
99b2dc0539 refactor: increase use line width 2020-07-28 12:05:04 +02:00
53f1118181 refactor: update download links 2020-07-27 17:44:27 +02:00
2791264a92 feat: empty image support 2020-07-24 21:38:00 +02:00
6c2ee0cad3 refactor: lobby ui 2020-07-24 16:02:19 +02:00
20f8c25f55 refactor: timeout update 2020-07-24 14:58:07 +02:00
0224f55104 refactor: change the timeout 2020-07-24 14:57:39 +02:00
644702ebdf feat: client state update as soon a client are in the lobby 2020-07-24 14:56:20 +02:00
9377b2be9b fix: session pannel title 2020-07-24 14:55:14 +02:00
29cbf23142 doc: update hosting guide 2020-07-24 14:53:44 +02:00
a645f71d19 fix: material socket index
Related to #101
2020-07-22 16:41:01 +02:00
909d92a7a1 fix: broken materials links
Related to #102
2020-07-21 16:46:16 +02:00
7ee9089087 feat: use callbacks instead of timers to cleanup session states
refactor: move graph initialization to operators,py
2020-07-17 16:33:39 +02:00
6201c82392 fix: Loading the false scene by default
`get` was giving wrong result inthe  scene initialization routing during the  resolve process

Related to #100
2020-07-17 14:47:52 +02:00
0faf7d9436 fix: initial test to handle #99 2020-07-15 19:08:53 +02:00
e69e61117a fix: Process "Quitting session" does not finish and gets stuck
Related to #101
2020-07-15 14:46:48 +02:00
25e988d423 fix: Unregistration of users
Related to #97
2020-07-15 13:50:46 +02:00
8a3ab895e0 fix: ci blender install 2020-07-14 12:56:34 +00:00
06a8e3c0ab feat: replication update 2020-07-14 14:54:22 +02:00
c1c1628a38 Update .gitlab/ci/test.gitlab-ci.yml 2020-07-14 10:07:30 +00:00
022e3354d9 feat: psutils test 2020-07-14 11:59:45 +02:00
211cb848b9 feat: update replication version 2020-07-14 11:29:30 +02:00
25e233f328 fix: temporary disabled spline IK test until the python api is fixed 2020-07-13 15:57:19 +02:00
9bc3d9b29d feat: dependencies version check 2020-07-13 15:12:15 +02:00
15debf339d feat: auto-update dependencies 2020-07-10 18:00:44 +02:00
56df7d182d clean: remove libs 2020-07-10 17:05:42 +02:00
26e1579e35 feat: update ci 2020-07-10 16:59:47 +02:00
a0e290ad6d feat: remove submodule 2020-07-10 16:59:32 +02:00
092384b2e4 feat: use replication from pip 2020-07-10 16:50:09 +02:00
2dc3654e6c feat: tests
feat: services heartbeats
clean: remove psutil dependency
2020-07-09 22:10:26 +02:00
f37a9efc60 feat: orthographic correction 2020-07-09 15:52:42 +02:00
0c5d323063 clean: remove old modal operator queue 2020-07-09 15:17:45 +02:00
b9f1b8a871 fix: armature operator is not running 2020-07-08 18:09:00 +02:00
2f6d8e1701 feat: initial camera background image support 2020-07-07 17:09:37 +02:00
9e64584f2d fix: ZeroDivisionError: integer division or modulo by zero 2020-07-07 15:50:05 +02:00
154aaf71c8 fix: disable test 2020-07-07 13:32:30 +00:00
ac24ab69ff fix: revert and disable testing until a definitive fix 2020-07-07 13:29:08 +00:00
ad431378f8 fix: test with a debian based image 2020-07-07 13:18:34 +00:00
784506cd95 fix: update build file 2020-07-07 13:07:55 +00:00
eb7542b1dd fix: update test.gitlab-ic.yml 2020-07-07 12:44:58 +00:00
25c19471bb feat: update submodule 2020-05-15 18:23:51 +02:00
9e4e646bb1 Merge branch 'develop' into feature/event_driven_updates 2020-05-15 16:19:47 +02:00
f8fa407a45 Merge branch '29-differential-revision' into feature/event_driven_updates 2020-04-13 11:48:20 +02:00
a0676f4e37 hotfix: wrong download link 2020-03-14 20:30:18 +00:00
61a05dc347 fix: curve commit error
Releated to #72
2020-03-10 18:04:06 +01:00
a6e1566f89 Merge branch '40-multi-scene-workflow' of gitlab.com:slumber/multi-user into feature/event_driven_updates 2020-03-05 16:17:00 +01:00
adeb694b2d feat: one apply timer for all 2020-03-05 15:38:20 +01:00
50d14e663e feat: update sumbodules 2020-03-05 10:56:17 +01:00
9b8d69042d feat: update submodule 2020-03-04 22:28:34 +01:00
b2475081b6 feat: id accessor 2020-03-04 18:28:42 +01:00
aef1d8987c Merge branch '61-config-file-prevents-having-the-addon-on-a-shared-network-location' into feature/event_driven_updates 2020-03-04 14:54:42 +01:00
292f76aea5 feat: move diff to observer
feat: logs
2020-02-28 15:39:29 +01:00
28c4ccf1f3 Merge branch 'develop' into feature/event_driven_updates 2020-02-28 14:48:09 +01:00
549b0b3784 fix: submodule version 2020-02-25 17:40:00 +01:00
fc9ab1a7e6 feat: update submodule 2020-02-25 17:38:43 +01:00
44bffc1850 Merge remote-tracking branch 'origin/develop' into feature/event_driven_updates 2020-02-25 17:37:24 +01:00
a141e9bfe7 feat: stash on deps graph update 2020-02-23 14:08:45 +01:00
70 changed files with 4398 additions and 2479 deletions

View File

@ -1,8 +1,10 @@
stages: stages:
- test - test
- build - build
- deploy
include: include:
- local: .gitlab/ci/test.gitlab-ci.yml - local: .gitlab/ci/test.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml - local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/deploy.gitlab-ci.yml

View File

@ -1,14 +1,13 @@
build: build:
stage: build stage: build
image: python:latest image: debian:stable-slim
script: script:
- git submodule init
- git submodule update
- cd multi_user/libs/replication
- rm -rf tests .git .gitignore script - rm -rf tests .git .gitignore script
artifacts: artifacts:
name: multi_user name: multi_user
paths: paths:
- multi_user - multi_user
only:
refs:
- master
- develop

View File

@ -0,0 +1,23 @@
deploy:
stage: deploy
image: slumber/docker-python
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs"
services:
- docker:19.03.12-dind
script:
- RP_VERSION="$(python scripts/get_replication_version.py)"
- VERSION="$(python scripts/get_addon_version.py)"
- echo "Building docker image with replication ${RP_VERSION}"
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
- echo "Pushing to gitlab registry ${VERSION}"
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}
only:
refs:
- master
- develop

View File

@ -1,14 +1,5 @@
test: test:
stage: test stage: test
image: python:latest image: slumber/blender-addon-testing:latest
script: script:
- git submodule init
- git submodule update
- apt update
# install blender to get all required dependencies
# TODO: indtall only dependencies
- apt install -f -y gcc python-dev python3.7-dev
- apt install -f -y blender
- python3 -m pip install blender-addon-tester
- python3 scripts/test_addon.py - python3 scripts/test_addon.py

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "multi_user/libs/replication"]
path = multi_user/libs/replication
url = https://gitlab.com/slumber/replication.git

View File

@ -37,7 +37,7 @@ All notable changes to this project will be documented in this file.
- Serialization is now based on marshal (2x performance improvements). - Serialization is now based on marshal (2x performance improvements).
- Let pip chose python dependencies install path. - Let pip chose python dependencies install path.
## [0.0.3] - Upcoming ## [0.0.3] - 2020-07-29
### Added ### Added
@ -60,8 +60,68 @@ All notable changes to this project will be documented in this file.
- user localization - user localization
- repository init - repository init
### Removed ### Removed
- Unused strict right management strategy - Unused strict right management strategy
- Legacy config management system - Legacy config management system
## [0.1.0] - 2020-10-05
### Added
- Dependency graph driven updates [experimental]
- Edit Mode updates
- Late join mechanism
- Sync Axis lock replication
- Sync collection offset
- Sync camera orthographic scale
- Sync custom fonts
- Sync sound files
- Logging configuration (file output and level)
- Object visibility type replication
- Optionnal sync for active camera
- Curve->Mesh conversion
- Mesh->gpencil conversion
### Changed
- Auto updater now handle installation from branches
- Use uuid for collection loading
- Moved session instance to replication package
### Fixed
- Prevent unsupported data types to crash the session
- Modifier vertex group assignation
- World sync
- Snapshot UUID error
- The world is not synchronized
## [0.1.1] - 2020-10-16
### Added
- Session status widget
- Affect dependencies during change owner
- Dedicated server managment scripts(@brybalicious)
### Changed
- Refactored presence.py
- Reset button UI icon
- Documentation `How to contribute` improvements (@brybalicious)
- Documentation `Hosting guide` improvements (@brybalicious)
- Show flags are now available from the viewport overlay
### Fixed
- Render sync race condition (causing scene errors)
- Binary differentials
- Hybrid session crashes between Linux/Windows
- Materials node default output value
- Right selection
- Client node rights changed to COMMON after disconnecting from the server
- Collection instances selection draw
- Packed image save error
- Material replication
- UI spelling errors (@brybalicious)

View File

@ -11,7 +11,7 @@ This tool aims to allow multiple users to work on the same scene over the networ
## Quick installation ## Quick installation
1. Download latest release [multi_user.zip](/uploads/8aef79c7cf5b1d9606dc58307fd9ad8b/multi_user.zip). 1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
2. Run blender as administrator (dependencies installation). 2. Run blender as administrator (dependencies installation).
3. Install last_version.zip from your addon preferences. 3. Install last_version.zip from your addon preferences.
@ -25,27 +25,32 @@ See the [documentation](https://multi-user.readthedocs.io/en/latest/) for detail
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones. Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment | | Name | Status | Comment |
| ----------- | :----: | :-----------------------------------------------------------: | | ----------- | :----: | :--------------------------------------------------------------------------: |
| action | | Not stable | | action | ✔️ | |
| armature | ❗ | Not stable | | armature | ❗ | Not stable |
| camera | ✔️ | | | camera | ✔️ | |
| collection | ✔️ | | | collection | ✔️ | |
| curve | ✔️ | Nurbs surface don't load correctly | | curve | | Nurbs not supported |
| gpencil | ✔️ | | | gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
| image | | Not stable yet | | image | ✔️ | |
| mesh | ✔️ | | | mesh | ✔️ | |
| material | ✔️ | | | material | ✔️ | |
| metaball | ✔️ | | | metaball | ✔️ | |
| object | ✔️ | | | object | ✔️ | |
| scene | ✔️ | | | texts | ✔️ | |
| world | ✔️ | | | scene | ✔️ | |
| lightprobes | ✔️ | | | world | ✔️ | |
| particles | | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) | | lightprobes | ✔️ | |
| speakers | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/65) | | compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) | | texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) | | nla | ❌ | |
| libraries | | Partial | | volumes | | |
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
### Performance issues ### Performance issues
@ -57,14 +62,16 @@ I'm working on it.
| Dependencies | Version | Needed | | Dependencies | Version | Needed |
| ------------ | :-----: | -----: | | ------------ | :-----: | -----: |
| ZeroMQ | latest | yes | | Replication | latest | yes |
| JsonDiff | latest | yes |
## Contributing ## Contributing
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation. See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.
## Licensing ## Licensing
See [license](LICENSE) See [license](LICENSE)

View File

@ -22,7 +22,7 @@ copyright = '2020, Swann Martinez'
author = 'Swann Martinez' author = 'Swann Martinez'
# The full version, including alpha/beta/rc tags # The full version, including alpha/beta/rc tags
release = '0.0.2' release = '0.1.0'
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.7 KiB

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.1 KiB

After

Width:  |  Height:  |  Size: 559 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 70 KiB

View File

@ -8,5 +8,4 @@ Getting started
install install
quickstart quickstart
known_problems
glossary glossary

View File

@ -1,46 +0,0 @@
.. _known-problems:
==============
Known problems
==============
.. rubric:: What do you need to do in order to use Multi-User through internet?
1. Use Hamachi or ZeroTier (I prefer Hamachi) and create a network.
2. All participants need to join this network.
3. Go to Blender and install Multi-User in the preferneces.
4. Setup and start the session:
* **Host**: After activating Multi-User as an Add-On, press N and go on Multi-User.
Then, put the IP of your network where IP is asked for.
Leave Port and IPC Port on default(5555 and 5561). Increase the Timeout(ms) if the connection is not stable.
Then press on "host".
* **Guest**: After activating Multi-User as an Add-On, press N and go to Multi-User
Then, put the IP of your network where IP is asked for.
Leave Port and IPC Port on default(5555 and 5561)(Simpler, put the same information that the host is using.
BUT,it needs 4 ports for communication. Therefore, you need to put 5555+count of guests [up to 4]. ).
Increase the Timeout(ms) if the connection is not stable. Then press on "connexion".
.. rubric:: What do you need to check if you can't host?
You need to check, if the IP and all ports are correct. If it's not loading, because you laoded a project before hosting, it's not your fault.
Then the version is not sable yet (the project contains data, that is not made stable yet).
.. rubric:: What do you need to check if you can't connect?
Check, if you are connected to the network (VPN) of the host. Also, check if you have all of the information like the host has.
Maybe you have different versions (which shouldn't be the case after Auto-Updater is introduced).
.. rubric:: You are connected, but you dont see anything?
After pressing N, go presence overlay and check the box.
Also, go down and uncheck the box "Show only owned"(unless you need privacy ( ͡° ͜ʖ ͡°) ).
If it's still not working, hit the support channel on the discord channel "multi-user". This little helping text is produced by my own experience
(Ultr-X).
In order to bring attention to other problems, please @ me on the support channel. Every problem brought to me will be documentated to optimize and update this text.
Thank you and have fun with Multi-User, brought to you by "swann".
Here the discord server: https://discord.gg/v5eKgm

View File

@ -161,6 +161,19 @@ The collaboration quality directly depend on the communication quality. This sec
various tools made in an effort to ease the communication between the different session users. various tools made in an effort to ease the communication between the different session users.
Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ . Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
---------------------------
Change replication behavior
---------------------------
During a session, the multi-user will replicate your modifications to other instances.
In order to avoid annoying other users when you are experimenting, some of those modifications can be ignored via
various flags present at the top of the panel (see red area in the image bellow). Those flags are explained in the :ref:`replication` section.
.. figure:: img/quickstart_replication.png
:align: center
Session replication flags
-------------------- --------------------
Monitor online users Monitor online users
-------------------- --------------------
@ -238,10 +251,20 @@ it draw users related information in your viewport such as:
The presence overlay panel (see image above) allow you to enable/disable The presence overlay panel (see image above) allow you to enable/disable
various drawn parts via the following flags: various drawn parts via the following flags:
- **Show session statut**: display the session status in the viewport
.. figure:: img/quickstart_status.png
:align: center
- **Text scale**: session status text size
- **Vertical/Horizontal position**: session position in the viewport
- **Show selected objects**: display other users current selection - **Show selected objects**: display other users current selection
- **Show users**: display users current viewpoint - **Show users**: display users current viewpoint
- **Show different scenes**: display users working on other scenes - **Show different scenes**: display users working on other scenes
----------- -----------
Manage data Manage data
----------- -----------
@ -299,37 +322,105 @@ Here is a quick list of available actions:
.. _advanced: .. _advanced:
Advanced configuration Advanced settings
====================== =================
This section contains optional settings to configure the session behavior. This section contains optional settings to configure the session behavior.
.. figure:: img/quickstart_advanced.png .. figure:: img/quickstart_advanced.png
:align: center :align: center
Repository panel Advanced configuration panel
.. rubric:: Network -------
Network
-------
.. figure:: img/quickstart_advanced_network.png
:align: center
Advanced network settings
**IPC Port** is the port used for Inter Process Communication. This port is used **IPC Port** is the port used for Inter Process Communication. This port is used
by the multi-users subprocesses to communicate with each others. If different instances by the multi-users subprocesses to communicate with each others. If different instances
of the multi-user are using the same IPC port it will create conflict ! of the multi-user are using the same IPC port it will create conflict !
You only need to modify it if you need to launch multiple clients from the same .. note::
computer(or if you try to host and join on the same computer). You should just enter a different You only need to modify it if you need to launch multiple clients from the same
**IPC port** for each blender instance. computer(or if you try to host and join on the same computer). You should just enter a different
**IPC port** for each blender instance.
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting. **Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
You should only increase it if you have a bad connection. You should only increase it if you have a bad connection.
.. rubric:: Replication .. _replication:
-----------
Replication
-----------
.. figure:: img/quickstart_advanced_replication.png
:align: center
Advanced replication settings
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients. **Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
**Synchronize active camera** sync the scene active camera.
**Edit Mode Updates** enable objects update while you are in Edit_Mode.
.. warning:: Edit Mode Updates kill performances with complex objects (heavy meshes, gpencil, etc...).
**Update method** allow you to change how replication update are triggered. Until now two update methode are implemented:
- **Default**: Use external threads to monitor datablocks changes, slower and less accurate.
- **Despgraph ⚠️**: Use the blender dependency graph to trigger updates. Faster but experimental and unstable !
**Properties frequency gird** allow to set a custom replication frequency for each type of data-block: **Properties frequency gird** allow to set a custom replication frequency for each type of data-block:
- **Refresh**: pushed data update rate (in second) - **Refresh**: pushed data update rate (in second)
- **Apply**: pulled data update rate (in second) - **Apply**: pulled data update rate (in second)
.. note:: Per-data type settings will soon be revamped for simplification purposes -----
Cache
-----
The multi-user allows to replicate external blend dependencies such as images, movies sounds.
On each client, those files are stored into the cache folder.
.. figure:: img/quickstart_advanced_cache.png
:align: center
Advanced cache settings
**cache_directory** allows to choose where cached files (images, sound, movies) will be saved.
**Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it have been written to the disk.
**Clear cache** will remove all file from the cache folder.
.. warning:: Clear cash could break your scene image/movie/sound if they are used into the blend !
---
Log
---
.. figure:: img/quickstart_advanced_logging.png
:align: center
Advanced log settings
**log level** allow to set the logging level of detail. Here is the detail for each values:
+-----------+-----------------------------------------------+
| Log level | Description |
+===========+===============================================+
| ERROR | Shows only critical error |
+-----------+-----------------------------------------------+
| WARNING | Shows only errors (all kind) |
+-----------+-----------------------------------------------+
| INFO | Shows only status related messages and errors |
+-----------+-----------------------------------------------+
| DEBUG | Shows every possible information. |
+-----------+-----------------------------------------------+

View File

@ -48,7 +48,6 @@ Documentation is organized into the following sections:
getting_started/install getting_started/install
getting_started/quickstart getting_started/quickstart
getting_started/known_problems
getting_started/glossary getting_started/glossary
.. toctree:: .. toctree::

View File

@ -144,7 +144,7 @@ Let's check the connection status. Right click on the tray icon and click on **S
Network status. Network status.
The network status must be **OK** for each user(like in the picture above) otherwise it means that you are not connected to the network. The network status must be **OK** for each user(like in the picture above) otherwise it means that you are not connected to the network.
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the :ref:`network-authorization` section. If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the section :ref:`network-authorization`
This is it for the ZeroTier network setup. Now everything should be setup to use the multi-user add-on over internet ! You can now follow the :ref:`quickstart` guide to start using the multi-user add-on ! This is it for the ZeroTier network setup. Now everything should be setup to use the multi-user add-on over internet ! You can now follow the :ref:`quickstart` guide to start using the multi-user add-on !
@ -171,46 +171,50 @@ From the dedicated server
run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first. run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first.
The dedicated server allow you to host a session with simplicity from any location. The dedicated server allow you to host a session with simplicity from any location.
It was developed to improve intaernet hosting performance. It was developed to improve internet hosting performance.
The dedicated server can be run in tow ways: The dedicated server can be run in two ways:
- :ref:`cmd-line` - :ref:`cmd-line`
- :ref:`docker` - :ref:`docker`
.. Note:: There are shell scripts to conveniently start a dedicated server via either of these approaches available in the gitlab repository. See section: :ref:`serverstartscripts`
.. _cmd-line: .. _cmd-line:
Using a regular command line Using a regular command line
---------------------------- ----------------------------
You can run the dedicated server on any platform by following those steps: You can run the dedicated server on any platform by following these steps:
1. Firstly, download and intall python 3 (3.6 or above). 1. Firstly, download and intall python 3 (3.6 or above).
2. Download and extract the dedicated server from `here <https://gitlab.com/slumber/replication/-/archive/develop/replication-develop.zip>`_ 2. Install the latest version of the replication library:
3. Open a terminal in the extracted folder and install python dependencies by running:
.. code-block:: bash .. code-block:: bash
python -m pip install -r requirements.txt python -m pip install replication==0.0.21a15
4. Launch the server from the same terminal with: 4. Launch the server with:
.. code-block:: bash .. code-block:: bash
python scripts/server.py replication.serve
.. hint:: .. hint::
You can also specify a custom **port** (-p), **timeout** (-t) and **admin password** (-pwd) with the following optionnal argument You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
.. code-block:: bash .. code-block:: bash
python scripts/server.py -p 5555 -pwd toto -t 1000 replication.serve -p 5555 -pwd admin -t 1000 -l INFO -lf server.log
Here, for example, a server is instantiated on port 5555, with password 'admin', a 1 second timeout, and logging enabled.
As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
As soon as the dedicated server is running, you can connect to it from blender (follow :ref:`how-to-join`).
.. hint:: .. hint::
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more. Some commands are available to enable an administrator to manage the session. Check :ref:`dedicated-management` to learn more.
.. _docker: .. _docker:
@ -218,22 +222,56 @@ As soon as the dedicated server is running, you can connect to it from blender (
Using a pre-configured image on docker engine Using a pre-configured image on docker engine
--------------------------------------------- ---------------------------------------------
Launching the dedicated server from a docker server is simple as: Launching the dedicated server from a docker server is simple as running:
.. code-block:: bash .. code-block:: bash
docker run -d \ docker run -d \
-p 5555-5560:5555-5560 \ -p 5555-5560:5555-5560 \
-e port=5555 \ -e port=5555 \
-e log_level=DEBUG \
-e password=admin \ -e password=admin \
-e timeout=1000 \ -e timeout=1000 \
registry.gitlab.com/slumber/multi-user/multi-user-server:0.0.3 registry.gitlab.com/slumber/multi-user/multi-user-server:0.1.0
As soon as the dedicated server is running, you can connect to it from blender. As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
You can check the :ref:`how-to-join` section.
You can check your container is running, and find its ID with:
.. code-block:: bash
docker ps
Logs for the server running in the docker container can be accessed by outputting the following to a log file:
.. code-block:: bash
docker log your-container-id >& dockerserver.log
.. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available.
.. _serverstartscripts:
Server startup scripts
----------------------
Convenient scripts are available in the Gitlab repository: https://gitlab.com/slumber/multi-user/scripts/startup_scripts/
Simply run the relevant script in a shell on the host machine to start a server with one line of code via replication directly or via a docker container. Choose between the two methods:
.. code-block:: bash
./start-server.sh
or
.. code-block:: bash
./run-dockerfile.sh
.. hint:: .. hint::
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more. Once your server is up and running, some commands are available to manage the session :ref:`dedicated-management`
.. _dedicated-management: .. _dedicated-management:

View File

@ -21,11 +21,11 @@ In order to help with the testing, you have several possibilities:
- Test `development branch <https://gitlab.com/slumber/multi-user/-/branches>`_ - Test `development branch <https://gitlab.com/slumber/multi-user/-/branches>`_
-------------------------- --------------------------
Filling an issue on Gitlab Filing an issue on Gitlab
-------------------------- --------------------------
The `gitlab issue tracker <https://gitlab.com/slumber/multi-user/issues>`_ is used for bug report and enhancement suggestion. The `gitlab issue tracker <https://gitlab.com/slumber/multi-user/issues>`_ is used for bug report and enhancement suggestion.
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button. You will need a Gitlab account to be able to open a new issue there and click on "New issue" button in the main multi-user project.
Here are some useful information you should provide in a bug report: Here are some useful information you should provide in a bug report:
@ -35,8 +35,75 @@ Here are some useful information you should provide in a bug report:
Contributing code Contributing code
================= =================
1. Fork it (https://gitlab.com/yourname/yourproject/fork) In general, this project follows the `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_. It may help to understand that there are three different repositories - the upstream (main multi-user project repository, designated in git by 'upstream'), remote (forked repository, designated in git by 'origin'), and the local repository on your machine.
2. Create your feature branch (git checkout -b feature/fooBar) The following example suggests how to contribute a feature.
3. Commit your changes (git commit -am 'Add some fooBar')
4. Push to the branch (git push origin feature/fooBar) 1. Fork the project into a new repository:
5. Create a new Pull Request https://gitlab.com/yourname/multi-user
2. Clone the new repository locally:
.. code-block:: bash
git clone https://gitlab.com/yourname/multi-user.git
3. Keep your fork in sync with the main repository by setting up the upstream pointer once. cd into your git repo and then run:
.. code-block:: bash
git remote add upstream https://gitlab.com/slumber/multi-user.git
4. Now, locally check out the develop branch, upon which to base your new feature branch:
.. code-block:: bash
git checkout develop
5. Fetch any changes from the main upstream repository into your fork (especially if some time has passed since forking):
.. code-block:: bash
git fetch upstream
'Fetch' downloads objects and refs from the repository, but doesnt apply them to the branch we are working on. We want to apply the updates to the branch we will work from, which we checked out in step 4.
6. Let's merge any recent changes from the remote upstream (original repository's) 'develop' branch into our local 'develop' branch:
.. code-block:: bash
git merge upstream/develop
7. Update your forked repository's remote 'develop' branch with the fetched changes, just to keep things tidy. Make sure you haven't committed any local changes in the interim:
.. code-block:: bash
git push origin develop
8. Locally create your own new feature branch from the develop branch, using the syntax:
.. code-block:: bash
git checkout -b feature/yourfeaturename
...where 'feature/' designates a feature branch, and 'yourfeaturename' is a name of your choosing
9. Add and commit your changes, including a commit message:
.. code-block:: bash
git commit -am 'Add fooBar'
10. Push committed changes to the remote copy of your new feature branch which will be created in this step:
.. code-block:: bash
git push -u origin feature/yourfeaturename
If it's been some time since performing steps 4 through 7, make sure to checkout 'develop' again and pull the latest changes from upstream before checking out and creating feature/yourfeaturename and pushing changes. Alternatively, checkout 'feature/yourfeaturename' and simply run:
.. code-block:: bash
git rebase upstream/develop
and your staged commits will be merged along with the changes. More information on `rebasing here <https://git-scm.com/book/en/v2/Git-Branching-Rebasing>`_
.. Hint:: -u option sets up your locally created new branch to follow a remote branch which is now created with the same name on your remote repository.
11. Finally, create a new Pull/Merge Request on Gitlab to merge the remote version of this new branch with commited updates, back into the upstream develop branch, finalising the integration of the new feature.
12. Thanks for contributing!
.. Note:: For hotfixes, replace 'feature/' with 'hotfix/' and base the new branch off the parent 'master' branch instead of 'develop' branch. Make sure to checkout 'master' before running step 8
.. Note:: Let's follow the Atlassian `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_, except for one main difference - submitting a pull request rather than merging by ourselves.
.. Note:: See `here <https://philna.sh/blog/2018/08/21/git-commands-to-keep-a-fork-up-to-date/>`_ or `here <https://stefanbauer.me/articles/how-to-keep-your-git-fork-up-to-date>`_ for instructions on how to keep a fork up to date.

View File

@ -19,9 +19,9 @@
bl_info = { bl_info = {
"name": "Multi-User", "name": "Multi-User",
"author": "Swann Martinez", "author": "Swann Martinez",
"version": (0, 0, 3), "version": (0, 2, 0),
"description": "Enable real-time collaborative workflow inside blender", "description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 80, 0), "blender": (2, 82, 0),
"location": "3D View > Sidebar > Multi-User tab", "location": "3D View > Sidebar > Multi-User tab",
"warning": "Unstable addon, use it at your own risks", "warning": "Unstable addon, use it at your own risks",
"category": "Collaboration", "category": "Collaboration",
@ -40,45 +40,41 @@ import sys
import bpy import bpy
from bpy.app.handlers import persistent from bpy.app.handlers import persistent
from . import environment, utils from . import environment
# TODO: remove dependency as soon as replication will be installed as a module
DEPENDENCIES = { DEPENDENCIES = {
("zmq","zmq"), ("replication", '0.2.0'),
("jsondiff","jsondiff"),
("deepdiff", "deepdiff"),
("psutil","psutil")
} }
libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication\\replication" module_error_msg = "Insufficient rights to install the multi-user \
dependencies, aunch blender with administrator rights."
def register(): def register():
# Setup logging policy # Setup logging policy
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) logging.basicConfig(
format='%(asctime)s CLIENT %(levelname)-8s %(message)s',
if libs not in sys.path: datefmt='%H:%M:%S',
sys.path.append(libs) level=logging.INFO)
try: try:
environment.setup(DEPENDENCIES, bpy.app.binary_path_python) environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
except ModuleNotFoundError:
logging.fatal("Fail to install multi-user dependencies, try to execute blender with admin rights.")
return
from . import presence
from . import operators
from . import ui
from . import preferences
from . import addon_updater_ops
preferences.register() from . import presence
addon_updater_ops.register(bl_info) from . import operators
presence.register() from . import ui
operators.register() from . import preferences
ui.register() from . import addon_updater_ops
preferences.register()
addon_updater_ops.register(bl_info)
presence.register()
operators.register()
ui.register()
except ModuleNotFoundError as e:
raise Exception(module_error_msg)
logging.error(module_error_msg)
bpy.types.WindowManager.session = bpy.props.PointerProperty( bpy.types.WindowManager.session = bpy.props.PointerProperty(
type=preferences.SessionProps) type=preferences.SessionProps)
bpy.types.ID.uuid = bpy.props.StringProperty( bpy.types.ID.uuid = bpy.props.StringProperty(

View File

@ -23,7 +23,11 @@ https://github.com/CGCookie/blender-addon-updater
""" """
__version__ = "1.0.8"
import errno import errno
import traceback
import platform
import ssl import ssl
import urllib.request import urllib.request
import urllib import urllib
@ -98,6 +102,7 @@ class Singleton_updater(object):
# runtime variables, initial conditions # runtime variables, initial conditions
self._verbose = False self._verbose = False
self._use_print_traces = True
self._fake_install = False self._fake_install = False
self._async_checking = False # only true when async daemon started self._async_checking = False # only true when async daemon started
self._update_ready = None self._update_ready = None
@ -133,6 +138,13 @@ class Singleton_updater(object):
self._select_link = select_link_function self._select_link = select_link_function
# called from except blocks, to print the exception details,
# according to the use_print_traces option
def print_trace():
if self._use_print_traces:
traceback.print_exc()
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
# Getters and setters # Getters and setters
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
@ -166,7 +178,7 @@ class Singleton_updater(object):
try: try:
self._auto_reload_post_update = bool(value) self._auto_reload_post_update = bool(value)
except: except:
raise ValueError("Must be a boolean value") raise ValueError("auto_reload_post_update must be a boolean value")
@property @property
def backup_current(self): def backup_current(self):
@ -351,7 +363,7 @@ class Singleton_updater(object):
try: try:
self._repo = str(value) self._repo = str(value)
except: except:
raise ValueError("User must be a string") raise ValueError("repo must be a string value")
@property @property
def select_link(self): def select_link(self):
@ -377,6 +389,7 @@ class Singleton_updater(object):
os.makedirs(value) os.makedirs(value)
except: except:
if self._verbose: print("Error trying to staging path") if self._verbose: print("Error trying to staging path")
self.print_trace()
return return
self._updater_path = value self._updater_path = value
@ -446,6 +459,16 @@ class Singleton_updater(object):
except: except:
raise ValueError("Verbose must be a boolean value") raise ValueError("Verbose must be a boolean value")
@property
def use_print_traces(self):
return self._use_print_traces
@use_print_traces.setter
def use_print_traces(self, value):
try:
self._use_print_traces = bool(value)
except:
raise ValueError("use_print_traces must be a boolean value")
@property @property
def version_max_update(self): def version_max_update(self):
return self._version_max_update return self._version_max_update
@ -637,6 +660,9 @@ class Singleton_updater(object):
else: else:
if self._verbose: print("Tokens not setup for engine yet") if self._verbose: print("Tokens not setup for engine yet")
# Always set user agent
request.add_header('User-Agent', "Python/"+str(platform.python_version()))
# run the request # run the request
try: try:
if context: if context:
@ -652,6 +678,7 @@ class Singleton_updater(object):
self._error = "HTTP error" self._error = "HTTP error"
self._error_msg = str(e.code) self._error_msg = str(e.code)
print(self._error, self._error_msg) print(self._error, self._error_msg)
self.print_trace()
self._update_ready = None self._update_ready = None
except urllib.error.URLError as e: except urllib.error.URLError as e:
reason = str(e.reason) reason = str(e.reason)
@ -663,6 +690,7 @@ class Singleton_updater(object):
self._error = "URL error, check internet connection" self._error = "URL error, check internet connection"
self._error_msg = reason self._error_msg = reason
print(self._error, self._error_msg) print(self._error, self._error_msg)
self.print_trace()
self._update_ready = None self._update_ready = None
return None return None
else: else:
@ -684,6 +712,7 @@ class Singleton_updater(object):
self._error_msg = str(e.reason) self._error_msg = str(e.reason)
self._update_ready = None self._update_ready = None
print(self._error, self._error_msg) print(self._error, self._error_msg)
self.print_trace()
return None return None
else: else:
return None return None
@ -700,15 +729,17 @@ class Singleton_updater(object):
if self._verbose: print("Preparing staging folder for download:\n",local) if self._verbose: print("Preparing staging folder for download:\n",local)
if os.path.isdir(local) == True: if os.path.isdir(local) == True:
try: try:
shutil.rmtree(local) shutil.rmtree(local, ignore_errors=True)
os.makedirs(local) os.makedirs(local)
except: except:
error = "failed to remove existing staging directory" error = "failed to remove existing staging directory"
self.print_trace()
else: else:
try: try:
os.makedirs(local) os.makedirs(local)
except: except:
error = "failed to create staging directory" error = "failed to create staging directory"
self.print_trace()
if error != None: if error != None:
if self._verbose: print("Error: Aborting update, "+error) if self._verbose: print("Error: Aborting update, "+error)
@ -733,6 +764,10 @@ class Singleton_updater(object):
request.add_header('PRIVATE-TOKEN',self._engine.token) request.add_header('PRIVATE-TOKEN',self._engine.token)
else: else:
if self._verbose: print("Tokens not setup for selected engine yet") if self._verbose: print("Tokens not setup for selected engine yet")
# Always set user agent
request.add_header('User-Agent', "Python/"+str(platform.python_version()))
self.urlretrieve(urllib.request.urlopen(request,context=context), self._source_zip) self.urlretrieve(urllib.request.urlopen(request,context=context), self._source_zip)
# add additional checks on file size being non-zero # add additional checks on file size being non-zero
if self._verbose: print("Successfully downloaded update zip") if self._verbose: print("Successfully downloaded update zip")
@ -743,6 +778,7 @@ class Singleton_updater(object):
if self._verbose: if self._verbose:
print("Error retrieving download, bad link?") print("Error retrieving download, bad link?")
print("Error: {}".format(e)) print("Error: {}".format(e))
self.print_trace()
return False return False
@ -757,16 +793,18 @@ class Singleton_updater(object):
if os.path.isdir(local): if os.path.isdir(local):
try: try:
shutil.rmtree(local) shutil.rmtree(local, ignore_errors=True)
except: except:
if self._verbose:print("Failed to removed previous backup folder, contininuing") if self._verbose:print("Failed to removed previous backup folder, contininuing")
self.print_trace()
# remove the temp folder; shouldn't exist but could if previously interrupted # remove the temp folder; shouldn't exist but could if previously interrupted
if os.path.isdir(tempdest): if os.path.isdir(tempdest):
try: try:
shutil.rmtree(tempdest) shutil.rmtree(tempdest, ignore_errors=True)
except: except:
if self._verbose:print("Failed to remove existing temp folder, contininuing") if self._verbose:print("Failed to remove existing temp folder, contininuing")
self.print_trace()
# make the full addon copy, which temporarily places outside the addon folder # make the full addon copy, which temporarily places outside the addon folder
if self._backup_ignore_patterns != None: if self._backup_ignore_patterns != None:
shutil.copytree( shutil.copytree(
@ -794,7 +832,7 @@ class Singleton_updater(object):
# make the copy # make the copy
shutil.move(backuploc,tempdest) shutil.move(backuploc,tempdest)
shutil.rmtree(self._addon_root) shutil.rmtree(self._addon_root, ignore_errors=True)
os.rename(tempdest,self._addon_root) os.rename(tempdest,self._addon_root)
self._json["backup_date"] = "" self._json["backup_date"] = ""
@ -815,7 +853,7 @@ class Singleton_updater(object):
# clear the existing source folder in case previous files remain # clear the existing source folder in case previous files remain
outdir = os.path.join(self._updater_path, "source") outdir = os.path.join(self._updater_path, "source")
try: try:
shutil.rmtree(outdir) shutil.rmtree(outdir, ignore_errors=True)
if self._verbose: if self._verbose:
print("Source folder cleared") print("Source folder cleared")
except: except:
@ -828,6 +866,7 @@ class Singleton_updater(object):
except Exception as err: except Exception as err:
print("Error occurred while making extract dir:") print("Error occurred while making extract dir:")
print(str(err)) print(str(err))
self.print_trace()
self._error = "Install failed" self._error = "Install failed"
self._error_msg = "Failed to make extract directory" self._error_msg = "Failed to make extract directory"
return -1 return -1
@ -869,6 +908,7 @@ class Singleton_updater(object):
if exc.errno != errno.EEXIST: if exc.errno != errno.EEXIST:
self._error = "Install failed" self._error = "Install failed"
self._error_msg = "Could not create folder from zip" self._error_msg = "Could not create folder from zip"
self.print_trace()
return -1 return -1
else: else:
with open(os.path.join(outdir, subpath), "wb") as outfile: with open(os.path.join(outdir, subpath), "wb") as outfile:
@ -962,12 +1002,13 @@ class Singleton_updater(object):
print("Clean removing file {}".format(os.path.join(base,f))) print("Clean removing file {}".format(os.path.join(base,f)))
for f in folders: for f in folders:
if os.path.join(base,f)==self._updater_path: continue if os.path.join(base,f)==self._updater_path: continue
shutil.rmtree(os.path.join(base,f)) shutil.rmtree(os.path.join(base,f), ignore_errors=True)
print("Clean removing folder and contents {}".format(os.path.join(base,f))) print("Clean removing folder and contents {}".format(os.path.join(base,f)))
except Exception as err: except Exception as err:
error = "failed to create clean existing addon folder" error = "failed to create clean existing addon folder"
print(error, str(err)) print(error, str(err))
self.print_trace()
# Walk through the base addon folder for rules on pre-removing # Walk through the base addon folder for rules on pre-removing
# but avoid removing/altering backup and updater file # but avoid removing/altering backup and updater file
@ -983,6 +1024,7 @@ class Singleton_updater(object):
if self._verbose: print("Pre-removed file "+file) if self._verbose: print("Pre-removed file "+file)
except OSError: except OSError:
print("Failed to pre-remove "+file) print("Failed to pre-remove "+file)
self.print_trace()
# Walk through the temp addon sub folder for replacements # Walk through the temp addon sub folder for replacements
# this implements the overwrite rules, which apply after # this implements the overwrite rules, which apply after
@ -1006,7 +1048,7 @@ class Singleton_updater(object):
# otherwise, check each file to see if matches an overwrite pattern # otherwise, check each file to see if matches an overwrite pattern
replaced=False replaced=False
for ptrn in self._overwrite_patterns: for ptrn in self._overwrite_patterns:
if fnmatch.filter([destFile],ptrn): if fnmatch.filter([file],ptrn):
replaced=True replaced=True
break break
if replaced: if replaced:
@ -1022,10 +1064,11 @@ class Singleton_updater(object):
# now remove the temp staging folder and downloaded zip # now remove the temp staging folder and downloaded zip
try: try:
shutil.rmtree(staging_path) shutil.rmtree(staging_path, ignore_errors=True)
except: except:
error = "Error: Failed to remove existing staging directory, consider manually removing "+staging_path error = "Error: Failed to remove existing staging directory, consider manually removing "+staging_path
if self._verbose: print(error) if self._verbose: print(error)
self.print_trace()
def reload_addon(self): def reload_addon(self):
@ -1041,9 +1084,16 @@ class Singleton_updater(object):
# not allowed in restricted context, such as register module # not allowed in restricted context, such as register module
# toggle to refresh # toggle to refresh
bpy.ops.wm.addon_disable(module=self._addon_package) if "addon_disable" in dir(bpy.ops.wm): # 2.7
bpy.ops.wm.addon_refresh() bpy.ops.wm.addon_disable(module=self._addon_package)
bpy.ops.wm.addon_enable(module=self._addon_package) bpy.ops.wm.addon_refresh()
bpy.ops.wm.addon_enable(module=self._addon_package)
print("2.7 reload complete")
else: # 2.8
bpy.ops.preferences.addon_disable(module=self._addon_package)
bpy.ops.preferences.addon_refresh()
bpy.ops.preferences.addon_enable(module=self._addon_package)
print("2.8 reload complete")
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
@ -1375,26 +1425,26 @@ class Singleton_updater(object):
if "last_check" not in self._json or self._json["last_check"] == "": if "last_check" not in self._json or self._json["last_check"] == "":
return True return True
else:
now = datetime.now()
last_check = datetime.strptime(self._json["last_check"],
"%Y-%m-%d %H:%M:%S.%f")
next_check = last_check
offset = timedelta(
days=self._check_interval_days + 30*self._check_interval_months,
hours=self._check_interval_hours,
minutes=self._check_interval_minutes
)
delta = (now - offset) - last_check now = datetime.now()
if delta.total_seconds() > 0: last_check = datetime.strptime(self._json["last_check"],
if self._verbose: "%Y-%m-%d %H:%M:%S.%f")
print("{} Updater: Time to check for updates!".format(self._addon)) next_check = last_check
return True offset = timedelta(
else: days=self._check_interval_days + 30*self._check_interval_months,
if self._verbose: hours=self._check_interval_hours,
print("{} Updater: Determined it's not yet time to check for updates".format(self._addon)) minutes=self._check_interval_minutes
return False )
delta = (now - offset) - last_check
if delta.total_seconds() > 0:
if self._verbose:
print("{} Updater: Time to check for updates!".format(self._addon))
return True
if self._verbose:
print("{} Updater: Determined it's not yet time to check for updates".format(self._addon))
return False
def get_json_path(self): def get_json_path(self):
"""Returns the full path to the JSON state file used by this updater. """Returns the full path to the JSON state file used by this updater.
@ -1413,6 +1463,7 @@ class Singleton_updater(object):
except Exception as err: except Exception as err:
print("Other OS error occurred while trying to rename old JSON") print("Other OS error occurred while trying to rename old JSON")
print(err) print(err)
self.print_trace()
return json_path return json_path
def set_updater_json(self): def set_updater_json(self):
@ -1513,6 +1564,7 @@ class Singleton_updater(object):
except Exception as exception: except Exception as exception:
print("Checking for update error:") print("Checking for update error:")
print(exception) print(exception)
self.print_trace()
if not self._error: if not self._error:
self._update_ready = False self._update_ready = False
self._update_version = None self._update_version = None
@ -1624,10 +1676,7 @@ class GitlabEngine(object):
return "{}{}{}".format(self.api_url,"/api/v4/projects/",updater.repo) return "{}{}{}".format(self.api_url,"/api/v4/projects/",updater.repo)
def form_tags_url(self, updater): def form_tags_url(self, updater):
if updater.use_releases: return "{}{}".format(self.form_repo_url(updater),"/repository/tags")
return "{}{}".format(self.form_repo_url(updater),"/releases")
else:
return "{}{}".format(self.form_repo_url(updater),"/repository/tags")
def form_branch_list_url(self, updater): def form_branch_list_url(self, updater):
# does not validate branch name. # does not validate branch name.
@ -1655,12 +1704,7 @@ class GitlabEngine(object):
def parse_tags(self, response, updater): def parse_tags(self, response, updater):
if response == None: if response == None:
return [] return []
# Return asset links from release return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
if updater.use_releases:
return [{"name": release["name"], "zipball_url": release["assets"]["links"][0]["url"]} for release in response]
else:
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------

File diff suppressed because it is too large Load Diff

View File

@ -34,11 +34,14 @@ __all__ = [
'bl_metaball', 'bl_metaball',
'bl_lattice', 'bl_lattice',
'bl_lightprobe', 'bl_lightprobe',
'bl_speaker' 'bl_speaker',
'bl_font',
'bl_sound',
'bl_file'
] # Order here defines execution order ] # Order here defines execution order
from . import * from . import *
from ..libs.replication.replication.data import ReplicatedDataFactory from replication.data import ReplicatedDataFactory
def types_to_register(): def types_to_register():
return __all__ return __all__

View File

@ -134,6 +134,7 @@ class BlAction(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'ACTION_TWEAK' bl_icon = 'ACTION_TWEAK'
def _construct(self, data): def _construct(self, data):

View File

@ -31,6 +31,7 @@ class BlArmature(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 0 bl_delay_apply = 0
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'ARMATURE_DATA' bl_icon = 'ARMATURE_DATA'
def _construct(self, data): def _construct(self, data):
@ -92,6 +93,7 @@ class BlArmature(BlDatablock):
new_bone.head = bone_data['head_local'] new_bone.head = bone_data['head_local']
new_bone.tail_radius = bone_data['tail_radius'] new_bone.tail_radius = bone_data['tail_radius']
new_bone.head_radius = bone_data['head_radius'] new_bone.head_radius = bone_data['head_radius']
# new_bone.roll = bone_data['roll']
if 'parent' in bone_data: if 'parent' in bone_data:
new_bone.parent = target.edit_bones[data['bones'] new_bone.parent = target.edit_bones[data['bones']
@ -123,7 +125,8 @@ class BlArmature(BlDatablock):
'use_connect', 'use_connect',
'parent', 'parent',
'name', 'name',
'layers' 'layers',
# 'roll',
] ]
data = dumper.dump(instance) data = dumper.dump(instance)

View File

@ -29,6 +29,7 @@ class BlCamera(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'CAMERA_DATA' bl_icon = 'CAMERA_DATA'
def _construct(self, data): def _construct(self, data):
@ -36,7 +37,7 @@ class BlCamera(BlDatablock):
def _load_implementation(self, data, target): def _load_implementation(self, data, target):
loader = Loader() loader = Loader()
loader.load(target, data) loader.load(target, data)
dof_settings = data.get('dof') dof_settings = data.get('dof')
@ -45,13 +46,22 @@ class BlCamera(BlDatablock):
if dof_settings: if dof_settings:
loader.load(target.dof, dof_settings) loader.load(target.dof, dof_settings)
background_images = data.get('background_images')
if background_images:
target.background_images.clear()
for img_name, img_data in background_images.items():
target_img = target.background_images.new()
target_img.image = bpy.data.images[img_name]
loader.load(target_img, img_data)
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
# TODO: background image support # TODO: background image support
dumper = Dumper() dumper = Dumper()
dumper.depth = 2 dumper.depth = 3
dumper.include_filter = [ dumper.include_filter = [
"name", "name",
'type', 'type',
@ -70,6 +80,7 @@ class BlCamera(BlDatablock):
'aperture_fstop', 'aperture_fstop',
'aperture_blades', 'aperture_blades',
'aperture_rotation', 'aperture_rotation',
'ortho_scale',
'aperture_ratio', 'aperture_ratio',
'display_size', 'display_size',
'show_limits', 'show_limits',
@ -79,7 +90,24 @@ class BlCamera(BlDatablock):
'sensor_fit', 'sensor_fit',
'sensor_height', 'sensor_height',
'sensor_width', 'sensor_width',
'show_background_images',
'background_images',
'alpha',
'display_depth',
'frame_method',
'offset',
'rotation',
'scale',
'use_flip_x',
'use_flip_y',
'image'
] ]
return dumper.dump(instance) return dumper.dump(instance)
def _resolve_deps_implementation(self):
deps = []
for background in self.instance.background_images:
if background.image:
deps.append(background.image)
return deps

View File

@ -21,6 +21,55 @@ import mathutils
from .. import utils from .. import utils
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock
from .dump_anything import Loader, Dumper
def dump_collection_children(collection):
collection_children = []
for child in collection.children:
if child not in collection_children:
collection_children.append(child.uuid)
return collection_children
def dump_collection_objects(collection):
collection_objects = []
for object in collection.objects:
if object not in collection_objects:
collection_objects.append(object.uuid)
return collection_objects
def load_collection_objects(dumped_objects, collection):
for object in dumped_objects:
object_ref = utils.find_from_attr('uuid', object, bpy.data.objects)
if object_ref is None:
continue
elif object_ref.name not in collection.objects.keys():
collection.objects.link(object_ref)
for object in collection.objects:
if object.uuid not in dumped_objects:
collection.objects.unlink(object)
def load_collection_childrens(dumped_childrens, collection):
for child_collection in dumped_childrens:
collection_ref = utils.find_from_attr(
'uuid',
child_collection,
bpy.data.collections)
if collection_ref is None:
continue
if collection_ref.name not in collection.children.keys():
collection.children.link(collection_ref)
for child_collection in collection.children:
if child_collection.uuid not in dumped_childrens:
collection.children.unlink(child_collection)
class BlCollection(BlDatablock): class BlCollection(BlDatablock):
@ -30,71 +79,47 @@ class BlCollection(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = True
def _construct(self, data): def _construct(self, data):
if self.is_library: if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData): with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.collections = [ targetData.collections = [
name for name in sourceData.collections if name == self.data['name']] name for name in sourceData.collections if name == self.data['name']]
instance = bpy.data.collections[self.data['name']] instance = bpy.data.collections[self.data['name']]
return instance return instance
instance = bpy.data.collections.new(data["name"]) instance = bpy.data.collections.new(data["name"])
return instance return instance
def _load_implementation(self, data, target): def _load_implementation(self, data, target):
# Load other meshes metadata loader = Loader()
target.name = data["name"] loader.load(target, data)
# Objects # Objects
for object in data["objects"]: load_collection_objects(data['objects'], target)
object_ref = bpy.data.objects.get(object)
if object_ref is None:
continue
if object not in target.objects.keys():
target.objects.link(object_ref)
for object in target.objects:
if object.name not in data["objects"]:
target.objects.unlink(object)
# Link childrens # Link childrens
for collection in data["children"]: load_collection_childrens(data['children'], target)
collection_ref = bpy.data.collections.get(collection)
if collection_ref is None:
continue
if collection_ref.name not in target.children.keys():
target.children.link(collection_ref)
for collection in target.children:
if collection.name not in data["children"]:
target.children.unlink(collection)
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
data = {}
data['name'] = instance.name dumper = Dumper()
dumper.depth = 1
dumper.include_filter = [
"name",
"instance_offset"
]
data = dumper.dump(instance)
# dump objects # dump objects
collection_objects = [] data['objects'] = dump_collection_objects(instance)
for object in instance.objects:
if object not in collection_objects:
collection_objects.append(object.name)
data['objects'] = collection_objects
# dump children collections # dump children collections
collection_children = [] data['children'] = dump_collection_children(instance)
for child in instance.children:
if child not in collection_children:
collection_children.append(child.name)
data['children'] = collection_children
return data return data

View File

@ -46,12 +46,105 @@ SPLINE_POINT = [
"radius", "radius",
] ]
CURVE_METADATA = [
'align_x',
'align_y',
'bevel_depth',
'bevel_factor_end',
'bevel_factor_mapping_end',
'bevel_factor_mapping_start',
'bevel_factor_start',
'bevel_object',
'bevel_resolution',
'body',
'body_format',
'dimensions',
'eval_time',
'extrude',
'family',
'fill_mode',
'follow_curve',
'font',
'font_bold',
'font_bold_italic',
'font_italic',
'make_local',
'materials',
'name',
'offset',
'offset_x',
'offset_y',
'overflow',
'original',
'override_create',
'override_library',
'path_duration',
'preview',
'render_resolution_u',
'render_resolution_v',
'resolution_u',
'resolution_v',
'shape_keys',
'shear',
'size',
'small_caps_scale',
'space_character',
'space_line',
'space_word',
'type',
'taper_object',
'texspace_location',
'texspace_size',
'transform',
'twist_mode',
'twist_smooth',
'underline_height',
'underline_position',
'use_auto_texspace',
'use_deform_bounds',
'use_fake_user',
'use_fill_caps',
'use_fill_deform',
'use_map_taper',
'use_path',
'use_path_follow',
'use_radius',
'use_stretch',
]
SPLINE_METADATA = [
'hide',
'material_index',
# 'order_u',
# 'order_v',
# 'point_count_u',
# 'point_count_v',
'points',
'radius_interpolation',
'resolution_u',
'resolution_v',
'tilt_interpolation',
'type',
'use_bezier_u',
'use_bezier_v',
'use_cyclic_u',
'use_cyclic_v',
'use_endpoint_u',
'use_endpoint_v',
'use_smooth',
]
class BlCurve(BlDatablock): class BlCurve(BlDatablock):
bl_id = "curves" bl_id = "curves"
bl_class = bpy.types.Curve bl_class = bpy.types.Curve
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'CURVE_DATA' bl_icon = 'CURVE_DATA'
def _construct(self, data): def _construct(self, data):
@ -62,6 +155,7 @@ class BlCurve(BlDatablock):
loader.load(target, data) loader.load(target, data)
target.splines.clear() target.splines.clear()
# load splines # load splines
for spline in data['splines'].values(): for spline in data['splines'].values():
new_spline = target.splines.new(spline['type']) new_spline = target.splines.new(spline['type'])
@ -72,8 +166,12 @@ class BlCurve(BlDatablock):
bezier_points = new_spline.bezier_points bezier_points = new_spline.bezier_points
bezier_points.add(spline['bezier_points_count']) bezier_points.add(spline['bezier_points_count'])
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT) np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
# Not really working for now... if new_spline.type == 'POLY':
points = new_spline.points
points.add(spline['points_count'])
np_load_collection(spline['points'], points, SPLINE_POINT)
# Not working for now...
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python # See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
if new_spline.type == 'NURBS': if new_spline.type == 'NURBS':
logging.error("NURBS not supported.") logging.error("NURBS not supported.")
@ -83,11 +181,14 @@ class BlCurve(BlDatablock):
# new_spline.points[point_index], data['splines'][spline]["points"][point_index]) # new_spline.points[point_index], data['splines'][spline]["points"][point_index])
loader.load(new_spline, spline) loader.load(new_spline, spline)
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
dumper = Dumper() dumper = Dumper()
# Conflicting attributes # Conflicting attributes
# TODO: remove them with the NURBS support # TODO: remove them with the NURBS support
dumper.include_filter = CURVE_METADATA
dumper.exclude_filter = [ dumper.exclude_filter = [
'users', 'users',
'order_u', 'order_u',
@ -105,8 +206,13 @@ class BlCurve(BlDatablock):
for index, spline in enumerate(instance.splines): for index, spline in enumerate(instance.splines):
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = SPLINE_METADATA
spline_data = dumper.dump(spline) spline_data = dumper.dump(spline)
# spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
if spline.type == 'POLY':
spline_data['points_count'] = len(spline.points)-1
spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
spline_data['bezier_points_count'] = len(spline.bezier_points)-1 spline_data['bezier_points_count'] = len(spline.bezier_points)-1
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT) spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
data['splines'][index] = spline_data data['splines'][index] = spline_data
@ -118,3 +224,17 @@ class BlCurve(BlDatablock):
elif isinstance(instance, T.Curve): elif isinstance(instance, T.Curve):
data['type'] = 'CURVE' data['type'] = 'CURVE'
return data return data
def _resolve_deps_implementation(self):
# TODO: resolve material
deps = []
curve = self.instance
if isinstance(curve, T.TextCurve):
deps.extend([
curve.font,
curve.font_bold,
curve.font_bold_italic,
curve.font_italic])
return deps

View File

@ -16,13 +16,16 @@
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import logging
from collections.abc import Iterable
import bpy import bpy
import mathutils import mathutils
from replication.constants import DIFF_BINARY, UP
from replication.data import ReplicatedDatablock
from .. import utils from .. import utils
from .dump_anything import Loader, Dumper from .dump_anything import Dumper, Loader
from ..libs.replication.replication.data import ReplicatedDatablock
from ..libs.replication.replication.constants import (UP, DIFF_BINARY)
def has_action(target): def has_action(target):
@ -86,6 +89,18 @@ def load_driver(target_datablock, src_driver):
loader.load(new_point, src_driver['keyframe_points'][src_point]) loader.load(new_point, src_driver['keyframe_points'][src_point])
def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid:
return default
for category in dir(bpy.data):
root = getattr(bpy.data, category)
if isinstance(root, Iterable) and category not in ignore:
for item in root:
if getattr(item, 'uuid', None) == uuid:
return item
return default
class BlDatablock(ReplicatedDatablock): class BlDatablock(ReplicatedDatablock):
"""BlDatablock """BlDatablock
@ -95,36 +110,55 @@ class BlDatablock(ReplicatedDatablock):
bl_delay_apply : refresh rate in sec for apply bl_delay_apply : refresh rate in sec for apply
bl_automatic_push : boolean bl_automatic_push : boolean
bl_icon : type icon (blender icon name) bl_icon : type icon (blender icon name)
bl_check_common: enable check even in common rights
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None) instance = kwargs.get('instance', None)
self.preferences = utils.get_preferences()
# TODO: use is_library_indirect # TODO: use is_library_indirect
self.is_library = (instance and hasattr(instance, 'library') and self.is_library = (instance and hasattr(instance, 'library') and
instance.library) or \ instance.library) or \
(self.data and 'library' in self.data) (hasattr(self,'data') and self.data and 'library' in self.data)
if instance and hasattr(instance, 'uuid'): if instance and hasattr(instance, 'uuid'):
instance.uuid = self.uuid instance.uuid = self.uuid
self.diff_method = DIFF_BINARY # self.diff_method = DIFF_BINARY
def resolve(self): def resolve(self, construct = True):
datablock_ref = None datablock_ref = None
datablock_root = getattr(bpy.data, self.bl_id) datablock_root = getattr(bpy.data, self.bl_id)
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root) datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
if not datablock_ref: if not datablock_ref:
datablock_ref = datablock_root.get( try:
self.data['name'], # Resolve by name datablock_ref = datablock_root[self.data['name']]
self._construct(data=self.data)) # If it doesn't exist create it except Exception:
if construct:
name = self.data.get('name')
logging.debug(f"Constructing {name}")
datablock_ref = self._construct(data=self.data)
if datablock_ref: if datablock_ref is not None:
setattr(datablock_ref, 'uuid', self.uuid) setattr(datablock_ref, 'uuid', self.uuid)
self.instance = datablock_ref
return True
else:
return False
self.instance = datablock_ref
def remove_instance(self):
"""
Remove instance from blender data
"""
assert(self.instance)
datablock_root = getattr(bpy.data, self.bl_id)
datablock_root.remove(self.instance)
def _dump(self, instance=None): def _dump(self, instance=None):
dumper = Dumper() dumper = Dumper()
@ -186,6 +220,7 @@ class BlDatablock(ReplicatedDatablock):
if not self.is_library: if not self.is_library:
dependencies.extend(self._resolve_deps_implementation()) dependencies.extend(self._resolve_deps_implementation())
logging.debug(f"{self.instance.name} dependencies: {dependencies}")
return dependencies return dependencies
def _resolve_deps_implementation(self): def _resolve_deps_implementation(self):

View File

@ -0,0 +1,143 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
import sys
from pathlib import Path
import bpy
import mathutils
from replication.constants import DIFF_BINARY, UP
from replication.data import ReplicatedDatablock
from .. import utils
from .dump_anything import Dumper, Loader
def get_filepath(filename):
"""
Construct the local filepath
"""
return str(Path(
utils.get_preferences().cache_directory,
filename
))
def ensure_unpacked(datablock):
if datablock.packed_file:
logging.info(f"Unpacking {datablock.name}")
filename = Path(bpy.path.abspath(datablock.filepath)).name
datablock.filepath = get_filepath(filename)
datablock.unpack(method="WRITE_ORIGINAL")
class BlFile(ReplicatedDatablock):
bl_id = 'file'
bl_name = "file"
bl_class = Path
bl_delay_refresh = 0
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'FILE'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.instance = kwargs.get('instance', None)
if self.instance and not self.instance.exists():
raise FileNotFoundError(str(self.instance))
self.preferences = utils.get_preferences()
self.diff_method = DIFF_BINARY
def resolve(self):
if self.data:
self.instance = Path(get_filepath(self.data['name']))
if not self.instance.exists():
logging.debug("File don't exist, loading it.")
self._load(self.data, self.instance)
def push(self, socket, identity=None):
super().push(socket, identity=None)
if self.preferences.clear_memory_filecache:
del self.data['file']
def _dump(self, instance=None):
"""
Read the file and return a dict as:
{
name : filename
extension :
file: file content
}
"""
logging.info(f"Extracting file metadata")
data = {
'name': self.instance.name,
}
logging.info(
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
try:
file = open(self.instance, "rb")
data['file'] = file.read()
file.close()
except IOError:
logging.warning(f"{self.instance} doesn't exist, skipping")
else:
file.close()
return data
def _load(self, data, target):
"""
Writing the file
"""
# TODO: check for empty data
if target.exists() and not self.diff():
logging.info(f"{data['name']} already on the disk, skipping.")
return
try:
file = open(target, "wb")
file.write(data['file'])
if self.preferences.clear_memory_filecache:
del self.data['file']
except IOError:
logging.warning(f"{target} doesn't exist, skipping")
else:
file.close()
def diff(self):
if self.preferences.clear_memory_filecache:
return False
else:
memory_size = sys.getsizeof(self.data['file'])-33
disk_size = self.instance.stat().st_size
return memory_size == disk_size

View File

@ -0,0 +1,74 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy
from .bl_datablock import BlDatablock
from .bl_file import get_filepath, ensure_unpacked
from .dump_anything import Dumper, Loader
class BlFont(BlDatablock):
bl_id = "fonts"
bl_class = bpy.types.VectorFont
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'FILE_FONT'
def _construct(self, data):
filename = data.get('filename')
if filename == '<builtin>':
return bpy.data.fonts.load(filename)
else:
return bpy.data.fonts.load(get_filepath(filename))
def _load(self, data, target):
pass
def _dump(self, instance=None):
if instance.filepath == '<builtin>':
filename = '<builtin>'
else:
filename = Path(instance.filepath).name
if not filename:
raise FileExistsError(instance.filepath)
return {
'filename': filename,
'name': instance.name
}
def diff(self):
return False
def _resolve_deps_implementation(self):
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -218,6 +218,7 @@ class BlGpencil(BlDatablock):
bl_delay_refresh = 2 bl_delay_refresh = 2
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'GREASEPENCIL' bl_icon = 'GREASEPENCIL'
def _construct(self, data): def _construct(self, data):

View File

@ -16,90 +16,108 @@
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy import bpy
import mathutils import mathutils
import os
import logging
from .. import utils from .. import utils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader
from .bl_file import get_filepath, ensure_unpacked
def dump_image(image): format_to_ext = {
pixels = None 'BMP': 'bmp',
if image.source == "GENERATED" or image.packed_file is not None: 'IRIS': 'sgi',
prefs = utils.get_preferences() 'PNG': 'png',
img_name = f"{image.name}.png" 'JPEG': 'jpg',
'JPEG2000': 'jp2',
# Cache the image on the disk 'TARGA': 'tga',
image.filepath_raw = os.path.join(prefs.cache_directory, img_name) 'TARGA_RAW': 'tga',
os.makedirs(prefs.cache_directory, exist_ok=True) 'CINEON': 'cin',
image.file_format = "PNG" 'DPX': 'dpx',
image.save() 'OPEN_EXR_MULTILAYER': 'exr',
'OPEN_EXR': 'exr',
'HDR': 'hdr',
'TIFF': 'tiff',
'AVI_JPEG': 'avi',
'AVI_RAW': 'avi',
'FFMPEG': 'mpeg',
}
if image.source == "FILE":
image_path = bpy.path.abspath(image.filepath_raw)
image_directory = os.path.dirname(image_path)
os.makedirs(image_directory, exist_ok=True)
image.save()
file = open(image_path, "rb")
pixels = file.read()
file.close()
else:
raise ValueError()
return pixels
class BlImage(BlDatablock): class BlImage(BlDatablock):
bl_id = "images" bl_id = "images"
bl_class = bpy.types.Image bl_class = bpy.types.Image
bl_delay_refresh = 0 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = False bl_automatic_push = True
bl_check_common = False
bl_icon = 'IMAGE_DATA' bl_icon = 'IMAGE_DATA'
def _construct(self, data): def _construct(self, data):
return bpy.data.images.new( return bpy.data.images.new(
name=data['name'], name=data['name'],
width=data['size'][0], width=data['size'][0],
height=data['size'][1] height=data['size'][1]
) )
def _load(self, data, target): def _load(self, data, target):
image = target loader = Loader()
prefs = utils.get_preferences() loader.load(data, target)
img_name = f"{image.name}.png"
img_path = os.path.join(prefs.cache_directory,img_name)
os.makedirs(prefs.cache_directory, exist_ok=True)
file = open(img_path, 'wb')
file.write(data["pixels"])
file.close()
image.source = 'FILE'
image.filepath = img_path
image.colorspace_settings.name = data["colorspace_settings"]["name"]
target.source = 'FILE'
target.filepath_raw = get_filepath(data['filename'])
target.colorspace_settings.name = data["colorspace_settings"]["name"]
def _dump(self, instance=None): def _dump(self, instance=None):
assert(instance) assert(instance)
data = {}
data['pixels'] = dump_image(instance) filename = Path(instance.filepath).name
data = {
"filename": filename
}
dumper = Dumper() dumper = Dumper()
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
"name", "name",
'size', 'size',
'height', 'height',
'alpha', 'alpha',
'float_buffer', 'float_buffer',
'filepath', 'alpha_mode',
'source', 'colorspace_settings']
'colorspace_settings']
data.update(dumper.dump(instance)) data.update(dumper.dump(instance))
return data return data
def diff(self): def diff(self):
return False if self.instance and (self.instance.name != self.data['name']):
return True
else:
return False
def _resolve_deps_implementation(self):
deps = []
if self.instance.filepath:
if self.instance.packed_file:
filename = Path(bpy.path.abspath(self.instance.filepath)).name
self.instance.filepath_raw = get_filepath(filename)
self.instance.save()
# An image can't be unpacked to the modified path
# TODO: make a bug report
self.instance.unpack(method="REMOVE")
elif self.instance.source == "GENERATED":
filename = f"{self.instance.name}.png"
self.instance.filepath = get_filepath(filename)
self.instance.save()
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -21,7 +21,7 @@ import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock
from ..libs.replication.replication.exception import ContextError from replication.exception import ContextError
POINT = ['co', 'weight_softbody', 'co_deform'] POINT = ['co', 'weight_softbody', 'co_deform']
@ -32,6 +32,7 @@ class BlLattice(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'LATTICE_DATA' bl_icon = 'LATTICE_DATA'
def _construct(self, data): def _construct(self, data):

View File

@ -29,6 +29,7 @@ class BlLibrary(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIBRARY_DATA_DIRECT' bl_icon = 'LIBRARY_DATA_DIRECT'
def _construct(self, data): def _construct(self, data):

View File

@ -29,6 +29,7 @@ class BlLight(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIGHT_DATA' bl_icon = 'LIGHT_DATA'
def _construct(self, data): def _construct(self, data):

View File

@ -30,6 +30,7 @@ class BlLightprobe(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIGHTPROBE_GRID' bl_icon = 'LIGHTPROBE_GRID'
def _construct(self, data): def _construct(self, data):

View File

@ -19,10 +19,12 @@
import bpy import bpy
import mathutils import mathutils
import logging import logging
import re
from .. import utils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock, get_datablock_from_uuid
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
def load_node(node_data, node_tree): def load_node(node_data, node_tree):
@ -36,21 +38,32 @@ def load_node(node_data, node_tree):
loader = Loader() loader = Loader()
target_node = node_tree.nodes.new(type=node_data["bl_idname"]) target_node = node_tree.nodes.new(type=node_data["bl_idname"])
loader.load(target_node, node_data) loader.load(target_node, node_data)
image_uuid = node_data.get('image_uuid', None)
if image_uuid and not target_node.image:
target_node.image = get_datablock_from_uuid(image_uuid, None)
for input in node_data["inputs"]: for input in node_data["inputs"]:
if hasattr(target_node.inputs[input], "default_value"): if hasattr(target_node.inputs[input], "default_value"):
try: try:
target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"] target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
except: except:
logging.error(f"Material {input} parameter not supported, skipping") logging.error(
f"Material {input} parameter not supported, skipping")
for output in node_data["outputs"]:
if hasattr(target_node.outputs[output], "default_value"):
try:
target_node.outputs[output].default_value = node_data["outputs"][output]["default_value"]
except:
logging.error(
f"Material {output} parameter not supported, skipping")
def load_links(links_data, node_tree): def load_links(links_data, node_tree):
""" Load node_tree links from a list """ Load node_tree links from a list
:arg links_data: dumped node links :arg links_data: dumped node links
:type links_data: list :type links_data: list
:arg node_tree: node links collection :arg node_tree: node links collection
@ -58,9 +71,10 @@ def load_links(links_data, node_tree):
""" """
for link in links_data: for link in links_data:
input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])] input_socket = node_tree.nodes[link['to_node']
output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])] ].inputs[int(link['to_socket'])]
output_socket = node_tree.nodes[link['from_node']].outputs[int(
link['from_socket'])]
node_tree.links.new(input_socket, output_socket) node_tree.links.new(input_socket, output_socket)
@ -75,11 +89,15 @@ def dump_links(links):
links_data = [] links_data = []
for link in links: for link in links:
to_socket = NODE_SOCKET_INDEX.search(
link.to_socket.path_from_id()).group(1)
from_socket = NODE_SOCKET_INDEX.search(
link.from_socket.path_from_id()).group(1)
links_data.append({ links_data.append({
'to_node':link.to_node.name, 'to_node': link.to_node.name,
'to_socket':link.to_socket.path_from_id()[-2:-1], 'to_socket': to_socket,
'from_node':link.from_node.name, 'from_node': link.from_node.name,
'from_socket':link.from_socket.path_from_id()[-2:-1], 'from_socket': from_socket,
}) })
return links_data return links_data
@ -100,6 +118,7 @@ def dump_node(node):
"show_expanded", "show_expanded",
"name_full", "name_full",
"select", "select",
"bl_label",
"bl_height_min", "bl_height_min",
"bl_height_max", "bl_height_max",
"bl_height_default", "bl_height_default",
@ -116,9 +135,10 @@ def dump_node(node):
"show_preview", "show_preview",
"show_texture", "show_texture",
"outputs", "outputs",
"width_hidden" "width_hidden",
"image"
] ]
dumped_node = node_dumper.dump(node) dumped_node = node_dumper.dump(node)
if hasattr(node, 'inputs'): if hasattr(node, 'inputs'):
@ -130,8 +150,17 @@ def dump_node(node):
input_dumper.include_filter = ["default_value"] input_dumper.include_filter = ["default_value"]
if hasattr(i, 'default_value'): if hasattr(i, 'default_value'):
dumped_node['inputs'][i.name] = input_dumper.dump( dumped_node['inputs'][i.name] = input_dumper.dump(i)
i)
dumped_node['outputs'] = {}
for i in node.outputs:
output_dumper = Dumper()
output_dumper.depth = 2
output_dumper.include_filter = ["default_value"]
if hasattr(i, 'default_value'):
dumped_node['outputs'][i.name] = output_dumper.dump(i)
if hasattr(node, 'color_ramp'): if hasattr(node, 'color_ramp'):
ramp_dumper = Dumper() ramp_dumper = Dumper()
ramp_dumper.depth = 4 ramp_dumper.depth = 4
@ -151,16 +180,24 @@ def dump_node(node):
'location' 'location'
] ]
dumped_node['mapping'] = curve_dumper.dump(node.mapping) dumped_node['mapping'] = curve_dumper.dump(node.mapping)
if hasattr(node, 'image') and getattr(node, 'image'):
dumped_node['image_uuid'] = node.image.uuid
return dumped_node return dumped_node
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
return [node.image for node in node_tree.nodes if has_image(node)]
class BlMaterial(BlDatablock): class BlMaterial(BlDatablock):
bl_id = "materials" bl_id = "materials"
bl_class = bpy.types.Material bl_class = bpy.types.Material
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'MATERIAL_DATA' bl_icon = 'MATERIAL_DATA'
def _construct(self, data): def _construct(self, data):
@ -168,23 +205,22 @@ class BlMaterial(BlDatablock):
def _load_implementation(self, data, target): def _load_implementation(self, data, target):
loader = Loader() loader = Loader()
target.name = data['name']
if data['is_grease_pencil']: is_grease_pencil = data.get('is_grease_pencil')
use_nodes = data.get('use_nodes')
loader.load(target, data)
if is_grease_pencil:
if not target.is_grease_pencil: if not target.is_grease_pencil:
bpy.data.materials.create_gpencil_data(target) bpy.data.materials.create_gpencil_data(target)
loader.load(target.grease_pencil, data['grease_pencil'])
loader.load( elif use_nodes:
target.grease_pencil, data['grease_pencil'])
if data["use_nodes"]:
if target.node_tree is None: if target.node_tree is None:
target.use_nodes = True target.use_nodes = True
target.node_tree.nodes.clear() target.node_tree.nodes.clear()
loader.load(target,data)
# Load nodes # Load nodes
for node in data["node_tree"]["nodes"]: for node in data["node_tree"]["nodes"]:
load_node(data["node_tree"]["nodes"][node], target.node_tree) load_node(data["node_tree"]["nodes"][node], target.node_tree)
@ -198,59 +234,71 @@ class BlMaterial(BlDatablock):
assert(instance) assert(instance)
mat_dumper = Dumper() mat_dumper = Dumper()
mat_dumper.depth = 2 mat_dumper.depth = 2
mat_dumper.exclude_filter = [ mat_dumper.include_filter = [
"is_embed_data", 'name',
"is_evaluated", 'blend_method',
"name_full", 'shadow_method',
"bl_description", 'alpha_threshold',
"bl_icon", 'show_transparent_back',
"bl_idname", 'use_backface_culling',
"bl_label", 'use_screen_refraction',
"preview", 'use_sss_translucency',
"original", 'refraction_depth',
"uuid", 'preview_render_type',
"users", 'use_preview_world',
"alpha_threshold", 'pass_index',
"line_color", 'use_nodes',
"view_center", 'diffuse_color',
'specular_color',
'roughness',
'specular_intensity',
'metallic',
'line_color',
'line_priority',
'is_grease_pencil'
] ]
data = mat_dumper.dump(instance) data = mat_dumper.dump(instance)
if instance.use_nodes:
nodes = {}
for node in instance.node_tree.nodes:
nodes[node.name] = dump_node(node)
data["node_tree"]['nodes'] = nodes
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
if instance.is_grease_pencil: if instance.is_grease_pencil:
gp_mat_dumper = Dumper() gp_mat_dumper = Dumper()
gp_mat_dumper.depth = 3 gp_mat_dumper.depth = 3
gp_mat_dumper.include_filter = [ gp_mat_dumper.include_filter = [
'color',
'fill_color',
'mix_color',
'mix_factor',
'mix_stroke_factor',
# 'texture_angle',
# 'texture_scale',
# 'texture_offset',
'pixel_size',
'hide',
'lock',
'ghost',
# 'texture_clamp',
'flip',
'use_overlap_strokes',
'show_stroke', 'show_stroke',
'show_fill',
'alignment_mode',
'pass_index',
'mode', 'mode',
'stroke_style', 'stroke_style',
'color', # 'stroke_image',
'use_overlap_strokes',
'show_fill',
'fill_style', 'fill_style',
'fill_color',
'pass_index',
'alignment_mode',
# 'fill_image',
'texture_opacity',
'mix_factor',
'texture_offset',
'texture_angle',
'texture_scale',
'texture_clamp',
'gradient_type', 'gradient_type',
'mix_color', # 'fill_image',
'flip'
] ]
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil) data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
elif instance.use_nodes:
nodes = {}
data["node_tree"] = {}
for node in instance.node_tree.nodes:
nodes[node.name] = dump_node(node)
data["node_tree"]['nodes'] = nodes
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
return data return data
def _resolve_deps_implementation(self): def _resolve_deps_implementation(self):
@ -258,11 +306,8 @@ class BlMaterial(BlDatablock):
deps = [] deps = []
if self.instance.use_nodes: if self.instance.use_nodes:
for node in self.instance.node_tree.nodes: deps.extend(get_node_tree_dependencies(self.instance.node_tree))
if node.type == 'TEX_IMAGE':
deps.append(node.image)
if self.is_library: if self.is_library:
deps.append(self.instance.library) deps.append(self.instance.library)
return deps return deps

View File

@ -23,10 +23,9 @@ import logging
import numpy as np import numpy as np
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
from ..libs.replication.replication.constants import DIFF_BINARY from replication.constants import DIFF_BINARY
from ..libs.replication.replication.exception import ContextError from replication.exception import ContextError
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock, get_datablock_from_uuid
VERTICE = ['co'] VERTICE = ['co']
@ -53,6 +52,7 @@ class BlMesh(BlDatablock):
bl_delay_refresh = 2 bl_delay_refresh = 2
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'MESH_DATA' bl_icon = 'MESH_DATA'
def _construct(self, data): def _construct(self, data):
@ -70,8 +70,17 @@ class BlMesh(BlDatablock):
# MATERIAL SLOTS # MATERIAL SLOTS
target.materials.clear() target.materials.clear()
for m in data["material_list"]: for mat_uuid, mat_name in data["material_list"]:
target.materials.append(bpy.data.materials[m]) mat_ref = None
if mat_uuid is not None:
mat_ref = get_datablock_from_uuid(mat_uuid, None)
else:
mat_ref = bpy.data.materials.get(mat_name, None)
if mat_ref is None:
raise Exception("Material doesn't exist")
target.materials.append(mat_ref)
# CLEAR GEOMETRY # CLEAR GEOMETRY
if target.vertices: if target.vertices:
@ -89,32 +98,34 @@ class BlMesh(BlDatablock):
np_load_collection(data["polygons"],target.polygons, POLYGON) np_load_collection(data["polygons"],target.polygons, POLYGON)
# UV Layers # UV Layers
for layer in data['uv_layers']: if 'uv_layers' in data.keys():
if layer not in target.uv_layers: for layer in data['uv_layers']:
target.uv_layers.new(name=layer) if layer not in target.uv_layers:
target.uv_layers.new(name=layer)
np_load_collection_primitives( np_load_collection_primitives(
target.uv_layers[layer].data, target.uv_layers[layer].data,
'uv', 'uv',
data["uv_layers"][layer]['data']) data["uv_layers"][layer]['data'])
# Vertex color # Vertex color
for color_layer in data['vertex_colors']: if 'vertex_colors' in data.keys():
if color_layer not in target.vertex_colors: for color_layer in data['vertex_colors']:
target.vertex_colors.new(name=color_layer) if color_layer not in target.vertex_colors:
target.vertex_colors.new(name=color_layer)
np_load_collection_primitives( np_load_collection_primitives(
target.vertex_colors[color_layer].data, target.vertex_colors[color_layer].data,
'color', 'color',
data["vertex_colors"][color_layer]['data']) data["vertex_colors"][color_layer]['data'])
target.validate() target.validate()
target.update() target.update()
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
if instance.is_editmode: if instance.is_editmode and not self.preferences.sync_flags.sync_during_editmode:
raise ContextError("Mesh is in edit mode") raise ContextError("Mesh is in edit mode")
mesh = instance mesh = instance
@ -147,22 +158,24 @@ class BlMesh(BlDatablock):
data["loops"] = np_dump_collection(mesh.loops, LOOP) data["loops"] = np_dump_collection(mesh.loops, LOOP)
# UV Layers # UV Layers
data['uv_layers'] = {} if mesh.uv_layers:
for layer in mesh.uv_layers: data['uv_layers'] = {}
data['uv_layers'][layer.name] = {} for layer in mesh.uv_layers:
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv') data['uv_layers'][layer.name] = {}
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
# Vertex color # Vertex color
data['vertex_colors'] = {} if mesh.vertex_colors:
for color_map in mesh.vertex_colors: data['vertex_colors'] = {}
data['vertex_colors'][color_map.name] = {} for color_map in mesh.vertex_colors:
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color') data['vertex_colors'][color_map.name] = {}
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
# Fix material index # Fix material index
m_list = [] m_list = []
for material in instance.materials: for material in instance.materials:
if material: if material:
m_list.append(material.name) m_list.append((material.uuid,material.name))
data['material_list'] = m_list data['material_list'] = m_list

View File

@ -68,6 +68,7 @@ class BlMetaball(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'META_BALL' bl_icon = 'META_BALL'
def _construct(self, data): def _construct(self, data):

View File

@ -16,13 +16,15 @@
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
import logging import logging
from .dump_anything import Loader, Dumper import bpy
from .bl_datablock import BlDatablock import mathutils
from ..libs.replication.replication.exception import ContextError from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from .dump_anything import Dumper, Loader
from replication.exception import ReparentException
def load_pose(target_bone, data): def load_pose(target_bone, data):
@ -31,12 +33,59 @@ def load_pose(target_bone, data):
loader.load(target_bone, data) loader.load(target_bone, data)
def find_data_from_name(name=None):
instance = None
if not name:
pass
elif name in bpy.data.meshes.keys():
instance = bpy.data.meshes[name]
elif name in bpy.data.lights.keys():
instance = bpy.data.lights[name]
elif name in bpy.data.cameras.keys():
instance = bpy.data.cameras[name]
elif name in bpy.data.curves.keys():
instance = bpy.data.curves[name]
elif name in bpy.data.metaballs.keys():
instance = bpy.data.metaballs[name]
elif name in bpy.data.armatures.keys():
instance = bpy.data.armatures[name]
elif name in bpy.data.grease_pencils.keys():
instance = bpy.data.grease_pencils[name]
elif name in bpy.data.curves.keys():
instance = bpy.data.curves[name]
elif name in bpy.data.lattices.keys():
instance = bpy.data.lattices[name]
elif name in bpy.data.speakers.keys():
instance = bpy.data.speakers[name]
elif name in bpy.data.lightprobes.keys():
# Only supported since 2.83
if bpy.app.version[1] >= 83:
instance = bpy.data.lightprobes[name]
else:
logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
return instance
def load_data(object, name):
logging.info("loading data")
pass
def _is_editmode(object: bpy.types.Object) -> bool:
child_data = getattr(object, 'data', None)
return (child_data and
hasattr(child_data, 'is_editmode') and
child_data.is_editmode)
class BlObject(BlDatablock): class BlObject(BlDatablock):
bl_id = "objects" bl_id = "objects"
bl_class = bpy.types.Object bl_class = bpy.types.Object
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'OBJECT_DATA' bl_icon = 'OBJECT_DATA'
def _construct(self, data): def _construct(self, data):
@ -52,45 +101,67 @@ class BlObject(BlDatablock):
return instance return instance
# TODO: refactoring # TODO: refactoring
if "data" not in data: object_name = data.get("name")
pass data_uuid = data.get("data_uuid")
elif data["data"] in bpy.data.meshes.keys(): data_id = data.get("data")
instance = bpy.data.meshes[data["data"]]
elif data["data"] in bpy.data.lights.keys(): object_data = get_datablock_from_uuid(
instance = bpy.data.lights[data["data"]] data_uuid,
elif data["data"] in bpy.data.cameras.keys(): find_data_from_name(data_id),
instance = bpy.data.cameras[data["data"]] ignore=['images']) #TODO: use resolve_from_id
elif data["data"] in bpy.data.curves.keys(): instance = bpy.data.objects.new(object_name, object_data)
instance = bpy.data.curves[data["data"]]
elif data["data"] in bpy.data.metaballs.keys():
instance = bpy.data.metaballs[data["data"]]
elif data["data"] in bpy.data.armatures.keys():
instance = bpy.data.armatures[data["data"]]
elif data["data"] in bpy.data.grease_pencils.keys():
instance = bpy.data.grease_pencils[data["data"]]
elif data["data"] in bpy.data.curves.keys():
instance = bpy.data.curves[data["data"]]
elif data["data"] in bpy.data.lattices.keys():
instance = bpy.data.lattices[data["data"]]
elif data["data"] in bpy.data.speakers.keys():
instance = bpy.data.speakers[data["data"]]
elif data["data"] in bpy.data.lightprobes.keys():
# Only supported since 2.83
if bpy.app.version[1] >= 83:
instance = bpy.data.lightprobes[data["data"]]
else:
logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
instance = bpy.data.objects.new(data["name"], instance)
instance.uuid = self.uuid instance.uuid = self.uuid
return instance return instance
def _load_implementation(self, data, target): def _load_implementation(self, data, target):
# Load transformation data
loader = Loader() loader = Loader()
data_uuid = data.get("data_uuid")
data_id = data.get("data")
if target.type != data['type']:
raise ReparentException()
elif target.data and (target.data.name != data_id):
target.data = get_datablock_from_uuid(data_uuid, find_data_from_name(data_id), ignore=['images'])
# vertex groups
if 'vertex_groups' in data:
target.vertex_groups.clear()
for vg in data['vertex_groups']:
vertex_group=target.vertex_groups.new(name = vg['name'])
point_attr='vertices' if 'vertices' in vg else 'points'
for vert in vg[point_attr]:
vertex_group.add(
[vert['index']], vert['weight'], 'REPLACE')
# SHAPE KEYS
if 'shape_keys' in data:
target.shape_key_clear()
object_data=target.data
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data=data['shape_keys']['key_blocks'][key_block]
target.shape_key_add(name = key_block)
loader.load(
target.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
# Load transformation data
loader.load(target, data) loader.load(target, data)
loader.load(target.display, data['display'])
# Pose # Pose
if 'pose' in data: if 'pose' in data:
if not target.pose: if not target.pose:
@ -114,51 +185,25 @@ class BlObject(BlDatablock):
if 'constraints' in bone_data.keys(): if 'constraints' in bone_data.keys():
loader.load(target_bone, bone_data['constraints']) loader.load(target_bone, bone_data['constraints'])
load_pose(target_bone, bone_data) load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys(): if 'bone_index' in bone_data.keys():
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']] target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
# vertex groups # TODO: find another way...
if 'vertex_groups' in data: if target.type == 'EMPTY':
target.vertex_groups.clear() img_uuid = data.get('data_uuid')
for vg in data['vertex_groups']: if target.data is None and img_uuid:
vertex_group = target.vertex_groups.new(name=vg['name']) target.data = get_datablock_from_uuid(img_uuid, None)#bpy.data.images.get(img_key, None)
point_attr = 'vertices' if 'vertices' in vg else 'points'
for vert in vg[point_attr]:
vertex_group.add(
[vert['index']], vert['weight'], 'REPLACE')
# SHAPE KEYS
if 'shape_keys' in data:
target.shape_key_clear()
object_data = target.data
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data = data['shape_keys']['key_blocks'][key_block]
target.shape_key_add(name=key_block)
loader.load(
target.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
child_data = getattr(instance, 'data', None) if _is_editmode(instance):
if self.preferences.sync_flags.sync_during_editmode:
if child_data and hasattr(child_data, 'is_editmode') and child_data.is_editmode: instance.update_from_editmode()
raise ContextError("Object is in edit-mode.") else:
raise ContextError("Object is in edit-mode.")
dumper = Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
@ -171,28 +216,55 @@ class BlObject(BlDatablock):
"library", "library",
"empty_display_type", "empty_display_type",
"empty_display_size", "empty_display_size",
"empty_image_offset",
"empty_image_depth",
"empty_image_side",
"show_empty_image_orthographic",
"show_empty_image_perspective",
"show_empty_image_only_axis_aligned",
"use_empty_image_alpha",
"color",
"instance_collection", "instance_collection",
"instance_type", "instance_type",
"location", "location",
"scale", "scale",
'lock_location',
'lock_rotation',
'lock_scale',
'hide_render',
'display_type',
'display_bounds_type',
'show_bounds',
'show_name',
'show_axis',
'show_wire',
'show_all_edges',
'show_texture_space',
'show_in_front',
'type',
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler', 'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
] ]
data = dumper.dump(instance) data = dumper.dump(instance)
dumper.include_filter = [
'show_shadows',
]
data['display'] = dumper.dump(instance.display)
data['data_uuid'] = getattr(instance.data, 'uuid', None)
if self.is_library: if self.is_library:
return data return data
# MODIFIERS # MODIFIERS
if hasattr(instance, 'modifiers'): if hasattr(instance, 'modifiers'):
dumper.include_filter = None dumper.include_filter = None
dumper.depth = 2 dumper.depth = 1
data["modifiers"] = {} data["modifiers"] = {}
for index, modifier in enumerate(instance.modifiers): for index, modifier in enumerate(instance.modifiers):
data["modifiers"][modifier.name] = dumper.dump(modifier) data["modifiers"][modifier.name] = dumper.dump(modifier)
# CONSTRAINTS # CONSTRAINTS
# OBJECT
if hasattr(instance, 'constraints'): if hasattr(instance, 'constraints'):
dumper.depth = 3 dumper.depth = 3
data["constraints"] = dumper.dump(instance.constraints) data["constraints"] = dumper.dump(instance.constraints)
@ -245,7 +317,8 @@ class BlObject(BlDatablock):
# VERTEx GROUP # VERTEx GROUP
if len(instance.vertex_groups) > 0: if len(instance.vertex_groups) > 0:
points_attr = 'vertices' if isinstance(instance.data, bpy.types.Mesh) else 'points' points_attr = 'vertices' if isinstance(
instance.data, bpy.types.Mesh) else 'points'
vg_data = [] vg_data = []
for vg in instance.vertex_groups: for vg in instance.vertex_groups:
vg_idx = vg.index vg_idx = vg.index
@ -300,7 +373,7 @@ class BlObject(BlDatablock):
def _resolve_deps_implementation(self): def _resolve_deps_implementation(self):
deps = [] deps = []
# Avoid Empty case # Avoid Empty case
if self.instance.data: if self.instance.data:
deps.append(self.instance.data) deps.append(self.instance.data)
@ -315,4 +388,3 @@ class BlObject(BlDatablock):
deps.append(self.instance.instance_collection) deps.append(self.instance.instance_collection)
return deps return deps

View File

@ -21,8 +21,245 @@ import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock
from .bl_collection import dump_collection_children, dump_collection_objects, load_collection_childrens, load_collection_objects
from replication.constants import (DIFF_JSON, MODIFIED)
from deepdiff import DeepDiff
import logging
from ..utils import get_preferences RENDER_SETTINGS = [
'dither_intensity',
'engine',
'film_transparent',
'filter_size',
'fps',
'fps_base',
'frame_map_new',
'frame_map_old',
'hair_subdiv',
'hair_type',
'line_thickness',
'line_thickness_mode',
'metadata_input',
'motion_blur_shutter',
'pixel_aspect_x',
'pixel_aspect_y',
'preview_pixel_size',
'preview_start_resolution',
'resolution_percentage',
'resolution_x',
'resolution_y',
'sequencer_gl_preview',
'use_bake_clear',
'use_bake_lores_mesh',
'use_bake_multires',
'use_bake_selected_to_active',
'use_bake_user_scale',
'use_border',
'use_compositing',
'use_crop_to_border',
'use_file_extension',
'use_freestyle',
'use_full_sample',
'use_high_quality_normals',
'use_lock_interface',
'use_motion_blur',
'use_multiview',
'use_sequencer',
'use_sequencer_override_scene_strip',
'use_single_layer',
'views_format',
]
EVEE_SETTINGS = [
'gi_diffuse_bounces',
'gi_cubemap_resolution',
'gi_visibility_resolution',
'gi_irradiance_smoothing',
'gi_glossy_clamp',
'gi_filter_quality',
'gi_show_irradiance',
'gi_show_cubemaps',
'gi_irradiance_display_size',
'gi_cubemap_display_size',
'gi_auto_bake',
'taa_samples',
'taa_render_samples',
'use_taa_reprojection',
'sss_samples',
'sss_jitter_threshold',
'use_ssr',
'use_ssr_refraction',
'use_ssr_halfres',
'ssr_quality',
'ssr_max_roughness',
'ssr_thickness',
'ssr_border_fade',
'ssr_firefly_fac',
'volumetric_start',
'volumetric_end',
'volumetric_tile_size',
'volumetric_samples',
'volumetric_sample_distribution',
'use_volumetric_lights',
'volumetric_light_clamp',
'use_volumetric_shadows',
'volumetric_shadow_samples',
'use_gtao',
'use_gtao_bent_normals',
'use_gtao_bounce',
'gtao_factor',
'gtao_quality',
'gtao_distance',
'bokeh_max_size',
'bokeh_threshold',
'use_bloom',
'bloom_threshold',
'bloom_color',
'bloom_knee',
'bloom_radius',
'bloom_clamp',
'bloom_intensity',
'use_motion_blur',
'motion_blur_shutter',
'motion_blur_depth_scale',
'motion_blur_max',
'motion_blur_steps',
'shadow_cube_size',
'shadow_cascade_size',
'use_shadow_high_bitdepth',
'gi_diffuse_bounces',
'gi_cubemap_resolution',
'gi_visibility_resolution',
'gi_irradiance_smoothing',
'gi_glossy_clamp',
'gi_filter_quality',
'gi_show_irradiance',
'gi_show_cubemaps',
'gi_irradiance_display_size',
'gi_cubemap_display_size',
'gi_auto_bake',
'taa_samples',
'taa_render_samples',
'use_taa_reprojection',
'sss_samples',
'sss_jitter_threshold',
'use_ssr',
'use_ssr_refraction',
'use_ssr_halfres',
'ssr_quality',
'ssr_max_roughness',
'ssr_thickness',
'ssr_border_fade',
'ssr_firefly_fac',
'volumetric_start',
'volumetric_end',
'volumetric_tile_size',
'volumetric_samples',
'volumetric_sample_distribution',
'use_volumetric_lights',
'volumetric_light_clamp',
'use_volumetric_shadows',
'volumetric_shadow_samples',
'use_gtao',
'use_gtao_bent_normals',
'use_gtao_bounce',
'gtao_factor',
'gtao_quality',
'gtao_distance',
'bokeh_max_size',
'bokeh_threshold',
'use_bloom',
'bloom_threshold',
'bloom_color',
'bloom_knee',
'bloom_radius',
'bloom_clamp',
'bloom_intensity',
'use_motion_blur',
'motion_blur_shutter',
'motion_blur_depth_scale',
'motion_blur_max',
'motion_blur_steps',
'shadow_cube_size',
'shadow_cascade_size',
'use_shadow_high_bitdepth',
]
CYCLES_SETTINGS = [
'shading_system',
'progressive',
'use_denoising',
'denoiser',
'use_square_samples',
'samples',
'aa_samples',
'diffuse_samples',
'glossy_samples',
'transmission_samples',
'ao_samples',
'mesh_light_samples',
'subsurface_samples',
'volume_samples',
'sampling_pattern',
'use_layer_samples',
'sample_all_lights_direct',
'sample_all_lights_indirect',
'light_sampling_threshold',
'use_adaptive_sampling',
'adaptive_threshold',
'adaptive_min_samples',
'min_light_bounces',
'min_transparent_bounces',
'caustics_reflective',
'caustics_refractive',
'blur_glossy',
'max_bounces',
'diffuse_bounces',
'glossy_bounces',
'transmission_bounces',
'volume_bounces',
'transparent_max_bounces',
'volume_step_rate',
'volume_max_steps',
'dicing_rate',
'max_subdivisions',
'dicing_camera',
'offscreen_dicing_scale',
'film_exposure',
'film_transparent_glass',
'film_transparent_roughness',
'filter_type',
'pixel_filter_type',
'filter_width',
'seed',
'use_animated_seed',
'sample_clamp_direct',
'sample_clamp_indirect',
'tile_order',
'use_progressive_refine',
'bake_type',
'use_camera_cull',
'camera_cull_margin',
'use_distance_cull',
'distance_cull_margin',
'motion_blur_position',
'rolling_shutter_type',
'rolling_shutter_duration',
'texture_limit',
'texture_limit_render',
'ao_bounces',
'ao_bounces_render',
]
VIEW_SETTINGS = [
'look',
'view_transform',
'exposure',
'gamma',
'use_curve_mapping',
'white_level',
'black_level'
]
class BlScene(BlDatablock): class BlScene(BlDatablock):
bl_id = "scenes" bl_id = "scenes"
@ -30,8 +267,14 @@ class BlScene(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = True
bl_icon = 'SCENE_DATA' bl_icon = 'SCENE_DATA'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.diff_method = DIFF_JSON
def _construct(self, data): def _construct(self, data):
instance = bpy.data.scenes.new(data["name"]) instance = bpy.data.scenes.new(data["name"])
return instance return instance
@ -42,45 +285,38 @@ class BlScene(BlDatablock):
loader.load(target, data) loader.load(target, data)
# Load master collection # Load master collection
for object in data["collection"]["objects"]: load_collection_objects(
if object not in target.collection.objects.keys(): data['collection']['objects'], target.collection)
target.collection.objects.link(bpy.data.objects[object]) load_collection_childrens(
data['collection']['children'], target.collection)
for object in target.collection.objects.keys():
if object not in data["collection"]["objects"]:
target.collection.objects.unlink(bpy.data.objects[object])
# load collections
for collection in data["collection"]["children"]:
if collection not in target.collection.children.keys():
target.collection.children.link(
bpy.data.collections[collection])
for collection in target.collection.children.keys():
if collection not in data["collection"]["children"]:
target.collection.children.unlink(
bpy.data.collections[collection])
if 'world' in data.keys(): if 'world' in data.keys():
target.world = bpy.data.worlds[data['world']] target.world = bpy.data.worlds[data['world']]
# Annotation # Annotation
if 'grease_pencil' in data.keys(): if 'grease_pencil' in data.keys():
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']] target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
if 'eevee' in data.keys(): if self.preferences.sync_flags.sync_render_settings:
loader.load(target.eevee, data['eevee']) if 'eevee' in data.keys():
loader.load(target.eevee, data['eevee'])
if 'cycles' in data.keys():
loader.load(target.eevee, data['cycles'])
if 'view_settings' in data.keys(): if 'cycles' in data.keys():
loader.load(target.view_settings, data['view_settings']) loader.load(target.cycles, data['cycles'])
if target.view_settings.use_curve_mapping:
#TODO: change this ugly fix if 'render' in data.keys():
target.view_settings.curve_mapping.white_level = data['view_settings']['curve_mapping']['white_level'] loader.load(target.render, data['render'])
target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update() if 'view_settings' in data.keys():
loader.load(target.view_settings, data['view_settings'])
if target.view_settings.use_curve_mapping and \
'curve_mapping' in data['view_settings']:
# TODO: change this ugly fix
target.view_settings.curve_mapping.white_level = data[
'view_settings']['curve_mapping']['white_level']
target.view_settings.curve_mapping.black_level = data[
'view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update()
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
@ -92,52 +328,55 @@ class BlScene(BlDatablock):
'name', 'name',
'world', 'world',
'id', 'id',
'camera',
'grease_pencil', 'grease_pencil',
'frame_start',
'frame_end',
'frame_step',
] ]
if self.preferences.sync_flags.sync_active_camera:
scene_dumper.include_filter.append('camera')
data = scene_dumper.dump(instance) data = scene_dumper.dump(instance)
scene_dumper.depth = 3 scene_dumper.depth = 3
scene_dumper.include_filter = ['children','objects','name'] scene_dumper.include_filter = ['children', 'objects', 'name']
data['collection'] = scene_dumper.dump(instance.collection) data['collection'] = {}
data['collection']['children'] = dump_collection_children(
instance.collection)
data['collection']['objects'] = dump_collection_objects(
instance.collection)
scene_dumper.depth = 1 scene_dumper.depth = 1
scene_dumper.include_filter = None scene_dumper.include_filter = None
pref = get_preferences()
if pref.sync_flags.sync_render_settings: if self.preferences.sync_flags.sync_render_settings:
scene_dumper.exclude_filter = [ scene_dumper.include_filter = RENDER_SETTINGS
'gi_cache_info',
'feature_set', data['render'] = scene_dumper.dump(instance.render)
'debug_use_hair_bvh',
'aa_samples', if instance.render.engine == 'BLENDER_EEVEE':
'blur_glossy', scene_dumper.include_filter = EVEE_SETTINGS
'glossy_bounces', data['eevee'] = scene_dumper.dump(instance.eevee)
'device', elif instance.render.engine == 'CYCLES':
'max_bounces', scene_dumper.include_filter = CYCLES_SETTINGS
'preview_aa_samples', data['cycles'] = scene_dumper.dump(instance.cycles)
'preview_samples',
'sample_clamp_indirect', scene_dumper.include_filter = VIEW_SETTINGS
'samples',
'volume_bounces'
]
data['eevee'] = scene_dumper.dump(instance.eevee)
data['cycles'] = scene_dumper.dump(instance.cycles)
data['view_settings'] = scene_dumper.dump(instance.view_settings) data['view_settings'] = scene_dumper.dump(instance.view_settings)
if instance.view_settings.use_curve_mapping: if instance.view_settings.use_curve_mapping:
data['view_settings']['curve_mapping'] = scene_dumper.dump(instance.view_settings.curve_mapping) data['view_settings']['curve_mapping'] = scene_dumper.dump(
instance.view_settings.curve_mapping)
scene_dumper.depth = 5 scene_dumper.depth = 5
scene_dumper.include_filter = [ scene_dumper.include_filter = [
'curves', 'curves',
'points', 'points',
'location' 'location',
] ]
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(instance.view_settings.curve_mapping.curves) data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
instance.view_settings.curve_mapping.curves)
return data return data
def _resolve_deps_implementation(self): def _resolve_deps_implementation(self):
@ -146,17 +385,31 @@ class BlScene(BlDatablock):
# child collections # child collections
for child in self.instance.collection.children: for child in self.instance.collection.children:
deps.append(child) deps.append(child)
# childs objects # childs objects
for object in self.instance.objects: for object in self.instance.collection.objects:
deps.append(object) deps.append(object)
# world # world
if self.instance.world: if self.instance.world:
deps.append(self.instance.world) deps.append(self.instance.world)
# annotations # annotations
if self.instance.grease_pencil: if self.instance.grease_pencil:
deps.append(self.instance.grease_pencil) deps.append(self.instance.grease_pencil)
return deps return deps
def diff(self):
exclude_path = []
if not self.preferences.sync_flags.sync_render_settings:
exclude_path.append("root['eevee']")
exclude_path.append("root['cycles']")
exclude_path.append("root['view_settings']")
exclude_path.append("root['render']")
if not self.preferences.sync_flags.sync_active_camera:
exclude_path.append("root['camera']")
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)

View File

@ -0,0 +1,69 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy
from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader
class BlSound(BlDatablock):
bl_id = "sounds"
bl_class = bpy.types.Sound
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'SOUND'
def _construct(self, data):
filename = data.get('filename')
return bpy.data.sounds.load(get_filepath(filename))
def _load(self, data, target):
loader = Loader()
loader.load(target, data)
def diff(self):
return False
def _dump(self, instance=None):
filename = Path(instance.filepath).name
if not filename:
raise FileExistsError(instance.filepath)
return {
'filename': filename,
'name': instance.name
}
def _resolve_deps_implementation(self):
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -29,6 +29,7 @@ class BlSpeaker(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = False
bl_icon = 'SPEAKER' bl_icon = 'SPEAKER'
def _load_implementation(self, data, target): def _load_implementation(self, data, target):
@ -48,6 +49,7 @@ class BlSpeaker(BlDatablock):
'volume', 'volume',
'name', 'name',
'pitch', 'pitch',
'sound',
'volume_min', 'volume_min',
'volume_max', 'volume_max',
'attenuation', 'attenuation',
@ -60,6 +62,15 @@ class BlSpeaker(BlDatablock):
return dumper.dump(instance) return dumper.dump(instance)
def _resolve_deps_implementation(self):
# TODO: resolve material
deps = []
sound = self.instance.sound
if sound:
deps.append(sound)
return deps

View File

@ -21,7 +21,11 @@ import mathutils
from .dump_anything import Loader, Dumper from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock
from .bl_material import load_links, load_node, dump_node, dump_links from .bl_material import (load_links,
load_node,
dump_node,
dump_links,
get_node_tree_dependencies)
class BlWorld(BlDatablock): class BlWorld(BlDatablock):
@ -30,12 +34,16 @@ class BlWorld(BlDatablock):
bl_delay_refresh = 1 bl_delay_refresh = 1
bl_delay_apply = 1 bl_delay_apply = 1
bl_automatic_push = True bl_automatic_push = True
bl_check_common = True
bl_icon = 'WORLD_DATA' bl_icon = 'WORLD_DATA'
def _construct(self, data): def _construct(self, data):
return bpy.data.worlds.new(data["name"]) return bpy.data.worlds.new(data["name"])
def _load_implementation(self, data, target): def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
if data["use_nodes"]: if data["use_nodes"]:
if target.node_tree is None: if target.node_tree is None:
target.use_nodes = True target.use_nodes = True
@ -48,26 +56,21 @@ class BlWorld(BlDatablock):
# Load nodes links # Load nodes links
target.node_tree.links.clear() target.node_tree.links.clear()
load_links(data["node_tree"]["links"], target.node_tree) load_links(data["node_tree"]["links"], target.node_tree)
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
world_dumper = Dumper() world_dumper = Dumper()
world_dumper.depth = 2 world_dumper.depth = 1
world_dumper.exclude_filter = [ world_dumper.include_filter = [
"preview", "use_nodes",
"original", "name",
"uuid", "color"
"color",
"cycles",
"light_settings",
"users",
"view_center"
] ]
data = world_dumper.dump(instance) data = world_dumper.dump(instance)
if instance.use_nodes: if instance.use_nodes:
data['node_tree'] = {}
nodes = {} nodes = {}
for node in instance.node_tree.nodes: for node in instance.node_tree.nodes:
@ -83,10 +86,7 @@ class BlWorld(BlDatablock):
deps = [] deps = []
if self.instance.use_nodes: if self.instance.use_nodes:
for node in self.instance.node_tree.nodes: deps.extend(get_node_tree_dependencies(self.instance.node_tree))
if node.type == 'TEX_IMAGE':
deps.append(node.image)
if self.is_library: if self.is_library:
deps.append(self.instance.library) deps.append(self.instance.library)
return deps return deps

View File

@ -24,8 +24,8 @@ import numpy as np
BPY_TO_NUMPY_TYPES = { BPY_TO_NUMPY_TYPES = {
'FLOAT': np.float, 'FLOAT': np.float32,
'INT': np.int, 'INT': np.int32,
'BOOL': np.bool} 'BOOL': np.bool}
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN'] PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
@ -47,7 +47,7 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
:type attributes: list :type attributes: list
""" """
if not dikt or len(collection) == 0: if not dikt or len(collection) == 0:
logging.warning(f'Skipping collection') logging.debug(f'Skipping collection {collection}')
return return
if attributes is None: if attributes is None:
@ -115,7 +115,7 @@ def np_dump_collection_primitive(collection: bpy.types.CollectionProperty, attri
:return: numpy byte buffer :return: numpy byte buffer
""" """
if len(collection) == 0: if len(collection) == 0:
logging.warning(f'Skipping empty {attribute} attribute') logging.debug(f'Skipping empty {attribute} attribute')
return {} return {}
attr_infos = collection[0].bl_rna.properties.get(attribute) attr_infos = collection[0].bl_rna.properties.get(attribute)
@ -192,7 +192,7 @@ def np_load_collection_primitives(collection: bpy.types.CollectionProperty, attr
:type sequence: strr :type sequence: strr
""" """
if len(collection) == 0 or not sequence: if len(collection) == 0 or not sequence:
logging.warning(f"Skipping loadin {attribute}") logging.debug(f"Skipping loading {attribute}")
return return
attr_infos = collection[0].bl_rna.properties.get(attribute) attr_infos = collection[0].bl_rna.properties.get(attribute)
@ -301,7 +301,7 @@ class Dumper:
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch) self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
self._dump_collection = ( self._dump_collection = (
self._dump_default_as_leaf, self._dump_collection_as_branch) self._dump_default_as_leaf, self._dump_collection_as_branch)
self._dump_array = (self._dump_default_as_leaf, self._dump_array = (self._dump_array_as_branch,
self._dump_array_as_branch) self._dump_array_as_branch)
self._dump_matrix = (self._dump_matrix_as_leaf, self._dump_matrix = (self._dump_matrix_as_leaf,
self._dump_matrix_as_leaf) self._dump_matrix_as_leaf)
@ -593,6 +593,10 @@ class Loader:
instance.write(bpy.data.materials.get(dump)) instance.write(bpy.data.materials.get(dump))
elif isinstance(rna_property_type, T.Collection): elif isinstance(rna_property_type, T.Collection):
instance.write(bpy.data.collections.get(dump)) instance.write(bpy.data.collections.get(dump))
elif isinstance(rna_property_type, T.VectorFont):
instance.write(bpy.data.fonts.get(dump))
elif isinstance(rna_property_type, T.Sound):
instance.write(bpy.data.sounds.get(dump))
def _load_matrix(self, matrix, dump): def _load_matrix(self, matrix, dump):
matrix.write(mathutils.Matrix(dump)) matrix.write(mathutils.Matrix(dump))
@ -622,11 +626,11 @@ class Loader:
for k in self._ordered_keys(dump.keys()): for k in self._ordered_keys(dump.keys()):
v = dump[k] v = dump[k]
if not hasattr(default.read(), k): if not hasattr(default.read(), k):
logging.debug(f"Load default, skipping {default} : {k}") continue
try: try:
self._load_any(default.extend(k), v) self._load_any(default.extend(k), v)
except Exception as err: except Exception as err:
logging.debug(f"Cannot load {k}: {err}") logging.debug(f"Skipping {k}")
@property @property
def match_subset_all(self): def match_subset_all(self):

View File

@ -19,21 +19,36 @@ import logging
import bpy import bpy
from . import operators, presence, utils from . import utils
from .libs.replication.replication.constants import (FETCHED, from .presence import (renderer,
RP_COMMON, UserFrustumWidget,
STATE_INITIAL, UserNameWidget,
STATE_QUITTING, UserSelectionWidget,
STATE_ACTIVE, refresh_3d_view,
STATE_SYNCING, generate_user_camera,
STATE_LOBBY, get_view_matrix,
STATE_SRV_SYNC) refresh_sidebar_view)
from replication.constants import (FETCHED,
UP,
RP_COMMON,
STATE_INITIAL,
STATE_QUITTING,
STATE_ACTIVE,
STATE_SYNCING,
STATE_LOBBY,
STATE_SRV_SYNC,
REPARENT)
from replication.interface import session
from replication.exception import NonAuthorizedOperationError
class Delayable(): class Delayable():
"""Delayable task interface """Delayable task interface
""" """
def __init__(self):
self.is_registered = False
def register(self): def register(self):
raise NotImplementedError raise NotImplementedError
@ -51,13 +66,21 @@ class Timer(Delayable):
""" """
def __init__(self, duration=1): def __init__(self, duration=1):
super().__init__()
self._timeout = duration self._timeout = duration
self._running = True self._running = True
def register(self): def register(self):
"""Register the timer into the blender timer system """Register the timer into the blender timer system
""" """
bpy.app.timers.register(self.main)
if not self.is_registered:
bpy.app.timers.register(self.main)
self.is_registered = True
logging.debug(f"Register {self.__class__.__name__}")
else:
logging.debug(
f"Timer {self.__class__.__name__} already registered")
def main(self): def main(self):
self.execute() self.execute()
@ -85,18 +108,29 @@ class ApplyTimer(Timer):
super().__init__(timout) super().__init__(timout)
def execute(self): def execute(self):
client = operators.client if session and session.state['STATE'] == STATE_ACTIVE:
if client and client.state['STATE'] == STATE_ACTIVE: if self._type:
nodes = client.list(filter=self._type) nodes = session.list(filter=self._type)
else:
nodes = session.list()
for node in nodes: for node in nodes:
node_ref = client.get(uuid=node) node_ref = session.get(uuid=node)
if node_ref.state == FETCHED: if node_ref.state == FETCHED:
try: try:
client.apply(node) session.apply(node)
except Exception as e: except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}: {e}") logging.error(f"Fail to apply {node_ref.uuid}: {e}")
elif node_ref.state == REPARENT:
# Reload the node
node_ref.remove_instance()
node_ref.resolve()
session.apply(node)
for parent in session._graph.find_parents(node):
logging.info(f"Applying parent {parent}")
session.apply(parent, force=True)
node_ref.state = UP
class DynamicRightSelectTimer(Timer): class DynamicRightSelectTimer(Timer):
@ -107,7 +141,6 @@ class DynamicRightSelectTimer(Timer):
self._right_strategy = RP_COMMON self._right_strategy = RP_COMMON
def execute(self): def execute(self):
session = operators.client
settings = utils.get_preferences() settings = utils.get_preferences()
if session and session.state['STATE'] == STATE_ACTIVE: if session and session.state['STATE'] == STATE_ACTIVE:
@ -134,10 +167,14 @@ class DynamicRightSelectTimer(Timer):
recursive = True recursive = True
if node.data and 'instance_type' in node.data.keys(): if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION' recursive = node.data['instance_type'] != 'COLLECTION'
session.change_owner( try:
node.uuid, session.change_owner(
RP_COMMON, node.uuid,
recursive=recursive) RP_COMMON,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(f"Not authorized to change {node} owner")
# change new selection to our # change new selection to our
for obj in obj_ours: for obj in obj_ours:
@ -148,10 +185,14 @@ class DynamicRightSelectTimer(Timer):
if node.data and 'instance_type' in node.data.keys(): if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION' recursive = node.data['instance_type'] != 'COLLECTION'
session.change_owner( try:
node.uuid, session.change_owner(
settings.username, node.uuid,
recursive=recursive) settings.username,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(f"Not authorized to change {node} owner")
else: else:
return return
@ -170,101 +211,49 @@ class DynamicRightSelectTimer(Timer):
filter_owner=settings.username) filter_owner=settings.username)
for key in owned_keys: for key in owned_keys:
node = session.get(uuid=key) node = session.get(uuid=key)
try:
session.change_owner(
key,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(f"Not authorized to change {key} owner")
session.change_owner( for obj in bpy.data.objects:
key, object_uuid = getattr(obj, 'uuid', None)
RP_COMMON, if object_uuid:
recursive=recursive) is_selectable = not session.is_readonly(object_uuid)
if obj.hide_select != is_selectable:
for user, user_info in session.online_users.items(): obj.hide_select = is_selectable
if user != settings.username:
metadata = user_info.get('metadata')
if 'selected_objects' in metadata:
# Update selectionnable objects
for obj in bpy.data.objects:
if obj.hide_select and obj.uuid not in metadata['selected_objects']:
obj.hide_select = False
elif not obj.hide_select and obj.uuid in metadata['selected_objects']:
obj.hide_select = True
class Draw(Delayable):
def __init__(self):
self._handler = None
def register(self):
self._handler = bpy.types.SpaceView3D.draw_handler_add(
self.execute, (), 'WINDOW', 'POST_VIEW')
def execute(self):
raise NotImplementedError()
def unregister(self):
try:
bpy.types.SpaceView3D.draw_handler_remove(
self._handler, "WINDOW")
except:
pass
class DrawClient(Draw):
def execute(self):
session = getattr(operators, 'client', None)
renderer = getattr(presence, 'renderer', None)
prefs = utils.get_preferences()
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
settings = bpy.context.window_manager.session
users = session.online_users
# Update users
for user in users.values():
metadata = user.get('metadata')
color = metadata.get('color')
scene_current = metadata.get('scene_current')
user_showable = scene_current == bpy.context.scene.name or settings.presence_show_far_user
if color and scene_current and user_showable:
if settings.presence_show_selected and 'selected_objects' in metadata.keys():
renderer.draw_client_selection(
user['id'], color, metadata['selected_objects'])
if settings.presence_show_user and 'view_corners' in metadata:
renderer.draw_client_camera(
user['id'], metadata['view_corners'], color)
if not user_showable:
# TODO: remove this when user event drivent update will be
# ready
renderer.flush_selection()
renderer.flush_users()
class ClientUpdate(Timer): class ClientUpdate(Timer):
def __init__(self, timout=.016): def __init__(self, timout=.1):
super().__init__(timout) super().__init__(timout)
self.handle_quit = False self.handle_quit = False
self.users_metadata = {} self.users_metadata = {}
def execute(self): def execute(self):
settings = utils.get_preferences() settings = utils.get_preferences()
session = getattr(operators, 'client', None)
renderer = getattr(presence, 'renderer', None)
if session and renderer: if session and renderer:
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]: if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
local_user = operators.client.online_users.get(settings.username) local_user = session.online_users.get(
settings.username)
if not local_user: if not local_user:
return return
else: else:
for username, user_data in operators.client.online_users.items(): for username, user_data in session.online_users.items():
if username != settings.username: if username != settings.username:
cached_user_data = self.users_metadata.get(username) cached_user_data = self.users_metadata.get(
new_user_data = operators.client.online_users[username]['metadata'] username)
new_user_data = session.online_users[username]['metadata']
if cached_user_data is None: if cached_user_data is None:
self.users_metadata[username] = user_data['metadata'] self.users_metadata[username] = user_data['metadata']
elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']: elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']:
presence.refresh_3d_view() refresh_3d_view()
self.users_metadata[username] = user_data['metadata'] self.users_metadata[username] = user_data['metadata']
break break
else: else:
@ -272,18 +261,18 @@ class ClientUpdate(Timer):
local_user_metadata = local_user.get('metadata') local_user_metadata = local_user.get('metadata')
scene_current = bpy.context.scene.name scene_current = bpy.context.scene.name
local_user = session.online_users.get(settings.username) local_user = session.online_users.get(settings.username)
current_view_corners = presence.get_view_corners() current_view_corners = generate_user_camera()
# Init client metadata # Init client metadata
if not local_user_metadata or 'color' not in local_user_metadata.keys(): if not local_user_metadata or 'color' not in local_user_metadata.keys():
metadata = { metadata = {
'view_corners': presence.get_view_matrix(), 'view_corners': get_view_matrix(),
'view_matrix': presence.get_view_matrix(), 'view_matrix': get_view_matrix(),
'color': (settings.client_color.r, 'color': (settings.client_color.r,
settings.client_color.g, settings.client_color.g,
settings.client_color.b, settings.client_color.b,
1), 1),
'frame_current': bpy.context.scene.frame_current, 'frame_current': bpy.context.scene.frame_current,
'scene_current': scene_current 'scene_current': scene_current
} }
@ -296,33 +285,60 @@ class ClientUpdate(Timer):
session.update_user_metadata(local_user_metadata) session.update_user_metadata(local_user_metadata)
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']: elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
local_user_metadata['view_corners'] = current_view_corners local_user_metadata['view_corners'] = current_view_corners
local_user_metadata['view_matrix'] = presence.get_view_matrix() local_user_metadata['view_matrix'] = get_view_matrix(
)
session.update_user_metadata(local_user_metadata) session.update_user_metadata(local_user_metadata)
# sync online users
session_users = operators.client.online_users
ui_users = bpy.context.window_manager.online_users
for index, user in enumerate(ui_users):
if user.username not in session_users.keys():
ui_users.remove(index)
renderer.flush_selection()
renderer.flush_users()
break
for user in session_users: class SessionStatusUpdate(Timer):
if user not in ui_users: def __init__(self, timout=1):
new_key = ui_users.add() super().__init__(timout)
new_key.name = user
new_key.username = user
elif session.state['STATE'] == STATE_QUITTING:
presence.refresh_sidebar_view()
self.handle_quit = True
elif session.state['STATE'] == STATE_INITIAL and self.handle_quit:
self.handle_quit = False
presence.refresh_sidebar_view()
operators.unregister_delayables() def execute(self):
refresh_sidebar_view()
presence.renderer.stop()
presence.refresh_sidebar_view() class SessionUserSync(Timer):
def __init__(self, timout=1):
super().__init__(timout)
self.settings = utils.get_preferences()
def execute(self):
if session and renderer:
# sync online users
session_users = session.online_users
ui_users = bpy.context.window_manager.online_users
for index, user in enumerate(ui_users):
if user.username not in session_users.keys() and \
user.username != self.settings.username:
renderer.remove_widget(f"{user.username}_cam")
renderer.remove_widget(f"{user.username}_select")
renderer.remove_widget(f"{user.username}_name")
ui_users.remove(index)
break
for user in session_users:
if user not in ui_users:
new_key = ui_users.add()
new_key.name = user
new_key.username = user
if user != self.settings.username:
renderer.add_widget(
f"{user}_cam", UserFrustumWidget(user))
renderer.add_widget(
f"{user}_select", UserSelectionWidget(user))
renderer.add_widget(
f"{user}_name", UserNameWidget(user))
class MainThreadExecutor(Timer):
def __init__(self, timout=1, execution_queue=None):
super().__init__(timout)
self.execution_queue = execution_queue
def execute(self):
while not self.execution_queue.empty():
function = self.execution_queue.get()
logging.debug(f"Executing {function.__name__}")
function()

View File

@ -23,6 +23,9 @@ import subprocess
import sys import sys
from pathlib import Path from pathlib import Path
import socket import socket
import re
VERSION_EXPR = re.compile('\d+\.\d+\.\d+\w\d+')
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs") THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
DEFAULT_CACHE_DIR = os.path.join( DEFAULT_CACHE_DIR = os.path.join(
@ -47,10 +50,29 @@ def install_pip():
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"]) subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
def install_package(name): def install_package(name, version):
logging.debug(f"Using {PYTHON_PATH} for installation") logging.info(f"installing {name} version...")
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", name]) env = os.environ
if "PIP_REQUIRE_VIRTUALENV" in env:
# PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env
# https://docs.python-guide.org/dev/pip-virtualenv/
# But since Blender's pip is outside of a virtual env, it can block our packages installation, so we unset the
# env var for the subprocess.
env = os.environ.copy()
del env["PIP_REQUIRE_VIRTUALENV"]
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
def check_package_version(name, required_version):
logging.info(f"Checking {name} version...")
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
version = VERSION_EXPR.search(out.stdout.decode())
if version and version.group() == required_version:
logging.info(f"{name} is up to date")
return True
else:
logging.info(f"{name} need an update")
return False
def get_ip(): def get_ip():
""" """
@ -78,7 +100,9 @@ def setup(dependencies, python_path):
if not module_can_be_imported("pip"): if not module_can_be_imported("pip"):
install_pip() install_pip()
for module_name, package_name in dependencies: for package_name, package_version in dependencies:
if not module_can_be_imported(module_name): if not module_can_be_imported(package_name):
install_package(package_name) install_package(package_name, package_version)
module_can_be_imported(package_name) module_can_be_imported(package_name)
elif not check_package_version(package_name, package_version):
install_package(package_name, package_version)

View File

@ -21,35 +21,81 @@ import logging
import os import os
import queue import queue
import random import random
import shutil
import string import string
import time import time
from operator import itemgetter from operator import itemgetter
from pathlib import Path from pathlib import Path
from subprocess import PIPE, Popen, TimeoutExpired from queue import Queue
import zmq
import bpy import bpy
import mathutils import mathutils
from bpy.app.handlers import persistent from bpy.app.handlers import persistent
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP)
from replication.data import ReplicatedDataFactory
from replication.exception import NonAuthorizedOperationError
from replication.interface import session
from . import bl_types, delayable, environment, presence, ui, utils from . import bl_types, delayable, environment, ui, utils
from .libs.replication.replication.constants import (FETCHED, STATE_ACTIVE, from .presence import (SessionStatusWidget, renderer, view3d_find)
STATE_INITIAL,
STATE_SYNCING)
from .libs.replication.replication.data import ReplicatedDataFactory
from .libs.replication.replication.exception import NonAuthorizedOperationError
from .libs.replication.replication.interface import Session
background_execution_queue = Queue()
client = None
delayables = [] delayables = []
stop_modal_executor = False stop_modal_executor = False
modal_executor_queue = None
def unregister_delayables(): def session_callback(name):
""" Session callback wrapper
This allow to encapsulate session callbacks to background_execution_queue.
By doing this way callback are executed from the main thread.
"""
def func_wrapper(func):
@session.register(name)
def add_background_task():
background_execution_queue.put(func)
return add_background_task
return func_wrapper
@session_callback('on_connection')
def initialize_session():
"""Session connection init hander
"""
settings = utils.get_preferences()
runtime_settings = bpy.context.window_manager.session
# Step 1: Constrect nodes
for node in session._graph.list_ordered():
node_ref = session.get(node)
if node_ref.state == FETCHED:
node_ref.resolve()
# Step 2: Load nodes
for node in session._graph.list_ordered():
node_ref = session.get(node)
if node_ref.state == FETCHED:
node_ref.apply()
# Step 4: Register blender timers
for d in delayables:
d.register()
if settings.update_method == 'DEPSGRAPH':
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
@session_callback('on_exit')
def on_connection_end():
"""Session connection finished handler
"""
global delayables, stop_modal_executor global delayables, stop_modal_executor
settings = utils.get_preferences()
# Step 1: Unregister blender timers
for d in delayables: for d in delayables:
try: try:
d.unregister() d.unregister()
@ -58,9 +104,18 @@ def unregister_delayables():
stop_modal_executor = True stop_modal_executor = True
if settings.update_method == 'DEPSGRAPH':
bpy.app.handlers.depsgraph_update_post.remove(
depsgraph_evaluation)
# Step 3: remove file handled
logger = logging.getLogger()
for handler in logger.handlers:
if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler)
# OPERATORS # OPERATORS
class SessionStartOperator(bpy.types.Operator): class SessionStartOperator(bpy.types.Operator):
bl_idname = "session.start" bl_idname = "session.start"
bl_label = "start" bl_label = "start"
@ -73,17 +128,38 @@ class SessionStartOperator(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
global client, delayables global delayables
settings = utils.get_preferences() settings = utils.get_preferences()
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
users = bpy.data.window_managers['WinMan'].online_users users = bpy.data.window_managers['WinMan'].online_users
admin_pass = runtime_settings.password admin_pass = runtime_settings.password
use_extern_update = settings.update_method == 'DEPSGRAPH'
unregister_delayables()
users.clear() users.clear()
delayables.clear() delayables.clear()
logger = logging.getLogger()
if len(logger.handlers) == 1:
formatter = logging.Formatter(
fmt='%(asctime)s CLIENT %(levelname)-8s %(message)s',
datefmt='%H:%M:%S'
)
log_directory = os.path.join(
settings.cache_directory,
"multiuser_client.log")
os.makedirs(settings.cache_directory, exist_ok=True)
handler = logging.FileHandler(log_directory, mode='w')
logger.addHandler(handler)
for handler in logger.handlers:
if isinstance(handler, logging.NullHandler):
continue
handler.setFormatter(formatter)
bpy_factory = ReplicatedDataFactory() bpy_factory = ReplicatedDataFactory()
supported_bl_types = [] supported_bl_types = []
@ -95,24 +171,35 @@ class SessionStartOperator(bpy.types.Operator):
supported_bl_types.append(type_module_class.bl_id) supported_bl_types.append(type_module_class.bl_id)
# Retreive local replicated types settings if type_impl_name not in settings.supported_datablocks:
logging.info(f"{type_impl_name} not found, \
regenerate type settings...")
settings.generate_supported_types()
type_local_config = settings.supported_datablocks[type_impl_name] type_local_config = settings.supported_datablocks[type_impl_name]
bpy_factory.register_type( bpy_factory.register_type(
type_module_class.bl_class, type_module_class.bl_class,
type_module_class, type_module_class,
timer=type_local_config.bl_delay_refresh, timer=type_local_config.bl_delay_refresh*1000,
automatic=type_local_config.auto_push) automatic=type_local_config.auto_push,
check_common=type_module_class.bl_check_common)
if type_local_config.bl_delay_apply > 0: if settings.update_method == 'DEFAULT':
delayables.append( if type_local_config.bl_delay_apply > 0:
delayable.ApplyTimer( delayables.append(
timout=type_local_config.bl_delay_apply, delayable.ApplyTimer(
target_type=type_module_class)) timout=type_local_config.bl_delay_apply,
target_type=type_module_class))
client = Session( session.configure(
factory=bpy_factory, factory=bpy_factory,
python_path=bpy.app.binary_path_python) python_path=bpy.app.binary_path_python,
external_update_handling=use_extern_update)
if settings.update_method == 'DEPSGRAPH':
delayables.append(delayable.ApplyTimer(
settings.depsgraph_update_rate/1000))
# Host a session # Host a session
if self.host: if self.host:
@ -122,30 +209,34 @@ class SessionStartOperator(bpy.types.Operator):
runtime_settings.is_host = True runtime_settings.is_host = True
runtime_settings.internet_ip = environment.get_ip() runtime_settings.internet_ip = environment.get_ip()
for scene in bpy.data.scenes:
client.add(scene)
try: try:
client.host( for scene in bpy.data.scenes:
session.add(scene)
session.host(
id=settings.username, id=settings.username,
port=settings.port, port=settings.port,
ipc_port=settings.ipc_port, ipc_port=settings.ipc_port,
timeout=settings.connection_timeout, timeout=settings.connection_timeout,
password=admin_pass password=admin_pass,
cache_directory=settings.cache_directory,
server_log_level=logging.getLevelName(
logging.getLogger().level),
) )
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
logging.error(f"Error: {e}") logging.error(f"Error: {e}")
import traceback
traceback.print_exc()
# Join a session # Join a session
else: else:
if not runtime_settings.admin: if not runtime_settings.admin:
utils.clean_scene() utils.clean_scene()
# regular client, no password needed # regular session, no password needed
admin_pass = None admin_pass = None
try: try:
client.connect( session.connect(
id=settings.username, id=settings.username,
address=settings.ip, address=settings.ip,
port=settings.port, port=settings.port,
@ -158,22 +249,23 @@ class SessionStartOperator(bpy.types.Operator):
logging.error(str(e)) logging.error(str(e))
# Background client updates service # Background client updates service
#TODO: Refactoring
delayables.append(delayable.ClientUpdate()) delayables.append(delayable.ClientUpdate())
delayables.append(delayable.DrawClient())
delayables.append(delayable.DynamicRightSelectTimer()) delayables.append(delayable.DynamicRightSelectTimer())
# Launch drawing module session_update = delayable.SessionStatusUpdate()
if runtime_settings.enable_presence: session_user_sync = delayable.SessionUserSync()
presence.renderer.run() session_background_executor = delayable.MainThreadExecutor(
execution_queue=background_execution_queue)
# Register blender main thread tools session_update.register()
for d in delayables: session_user_sync.register()
d.register() session_background_executor.register()
global modal_executor_queue delayables.append(session_background_executor)
modal_executor_queue = queue.Queue() delayables.append(session_update)
bpy.ops.session.apply_armature_operator() delayables.append(session_user_sync)
self.report( self.report(
{'INFO'}, {'INFO'},
@ -209,15 +301,13 @@ class SessionInitOperator(bpy.types.Operator):
return wm.invoke_props_dialog(self) return wm.invoke_props_dialog(self)
def execute(self, context): def execute(self, context):
global client
if self.init_method == 'EMPTY': if self.init_method == 'EMPTY':
utils.clean_scene() utils.clean_scene()
for scene in bpy.data.scenes: for scene in bpy.data.scenes:
client.add(scene) session.add(scene)
client.init() session.init()
return {"FINISHED"} return {"FINISHED"}
@ -233,11 +323,12 @@ class SessionStopOperator(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
global client, delayables, stop_modal_executor global delayables, stop_modal_executor
if client: if session:
try: try:
client.disconnect() session.disconnect()
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
else: else:
@ -249,7 +340,7 @@ class SessionStopOperator(bpy.types.Operator):
class SessionKickOperator(bpy.types.Operator): class SessionKickOperator(bpy.types.Operator):
bl_idname = "session.kick" bl_idname = "session.kick"
bl_label = "Kick" bl_label = "Kick"
bl_description = "Kick the user" bl_description = "Kick the target user"
bl_options = {"REGISTER"} bl_options = {"REGISTER"}
user: bpy.props.StringProperty() user: bpy.props.StringProperty()
@ -259,11 +350,11 @@ class SessionKickOperator(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
global client, delayables, stop_modal_executor global delayables, stop_modal_executor
assert(client) assert(session)
try: try:
client.kick(self.user) session.kick(self.user)
except Exception as e: except Exception as e:
self.report({'ERROR'}, repr(e)) self.report({'ERROR'}, repr(e))
@ -279,8 +370,9 @@ class SessionKickOperator(bpy.types.Operator):
class SessionPropertyRemoveOperator(bpy.types.Operator): class SessionPropertyRemoveOperator(bpy.types.Operator):
bl_idname = "session.remove_prop" bl_idname = "session.remove_prop"
bl_label = "remove" bl_label = "Delete cache"
bl_description = "broadcast a property to connected client_instances" bl_description = "Stop tracking modification on the target datablock." + \
"The datablock will no longer be updated for others client. "
bl_options = {"REGISTER"} bl_options = {"REGISTER"}
property_path: bpy.props.StringProperty(default="None") property_path: bpy.props.StringProperty(default="None")
@ -290,9 +382,8 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
global client
try: try:
client.remove(self.property_path) session.remove(self.property_path)
return {"FINISHED"} return {"FINISHED"}
except: # NonAuthorizedOperationError: except: # NonAuthorizedOperationError:
@ -304,11 +395,12 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
class SessionPropertyRightOperator(bpy.types.Operator): class SessionPropertyRightOperator(bpy.types.Operator):
bl_idname = "session.right" bl_idname = "session.right"
bl_label = "Change owner to" bl_label = "Change modification rights"
bl_description = "Change owner of specified datablock" bl_description = "Modify the owner of the target datablock"
bl_options = {"REGISTER"} bl_options = {"REGISTER"}
key: bpy.props.StringProperty(default="None") key: bpy.props.StringProperty(default="None")
recursive: bpy.props.BoolProperty(default=True)
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
@ -322,15 +414,21 @@ class SessionPropertyRightOperator(bpy.types.Operator):
layout = self.layout layout = self.layout
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
col = layout.column() row = layout.row()
col.prop(runtime_settings, "clients") row.label(text="Give the owning rights to:")
row.prop(runtime_settings, "clients", text="")
row = layout.row()
row.label(text="Affect dependencies")
row.prop(self, "recursive", text="")
def execute(self, context): def execute(self, context):
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
global client
if client: if session:
client.change_owner(self.key, runtime_settings.clients) session.change_owner(self.key,
runtime_settings.clients,
ignore_warnings=True,
affect_dependencies=self.recursive)
return {"FINISHED"} return {"FINISHED"}
@ -376,11 +474,10 @@ class SessionSnapUserOperator(bpy.types.Operator):
return {'CANCELLED'} return {'CANCELLED'}
if event.type == 'TIMER': if event.type == 'TIMER':
area, region, rv3d = presence.view3d_find() area, region, rv3d = view3d_find()
global client
if client: if session:
target_ref = client.online_users.get(self.target_client) target_ref = session.online_users.get(self.target_client)
if target_ref: if target_ref:
target_scene = target_ref['metadata']['scene_current'] target_scene = target_ref['metadata']['scene_current']
@ -389,14 +486,16 @@ class SessionSnapUserOperator(bpy.types.Operator):
if target_scene != context.scene.name: if target_scene != context.scene.name:
blender_scene = bpy.data.scenes.get(target_scene, None) blender_scene = bpy.data.scenes.get(target_scene, None)
if blender_scene is None: if blender_scene is None:
self.report({'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.") self.report(
{'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.")
session_sessings.time_snap_running = False session_sessings.time_snap_running = False
return {"CANCELLED"} return {"CANCELLED"}
bpy.context.window.scene = blender_scene bpy.context.window.scene = blender_scene
# Update client viewmatrix # Update client viewmatrix
client_vmatrix = target_ref['metadata'].get('view_matrix', None) client_vmatrix = target_ref['metadata'].get(
'view_matrix', None)
if client_vmatrix: if client_vmatrix:
rv3d.view_matrix = mathutils.Matrix(client_vmatrix) rv3d.view_matrix = mathutils.Matrix(client_vmatrix)
@ -449,10 +548,8 @@ class SessionSnapTimeOperator(bpy.types.Operator):
return {'CANCELLED'} return {'CANCELLED'}
if event.type == 'TIMER': if event.type == 'TIMER':
global client if session:
target_ref = session.online_users.get(self.target_client)
if client:
target_ref = client.online_users.get(self.target_client)
if target_ref: if target_ref:
context.scene.frame_current = target_ref['metadata']['frame_current'] context.scene.frame_current = target_ref['metadata']['frame_current']
@ -464,28 +561,31 @@ class SessionSnapTimeOperator(bpy.types.Operator):
class SessionApply(bpy.types.Operator): class SessionApply(bpy.types.Operator):
bl_idname = "session.apply" bl_idname = "session.apply"
bl_label = "apply selected block into blender" bl_label = "Revert"
bl_description = "Apply selected block into blender" bl_description = "Revert the selected datablock from his cached" + \
" version."
bl_options = {"REGISTER"} bl_options = {"REGISTER"}
target: bpy.props.StringProperty() target: bpy.props.StringProperty()
reset_dependencies: bpy.props.BoolProperty(default=False)
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return True return True
def execute(self, context): def execute(self, context):
global client logging.debug(f"Running apply on {self.target}")
session.apply(self.target,
client.apply(self.target) force=True,
force_dependencies=self.reset_dependencies)
return {"FINISHED"} return {"FINISHED"}
class SessionCommit(bpy.types.Operator): class SessionCommit(bpy.types.Operator):
bl_idname = "session.commit" bl_idname = "session.commit"
bl_label = "commit and push selected datablock to server" bl_label = "Force server update"
bl_description = "commit and push selected datablock to server" bl_description = "Commit and push the target datablock to server"
bl_options = {"REGISTER"} bl_options = {"REGISTER"}
target: bpy.props.StringProperty() target: bpy.props.StringProperty()
@ -495,10 +595,9 @@ class SessionCommit(bpy.types.Operator):
return True return True
def execute(self, context): def execute(self, context):
global client # session.get(uuid=target).diff()
# client.get(uuid=target).diff() session.commit(uuid=self.target)
client.commit(uuid=self.target) session.push(self.target)
client.push(self.target)
return {"FINISHED"} return {"FINISHED"}
@ -516,18 +615,17 @@ class ApplyArmatureOperator(bpy.types.Operator):
return {'CANCELLED'} return {'CANCELLED'}
if event.type == 'TIMER': if event.type == 'TIMER':
global client if session and session.state['STATE'] == STATE_ACTIVE:
if client and client.state['STATE'] == STATE_ACTIVE: nodes = session.list(filter=bl_types.bl_armature.BlArmature)
nodes = client.list(filter=bl_types.bl_armature.BlArmature)
for node in nodes: for node in nodes:
node_ref = client.get(uuid=node) node_ref = session.get(uuid=node)
if node_ref.state == FETCHED: if node_ref.state == FETCHED:
try: try:
client.apply(node) session.apply(node)
except Exception as e: except Exception as e:
logging.error("Dail to apply armature: {e}") logging.error("Fail to apply armature: {e}")
return {'PASS_THROUGH'} return {'PASS_THROUGH'}
@ -546,6 +644,35 @@ class ApplyArmatureOperator(bpy.types.Operator):
stop_modal_executor = False stop_modal_executor = False
class ClearCache(bpy.types.Operator):
"Clear local session cache"
bl_idname = "session.clear_cache"
bl_label = "Modal Executor Operator"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
cache_dir = utils.get_preferences().cache_directory
try:
for root, dirs, files in os.walk(cache_dir):
for name in files:
Path(root, name).unlink()
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"FINISHED"}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
classes = ( classes = (
SessionStartOperator, SessionStartOperator,
SessionStopOperator, SessionStopOperator,
@ -558,7 +685,7 @@ classes = (
ApplyArmatureOperator, ApplyArmatureOperator,
SessionKickOperator, SessionKickOperator,
SessionInitOperator, SessionInitOperator,
ClearCache,
) )
@ -570,31 +697,65 @@ def sanitize_deps_graph(dummy):
A future solution should be to avoid storing dataclock reference... A future solution should be to avoid storing dataclock reference...
""" """
global client
if client and client.state['STATE'] == STATE_ACTIVE:
for node_key in client.list():
client.get(node_key).resolve()
if session and session.state['STATE'] == STATE_ACTIVE:
for node_key in session.list():
node = session.get(node_key)
if node and not node.resolve(construct=False):
session.remove(node_key)
@persistent @persistent
def load_pre_handler(dummy): def load_pre_handler(dummy):
global client if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
if client and client.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop() bpy.ops.session.stop()
@persistent @persistent
def update_client_frame(scene): def update_client_frame(scene):
if client and client.state['STATE'] == STATE_ACTIVE: if session and session.state['STATE'] == STATE_ACTIVE:
client.update_user_metadata({ session.update_user_metadata({
'frame_current': scene.frame_current 'frame_current': scene.frame_current
}) })
@persistent
def depsgraph_evaluation(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.get(update.id.uuid)
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
# - if its to someone else, ignore the update (go deeper ?)
if node and node.owner in [session.id, RP_COMMON] and node.state == UP:
# Avoid slow geometry update
if 'EDIT' in context.mode and \
not settings.sync_during_editmode:
break
session.stash(node.uuid)
else:
# Distant update
continue
# else:
# # New items !
# logger.error("UPDATE: ADD")
def register(): def register():
from bpy.utils import register_class from bpy.utils import register_class
for cls in classes: for cls in classes:
register_class(cls) register_class(cls)
@ -606,11 +767,8 @@ def register():
def unregister(): def unregister():
global client if session and session.state['STATE'] == STATE_ACTIVE:
session.disconnect()
if client and client.state['STATE'] == 2:
client.disconnect()
client = None
from bpy.utils import unregister_class from bpy.utils import unregister_class
for cls in reversed(classes): for cls in reversed(classes):
@ -621,7 +779,3 @@ def unregister():
bpy.app.handlers.load_pre.remove(load_pre_handler) bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame) bpy.app.handlers.frame_change_pre.remove(update_client_frame)
if __name__ == "__main__":
register()

View File

@ -20,9 +20,14 @@ import logging
import bpy import bpy
import string import string
import re import re
import os
from . import utils, bl_types, environment, addon_updater_ops, presence, ui from pathlib import Path
from .libs.replication.replication.constants import RP_COMMON
from . import bl_types, environment, addon_updater_ops, presence, ui
from .utils import get_preferences, get_expanded_icon
from replication.constants import RP_COMMON
from replication.interface import session
IP_EXPR = re.compile('\d+\.\d+\.\d+\.\d+') IP_EXPR = re.compile('\d+\.\d+\.\d+\.\d+')
@ -36,7 +41,7 @@ def randomColor():
def random_string_digits(stringLength=6): def random_string_digits(stringLength=6):
"""Generate a random string of letters and digits """ """Generate a random string of letters and digits"""
lettersAndDigits = string.ascii_letters + string.digits lettersAndDigits = string.ascii_letters + string.digits
return ''.join(random.choices(lettersAndDigits, k=stringLength)) return ''.join(random.choices(lettersAndDigits, k=stringLength))
@ -46,6 +51,7 @@ def update_panel_category(self, context):
ui.SESSION_PT_settings.bl_category = self.panel_category ui.SESSION_PT_settings.bl_category = self.panel_category
ui.register() ui.register()
def update_ip(self, context): def update_ip(self, context):
ip = IP_EXPR.search(self.ip) ip = IP_EXPR.search(self.ip)
@ -55,14 +61,35 @@ def update_ip(self, context):
logging.error("Wrong IP format") logging.error("Wrong IP format")
self['ip'] = "127.0.0.1" self['ip'] = "127.0.0.1"
def update_port(self, context): def update_port(self, context):
max_port = self.port + 3 max_port = self.port + 3
if self.ipc_port < max_port and \ if self.ipc_port < max_port and \
self['ipc_port'] >= self.port: self['ipc_port'] >= self.port:
logging.error("IPC Port in conflic with the port, assigning a random value") logging.error(
"IPC Port in conflict with the port, assigning a random value")
self['ipc_port'] = random.randrange(self.port+4, 10000) self['ipc_port'] = random.randrange(self.port+4, 10000)
def update_directory(self, context):
new_dir = Path(self.cache_directory)
if new_dir.exists() and any(Path(self.cache_directory).iterdir()):
logging.error("The folder is not empty, choose another one.")
self['cache_directory'] = environment.DEFAULT_CACHE_DIR
elif not new_dir.exists():
logging.info("Target cache folder doesn't exist, creating it.")
os.makedirs(self.cache_directory, exist_ok=True)
def set_log_level(self, value):
logging.getLogger().setLevel(value)
def get_log_level(self):
return logging.getLogger().level
class ReplicatedDatablock(bpy.types.PropertyGroup): class ReplicatedDatablock(bpy.types.PropertyGroup):
type_name: bpy.props.StringProperty() type_name: bpy.props.StringProperty()
bl_name: bpy.props.StringProperty() bl_name: bpy.props.StringProperty()
@ -73,11 +100,49 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
icon: bpy.props.StringProperty() icon: bpy.props.StringProperty()
def set_sync_render_settings(self, value):
self['sync_render_settings'] = value
if session and bpy.context.scene.uuid and value:
bpy.ops.session.apply('INVOKE_DEFAULT',
target=bpy.context.scene.uuid,
reset_dependencies=False)
def set_sync_active_camera(self, value):
self['sync_active_camera'] = value
if session and bpy.context.scene.uuid and value:
bpy.ops.session.apply('INVOKE_DEFAULT',
target=bpy.context.scene.uuid,
reset_dependencies=False)
class ReplicationFlags(bpy.types.PropertyGroup): class ReplicationFlags(bpy.types.PropertyGroup):
def get_sync_render_settings(self):
return self.get('sync_render_settings', True)
def get_sync_active_camera(self):
return self.get('sync_active_camera', True)
sync_render_settings: bpy.props.BoolProperty( sync_render_settings: bpy.props.BoolProperty(
name="Synchronize render settings", name="Synchronize render settings",
description="Synchronize render settings (eevee and cycles only)", description="Synchronize render settings (eevee and cycles only)",
default=True) default=False,
set=set_sync_render_settings,
get=get_sync_render_settings
)
sync_during_editmode: bpy.props.BoolProperty(
name="Edit mode updates",
description="Enable objects update in edit mode (! Impact performances !)",
default=False
)
sync_active_camera: bpy.props.BoolProperty(
name="Synchronize active camera",
description="Synchronize the active camera",
default=True,
get=get_sync_active_camera,
set=set_sync_active_camera
)
class SessionPrefs(bpy.types.AddonPreferences): class SessionPrefs(bpy.types.AddonPreferences):
@ -109,9 +174,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
) )
ipc_port: bpy.props.IntProperty( ipc_port: bpy.props.IntProperty(
name="ipc_port", name="ipc_port",
description='internal ttl port(only usefull for multiple local instances)', description='internal ttl port(only useful for multiple local instances)',
default=5561, default=random.randrange(5570, 70000),
update=update_port update=update_port,
) )
init_method: bpy.props.EnumProperty( init_method: bpy.props.EnumProperty(
name='init_method', name='init_method',
@ -123,33 +188,80 @@ class SessionPrefs(bpy.types.AddonPreferences):
cache_directory: bpy.props.StringProperty( cache_directory: bpy.props.StringProperty(
name="cache directory", name="cache directory",
subtype="DIR_PATH", subtype="DIR_PATH",
default=environment.DEFAULT_CACHE_DIR) default=environment.DEFAULT_CACHE_DIR,
update=update_directory)
connection_timeout: bpy.props.IntProperty( connection_timeout: bpy.props.IntProperty(
name='connection timeout', name='connection timeout',
description='connection timeout before disconnection', description='connection timeout before disconnection',
default=1000 default=1000
) )
update_method: bpy.props.EnumProperty(
name='update method',
description='replication update method',
items=[
('DEFAULT', "Default", "Default: Use threads to monitor databloc changes"),
('DEPSGRAPH', "Depsgraph",
"Experimental: Use the blender dependency graph to trigger updates"),
],
)
# Replication update settings
depsgraph_update_rate: bpy.props.IntProperty(
name='depsgraph update rate',
description='Dependency graph uppdate rate (milliseconds)',
default=100
)
clear_memory_filecache: bpy.props.BoolProperty(
name="Clear memory filecache",
description="Remove filecache from memory",
default=False
)
# for UI # for UI
category: bpy.props.EnumProperty( category: bpy.props.EnumProperty(
name="Category", name="Category",
description="Preferences Category", description="Preferences Category",
items=[ items=[
('CONFIG', "Configuration", "Configuration about this add-on"), ('CONFIG', "Configuration", "Configuration of this add-on"),
('UPDATE', "Update", "Update this add-on"), ('UPDATE', "Update", "Update this add-on"),
], ],
default='CONFIG' default='CONFIG'
) )
# WIP
logging_level: bpy.props.EnumProperty( logging_level: bpy.props.EnumProperty(
name="Log level", name="Log level",
description="Log verbosity level", description="Log verbosity level",
items=[ items=[
('ERROR', "error", "show only errors"), ('ERROR', "error", "show only errors", logging.ERROR),
('WARNING', "warning", "only show warnings and errors"), ('WARNING', "warning", "only show warnings and errors", logging.WARNING),
('INFO', "info", "default level"), ('INFO', "info", "default level", logging.INFO),
('DEBUG', "debug", "show all logs"), ('DEBUG', "debug", "show all logs", logging.DEBUG),
], ],
default='INFO' default='INFO',
set=set_log_level,
get=get_log_level
)
presence_hud_scale: bpy.props.FloatProperty(
name="Text scale",
description="Adjust the session widget text scale",
min=7,
max=90,
default=15,
)
presence_hud_hpos: bpy.props.FloatProperty(
name="Horizontal position",
description="Adjust the session widget horizontal position",
min=1,
max=90,
default=3,
step=1,
subtype='PERCENTAGE',
)
presence_hud_vpos: bpy.props.FloatProperty(
name="Vertical position",
description="Adjust the session widget vertical position",
min=1,
max=94,
default=1,
step=1,
subtype='PERCENTAGE',
) )
conf_session_identity_expanded: bpy.props.BoolProperty( conf_session_identity_expanded: bpy.props.BoolProperty(
name="Identity", name="Identity",
@ -181,6 +293,26 @@ class SessionPrefs(bpy.types.AddonPreferences):
description="Interface", description="Interface",
default=False default=False
) )
sidebar_advanced_rep_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_rep_expanded",
description="sidebar_advanced_rep_expanded",
default=False
)
sidebar_advanced_log_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_log_expanded",
description="sidebar_advanced_log_expanded",
default=False
)
sidebar_advanced_net_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_net_expanded",
description="sidebar_advanced_net_expanded",
default=False
)
sidebar_advanced_cache_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_cache_expanded",
description="sidebar_advanced_cache_expanded",
default=False
)
auto_check_update: bpy.props.BoolProperty( auto_check_update: bpy.props.BoolProperty(
name="Auto-check for Update", name="Auto-check for Update",
@ -232,9 +364,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
# USER INFORMATIONS # USER INFORMATIONS
box = grid.box() box = grid.box()
box.prop( box.prop(
self, "conf_session_identity_expanded", text="User informations", self, "conf_session_identity_expanded", text="User information",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_identity_expanded icon=get_expanded_icon(self.conf_session_identity_expanded),
else 'DISCLOSURE_TRI_RIGHT', emboss=False) emboss=False)
if self.conf_session_identity_expanded: if self.conf_session_identity_expanded:
box.row().prop(self, "username", text="name") box.row().prop(self, "username", text="name")
box.row().prop(self, "client_color", text="color") box.row().prop(self, "client_color", text="color")
@ -242,24 +374,27 @@ class SessionPrefs(bpy.types.AddonPreferences):
# NETWORK SETTINGS # NETWORK SETTINGS
box = grid.box() box = grid.box()
box.prop( box.prop(
self, "conf_session_net_expanded", text="Netorking", self, "conf_session_net_expanded", text="Networking",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_net_expanded icon=get_expanded_icon(self.conf_session_net_expanded),
else 'DISCLOSURE_TRI_RIGHT', emboss=False) emboss=False)
if self.conf_session_net_expanded: if self.conf_session_net_expanded:
box.row().prop(self, "ip", text="Address") box.row().prop(self, "ip", text="Address")
row = box.row() row = box.row()
row.label(text="Port:") row.label(text="Port:")
row.prop(self, "port", text="Address") row.prop(self, "port", text="")
row = box.row() row = box.row()
row.label(text="Init the session from:") row.label(text="Init the session from:")
row.prop(self, "init_method", text="") row.prop(self, "init_method", text="")
row = box.row()
row.label(text="Update method:")
row.prop(self, "update_method", text="")
table = box.box() table = box.box()
table.row().prop( table.row().prop(
self, "conf_session_timing_expanded", text="Refresh rates", self, "conf_session_timing_expanded", text="Refresh rates",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_timing_expanded icon=get_expanded_icon(self.conf_session_timing_expanded),
else 'DISCLOSURE_TRI_RIGHT', emboss=False) emboss=False)
if self.conf_session_timing_expanded: if self.conf_session_timing_expanded:
line = table.row() line = table.row()
@ -277,8 +412,8 @@ class SessionPrefs(bpy.types.AddonPreferences):
box = grid.box() box = grid.box()
box.prop( box.prop(
self, "conf_session_hosting_expanded", text="Hosting", self, "conf_session_hosting_expanded", text="Hosting",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_hosting_expanded icon=get_expanded_icon(self.conf_session_hosting_expanded),
else 'DISCLOSURE_TRI_RIGHT', emboss=False) emboss=False)
if self.conf_session_hosting_expanded: if self.conf_session_hosting_expanded:
row = box.row() row = box.row()
row.label(text="Init the session from:") row.label(text="Init the session from:")
@ -288,23 +423,33 @@ class SessionPrefs(bpy.types.AddonPreferences):
box = grid.box() box = grid.box()
box.prop( box.prop(
self, "conf_session_cache_expanded", text="Cache", self, "conf_session_cache_expanded", text="Cache",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_cache_expanded icon=get_expanded_icon(self.conf_session_cache_expanded),
else 'DISCLOSURE_TRI_RIGHT', emboss=False) emboss=False)
if self.conf_session_cache_expanded: if self.conf_session_cache_expanded:
box.row().prop(self, "cache_directory", text="Cache directory") box.row().prop(self, "cache_directory", text="Cache directory")
box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache")
# INTERFACE SETTINGS # INTERFACE SETTINGS
box = grid.box() box = grid.box()
box.prop( box.prop(
self, "conf_session_ui_expanded", text="Interface", self, "conf_session_ui_expanded", text="Interface",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_ui_expanded else 'DISCLOSURE_TRI_RIGHT', icon=get_expanded_icon(self.conf_session_ui_expanded),
emboss=False) emboss=False)
if self.conf_session_ui_expanded: if self.conf_session_ui_expanded:
box.row().prop(self, "panel_category", text="Panel category", expand=True) box.row().prop(self, "panel_category", text="Panel category", expand=True)
row = box.row()
row.label(text="Session widget:")
col = box.column(align=True)
col.prop(self, "presence_hud_scale", expand=True)
col.prop(self, "presence_hud_hpos", expand=True)
col.prop(self, "presence_hud_vpos", expand=True)
if self.category == 'UPDATE': if self.category == 'UPDATE':
from . import addon_updater_ops from . import addon_updater_ops
addon_updater_ops.update_settings_ui_condensed(self, context) addon_updater_ops.update_settings_ui(self, context)
def generate_supported_types(self): def generate_supported_types(self):
self.supported_datablocks.clear() self.supported_datablocks.clear()
@ -331,10 +476,10 @@ def client_list_callback(scene, context):
items = [(RP_COMMON, RP_COMMON, "")] items = [(RP_COMMON, RP_COMMON, "")]
username = utils.get_preferences().username username = get_preferences().username
cli = operators.client
if cli: if session:
client_ids = cli.online_users.keys() client_ids = session.online_users.keys()
for id in client_ids: for id in client_ids:
name_desc = id name_desc = id
if id == username: if id == username:
@ -370,25 +515,26 @@ class SessionProps(bpy.types.PropertyGroup):
name="Presence overlay", name="Presence overlay",
description='Enable overlay drawing module', description='Enable overlay drawing module',
default=True, default=True,
update=presence.update_presence
) )
presence_show_selected: bpy.props.BoolProperty( presence_show_selected: bpy.props.BoolProperty(
name="Show selected objects", name="Show selected objects",
description='Enable selection overlay ', description='Enable selection overlay ',
default=True, default=True,
update=presence.update_overlay_settings
) )
presence_show_user: bpy.props.BoolProperty( presence_show_user: bpy.props.BoolProperty(
name="Show users", name="Show users",
description='Enable user overlay ', description='Enable user overlay ',
default=True, default=True,
update=presence.update_overlay_settings
) )
presence_show_far_user: bpy.props.BoolProperty( presence_show_far_user: bpy.props.BoolProperty(
name="Show users on different scenes", name="Show users on different scenes",
description="Show user on different scenes", description="Show user on different scenes",
default=False, default=False,
update=presence.update_overlay_settings )
presence_show_session_status: bpy.props.BoolProperty(
name="Show session status ",
description="Show session status on the viewport",
default=True,
) )
filter_owned: bpy.props.BoolProperty( filter_owned: bpy.props.BoolProperty(
name="filter_owned", name="filter_owned",

View File

@ -19,6 +19,8 @@
import copy import copy
import logging import logging
import math import math
import sys
import traceback
import bgl import bgl
import blf import blf
@ -27,13 +29,17 @@ import gpu
import mathutils import mathutils
from bpy_extras import view3d_utils from bpy_extras import view3d_utils
from gpu_extras.batch import batch_for_shader from gpu_extras.batch import batch_for_shader
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
STATE_INITIAL, STATE_LAUNCHING_SERVICES,
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
STATE_SYNCING, STATE_WAITING)
from replication.interface import session
from . import utils from .utils import find_from_attr, get_state_str, get_preferences
renderer = None # Helper functions
def view3d_find() -> tuple:
def view3d_find():
""" Find the first 'VIEW_3D' windows found in areas """ Find the first 'VIEW_3D' windows found in areas
:return: tuple(Area, Region, RegionView3D) :return: tuple(Area, Region, RegionView3D)
@ -55,35 +61,48 @@ def refresh_3d_view():
if area and region and rv3d: if area and region and rv3d:
area.tag_redraw() area.tag_redraw()
def refresh_sidebar_view(): def refresh_sidebar_view():
""" Refresh the blender sidebar """ Refresh the blender viewport sidebar
""" """
area, region, rv3d = view3d_find() area, region, rv3d = view3d_find()
area.regions[3].tag_redraw() if area:
area.regions[3].tag_redraw()
def get_target(region, rv3d, coord):
def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D, coords: list, distance: float = 1.0) -> list:
""" Compute a projection from 2D to 3D viewport coordinate
:param region: target windows region
:type region: bpy.types.Region
:param rv3d: view 3D
:type rv3d: bpy.types.RegionView3D
:param coords: coordinate to project
:type coords: list
:param distance: distance offset into viewport
:type distance: float
:return: list of coordinates [x,y,z]
"""
target = [0, 0, 0] target = [0, 0, 0]
if coord and region and rv3d: if coords and region and rv3d:
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord) view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coords)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord) ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coords)
target = ray_origin + view_vector
return [target.x, target.y, target.z]
def get_target_far(region, rv3d, coord, distance):
target = [0, 0, 0]
if coord and region and rv3d:
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
target = ray_origin + view_vector * distance target = ray_origin + view_vector * distance
return [target.x, target.y, target.z] return [target.x, target.y, target.z]
def get_default_bbox(obj, radius):
def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
""" Generate a bounding box for a given object by using its world matrix
:param obj: target object
:type obj: bpy.types.Object
:param radius: bounding box radius
:type radius: float
:return: list of 8 points [(x,y,z),...]
"""
coords = [ coords = [
(-radius, -radius, -radius), (+radius, -radius, -radius), (-radius, -radius, -radius), (+radius, -radius, -radius),
(-radius, +radius, -radius), (+radius, +radius, -radius), (-radius, +radius, -radius), (+radius, +radius, -radius),
@ -91,264 +110,384 @@ def get_default_bbox(obj, radius):
(-radius, +radius, +radius), (+radius, +radius, +radius)] (-radius, +radius, +radius), (+radius, +radius, +radius)]
base = obj.matrix_world base = obj.matrix_world
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords] bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
return [(point.x, point.y, point.z) return [(point.x, point.y, point.z)
for point in bbox_corners] for point in bbox_corners]
def get_view_corners():
def generate_user_camera() -> list:
""" Generate a basic camera represention of the user point of view
:return: list of 7 points
"""
area, region, rv3d = view3d_find() area, region, rv3d = view3d_find()
v1 = [0, 0, 0] v1 = v2 = v3 = v4 = v5 = v6 = v7 = [0, 0, 0]
v2 = [0, 0, 0]
v3 = [0, 0, 0]
v4 = [0, 0, 0]
v5 = [0, 0, 0]
v6 = [0, 0, 0]
v7 = [0, 0, 0]
if area and region and rv3d: if area and region and rv3d:
width = region.width width = region.width
height = region.height height = region.height
v1 = get_target(region, rv3d, (0, 0)) v1 = project_to_viewport(region, rv3d, (0, 0))
v3 = get_target(region, rv3d, (0, height)) v3 = project_to_viewport(region, rv3d, (0, height))
v2 = get_target(region, rv3d, (width, height)) v2 = project_to_viewport(region, rv3d, (width, height))
v4 = get_target(region, rv3d, (width, 0)) v4 = project_to_viewport(region, rv3d, (width, 0))
v5 = get_target(region, rv3d, (width/2, height/2)) v5 = project_to_viewport(region, rv3d, (width/2, height/2))
v6 = list(rv3d.view_location) v6 = list(rv3d.view_location)
v7 = get_target_far(region, rv3d, (width/2, height/2), -.8) v7 = project_to_viewport(
region, rv3d, (width/2, height/2), distance=-.8)
coords = [v1, v2, v3, v4, v5, v6, v7] coords = [v1, v2, v3, v4, v5, v6, v7]
return coords return coords
def get_client_2d(coords): def project_to_screen(coords: list) -> list:
""" Project 3D coordinate to 2D screen coordinates
:param coords: 3D coordinates (x,y,z)
:type coords: list
:return: list of 2D coordinates [x,y]
"""
area, region, rv3d = view3d_find() area, region, rv3d = view3d_find()
if area and region and rv3d: if area and region and rv3d:
return view3d_utils.location_3d_to_region_2d(region, rv3d, coords) return view3d_utils.location_3d_to_region_2d(region, rv3d, coords)
else: else:
return (0, 0) return (0, 0)
def get_bb_coords_from_obj(object, parent=None):
base = object.matrix_world if parent is None else parent.matrix_world def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object = None) -> list:
""" Generate bounding box in world coordinate from object bound box
:param object: target object
:type object: bpy.types.Object
:param instance: optionnal instance
:type instance: bpy.types.Object
:return: list of 8 points [(x,y,z),...]
"""
base = object.matrix_world
if instance:
scale = mathutils.Matrix.Diagonal(object.matrix_world.to_scale())
base = instance.matrix_world @ scale.to_4x4()
bbox_corners = [base @ mathutils.Vector( bbox_corners = [base @ mathutils.Vector(
corner) for corner in object.bound_box] corner) for corner in object.bound_box]
return [(point.x, point.y, point.z) return [(point.x, point.y, point.z) for point in bbox_corners]
for point in bbox_corners]
def get_view_matrix(): def get_view_matrix() -> list:
""" Return the 3d viewport view matrix
:return: view matrix as a 4x4 list
"""
area, region, rv3d = view3d_find() area, region, rv3d = view3d_find()
if area and region and rv3d: if area and region and rv3d:
return [list(v) for v in rv3d.view_matrix] return [list(v) for v in rv3d.view_matrix]
def update_presence(self, context):
global renderer
if 'renderer' in globals() and hasattr(renderer, 'run'): class Widget(object):
if self.enable_presence: """ Base class to define an interface element
renderer.run() """
draw_type: str = 'POST_VIEW' # Draw event type
def poll(self) -> bool:
"""Test if the widget can be drawn or not
:return: bool
"""
return True
def draw(self):
"""How to draw the widget
"""
raise NotImplementedError()
class UserFrustumWidget(Widget):
# Camera widget indices
indices = ((1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6))
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else: else:
renderer.stop() return None
def poll(self):
if self.data is None:
return False
def update_overlay_settings(self, context): scene_current = self.data.get('scene_current')
global renderer view_corners = self.data.get('view_corners')
if renderer and not self.presence_show_selected: return (scene_current == bpy.context.scene.name or
renderer.flush_selection() self.settings.presence_show_far_user) and \
if renderer and not self.presence_show_user: view_corners and \
renderer.flush_users() self.settings.presence_show_user and \
self.settings.enable_presence
def draw(self):
class DrawFactory(object): location = self.data.get('view_corners')
def __init__(self):
self.d3d_items = {}
self.d2d_items = {}
self.draw3d_handle = None
self.draw2d_handle = None
self.draw_event = None
self.coords = None
self.active_object = None
def run(self):
self.register_handlers()
def stop(self):
self.flush_users()
self.flush_selection()
self.unregister_handlers()
refresh_3d_view()
def register_handlers(self):
self.draw3d_handle = bpy.types.SpaceView3D.draw_handler_add(
self.draw3d_callback, (), 'WINDOW', 'POST_VIEW')
self.draw2d_handle = bpy.types.SpaceView3D.draw_handler_add(
self.draw2d_callback, (), 'WINDOW', 'POST_PIXEL')
def unregister_handlers(self):
if self.draw2d_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.draw2d_handle, "WINDOW")
self.draw2d_handle = None
if self.draw3d_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.draw3d_handle, "WINDOW")
self.draw3d_handle = None
self.d3d_items.clear()
self.d2d_items.clear()
def flush_selection(self, user=None):
key_to_remove = []
select_key = f"{user}_select" if user else "select"
for k in self.d3d_items.keys():
if select_key in k:
key_to_remove.append(k)
for k in key_to_remove:
del self.d3d_items[k]
def flush_users(self):
key_to_remove = []
for k in self.d3d_items.keys():
if "select" not in k:
key_to_remove.append(k)
for k in key_to_remove:
del self.d3d_items[k]
self.d2d_items.clear()
def draw_client_selection(self, client_id, client_color, client_selection):
local_user = utils.get_preferences().username
if local_user != client_id:
self.flush_selection(client_id)
for select_ob in client_selection:
drawable_key = f"{client_id}_select_{select_ob}"
ob = utils.find_from_attr("uuid", select_ob, bpy.data.objects)
if not ob:
return
if ob.type == 'EMPTY':
# TODO: Child case
# Collection instance case
indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
if ob.instance_collection:
for obj in ob.instance_collection.objects:
if obj.type == 'MESH':
self.append_3d_item(
drawable_key,
client_color,
get_bb_coords_from_obj(obj, parent=ob),
indices)
if ob.type in ['MESH','META']:
indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
self.append_3d_item(
drawable_key,
client_color,
get_bb_coords_from_obj(ob),
indices)
else:
indices = (
(0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
self.append_3d_item(
drawable_key,
client_color,
get_default_bbox(ob, ob.scale.x),
indices)
def append_3d_item(self,key,color, coords, indices):
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR') shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
color = color positions = [tuple(coord) for coord in location]
if len(positions) != 7:
return
batch = batch_for_shader( batch = batch_for_shader(
shader, 'LINES', {"pos": coords}, indices=indices) shader,
'LINES',
{"pos": positions},
indices=self.indices)
self.d3d_items[key] = (shader, batch, color) bgl.glLineWidth(2.)
def draw_client_camera(self, client_id, client_location, client_color):
if client_location:
local_user = utils.get_preferences().username
if local_user != client_id:
try:
indices = (
(1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6)
)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
position = [tuple(coord) for coord in client_location]
color = client_color
batch = batch_for_shader(
shader, 'LINES', {"pos": position}, indices=indices)
self.d3d_items[client_id] = (shader, batch, color)
self.d2d_items[client_id] = (position[1], client_id, color)
except Exception as e:
logging.error(f"Draw client exception: {e}")
def draw3d_callback(self):
bgl.glLineWidth(1.5)
bgl.glEnable(bgl.GL_DEPTH_TEST) bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND) bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH) bgl.glEnable(bgl.GL_LINE_SMOOTH)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
class UserSelectionWidget(Widget):
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
user_selection = self.data.get('selected_objects')
scene_current = self.data.get('scene_current')
return (scene_current == bpy.context.scene.name or
self.settings.presence_show_far_user) and \
user_selection and \
self.settings.presence_show_selected and \
self.settings.enable_presence
def draw(self):
user_selection = self.data.get('selected_objects')
for select_ob in user_selection:
ob = find_from_attr("uuid", select_ob, bpy.data.objects)
if not ob:
return
vertex_pos = bbox_from_obj(ob, 1.0)
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
if ob.instance_collection:
for obj in ob.instance_collection.objects:
if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
vertex_pos = get_bb_coords_from_obj(obj, instance=ob)
break
elif ob.type == 'EMPTY':
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
elif ob.type == 'LIGHT':
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
elif ob.type == 'LIGHT_PROBE':
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
elif ob.type == 'CAMERA':
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
elif hasattr(ob, 'bound_box'):
vertex_indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
vertex_pos = get_bb_coords_from_obj(ob)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(
shader,
'LINES',
{"pos": vertex_pos},
indices=vertex_indices)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
class UserNameWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
scene_current = self.data.get('scene_current')
view_corners = self.data.get('view_corners')
return (scene_current == bpy.context.scene.name or
self.settings.presence_show_far_user) and \
view_corners and \
self.settings.presence_show_user and \
self.settings.enable_presence
def draw(self):
view_corners = self.data.get('view_corners')
color = self.data.get('color')
position = [tuple(coord) for coord in view_corners]
coords = project_to_screen(position[1])
if coords:
blf.position(0, coords[0], coords[1]+10, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, self.username)
class SessionStatusWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(self):
self.preferences = get_preferences()
@property
def settings(self):
return getattr(bpy.context.window_manager, 'session', None)
def poll(self):
return self.settings and self.settings.presence_show_session_status and \
self.settings.enable_presence
def draw(self):
text_scale = self.preferences.presence_hud_scale
ui_scale = bpy.context.preferences.view.ui_scale
color = [1, 1, 0, 1]
state = session.state.get('STATE')
state_str = f"{get_state_str(state)}"
if state == STATE_ACTIVE:
color = [0, 1, 0, 1]
elif state == STATE_INITIAL:
color = [1, 0, 0, 1]
hpos = (self.preferences.presence_hud_hpos*bpy.context.area.width)/100
vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100
blf.position(0, hpos, vpos, 0)
blf.size(0, int(text_scale*ui_scale), 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, state_str)
class DrawFactory(object):
def __init__(self):
self.post_view_handle = None
self.post_pixel_handle = None
self.widgets = {}
def add_widget(self, name: str, widget: Widget):
self.widgets[name] = widget
def remove_widget(self, name: str):
if name in self.widgets:
del self.widgets[name]
else:
logging.error(f"Widget {name} not existing")
def clear_widgets(self):
self.widgets.clear()
def register_handlers(self):
self.post_view_handle = bpy.types.SpaceView3D.draw_handler_add(
self.post_view_callback,
(),
'WINDOW',
'POST_VIEW')
self.post_pixel_handle = bpy.types.SpaceView3D.draw_handler_add(
self.post_pixel_callback,
(),
'WINDOW',
'POST_PIXEL')
def unregister_handlers(self):
if self.post_pixel_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.post_pixel_handle,
"WINDOW")
self.post_pixel_handle = None
if self.post_view_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.post_view_handle,
"WINDOW")
self.post_view_handle = None
def post_view_callback(self):
try: try:
for shader, batch, color in self.d3d_items.values(): for widget in self.widgets.values():
shader.bind() if widget.draw_type == 'POST_VIEW' and widget.poll():
shader.uniform_float("color", color) widget.draw()
batch.draw(shader) except Exception as e:
except Exception: logging.error(
logging.error("3D Exception") f"Post view widget exception: {e} \n {traceback.print_exc()}")
def draw2d_callback(self): def post_pixel_callback(self):
for position, font, color in self.d2d_items.values(): try:
try: for widget in self.widgets.values():
coords = get_client_2d(position) if widget.draw_type == 'POST_PIXEL' and widget.poll():
widget.draw()
except Exception as e:
logging.error(
f"Post pixel widget Exception: {e} \n {traceback.print_exc()}")
if coords:
blf.position(0, coords[0], coords[1]+10, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, font)
except Exception: this = sys.modules[__name__]
logging.error("2D EXCEPTION") this.renderer = DrawFactory()
def register(): def register():
global renderer this.renderer.register_handlers()
renderer = DrawFactory()
this.renderer.add_widget("session_status", SessionStatusWidget())
def unregister(): def unregister():
global renderer this.renderer.unregister_handlers()
renderer.unregister_handlers()
del renderer this.renderer.clear_widgets()

View File

@ -18,8 +18,8 @@
import bpy import bpy
from . import operators, utils from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str
from .libs.replication.replication.constants import (ADDED, ERROR, FETCHED, from replication.constants import (ADDED, ERROR, FETCHED,
MODIFIED, RP_COMMON, UP, MODIFIED, RP_COMMON, UP,
STATE_ACTIVE, STATE_AUTH, STATE_ACTIVE, STATE_AUTH,
STATE_CONFIG, STATE_SYNCING, STATE_CONFIG, STATE_SYNCING,
@ -27,13 +27,16 @@ from .libs.replication.replication.constants import (ADDED, ERROR, FETCHED,
STATE_WAITING, STATE_QUITTING, STATE_WAITING, STATE_QUITTING,
STATE_LOBBY, STATE_LOBBY,
STATE_LAUNCHING_SERVICES) STATE_LAUNCHING_SERVICES)
from replication import __version__
from replication.interface import session
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
'TRIA_UP', # COMMITED 'TRIA_UP', # COMMITED
'KEYTYPE_KEYFRAME_VEC', # PUSHED 'KEYTYPE_KEYFRAME_VEC', # PUSHED
'TRIA_DOWN', # FETCHED 'TRIA_DOWN', # FETCHED
'FILE_REFRESH', # UP 'RECOVER_LAST', # RESET
'TRIA_UP'] # CHANGED 'TRIA_UP', # CHANGED
'ERROR'] # ERROR
def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='', fill_empty=' '): def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='', fill_empty=' '):
@ -50,50 +53,26 @@ def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=
From here: From here:
https://gist.github.com/greenstick/b23e475d2bfdc3a82e34eaa1f6781ee4 https://gist.github.com/greenstick/b23e475d2bfdc3a82e34eaa1f6781ee4
""" """
if total == 0:
return ""
filledLength = int(length * iteration // total) filledLength = int(length * iteration // total)
bar = fill * filledLength + fill_empty * (length - filledLength) bar = fill * filledLength + fill_empty * (length - filledLength)
return f"{prefix} |{bar}| {iteration}/{total}{suffix}" return f"{prefix} |{bar}| {iteration}/{total}{suffix}"
def get_state_str(state):
state_str = 'UNKNOWN'
if state == STATE_WAITING:
state_str = 'WARMING UP DATA'
elif state == STATE_SYNCING:
state_str = 'FETCHING'
elif state == STATE_AUTH:
state_str = 'AUTHENTIFICATION'
elif state == STATE_CONFIG:
state_str = 'CONFIGURATION'
elif state == STATE_ACTIVE:
state_str = 'ONLINE'
elif state == STATE_SRV_SYNC:
state_str = 'PUSHING'
elif state == STATE_INITIAL:
state_str = 'INIT'
elif state == STATE_QUITTING:
state_str = 'QUITTING'
elif state == STATE_LAUNCHING_SERVICES:
state_str = 'LAUNCHING SERVICES'
elif state == STATE_LOBBY:
state_str = 'LOBBY'
return state_str
class SESSION_PT_settings(bpy.types.Panel): class SESSION_PT_settings(bpy.types.Panel):
"""Settings panel""" """Settings panel"""
bl_idname = "MULTIUSER_SETTINGS_PT_panel" bl_idname = "MULTIUSER_SETTINGS_PT_panel"
bl_label = "" bl_label = " "
bl_space_type = 'VIEW_3D' bl_space_type = 'VIEW_3D'
bl_region_type = 'UI' bl_region_type = 'UI'
bl_category = "Multiuser" bl_category = "Multiuser"
def draw_header(self, context): def draw_header(self, context):
layout = self.layout layout = self.layout
if operators.client and operators.client.state['STATE'] != STATE_INITIAL: if session and session.state['STATE'] != STATE_INITIAL:
cli_state = operators.client.state cli_state = session.state
state = operators.client.state.get('STATE') state = session.state.get('STATE')
connection_icon = "KEYTYPE_MOVING_HOLD_VEC" connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
if state == STATE_ACTIVE: if state == STATE_ACTIVE:
@ -103,72 +82,54 @@ class SESSION_PT_settings(bpy.types.Panel):
layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon) layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
else: else:
layout.label(text="Session",icon="PROP_OFF") layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
layout.use_property_split = True layout.use_property_split = True
row = layout.row() row = layout.row()
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
settings = utils.get_preferences() settings = get_preferences()
if hasattr(context.window_manager, 'session'): if hasattr(context.window_manager, 'session'):
# STATE INITIAL # STATE INITIAL
if not operators.client \ if not session \
or (operators.client and operators.client.state['STATE'] == STATE_INITIAL): or (session and session.state['STATE'] == STATE_INITIAL):
pass pass
else: else:
cli_state = operators.client.state cli_state = session.state
row = layout.row() row = layout.row()
current_state = cli_state['STATE'] current_state = cli_state['STATE']
info_msg = None
# STATE ACTIVE if current_state in [STATE_ACTIVE]:
if current_state in [STATE_ACTIVE, STATE_LOBBY]: row = row.split(factor=0.3)
row.operator("session.stop", icon='QUIT', text="Exit") row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
row = layout.row() row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
if runtime_settings.is_host: row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE')
row = row.box()
row.label(text=f"{runtime_settings.internet_ip}:{settings.port}", icon='INFO') row= layout.row()
row = layout.row()
# CONNECTION STATE if current_state in [STATE_ACTIVE] and runtime_settings.is_host:
elif current_state in [STATE_SRV_SYNC, info_msg = f"LAN: {runtime_settings.internet_ip}"
STATE_SYNCING, if current_state == STATE_LOBBY:
STATE_AUTH, info_msg = "Waiting for the session to start."
STATE_CONFIG,
STATE_WAITING]:
if cli_state['STATE'] in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]: if info_msg:
box = row.box() info_box = row.box()
box.label(text=printProgressBar( info_box.row().label(text=info_msg,icon='INFO')
cli_state['CURRENT'],
cli_state['TOTAL'],
length=16
))
row = layout.row() # Progress bar
row.operator("session.stop", icon='QUIT', text="CANCEL") if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
elif current_state == STATE_QUITTING: info_box = row.box()
row = layout.row() info_box.row().label(text=printProgressBar(
box = row.box() cli_state['CURRENT'],
cli_state['TOTAL'],
num_online_services = 0
for name, state in operators.client.services_state.items():
if state == STATE_ACTIVE:
num_online_services += 1
total_online_services = len(
operators.client.services_state)
box.label(text=printProgressBar(
total_online_services-num_online_services,
total_online_services,
length=16 length=16
)) ))
layout.row().operator("session.stop", icon='QUIT', text="Exit")
class SESSION_PT_settings_network(bpy.types.Panel): class SESSION_PT_settings_network(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel" bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel"
@ -179,8 +140,8 @@ class SESSION_PT_settings_network(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return not operators.client \ return not session \
or (operators.client and operators.client.state['STATE'] == 0) or (session and session.state['STATE'] == 0)
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='URL') self.layout.label(text="", icon='URL')
@ -189,7 +150,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
layout = self.layout layout = self.layout
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
settings = utils.get_preferences() settings = get_preferences()
# USER SETTINGS # USER SETTINGS
row = layout.row() row = layout.row()
@ -237,8 +198,8 @@ class SESSION_PT_settings_user(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return not operators.client \ return not session \
or (operators.client and operators.client.state['STATE'] == 0) or (session and session.state['STATE'] == 0)
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='USER') self.layout.label(text="", icon='USER')
@ -247,7 +208,7 @@ class SESSION_PT_settings_user(bpy.types.Panel):
layout = self.layout layout = self.layout
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
settings = utils.get_preferences() settings = get_preferences()
row = layout.row() row = layout.row()
# USER SETTINGS # USER SETTINGS
@ -268,8 +229,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return not operators.client \ return not session \
or (operators.client and operators.client.state['STATE'] == 0) or (session and session.state['STATE'] == 0)
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='PREFERENCES') self.layout.label(text="", icon='PREFERENCES')
@ -278,44 +239,107 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
layout = self.layout layout = self.layout
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
settings = utils.get_preferences() settings = get_preferences()
net_section = layout.row().box() net_section = layout.row().box()
net_section.label(text="Network ", icon='TRIA_DOWN') net_section.prop(
net_section_row = net_section.row() settings,
net_section_row.label(text="IPC Port:") "sidebar_advanced_net_expanded",
net_section_row.prop(settings, "ipc_port", text="") text="Network",
net_section_row = net_section.row() icon=get_expanded_icon(settings.sidebar_advanced_net_expanded),
net_section_row.label(text="Timeout (ms):") emboss=False)
net_section_row.prop(settings, "connection_timeout", text="")
if settings.sidebar_advanced_net_expanded:
net_section_row = net_section.row()
net_section_row.label(text="IPC Port:")
net_section_row.prop(settings, "ipc_port", text="")
net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="")
replication_section = layout.row().box() replication_section = layout.row().box()
replication_section.label(text="Replication ", icon='TRIA_DOWN') replication_section.prop(
replication_section_row = replication_section.row() settings,
if runtime_settings.session_mode == 'HOST': "sidebar_advanced_rep_expanded",
text="Replication",
icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded),
emboss=False)
if settings.sidebar_advanced_rep_expanded:
replication_section_row = replication_section.row()
replication_section_row.label(text="Sync flags", icon='COLLECTION_NEW')
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_render_settings") replication_section_row.prop(settings.sync_flags, "sync_render_settings")
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_active_camera")
replication_section_row = replication_section.row()
replication_section_row = replication_section.row() replication_section_row.prop(settings.sync_flags, "sync_during_editmode")
replication_section_row.label(text="Per data type timers:") replication_section_row = replication_section.row()
replication_section_row = replication_section.row() if settings.sync_flags.sync_during_editmode:
# Replication frequencies warning = replication_section_row.box()
flow = replication_section_row .grid_flow( warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
row_major=True, columns=0, even_columns=True, even_rows=False, align=True) replication_section_row = replication_section.row()
line = flow.row(align=True)
line.label(text=" ")
line.separator()
line.label(text="refresh (sec)")
line.label(text="apply (sec)")
for item in settings.supported_datablocks: replication_section_row.label(text="Update method", icon='RECOVER_LAST')
line = flow.row(align=True) replication_section_row = replication_section.row()
line.prop(item, "auto_push", text="", icon=item.icon) replication_section_row.prop(settings, "update_method", expand=True)
line.separator() replication_section_row = replication_section.row()
line.prop(item, "bl_delay_refresh", text="") replication_timers = replication_section_row.box()
line.prop(item, "bl_delay_apply", text="") replication_timers.label(text="Replication timers", icon='TIME')
if settings.update_method == "DEFAULT":
replication_timers = replication_timers.row()
# Replication frequencies
flow = replication_timers.grid_flow(
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
line = flow.row(align=True)
line.label(text=" ")
line.separator()
line.label(text="refresh (sec)")
line.label(text="apply (sec)")
for item in settings.supported_datablocks:
line = flow.row(align=True)
line.prop(item, "auto_push", text="", icon=item.icon)
line.separator()
line.prop(item, "bl_delay_refresh", text="")
line.prop(item, "bl_delay_apply", text="")
else:
replication_timers = replication_timers.row()
replication_timers.label(text="Update rate (ms):")
replication_timers.prop(settings, "depsgraph_update_rate", text="")
cache_section = layout.row().box()
cache_section.prop(
settings,
"sidebar_advanced_cache_expanded",
text="Cache",
icon=get_expanded_icon(settings.sidebar_advanced_cache_expanded),
emboss=False)
if settings.sidebar_advanced_cache_expanded:
cache_section_row = cache_section.row()
cache_section_row.label(text="Cache directory:")
cache_section_row = cache_section.row()
cache_section_row.prop(settings, "cache_directory", text="")
cache_section_row = cache_section.row()
cache_section_row.label(text="Clear memory filecache:")
cache_section_row.prop(settings, "clear_memory_filecache", text="")
cache_section_row = cache_section.row()
cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})")
log_section = layout.row().box()
log_section.prop(
settings,
"sidebar_advanced_log_expanded",
text="Logging",
icon=get_expanded_icon(settings.sidebar_advanced_log_expanded),
emboss=False)
if settings.sidebar_advanced_log_expanded:
log_section_row = log_section.row()
log_section_row.label(text="Log level:")
log_section_row.prop(settings, 'logging_level', text="")
class SESSION_PT_user(bpy.types.Panel): class SESSION_PT_user(bpy.types.Panel):
bl_idname = "MULTIUSER_USER_PT_panel" bl_idname = "MULTIUSER_USER_PT_panel"
bl_label = "Online users" bl_label = "Online users"
@ -325,7 +349,7 @@ class SESSION_PT_user(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return operators.client and operators.client.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY] return session and session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='USER') self.layout.label(text="", icon='USER')
@ -334,7 +358,7 @@ class SESSION_PT_user(bpy.types.Panel):
layout = self.layout layout = self.layout
online_users = context.window_manager.online_users online_users = context.window_manager.online_users
selected_user = context.window_manager.user_index selected_user = context.window_manager.user_index
settings = utils.get_preferences() settings = get_preferences()
active_user = online_users[selected_user] if len( active_user = online_users[selected_user] if len(
online_users)-1 >= selected_user else 0 online_users)-1 >= selected_user else 0
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
@ -356,19 +380,21 @@ class SESSION_PT_user(bpy.types.Panel):
if active_user != 0 and active_user.username != settings.username: if active_user != 0 and active_user.username != settings.username:
row = layout.row() row = layout.row()
user_operations = row.split() user_operations = row.split()
user_operations.alert = context.window_manager.session.time_snap_running if session.state['STATE'] == STATE_ACTIVE:
user_operations.operator(
"session.snapview", user_operations.alert = context.window_manager.session.time_snap_running
text="", user_operations.operator(
icon='VIEW_CAMERA').target_client = active_user.username "session.snapview",
text="",
icon='VIEW_CAMERA').target_client = active_user.username
user_operations.alert = context.window_manager.session.user_snap_running user_operations.alert = context.window_manager.session.user_snap_running
user_operations.operator( user_operations.operator(
"session.snaptime", "session.snaptime",
text="", text="",
icon='TIME').target_client = active_user.username icon='TIME').target_client = active_user.username
if operators.client.online_users[settings.username]['admin']: if session.online_users[settings.username]['admin']:
user_operations.operator( user_operations.operator(
"session.kick", "session.kick",
text="", text="",
@ -377,8 +403,7 @@ class SESSION_PT_user(bpy.types.Panel):
class SESSION_UL_users(bpy.types.UIList): class SESSION_UL_users(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag): def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
session = operators.client settings = get_preferences()
settings = utils.get_preferences()
is_local_user = item.username == settings.username is_local_user = item.username == settings.username
ping = '-' ping = '-'
frame_current = '-' frame_current = '-'
@ -390,8 +415,8 @@ class SESSION_UL_users(bpy.types.UIList):
ping = str(user['latency']) ping = str(user['latency'])
metadata = user.get('metadata') metadata = user.get('metadata')
if metadata and 'frame_current' in metadata: if metadata and 'frame_current' in metadata:
frame_current = str(metadata['frame_current']) frame_current = str(metadata.get('frame_current','-'))
scene_current = metadata['scene_current'] scene_current = metadata.get('scene_current','-')
if user['admin']: if user['admin']:
status_icon = 'FAKE_USER_ON' status_icon = 'FAKE_USER_ON'
split = layout.split(factor=0.35) split = layout.split(factor=0.35)
@ -412,8 +437,8 @@ class SESSION_PT_presence(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return not operators.client \ return not session \
or (operators.client and operators.client.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE]) or (session and session.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
def draw_header(self, context): def draw_header(self, context):
self.layout.prop(context.window_manager.session, self.layout.prop(context.window_manager.session,
@ -423,56 +448,35 @@ class SESSION_PT_presence(bpy.types.Panel):
layout = self.layout layout = self.layout
settings = context.window_manager.session settings = context.window_manager.session
pref = get_preferences()
layout.active = settings.enable_presence layout.active = settings.enable_presence
col = layout.column() col = layout.column()
col.prop(settings, "presence_show_session_status")
row = col.column()
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_scale", expand=True)
row = col.column(align=True)
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_hpos", expand=True)
row.prop(pref, "presence_hud_vpos", expand=True)
col.prop(settings, "presence_show_selected") col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user") col.prop(settings, "presence_show_user")
row = layout.column() row = layout.column()
row.active = settings.presence_show_user row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user") row.prop(settings, "presence_show_far_user")
class SESSION_PT_services(bpy.types.Panel):
bl_idname = "MULTIUSER_SERVICE_PT_panel"
bl_label = "Services"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return operators.client and operators.client.state['STATE'] == 2
def draw_header(self, context):
self.layout.label(text="", icon='FILE_CACHE')
def draw(self, context):
layout = self.layout
online_users = context.window_manager.online_users
selected_user = context.window_manager.user_index
settings = context.window_manager.session
active_user = online_users[selected_user] if len(online_users)-1 >= selected_user else 0
# Create a simple row.
for name, state in operators.client.services_state.items():
row = layout.row()
row.label(text=name)
row.label(text=get_state_str(state))
def draw_property(context, parent, property_uuid, level=0): def draw_property(context, parent, property_uuid, level=0):
settings = utils.get_preferences() settings = get_preferences()
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
item = operators.client.get(uuid=property_uuid) item = session.get(uuid=property_uuid)
if item.state == ERROR:
return
area_msg = parent.row(align=True) area_msg = parent.row(align=True)
if level > 0:
for i in range(level): if item.state == ERROR:
area_msg.label(text="") area_msg.alert=True
else:
area_msg.alert=False
line = area_msg.box() line = area_msg.box()
name = item.data['name'] if item.data else item.uuid name = item.data['name'] if item.data else item.uuid
@ -485,8 +489,8 @@ def draw_property(context, parent, property_uuid, level=0):
# Operations # Operations
have_right_to_modify = item.owner == settings.username or \ have_right_to_modify = (item.owner == settings.username or \
item.owner == RP_COMMON item.owner == RP_COMMON) and item.state != ERROR
if have_right_to_modify: if have_right_to_modify:
detail_item_box.operator( detail_item_box.operator(
@ -496,10 +500,12 @@ def draw_property(context, parent, property_uuid, level=0):
detail_item_box.separator() detail_item_box.separator()
if item.state in [FETCHED, UP]: if item.state in [FETCHED, UP]:
detail_item_box.operator( apply = detail_item_box.operator(
"session.apply", "session.apply",
text="", text="",
icon=ICONS_PROP_STATES[item.state]).target = item.uuid icon=ICONS_PROP_STATES[item.state])
apply.target = item.uuid
apply.reset_dependencies = True
elif item.state in [MODIFIED, ADDED]: elif item.state in [MODIFIED, ADDED]:
detail_item_box.operator( detail_item_box.operator(
"session.commit", "session.commit",
@ -522,7 +528,6 @@ def draw_property(context, parent, property_uuid, level=0):
else: else:
detail_item_box.label(text="", icon="DECORATE_LOCKED") detail_item_box.label(text="", icon="DECORATE_LOCKED")
class SESSION_PT_repository(bpy.types.Panel): class SESSION_PT_repository(bpy.types.Panel):
bl_idname = "MULTIUSER_PROPERTIES_PT_panel" bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
bl_label = "Repository" bl_label = "Repository"
@ -532,9 +537,17 @@ class SESSION_PT_repository(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
settings = get_preferences()
admin = False
if session and hasattr(session,'online_users'):
usr = session.online_users.get(settings.username)
if usr:
admin = usr['admin']
return hasattr(context.window_manager, 'session') and \ return hasattr(context.window_manager, 'session') and \
operators.client and \ session and \
operators.client.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY] (session.state['STATE'] == STATE_ACTIVE or \
session.state['STATE'] == STATE_LOBBY and admin)
def draw_header(self, context): def draw_header(self, context):
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE') self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
@ -543,10 +556,9 @@ class SESSION_PT_repository(bpy.types.Panel):
layout = self.layout layout = self.layout
# Filters # Filters
settings = utils.get_preferences() settings = get_preferences()
runtime_settings = context.window_manager.session runtime_settings = context.window_manager.session
session = operators.client
usr = session.online_users.get(settings.username) usr = session.online_users.get(settings.username)
row = layout.row() row = layout.row()
@ -572,11 +584,11 @@ class SESSION_PT_repository(bpy.types.Panel):
types_filter = [t.type_name for t in settings.supported_datablocks types_filter = [t.type_name for t in settings.supported_datablocks
if t.use_as_filter] if t.use_as_filter]
key_to_filter = operators.client.list( key_to_filter = session.list(
filter_owner=settings.username) if runtime_settings.filter_owned else operators.client.list() filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
client_keys = [key for key in key_to_filter client_keys = [key for key in key_to_filter
if operators.client.get(uuid=key).str_type if session.get(uuid=key).str_type
in types_filter] in types_filter]
if client_keys: if client_keys:
@ -592,6 +604,36 @@ class SESSION_PT_repository(bpy.types.Panel):
else: else:
row.label(text="Waiting to start") row.label(text="Waiting to start")
class VIEW3D_PT_overlay_session(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Multi-user"
@classmethod
def poll(cls, context):
return True
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
row = col.row(align=True)
settings = context.window_manager.session
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status")
col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
classes = ( classes = (
SESSION_UL_users, SESSION_UL_users,
@ -601,9 +643,8 @@ classes = (
SESSION_PT_presence, SESSION_PT_presence,
SESSION_PT_advanced_settings, SESSION_PT_advanced_settings,
SESSION_PT_user, SESSION_PT_user,
SESSION_PT_services,
SESSION_PT_repository, SESSION_PT_repository,
VIEW3D_PT_overlay_session,
) )

View File

@ -21,13 +21,22 @@ import logging
import os import os
import sys import sys
import time import time
from uuid import uuid4
from collections.abc import Iterable from collections.abc import Iterable
from pathlib import Path
from uuid import uuid4
import math
import bpy import bpy
import mathutils import mathutils
from . import environment, presence from . import environment
from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_CONFIG, STATE_SYNCING,
STATE_INITIAL, STATE_SRV_SYNC,
STATE_WAITING, STATE_QUITTING,
STATE_LOBBY,
STATE_LAUNCHING_SERVICES)
def find_from_attr(attr_name, attr_value, list): def find_from_attr(attr_name, attr_value, list):
@ -39,7 +48,7 @@ def find_from_attr(attr_name, attr_value, list):
def get_datablock_users(datablock): def get_datablock_users(datablock):
users = [] users = []
supported_types = get_preferences().supported_datablocks supported_types = get_preferences().supported_datablocks
if hasattr(datablock, 'users_collection') and datablock.users_collection: if hasattr(datablock, 'users_collection') and datablock.users_collection:
users.extend(list(datablock.users_collection)) users.extend(list(datablock.users_collection))
if hasattr(datablock, 'users_scene') and datablock.users_scene: if hasattr(datablock, 'users_scene') and datablock.users_scene:
@ -47,7 +56,7 @@ def get_datablock_users(datablock):
if hasattr(datablock, 'users_group') and datablock.users_scene: if hasattr(datablock, 'users_group') and datablock.users_scene:
users.extend(list(datablock.users_scene)) users.extend(list(datablock.users_scene))
for datatype in supported_types: for datatype in supported_types:
if datatype.bl_name != 'users': if datatype.bl_name != 'users' and hasattr(bpy.data, datatype.bl_name):
root = getattr(bpy.data, datatype.bl_name) root = getattr(bpy.data, datatype.bl_name)
for item in root: for item in root:
if hasattr(item, 'data') and datablock == item.data or \ if hasattr(item, 'data') and datablock == item.data or \
@ -56,6 +65,32 @@ def get_datablock_users(datablock):
return users return users
def get_state_str(state):
state_str = 'UNKOWN'
if state == STATE_WAITING:
state_str = 'WARMING UP DATA'
elif state == STATE_SYNCING:
state_str = 'FETCHING'
elif state == STATE_AUTH:
state_str = 'AUTHENTICATION'
elif state == STATE_CONFIG:
state_str = 'CONFIGURATION'
elif state == STATE_ACTIVE:
state_str = 'ONLINE'
elif state == STATE_SRV_SYNC:
state_str = 'PUSHING'
elif state == STATE_INITIAL:
state_str = 'OFFLINE'
elif state == STATE_QUITTING:
state_str = 'QUITTING'
elif state == STATE_LAUNCHING_SERVICES:
state_str = 'LAUNCHING SERVICES'
elif state == STATE_LOBBY:
state_str = 'LOBBY'
return state_str
def clean_scene(): def clean_scene():
for type_name in dir(bpy.data): for type_name in dir(bpy.data):
try: try:
@ -77,10 +112,76 @@ def resolve_from_id(id, optionnal_type=None):
if id in root and ((optionnal_type is None) or (optionnal_type.lower() in root[id].__class__.__name__.lower())): if id in root and ((optionnal_type is None) or (optionnal_type.lower() in root[id].__class__.__name__.lower())):
return root[id] return root[id]
return None return None
def get_preferences(): def get_preferences():
return bpy.context.preferences.addons[__package__].preferences return bpy.context.preferences.addons[__package__].preferences
def current_milli_time(): def current_milli_time():
return int(round(time.time() * 1000)) return int(round(time.time() * 1000))
def get_expanded_icon(prop: bpy.types.BoolProperty) -> str:
if prop:
return 'DISCLOSURE_TRI_DOWN'
else:
return 'DISCLOSURE_TRI_RIGHT'
# Taken from here: https://stackoverflow.com/a/55659577
def get_folder_size(folder):
return ByteSize(sum(file.stat().st_size for file in Path(folder).rglob('*')))
class ByteSize(int):
_kB = 1024
_suffixes = 'B', 'kB', 'MB', 'GB', 'PB'
def __new__(cls, *args, **kwargs):
return super().__new__(cls, *args, **kwargs)
def __init__(self, *args, **kwargs):
self.bytes = self.B = int(self)
self.kilobytes = self.kB = self / self._kB**1
self.megabytes = self.MB = self / self._kB**2
self.gigabytes = self.GB = self / self._kB**3
self.petabytes = self.PB = self / self._kB**4
*suffixes, last = self._suffixes
suffix = next((
suffix
for suffix in suffixes
if 1 < getattr(self, suffix) < self._kB
), last)
self.readable = suffix, getattr(self, suffix)
super().__init__()
def __str__(self):
return self.__format__('.2f')
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, super().__repr__())
def __format__(self, format_spec):
suffix, val = self.readable
return '{val:{fmt}} {suf}'.format(val=math.ceil(val), fmt=format_spec, suf=suffix)
def __sub__(self, other):
return self.__class__(super().__sub__(other))
def __add__(self, other):
return self.__class__(super().__add__(other))
def __mul__(self, other):
return self.__class__(super().__mul__(other))
def __rsub__(self, other):
return self.__class__(super().__sub__(other))
def __radd__(self, other):
return self.__class__(super().__add__(other))
def __rmul__(self, other):
return self.__class__(super().__rmul__(other))

View File

@ -0,0 +1,24 @@
# Download base image debian jessie
FROM python:slim
ARG replication_version=0.0.21
ARG version=0.1.1
# Infos
LABEL maintainer="Swann Martinez"
LABEL version=$version
LABEL description="Blender multi-user addon \
dedicated server image."
# Argument
ENV password='admin'
ENV port=5555
ENV timeout=3000
ENV log_level=INFO
ENV log_file="multiuser_server.log"
#Install replication
RUN pip install replication==$replication_version
# Run the server with parameters
CMD replication.serve -pwd ${password} -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}

View File

@ -0,0 +1,6 @@
import re
init_py = open("multi_user/__init__.py").read()
version = re.search("\d+, \d+, \d+", init_py).group(0)
digits = version.split(',')
print('.'.join(digits).replace(" ",""))

View File

@ -0,0 +1,4 @@
import re
init_py = open("multi_user/__init__.py").read()
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))

View File

@ -0,0 +1,10 @@
#! /bin/bash
# Start server in docker container, from image hosted on the multi-user gitlab's container registry
docker run -d \
-p 5555-5560:5555-5560 \
-e port=5555 \
-e log-level DEBUG \
-e password=admin \
-e timeout=1000 \
registry.gitlab.com/slumber/multi-user/multi-user-server:0.1.0

View File

@ -0,0 +1,5 @@
#! /bin/bash
# Start replication server locally, and include logging (requires replication_version=0.0.21a15)
clear
replication.serve -p 5555 -pwd admin -t 1000 -l DEBUG -lf server.log

View File

@ -2,7 +2,7 @@ import os
import pytest import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
from uuid import uuid4
import bpy import bpy
import random import random
from multi_user.bl_types.bl_collection import BlCollection from multi_user.bl_types.bl_collection import BlCollection
@ -10,8 +10,13 @@ from multi_user.bl_types.bl_collection import BlCollection
def test_collection(clear_blend): def test_collection(clear_blend):
# Generate a collection with childrens and a cube # Generate a collection with childrens and a cube
datablock = bpy.data.collections.new("root") datablock = bpy.data.collections.new("root")
datablock.children.link(bpy.data.collections.new("child")) datablock.uuid = str(uuid4())
datablock.children.link(bpy.data.collections.new("child2")) s1 = bpy.data.collections.new("child")
s1.uuid = str(uuid4())
s2 = bpy.data.collections.new("child2")
s2.uuid = str(uuid4())
datablock.children.link(s1)
datablock.children.link(s2)
bpy.ops.mesh.primitive_cube_add() bpy.ops.mesh.primitive_cube_add()
datablock.objects.link(bpy.data.objects[0]) datablock.objects.link(bpy.data.objects[0])

View File

@ -1,21 +0,0 @@
import os
import pytest
from deepdiff import DeepDiff
import bpy
import random
from multi_user.bl_types.bl_image import BlImage
def test_image(clear_blend):
datablock = bpy.data.images.new('asd',2000,2000)
implementation = BlImage()
expected = implementation._dump(datablock)
bpy.data.images.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
assert not DeepDiff(expected, result)

View File

@ -7,13 +7,12 @@ import bpy
from multi_user.bl_types.bl_material import BlMaterial from multi_user.bl_types.bl_material import BlMaterial
def test_material(clear_blend): def test_material_nodes(clear_blend):
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()] nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()]
datablock = bpy.data.materials.new("test") datablock = bpy.data.materials.new("test")
datablock.use_nodes = True datablock.use_nodes = True
bpy.data.materials.create_gpencil_data(datablock)
for ntype in nodes_types: for ntype in nodes_types:
datablock.node_tree.nodes.new(ntype) datablock.node_tree.nodes.new(ntype)
@ -26,3 +25,18 @@ def test_material(clear_blend):
result = implementation._dump(test) result = implementation._dump(test)
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)
def test_material_gpencil(clear_blend):
datablock = bpy.data.materials.new("test")
bpy.data.materials.create_gpencil_data(datablock)
implementation = BlMaterial()
expected = implementation._dump(datablock)
bpy.data.materials.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
assert not DeepDiff(expected, result)

View File

@ -30,9 +30,11 @@ CONSTRAINTS_TYPES = [
'COPY_ROTATION', 'COPY_SCALE', 'COPY_TRANSFORMS', 'LIMIT_DISTANCE', 'COPY_ROTATION', 'COPY_SCALE', 'COPY_TRANSFORMS', 'LIMIT_DISTANCE',
'LIMIT_LOCATION', 'LIMIT_ROTATION', 'LIMIT_SCALE', 'MAINTAIN_VOLUME', 'LIMIT_LOCATION', 'LIMIT_ROTATION', 'LIMIT_SCALE', 'MAINTAIN_VOLUME',
'TRANSFORM', 'TRANSFORM_CACHE', 'CLAMP_TO', 'DAMPED_TRACK', 'IK', 'TRANSFORM', 'TRANSFORM_CACHE', 'CLAMP_TO', 'DAMPED_TRACK', 'IK',
'LOCKED_TRACK', 'SPLINE_IK', 'STRETCH_TO', 'TRACK_TO', 'ACTION', 'LOCKED_TRACK', 'STRETCH_TO', 'TRACK_TO', 'ACTION',
'ARMATURE', 'CHILD_OF', 'FLOOR', 'FOLLOW_PATH', 'PIVOT', 'SHRINKWRAP'] 'ARMATURE', 'CHILD_OF', 'FLOOR', 'FOLLOW_PATH', 'PIVOT', 'SHRINKWRAP']
#temporary disabled 'SPLINE_IK' until its fixed
def test_object(clear_blend): def test_object(clear_blend):
bpy.ops.mesh.primitive_cube_add( bpy.ops.mesh.primitive_cube_add(
enter_editmode=False, align='WORLD', location=(0, 0, 0)) enter_editmode=False, align='WORLD', location=(0, 0, 0))

View File

@ -6,8 +6,11 @@ from deepdiff import DeepDiff
import bpy import bpy
import random import random
from multi_user.bl_types.bl_scene import BlScene from multi_user.bl_types.bl_scene import BlScene
from multi_user.utils import get_preferences
def test_scene(clear_blend): def test_scene(clear_blend):
get_preferences().sync_flags.sync_render_settings = True
datablock = bpy.data.scenes.new("toto") datablock = bpy.data.scenes.new("toto")
datablock.view_settings.use_curve_mapping = True datablock.view_settings.use_curve_mapping = True
# Test # Test