Compare commits

..

243 Commits

Author SHA1 Message Date
45437660ba clean: remove unused lock 2020-10-22 17:37:53 +02:00
ee93a5b209 Merge branch 'develop' into 132-fix-undo-edit-last-operation-redo-handling 2020-10-22 16:21:31 +02:00
f90c12b27f doc: added missing fields
feat: changed session widget defaults
2020-10-22 16:07:19 +02:00
3573db0969 Merge branch '134-revamp-session-status-ui-widget' into 'develop'
Resolve "Revamp session status UI widget"

See merge request slumber/multi-user!67
2020-10-22 13:52:29 +00:00
92bde00a5a feat: store session widget settings to preferences 2020-10-22 15:48:13 +02:00
2c82560d24 fix: grease pencil material 2020-10-22 13:55:26 +02:00
6f364d2b88 feat: session widget position and scale settings
feat: ui_scale is now taken in account for session widget text size
2020-10-21 23:33:44 +02:00
760b52c02b Merge branch '135-empty-and-light-objects-user-selection-highlight-is-broken' into 'develop'
Resolve "Empty and Light objects user selection highlight is broken"

See merge request slumber/multi-user!66
2020-10-21 15:25:42 +00:00
4dd932fc56 fix: empty and light display broken 2020-10-21 17:23:59 +02:00
ba1a03cbfa Merge branch '133-material-renaming-is-unstable' into 'develop'
Resolve "Material renaming is unstable"

See merge request slumber/multi-user!65
2020-10-21 13:17:18 +00:00
18b5fa795c feat: resolve materials from uuid by default and fallback on regular name resolving 2020-10-21 15:10:37 +02:00
1a82ec72e4 fix: change owner call in opterator 2020-10-21 14:40:15 +02:00
804747c73b fix: owning parent when a child is already owned (ex: duplicate linked) 2020-10-21 14:15:42 +02:00
7ee705332f feat: update replication to prevent UnpicklingError from crashing the network Thred 2020-10-20 17:25:50 +02:00
4bd0055056 Merge branch 'develop' into 132-fix-undo-edit-last-operation-redo-handling 2020-10-16 14:57:36 +02:00
716c78e380 feat: update changelog 2020-10-16 11:06:41 +02:00
5e4ce4556f doc: update operator descriptions 2020-10-16 10:57:45 +02:00
aa9ea08151 doc: update refresh icon 2020-10-16 10:28:29 +02:00
f56890128e fix: material test by splitting it in a gpencil and nodal material test 2020-10-15 18:08:08 +02:00
8865556229 feat: update CHANGELOG 2020-10-15 18:02:07 +02:00
5bc9b10c12 fix: material gpencil loading 2020-10-15 18:01:54 +02:00
7db3c18213 feat: affect dependencies option in change owner 2020-10-15 17:48:04 +02:00
f151c61d7b feat: mimic blender undo handling 2020-10-15 17:21:14 +02:00
ff35e34032 feat: update apply ui icon
fix: material property filter
2020-10-15 17:09:50 +02:00
9f8222afa7 fix: handle apply dependencies 2020-10-15 12:11:28 +02:00
1828bfac22 feat: update changelog 2020-10-14 19:25:59 +02:00
3a1087ecb8 Merge branch '131-sync-render-settings-flag-cause-a-race-condition' into 'develop'
Resolve "Sync render settings flag cause a race condition"

See merge request slumber/multi-user!63
2020-10-14 17:16:20 +00:00
b398541787 fix: apply operator 2020-10-14 19:12:28 +02:00
f0b33d8471 fix: race condition in scene sync 2020-10-14 19:11:32 +02:00
5a282a3e22 Merge branch '130-mesh-transfert-is-broken-between-a-hybrid-linux-windows-session' into 'develop'
Resolve "Mesh transfert is broken between a hybrid linux-windows session"

See merge request slumber/multi-user!62
2020-10-14 14:07:59 +00:00
4283fc0fff fix: crash during hybrid session
Related to #130
2020-10-14 16:06:11 +02:00
753f4d3f27 fix: prevent NonAuthorizedOperationError to kill the right managment timer 2020-10-14 00:36:59 +02:00
9dd02b2756 feat: fix binary diff 2020-10-13 17:15:31 +02:00
c74d12c843 fix: handle world empty dependencies 2020-10-13 17:10:25 +02:00
e1d9982276 fix: bl_file diff when clear memory cache is enabled 2020-10-13 17:09:43 +02:00
8861986213 fix: packed image save error 2020-10-13 16:58:48 +02:00
1cb9fb410c feat: material node output default value support
fix: prevent material empty dependencies
2020-10-12 23:10:42 +02:00
c4a8cc4606 Merge branch 'fix_deploy' into 'develop'
Fix deploy

See merge request slumber/multi-user!61
2020-10-12 19:03:47 +00:00
187f11071c feat: enable build and deploy for only master and develop 2020-10-12 21:01:54 +02:00
530fae8cb4 feat: active deploy 2020-10-12 20:24:12 +02:00
6771c371a1 feat: enable deploy back 2020-10-12 20:23:08 +02:00
c844c6e54f clean: keep only active renderer settings (bl_scene.py)
fix: resolve_deps_implementation now only resolve master collection objects (bl_scene.py)
2020-10-12 20:21:08 +02:00
a4d0b1a68b fix: client selection 2020-10-12 18:56:42 +02:00
2fdc11692d fix: handle None bounding box position 2020-10-12 18:15:59 +02:00
dbfca4568f fix: get_preference import syntax 2020-10-12 18:07:09 +02:00
069a528276 feat: test scene with sync render settings enabled 2020-10-12 18:04:54 +02:00
030f2661fd fix: buffer empty for the first diff 2020-10-12 17:13:35 +02:00
e589e3eec4 fix: file not found logging
clean: remove cache for scene diff
2020-10-12 17:12:50 +02:00
04140ced1b fix: collection instance bounding box display 2020-10-12 17:11:46 +02:00
0d9ce43e74 fix: enable binrary differentialback
feat: ignore material node bl_label
2020-10-12 13:33:49 +02:00
d3969b4fd4 Revert "feat: avoid dumping read only properties"
This reverts commit cefaef5c4b.
2020-10-12 10:23:19 +02:00
e21f64ac98 revert: bl_label 2020-10-11 19:20:53 +02:00
b25b380d21 fix: missing bl_idname 2020-10-11 19:11:51 +02:00
1146d9d304 feat: disable render settings sync by default 2020-10-11 19:08:06 +02:00
51b60521e6 feat: update relplication version 2020-10-11 19:07:48 +02:00
035f8a1dcd feat: skipping not required parameters 2020-10-11 19:07:28 +02:00
cefaef5c4b feat: avoid dumping read only properties 2020-10-11 19:06:58 +02:00
4714e60ff7 Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2020-10-11 15:22:05 +02:00
3eca25ae19 feat: update replication version 2020-10-11 15:10:28 +02:00
96346f8a25 refactor: clean debug logs 2020-10-11 15:06:32 +02:00
a258c2c182 Merge branch 'feature/doc-updates-2' into 'develop'
Feature/doc updates 2

See merge request slumber/multi-user!60
2020-10-09 09:28:36 +00:00
6862df5331 Minor doc update 2020-10-09 01:59:42 +02:00
f271a9d0e3 Updated contribution doc to indicate how to sync with upstream repository 2020-10-09 01:55:45 +02:00
bdff6eb5c9 Updated contribution documentation with how to sync upstream repo 2020-10-09 01:29:01 +02:00
b661407952 Merge branch '128-ui-gizmo-error' into 'develop'
Resolve "UI gizmo error"

See merge request slumber/multi-user!59
2020-10-08 22:50:11 +00:00
d5eb7fda02 fix: ci yaml error 2020-10-09 00:46:52 +02:00
35e8ac9c33 feat: disable deploy until fixed 2020-10-09 00:45:30 +02:00
4453d256b8 feat: update replication version, switched dependency to pyzmq 2020-10-08 23:57:39 +02:00
299e330ec6 fix: internal gizmo error by launching the modal operator from the timer 2020-10-08 23:42:14 +02:00
34b9f7ae27 Merge branch 'master' into develop 2020-10-08 23:14:58 +02:00
9d100d84ad Merge branch 'hotfix/ui-spelling-fixes' into 'master'
Hotfix/ui spelling fixes

See merge request slumber/multi-user!58
2020-10-08 20:58:13 +00:00
2f677c399e UI spelling fixes to preferences.py and ui.py 2020-10-08 22:52:24 +02:00
e967b35c38 Revert "Minor UI spelling errors"
This reverts commit 673c4e69a4.
2020-10-08 21:58:30 +02:00
7bd0a196b4 Merge branch 'feature/doc-updates' into 'develop'
Feature/doc updates

See merge request slumber/multi-user!57
2020-10-08 17:04:37 +00:00
7892b5e9b6 Adding log-level to server startup scripts 2020-10-08 18:35:08 +02:00
f779678c0e Updates to hosting guide and contribution documentation 2020-10-08 18:31:20 +02:00
629fc2d223 feat: update dockerfile 2020-10-08 15:10:32 +02:00
724c2345df refactor: disable force apply during the reparent 2020-10-08 15:00:27 +02:00
673c4e69a4 Minor UI spelling errors 2020-10-08 00:31:56 +02:00
fbfff6c7ec Doc updates clarifying developer workflow, updating hosting instructions 2020-10-08 00:08:23 +02:00
f592294335 Added scripts to conveniently start server instance via docker or replication 2020-10-07 21:20:43 +02:00
8e7be5afde Merge branch '126-draw-refactoring' into 'develop'
Resolve "Draw refactoring"

See merge request slumber/multi-user!55
2020-10-06 14:12:13 +00:00
fc76b2a8e6 fix: avoid to remove inexistant user widget 2020-10-06 16:10:10 +02:00
1a8bcddb74 refactor: formatting 2020-10-06 15:53:29 +02:00
60fba5b9df refactor: use dict to store widgets 2020-10-06 15:46:35 +02:00
be0eb1fa42 clean: remove unused import 2020-10-06 09:45:13 +02:00
93d9bea3ae feat: display session status 2020-10-05 23:38:52 +02:00
022b7f7822 refactor: enable username display again
refactor: avoid to draw the local user
2020-10-05 22:34:43 +02:00
ae34846509 fix: ci syntax 2020-10-05 21:53:14 +02:00
d328077cb0 feat: deploy and build only for master and develop
refactor: carry on presence refactoring
2020-10-05 21:51:54 +02:00
0c4740eef8 fix: import error 2020-10-05 18:48:40 +02:00
d7b2c7e2f6 refactor: started to rewrite presence
fix: weird bounding boxes on various objects types

Related to #55
2020-10-05 18:34:41 +02:00
efbb9e7096 doc: feat changelog 0.1.0 release date 2020-10-05 16:11:04 +02:00
e0b56d8990 Merge branch 'develop' into 'master'
v0.1.0

See merge request slumber/multi-user!43
2020-10-05 13:42:16 +00:00
7a94c21187 doc: update version 2020-10-05 15:37:06 +02:00
0687090f05 feat: update changelog 2020-10-05 15:18:01 +02:00
920744334c Merge branch '125-autobuild-docker-image' into 'develop'
Resolve "Autobuild docker image"

See merge request slumber/multi-user!53
2020-10-05 09:32:57 +00:00
dfa7f98126 refactor: remove useless script 2020-10-05 11:28:45 +02:00
ea530f0f96 featL enable tast and build back 2020-10-03 00:30:37 +02:00
c3546ff74f fix: var name 2020-10-03 00:28:11 +02:00
83aa9b57ec feat: automatic image version 2020-10-03 00:26:44 +02:00
28a265be68 test: login in script 2020-10-03 00:12:39 +02:00
7dfabb16c7 fix: tls 2020-10-03 00:07:07 +02:00
ea5d9371ca feat: login 2020-10-03 00:00:42 +02:00
3df73a0716 feat: find replication version 2020-10-02 23:58:08 +02:00
ae3c994ff1 feat: dind tests 2020-10-02 23:55:04 +02:00
bd73b385b6 feat: dind 2020-10-02 23:52:19 +02:00
f054b1c5f2 fix: trying to use a standard docker image 2020-10-02 23:38:09 +02:00
d083100a2a fix: image directory path 2020-10-02 23:33:50 +02:00
b813b8df9e feat: docker build and push 2020-10-02 23:32:06 +02:00
d0e966ff1a fix: path 2020-10-02 23:29:48 +02:00
56cbf14fe1 refactor: use custom image 2020-10-02 23:27:45 +02:00
8bf55ebd46 feat: apk update 2020-10-02 23:19:34 +02:00
edbc5ee343 feat: apt install 2020-10-02 23:16:46 +02:00
4a92511582 feat: test install python 2020-10-02 23:14:49 +02:00
b42df2cf4a feat: retrieve version 2020-10-02 23:07:25 +02:00
7549466824 fix: ci deploy name 2020-10-02 18:59:25 +02:00
423e71476d feat: update ci 2020-10-02 18:57:50 +02:00
3bc4b20035 feat: CI file and docker image 2020-10-02 18:56:26 +02:00
9966a24b5e feat: update supported types in README.md 2020-10-02 18:04:32 +02:00
577c01a594 Merge branch '124-use-a-global-session-instance-in-replication' into 'develop'
Resolve "use a global session instance in replication"

See merge request slumber/multi-user!52
2020-10-02 15:51:30 +00:00
3d72796c10 refactor: remove old client ref
feat: update changelog
2020-10-02 17:48:56 +02:00
edcbd7b02a feat: display node in error in the repository view 2020-10-02 17:22:14 +02:00
b368c985b8 refactor: session handler encapsulation 2020-10-02 12:11:53 +02:00
cab1a71eaa fix: version 2020-10-02 09:52:21 +02:00
33cb188509 refactor: use replication session global instance 2020-10-02 00:05:33 +02:00
0a3dd9b5b8 fix: missing get_datablock_from_uuid 2020-10-02 00:00:34 +02:00
7fbdbdcc21 feat: show flag in blender overlays panel 2020-10-01 22:55:06 +02:00
8f9d5aabf9 refactor: moved get_datablock_from_uuid to bl_datablock 2020-10-01 22:51:48 +02:00
824d4d6a83 feat: upgrade replication version to fix duplicate during REPARENT
Related to #113
2020-10-01 15:34:36 +02:00
5f4bccbcd9 feat: POLY curves support
During a mesh->curve conversion, curve type spline is changed to POLY. This is adding the support for POLY curves.

Related to #113
2020-10-01 15:33:10 +02:00
8e8e54fe7d Merge branch '122-crash-on-connection' into 'develop'
Resolve "Crash on connection"

See merge request slumber/multi-user!50
2020-10-01 09:17:59 +00:00
04b13cc0b7 refactor: moveconnection handlers to the main thread 2020-10-01 10:58:30 +02:00
ba98875560 fix: version check command format 2020-09-29 17:33:39 +02:00
a9fb84a5c6 fix: world viewport color sync 2020-09-29 11:47:48 +02:00
2f139178d3 feat: update replication version 2020-09-28 22:59:43 +02:00
e466f81600 fix: file handler not properly closed 2020-09-28 22:51:07 +02:00
cb836e30f5 fix: empty uv useless update 2020-09-28 22:50:42 +02:00
152e356dad fix: font/sound loading 2020-09-28 10:40:07 +02:00
7b13e8978b fix: close file handler after quitting the session 2020-09-28 10:32:41 +02:00
e0839fe1fb Merge branch '118-optionnal-active-camera-sync-flag' into 'develop'
Resolve "Optionnal active camera sync flag"

See merge request slumber/multi-user!49
2020-09-25 14:09:31 +00:00
aec3e8b8bf doc: update replication flag section 2020-09-25 15:27:01 +02:00
a89564de6b feat: append synchronization flags to the top
refactor: enable sync render settings by default
2020-09-25 14:26:31 +02:00
e301a10456 feat: active camera sync flag 2020-09-25 11:33:35 +02:00
cfc6ce91bc feat: initial live syncflag support 2020-09-25 11:23:36 +02:00
4f731c6640 fix: implementation not found if a new type is added 2020-09-23 17:37:21 +02:00
9b1b8f11fd feat: sync object hide_render 2020-09-23 16:48:17 +02:00
e742c824fc feat: sync all object show flags except hide_viewport. 2020-09-23 16:47:51 +02:00
6757bbbd30 fix: enable DIFF_BINARY by default 2020-09-23 16:04:31 +02:00
f6a39e4290 fix: scene differential error
fix: bl_file loading error
feat: update replication version
2020-09-23 14:24:57 +02:00
410d8d2f1a feat: display sync 2020-09-23 10:00:08 +02:00
bd64c17f05 feat: update version 2020-09-22 16:36:59 +02:00
dc063b5954 fix: handle file not found exception 2020-09-21 18:52:27 +02:00
0ae34d5702 Merge branch 'file_replication' into 'develop'
Basic file replication interface

See merge request slumber/multi-user!48
2020-09-21 16:17:58 +00:00
167b39f15e doc: added a cache section to the quickstart 2020-09-21 18:14:30 +02:00
9adc0d7d6e clean: remove image testing (until the file replication interface is done) 2020-09-21 17:48:07 +02:00
fb622fa098 fix: get_datablock_users attribute error 2020-09-21 17:37:06 +02:00
c533d4b86a ci: run tests on every branch 2020-09-21 17:31:07 +02:00
6c47e095be feat: cache managenent utility 2020-09-21 16:47:49 +02:00
f992d06b03 feat: handle packed datablock
feat: filecache settings
2020-09-21 12:12:19 +02:00
af3afc1124 feat: use bl_file in bl_image 2020-09-21 00:11:37 +02:00
b77ab2dd05 feat: use bl_file to replicate font and sound files 2020-09-20 23:31:24 +02:00
150054d19c feat: generic file replication ground work 2020-09-20 19:53:51 +02:00
8d2b9e5580 Merge branch '65-sync-speaker-sounds' into 'develop'
Partial support for syncinf speaker sound files

See merge request slumber/multi-user!47
2020-09-19 19:37:43 +00:00
6870331c34 feat: notice 2020-09-19 18:59:03 +02:00
6f73b7fc29 feat: ground work for sound sync 2020-09-19 00:47:46 +02:00
6385830f53 fix: prevent world replication conflict with external addons 2020-09-18 23:38:21 +02:00
b705228f4a feat: support all font file extention 2020-09-18 23:30:50 +02:00
73d2da4c47 fix: ReparentException error
feat: replication protocol version in ui header
2020-09-18 23:25:01 +02:00
b28e7c2149 Merge branch '116-bfon-is-missing' into 'develop'
Resolve "Bfont is missing"

See merge request slumber/multi-user!46
2020-09-18 21:10:13 +00:00
38f06683be fix: bfont is missing
related to #116
2020-09-18 23:09:47 +02:00
62221c9e49 Merge branch '114-support-custom-fonts' into 'develop'
Resolve "Support custom fonts"

See merge request slumber/multi-user!45
2020-09-18 15:05:25 +00:00
e9f416f682 feat: ground work for custom font support 2020-09-18 17:04:24 +02:00
3108a06e89 fix: sync flag missing comma 2020-09-18 16:17:19 +02:00
470df50dc2 fix: bl_image test, disabling texture unload from ram. 2020-09-18 16:02:50 +02:00
d8a94e3f5e fix: image uuid error 2020-09-18 15:58:43 +02:00
47a0efef27 Merge branch '113-support-datablock-conversion' into 'develop'
Resolve "Support datablock conversion"

See merge request slumber/multi-user!44
2020-09-18 13:33:43 +00:00
ca5aebfeff feat: various images format support
feat: world environment image support
2020-09-18 15:25:52 +02:00
fe6ffd19b4 feat: child date renaming support 2020-09-17 23:45:09 +02:00
b9a6ddafe9 fix: object data load 2020-09-17 23:17:51 +02:00
ae71d7757e feat: reparent ground work 2020-09-17 22:47:11 +02:00
34ed5da6f0 fix: logging 2020-09-15 16:33:49 +02:00
2c16f07ae7 doc: update Changelog 2020-09-15 15:05:09 +02:00
60f25359d1 Merge branch '111-improve-the-logging-process' into 'develop'
Resolve "Improve the logging process"

See merge request slumber/multi-user!42
2020-09-15 11:03:42 +00:00
975b50a988 doc: update log related sections 2020-09-15 13:02:50 +02:00
66417dc84a refactor: minor ui cleanup 2020-09-15 12:40:51 +02:00
514f90d602 feat: logging to files
feat: logging level

Related to #111
2020-09-15 12:31:46 +02:00
086876ad2e feat: update version check to handle experimental ones 2020-09-15 12:29:20 +02:00
71c179f32f fix: python version 2020-09-09 11:58:51 +02:00
2399096b07 feat: experimenting a custom testing image 2020-09-09 11:57:34 +02:00
0c4d1aaa5f feat: update changelog to reflect changes 2020-09-09 11:55:53 +02:00
de8fbb0629 feat: update addon updater to support installation from branches (develop and master) 2020-09-09 10:58:02 +02:00
d7396e578c Merge branch '107-optionnal-flag-to-allow-edit-mesh-updates' into 'develop'
Resolve "Optionnal flag to allow edit mesh updates"

See merge request slumber/multi-user!41
2020-09-08 21:11:09 +00:00
7f5b5866f2 feat: usage warning 2020-09-08 23:09:42 +02:00
3eb1af406b doc: reflect advanced settings changes 2020-09-08 22:56:23 +02:00
79ccac915f feat: experimental edit mode update
Related to #107
2020-09-08 22:37:58 +02:00
f5232ccea0 Merge branch 'master' into develop 2020-09-03 17:23:21 +02:00
c599a4e6ea doc: update advanced section 2020-09-03 16:15:49 +02:00
b3230177d8 Merge branch 'feature/event_driven_updates' into develop 2020-09-03 15:59:19 +02:00
f2da4cb8e9 Merge branch 'develop' of gitlab.com:slumber/multi-user into develop 2020-09-02 16:45:08 +02:00
605bcc7581 refactor: bl_collection lint
feat: late update replication
2020-09-02 16:44:11 +02:00
e31d76a641 Merge branch 'fix-pip-require-virtualenv' into 'develop'
Resolve "'zmq' install (and other pip packages) fails when PIP_REQUIRE_VIRTUALENV env var is set to true"

See merge request slumber/multi-user!40
2020-08-28 17:49:27 +00:00
97c2118b7e doc: add comment to explain why unsetting PIP_REQUIRE_VIRTUALENV is required. 2020-08-28 18:12:01 +02:00
352977e442 fix: unset PIP_REQUIRE_VIRTUALENV if set to ensure multi-user can install its packages 2020-08-28 17:23:25 +02:00
a46d5fa227 fix: missing ui error, missing scene 2020-08-28 15:27:46 +02:00
ade736d8a5 refacotr: collection test 2020-08-28 15:01:50 +02:00
d7f7e86015 fix: collection dump 2020-08-28 14:52:56 +02:00
5e7d1e1dda feat: update replication version 2020-08-28 14:20:00 +02:00
fa5f0c7296 fix: replication version 2020-08-28 14:13:20 +02:00
f14d0915c8 feat: same collection management for Scene Master collection 2020-08-28 14:10:09 +02:00
d1e088d229 feat: orhtographic_scale sync 2020-08-28 14:09:10 +02:00
aa35da9c56 refactor: move attribute skipping warnings to debug 2020-08-28 11:28:26 +02:00
f26c3b2606 refactor: use uuid for collection loading 2020-08-28 11:27:03 +02:00
00d60be75b feat: change replication to the pre-release version 2020-08-27 11:40:26 +02:00
bb5b9fe4c8 refactor: move deepdiff dependency to replication 2020-08-27 10:45:54 +02:00
c6af49492e Merge branch 'master' of gitlab.com:slumber/multi-user 2020-08-26 11:35:47 +02:00
6158ef5171 feat: discord link in readme 2020-08-26 11:35:06 +02:00
6475b4fc08 feat: collection insance offset support
Related to #105
2020-08-24 17:49:17 +02:00
e4e09d63ff fix: instanced collection replication
Related to #105
2020-08-24 17:48:14 +02:00
4b07ae0cc3 fix: fix test condition 2020-08-07 15:47:05 +02:00
49a419cbe2 fix: none result while trying to access a node 2020-08-07 15:38:11 +02:00
5d52fb2460 fix: avoid build ci from running on other branch than develop and master 2020-08-07 15:08:08 +02:00
f1e09c1507 Merge branch 'develop' into feature/event_driven_updates 2020-08-07 15:07:17 +02:00
f915c52bd0 fix: loader missing 2020-08-06 15:33:08 +02:00
dee2e77552 fix: modifier assigned vertex groups 2020-08-06 15:26:55 +02:00
7953a2a177 feat: Update CHANGELOG.md 2020-07-31 09:01:01 +00:00
3f0082927e feat: lock movement support 2020-07-29 11:10:35 +02:00
07ffe05a84 feat: enable autoupdater back 2020-07-28 17:26:14 +02:00
25c19471bb feat: update submodule 2020-05-15 18:23:51 +02:00
9e4e646bb1 Merge branch 'develop' into feature/event_driven_updates 2020-05-15 16:19:47 +02:00
f8fa407a45 Merge branch '29-differential-revision' into feature/event_driven_updates 2020-04-13 11:48:20 +02:00
a6e1566f89 Merge branch '40-multi-scene-workflow' of gitlab.com:slumber/multi-user into feature/event_driven_updates 2020-03-05 16:17:00 +01:00
adeb694b2d feat: one apply timer for all 2020-03-05 15:38:20 +01:00
50d14e663e feat: update sumbodules 2020-03-05 10:56:17 +01:00
9b8d69042d feat: update submodule 2020-03-04 22:28:34 +01:00
b2475081b6 feat: id accessor 2020-03-04 18:28:42 +01:00
aef1d8987c Merge branch '61-config-file-prevents-having-the-addon-on-a-shared-network-location' into feature/event_driven_updates 2020-03-04 14:54:42 +01:00
292f76aea5 feat: move diff to observer
feat: logs
2020-02-28 15:39:29 +01:00
28c4ccf1f3 Merge branch 'develop' into feature/event_driven_updates 2020-02-28 14:48:09 +01:00
549b0b3784 fix: submodule version 2020-02-25 17:40:00 +01:00
fc9ab1a7e6 feat: update submodule 2020-02-25 17:38:43 +01:00
44bffc1850 Merge remote-tracking branch 'origin/develop' into feature/event_driven_updates 2020-02-25 17:37:24 +01:00
a141e9bfe7 feat: stash on deps graph update 2020-02-23 14:08:45 +01:00
63 changed files with 4281 additions and 2401 deletions

View File

@ -1,7 +1,10 @@
stages:
- test
- build
- deploy
include:
- local: .gitlab/ci/test.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/build.gitlab-ci.yml
- local: .gitlab/ci/deploy.gitlab-ci.yml

View File

@ -3,10 +3,11 @@ build:
image: debian:stable-slim
script:
- rm -rf tests .git .gitignore script
artifacts:
name: multi_user
paths:
- multi_user
only:
refs:
- master
- develop

View File

@ -0,0 +1,23 @@
deploy:
stage: deploy
image: slumber/docker-python
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs"
services:
- docker:19.03.12-dind
script:
- RP_VERSION="$(python scripts/get_replication_version.py)"
- VERSION="$(python scripts/get_addon_version.py)"
- echo "Building docker image with replication ${RP_VERSION}"
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
- echo "Pushing to gitlab registry ${VERSION}"
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}
only:
refs:
- master
- develop

View File

@ -1,14 +1,5 @@
test:
stage: test
image: python:3.7
image: slumber/blender-addon-testing:latest
script:
- git submodule init
- git submodule update
- apt-get update
# install blender to get all required dependencies
# TODO: indtall only dependencies
- apt install -f -y blender
- python -m pip install blender-addon-tester
- python scripts/test_addon.py
- python3 scripts/test_addon.py

View File

@ -37,7 +37,7 @@ All notable changes to this project will be documented in this file.
- Serialization is now based on marshal (2x performance improvements).
- Let pip chose python dependencies install path.
## [0.0.3] - Upcoming
## [0.0.3] - 2020-07-29
### Added
@ -60,8 +60,68 @@ All notable changes to this project will be documented in this file.
- user localization
- repository init
### Removed
- Unused strict right management strategy
- Legacy config management system
- Legacy config management system
## [0.1.0] - 2020-10-05
### Added
- Dependency graph driven updates [experimental]
- Edit Mode updates
- Late join mechanism
- Sync Axis lock replication
- Sync collection offset
- Sync camera orthographic scale
- Sync custom fonts
- Sync sound files
- Logging configuration (file output and level)
- Object visibility type replication
- Optionnal sync for active camera
- Curve->Mesh conversion
- Mesh->gpencil conversion
### Changed
- Auto updater now handle installation from branches
- Use uuid for collection loading
- Moved session instance to replication package
### Fixed
- Prevent unsupported data types to crash the session
- Modifier vertex group assignation
- World sync
- Snapshot UUID error
- The world is not synchronized
## [0.1.1] - 2020-10-16
### Added
- Session status widget
- Affect dependencies during change owner
- Dedicated server managment scripts(@brybalicious)
### Changed
- Refactored presence.py
- Reset button UI icon
- Documentation `How to contribute` improvements (@brybalicious)
- Documentation `Hosting guide` improvements (@brybalicious)
- Show flags are now available from the viewport overlay
### Fixed
- Render sync race condition (causing scene errors)
- Binary differentials
- Hybrid session crashes between Linux/Windows
- Materials node default output value
- Right selection
- Client node rights changed to COMMON after disconnecting from the server
- Collection instances selection draw
- Packed image save error
- Material replication
- UI spelling errors (@brybalicious)

View File

@ -25,27 +25,32 @@ See the [documentation](https://multi-user.readthedocs.io/en/latest/) for detail
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment |
| ----------- | :----: | :-----------------------------------------------------------: |
| action | | Not stable |
| armature | ❗ | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | ✔️ | Nurbs surface don't load correctly |
| gpencil | ✔️ | |
| image | | Not stable yet |
| mesh | ✔️ | |
| material | ✔️ | |
| metaball | ✔️ | |
| object | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| particles | | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | | Partial |
| Name | Status | Comment |
| ----------- | :----: | :--------------------------------------------------------------------------: |
| action | ✔️ | |
| armature | ❗ | Not stable |
| camera | ✔️ | |
| collection | ✔️ | |
| curve | | Nurbs not supported |
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
| image | ✔️ | |
| mesh | ✔️ | |
| material | ✔️ | |
| metaball | ✔️ | |
| object | ✔️ | |
| texts | ✔️ | |
| scene | ✔️ | |
| world | ✔️ | |
| lightprobes | ✔️ | |
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | ❌ | |
| volumes | | |
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial |
### Performance issues
@ -57,14 +62,16 @@ I'm working on it.
| Dependencies | Version | Needed |
| ------------ | :-----: | -----: |
| ZeroMQ | latest | yes |
| JsonDiff | latest | yes |
| Replication | latest | yes |
## Contributing
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.
## Licensing
See [license](LICENSE)

View File

@ -22,7 +22,7 @@ copyright = '2020, Swann Martinez'
author = 'Swann Martinez'
# The full version, including alpha/beta/rc tags
release = '0.0.2'
release = '0.1.0'
# -- General configuration ---------------------------------------------------

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.7 KiB

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.1 KiB

After

Width:  |  Height:  |  Size: 559 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 70 KiB

View File

@ -161,6 +161,19 @@ The collaboration quality directly depend on the communication quality. This sec
various tools made in an effort to ease the communication between the different session users.
Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
---------------------------
Change replication behavior
---------------------------
During a session, the multi-user will replicate your modifications to other instances.
In order to avoid annoying other users when you are experimenting, some of those modifications can be ignored via
various flags present at the top of the panel (see red area in the image bellow). Those flags are explained in the :ref:`replication` section.
.. figure:: img/quickstart_replication.png
:align: center
Session replication flags
--------------------
Monitor online users
--------------------
@ -238,10 +251,20 @@ it draw users related information in your viewport such as:
The presence overlay panel (see image above) allow you to enable/disable
various drawn parts via the following flags:
- **Show session statut**: display the session status in the viewport
.. figure:: img/quickstart_status.png
:align: center
- **Text scale**: session status text size
- **Vertical/Horizontal position**: session position in the viewport
- **Show selected objects**: display other users current selection
- **Show users**: display users current viewpoint
- **Show different scenes**: display users working on other scenes
-----------
Manage data
-----------
@ -299,37 +322,105 @@ Here is a quick list of available actions:
.. _advanced:
Advanced configuration
======================
Advanced settings
=================
This section contains optional settings to configure the session behavior.
.. figure:: img/quickstart_advanced.png
:align: center
Repository panel
Advanced configuration panel
.. rubric:: Network
-------
Network
-------
.. figure:: img/quickstart_advanced_network.png
:align: center
Advanced network settings
**IPC Port** is the port used for Inter Process Communication. This port is used
by the multi-users subprocesses to communicate with each others. If different instances
of the multi-user are using the same IPC port it will create conflict !
You only need to modify it if you need to launch multiple clients from the same
computer(or if you try to host and join on the same computer). You should just enter a different
**IPC port** for each blender instance.
.. note::
You only need to modify it if you need to launch multiple clients from the same
computer(or if you try to host and join on the same computer). You should just enter a different
**IPC port** for each blender instance.
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
You should only increase it if you have a bad connection.
.. rubric:: Replication
.. _replication:
-----------
Replication
-----------
.. figure:: img/quickstart_advanced_replication.png
:align: center
Advanced replication settings
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
**Synchronize active camera** sync the scene active camera.
**Edit Mode Updates** enable objects update while you are in Edit_Mode.
.. warning:: Edit Mode Updates kill performances with complex objects (heavy meshes, gpencil, etc...).
**Update method** allow you to change how replication update are triggered. Until now two update methode are implemented:
- **Default**: Use external threads to monitor datablocks changes, slower and less accurate.
- **Despgraph ⚠️**: Use the blender dependency graph to trigger updates. Faster but experimental and unstable !
**Properties frequency gird** allow to set a custom replication frequency for each type of data-block:
- **Refresh**: pushed data update rate (in second)
- **Apply**: pulled data update rate (in second)
.. note:: Per-data type settings will soon be revamped for simplification purposes
-----
Cache
-----
The multi-user allows to replicate external blend dependencies such as images, movies sounds.
On each client, those files are stored into the cache folder.
.. figure:: img/quickstart_advanced_cache.png
:align: center
Advanced cache settings
**cache_directory** allows to choose where cached files (images, sound, movies) will be saved.
**Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it have been written to the disk.
**Clear cache** will remove all file from the cache folder.
.. warning:: Clear cash could break your scene image/movie/sound if they are used into the blend !
---
Log
---
.. figure:: img/quickstart_advanced_logging.png
:align: center
Advanced log settings
**log level** allow to set the logging level of detail. Here is the detail for each values:
+-----------+-----------------------------------------------+
| Log level | Description |
+===========+===============================================+
| ERROR | Shows only critical error |
+-----------+-----------------------------------------------+
| WARNING | Shows only errors (all kind) |
+-----------+-----------------------------------------------+
| INFO | Shows only status related messages and errors |
+-----------+-----------------------------------------------+
| DEBUG | Shows every possible information. |
+-----------+-----------------------------------------------+

View File

@ -144,7 +144,7 @@ Let's check the connection status. Right click on the tray icon and click on **S
Network status.
The network status must be **OK** for each user(like in the picture above) otherwise it means that you are not connected to the network.
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the :ref:`network-authorization` section.
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the section :ref:`network-authorization`
This is it for the ZeroTier network setup. Now everything should be setup to use the multi-user add-on over internet ! You can now follow the :ref:`quickstart` guide to start using the multi-user add-on !
@ -171,26 +171,28 @@ From the dedicated server
run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first.
The dedicated server allow you to host a session with simplicity from any location.
It was developed to improve intaernet hosting performance.
It was developed to improve internet hosting performance.
The dedicated server can be run in tow ways:
The dedicated server can be run in two ways:
- :ref:`cmd-line`
- :ref:`docker`
.. Note:: There are shell scripts to conveniently start a dedicated server via either of these approaches available in the gitlab repository. See section: :ref:`serverstartscripts`
.. _cmd-line:
Using a regular command line
----------------------------
You can run the dedicated server on any platform by following those steps:
You can run the dedicated server on any platform by following these steps:
1. Firstly, download and intall python 3 (3.6 or above).
2. Install the replication library:
2. Install the latest version of the replication library:
.. code-block:: bash
python -m pip install replication
python -m pip install replication==0.0.21a15
4. Launch the server with:
@ -199,17 +201,20 @@ You can run the dedicated server on any platform by following those steps:
replication.serve
.. hint::
You can also specify a custom **port** (-p), **timeout** (-t) and **admin password** (-pwd) with the following optionnal argument
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
.. code-block:: bash
replication.serve -p 5555 -pwd toto -t 1000
replication.serve -p 5555 -pwd admin -t 1000 -l INFO -lf server.log
Here, for example, a server is instantiated on port 5555, with password 'admin', a 1 second timeout, and logging enabled.
As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
As soon as the dedicated server is running, you can connect to it from blender (follow :ref:`how-to-join`).
.. hint::
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
Some commands are available to enable an administrator to manage the session. Check :ref:`dedicated-management` to learn more.
.. _docker:
@ -217,22 +222,56 @@ As soon as the dedicated server is running, you can connect to it from blender (
Using a pre-configured image on docker engine
---------------------------------------------
Launching the dedicated server from a docker server is simple as:
Launching the dedicated server from a docker server is simple as running:
.. code-block:: bash
docker run -d \
-p 5555-5560:5555-5560 \
-e port=5555 \
-e log_level=DEBUG \
-e password=admin \
-e timeout=1000 \
registry.gitlab.com/slumber/multi-user/multi-user-server:0.0.3
registry.gitlab.com/slumber/multi-user/multi-user-server:0.1.0
As soon as the dedicated server is running, you can connect to it from blender.
You can check the :ref:`how-to-join` section.
As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
You can check your container is running, and find its ID with:
.. code-block:: bash
docker ps
Logs for the server running in the docker container can be accessed by outputting the following to a log file:
.. code-block:: bash
docker log your-container-id >& dockerserver.log
.. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available.
.. _serverstartscripts:
Server startup scripts
----------------------
Convenient scripts are available in the Gitlab repository: https://gitlab.com/slumber/multi-user/scripts/startup_scripts/
Simply run the relevant script in a shell on the host machine to start a server with one line of code via replication directly or via a docker container. Choose between the two methods:
.. code-block:: bash
./start-server.sh
or
.. code-block:: bash
./run-dockerfile.sh
.. hint::
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
Once your server is up and running, some commands are available to manage the session :ref:`dedicated-management`
.. _dedicated-management:

View File

@ -21,11 +21,11 @@ In order to help with the testing, you have several possibilities:
- Test `development branch <https://gitlab.com/slumber/multi-user/-/branches>`_
--------------------------
Filling an issue on Gitlab
Filing an issue on Gitlab
--------------------------
The `gitlab issue tracker <https://gitlab.com/slumber/multi-user/issues>`_ is used for bug report and enhancement suggestion.
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button.
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button in the main multi-user project.
Here are some useful information you should provide in a bug report:
@ -35,8 +35,75 @@ Here are some useful information you should provide in a bug report:
Contributing code
=================
1. Fork it (https://gitlab.com/yourname/yourproject/fork)
2. Create your feature branch (git checkout -b feature/fooBar)
3. Commit your changes (git commit -am 'Add some fooBar')
4. Push to the branch (git push origin feature/fooBar)
5. Create a new Pull Request
In general, this project follows the `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_. It may help to understand that there are three different repositories - the upstream (main multi-user project repository, designated in git by 'upstream'), remote (forked repository, designated in git by 'origin'), and the local repository on your machine.
The following example suggests how to contribute a feature.
1. Fork the project into a new repository:
https://gitlab.com/yourname/multi-user
2. Clone the new repository locally:
.. code-block:: bash
git clone https://gitlab.com/yourname/multi-user.git
3. Keep your fork in sync with the main repository by setting up the upstream pointer once. cd into your git repo and then run:
.. code-block:: bash
git remote add upstream https://gitlab.com/slumber/multi-user.git
4. Now, locally check out the develop branch, upon which to base your new feature branch:
.. code-block:: bash
git checkout develop
5. Fetch any changes from the main upstream repository into your fork (especially if some time has passed since forking):
.. code-block:: bash
git fetch upstream
'Fetch' downloads objects and refs from the repository, but doesnt apply them to the branch we are working on. We want to apply the updates to the branch we will work from, which we checked out in step 4.
6. Let's merge any recent changes from the remote upstream (original repository's) 'develop' branch into our local 'develop' branch:
.. code-block:: bash
git merge upstream/develop
7. Update your forked repository's remote 'develop' branch with the fetched changes, just to keep things tidy. Make sure you haven't committed any local changes in the interim:
.. code-block:: bash
git push origin develop
8. Locally create your own new feature branch from the develop branch, using the syntax:
.. code-block:: bash
git checkout -b feature/yourfeaturename
...where 'feature/' designates a feature branch, and 'yourfeaturename' is a name of your choosing
9. Add and commit your changes, including a commit message:
.. code-block:: bash
git commit -am 'Add fooBar'
10. Push committed changes to the remote copy of your new feature branch which will be created in this step:
.. code-block:: bash
git push -u origin feature/yourfeaturename
If it's been some time since performing steps 4 through 7, make sure to checkout 'develop' again and pull the latest changes from upstream before checking out and creating feature/yourfeaturename and pushing changes. Alternatively, checkout 'feature/yourfeaturename' and simply run:
.. code-block:: bash
git rebase upstream/develop
and your staged commits will be merged along with the changes. More information on `rebasing here <https://git-scm.com/book/en/v2/Git-Branching-Rebasing>`_
.. Hint:: -u option sets up your locally created new branch to follow a remote branch which is now created with the same name on your remote repository.
11. Finally, create a new Pull/Merge Request on Gitlab to merge the remote version of this new branch with commited updates, back into the upstream develop branch, finalising the integration of the new feature.
12. Thanks for contributing!
.. Note:: For hotfixes, replace 'feature/' with 'hotfix/' and base the new branch off the parent 'master' branch instead of 'develop' branch. Make sure to checkout 'master' before running step 8
.. Note:: Let's follow the Atlassian `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_, except for one main difference - submitting a pull request rather than merging by ourselves.
.. Note:: See `here <https://philna.sh/blog/2018/08/21/git-commands-to-keep-a-fork-up-to-date/>`_ or `here <https://stefanbauer.me/articles/how-to-keep-your-git-fork-up-to-date>`_ for instructions on how to keep a fork up to date.

View File

@ -19,7 +19,7 @@
bl_info = {
"name": "Multi-User",
"author": "Swann Martinez",
"version": (0, 0, 3),
"version": (0, 2, 0),
"description": "Enable real-time collaborative workflow inside blender",
"blender": (2, 82, 0),
"location": "3D View > Sidebar > Multi-User tab",
@ -40,38 +40,41 @@ import sys
import bpy
from bpy.app.handlers import persistent
from . import environment, utils
from . import environment
# TODO: remove dependency as soon as replication will be installed as a module
DEPENDENCIES = {
("replication", '0.0.20'),
("deepdiff", '5.0.1'),
("replication", '0.2.0'),
}
module_error_msg = "Insufficient rights to install the multi-user \
dependencies, aunch blender with administrator rights."
def register():
# Setup logging policy
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
logging.basicConfig(
format='%(asctime)s CLIENT %(levelname)-8s %(message)s',
datefmt='%H:%M:%S',
level=logging.INFO)
try:
environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
except ModuleNotFoundError:
logging.fatal("Fail to install multi-user dependencies, try to execute blender with admin rights.")
return
from . import presence
from . import operators
from . import ui
from . import preferences
from . import addon_updater_ops
preferences.register()
addon_updater_ops.register(bl_info)
presence.register()
operators.register()
ui.register()
from . import presence
from . import operators
from . import ui
from . import preferences
from . import addon_updater_ops
preferences.register()
addon_updater_ops.register(bl_info)
presence.register()
operators.register()
ui.register()
except ModuleNotFoundError as e:
raise Exception(module_error_msg)
logging.error(module_error_msg)
bpy.types.WindowManager.session = bpy.props.PointerProperty(
type=preferences.SessionProps)
bpy.types.ID.uuid = bpy.props.StringProperty(

View File

@ -23,7 +23,11 @@ https://github.com/CGCookie/blender-addon-updater
"""
__version__ = "1.0.8"
import errno
import traceback
import platform
import ssl
import urllib.request
import urllib
@ -98,6 +102,7 @@ class Singleton_updater(object):
# runtime variables, initial conditions
self._verbose = False
self._use_print_traces = True
self._fake_install = False
self._async_checking = False # only true when async daemon started
self._update_ready = None
@ -133,6 +138,13 @@ class Singleton_updater(object):
self._select_link = select_link_function
# called from except blocks, to print the exception details,
# according to the use_print_traces option
def print_trace():
if self._use_print_traces:
traceback.print_exc()
# -------------------------------------------------------------------------
# Getters and setters
# -------------------------------------------------------------------------
@ -166,7 +178,7 @@ class Singleton_updater(object):
try:
self._auto_reload_post_update = bool(value)
except:
raise ValueError("Must be a boolean value")
raise ValueError("auto_reload_post_update must be a boolean value")
@property
def backup_current(self):
@ -351,7 +363,7 @@ class Singleton_updater(object):
try:
self._repo = str(value)
except:
raise ValueError("User must be a string")
raise ValueError("repo must be a string value")
@property
def select_link(self):
@ -377,6 +389,7 @@ class Singleton_updater(object):
os.makedirs(value)
except:
if self._verbose: print("Error trying to staging path")
self.print_trace()
return
self._updater_path = value
@ -446,6 +459,16 @@ class Singleton_updater(object):
except:
raise ValueError("Verbose must be a boolean value")
@property
def use_print_traces(self):
return self._use_print_traces
@use_print_traces.setter
def use_print_traces(self, value):
try:
self._use_print_traces = bool(value)
except:
raise ValueError("use_print_traces must be a boolean value")
@property
def version_max_update(self):
return self._version_max_update
@ -637,6 +660,9 @@ class Singleton_updater(object):
else:
if self._verbose: print("Tokens not setup for engine yet")
# Always set user agent
request.add_header('User-Agent', "Python/"+str(platform.python_version()))
# run the request
try:
if context:
@ -652,6 +678,7 @@ class Singleton_updater(object):
self._error = "HTTP error"
self._error_msg = str(e.code)
print(self._error, self._error_msg)
self.print_trace()
self._update_ready = None
except urllib.error.URLError as e:
reason = str(e.reason)
@ -663,6 +690,7 @@ class Singleton_updater(object):
self._error = "URL error, check internet connection"
self._error_msg = reason
print(self._error, self._error_msg)
self.print_trace()
self._update_ready = None
return None
else:
@ -684,6 +712,7 @@ class Singleton_updater(object):
self._error_msg = str(e.reason)
self._update_ready = None
print(self._error, self._error_msg)
self.print_trace()
return None
else:
return None
@ -700,15 +729,17 @@ class Singleton_updater(object):
if self._verbose: print("Preparing staging folder for download:\n",local)
if os.path.isdir(local) == True:
try:
shutil.rmtree(local)
shutil.rmtree(local, ignore_errors=True)
os.makedirs(local)
except:
error = "failed to remove existing staging directory"
self.print_trace()
else:
try:
os.makedirs(local)
except:
error = "failed to create staging directory"
self.print_trace()
if error != None:
if self._verbose: print("Error: Aborting update, "+error)
@ -722,28 +753,32 @@ class Singleton_updater(object):
self._source_zip = os.path.join(local,"source.zip")
if self._verbose: print(f"Starting download update zip to {self._source_zip}")
if self._verbose: print("Starting download update zip")
try:
import urllib3
http = urllib3.PoolManager()
r = http.request('GET', url, preload_content=False)
chunk_size = 1024*8
with open(self._source_zip, 'wb') as out:
while True:
data = r.read(chunk_size)
if not data:
break
out.write(data)
request = urllib.request.Request(url)
context = ssl._create_unverified_context()
r.release_conn()
# setup private token if appropriate
if self._engine.token != None:
if self._engine.name == "gitlab":
request.add_header('PRIVATE-TOKEN',self._engine.token)
else:
if self._verbose: print("Tokens not setup for selected engine yet")
# Always set user agent
request.add_header('User-Agent', "Python/"+str(platform.python_version()))
self.urlretrieve(urllib.request.urlopen(request,context=context), self._source_zip)
# add additional checks on file size being non-zero
if self._verbose: print("Successfully downloaded update zip")
return False
return True
except Exception as e:
self._error = "Error retrieving download, bad link?"
self._error_msg = "Error: {}".format(e)
if self._verbose:
print("Error retrieving download, bad link?")
print("Error: {}".format(e))
self.print_trace()
return False
@ -758,16 +793,18 @@ class Singleton_updater(object):
if os.path.isdir(local):
try:
shutil.rmtree(local)
shutil.rmtree(local, ignore_errors=True)
except:
if self._verbose:print("Failed to removed previous backup folder, contininuing")
self.print_trace()
# remove the temp folder; shouldn't exist but could if previously interrupted
if os.path.isdir(tempdest):
try:
shutil.rmtree(tempdest)
shutil.rmtree(tempdest, ignore_errors=True)
except:
if self._verbose:print("Failed to remove existing temp folder, contininuing")
self.print_trace()
# make the full addon copy, which temporarily places outside the addon folder
if self._backup_ignore_patterns != None:
shutil.copytree(
@ -795,7 +832,7 @@ class Singleton_updater(object):
# make the copy
shutil.move(backuploc,tempdest)
shutil.rmtree(self._addon_root)
shutil.rmtree(self._addon_root, ignore_errors=True)
os.rename(tempdest,self._addon_root)
self._json["backup_date"] = ""
@ -816,7 +853,7 @@ class Singleton_updater(object):
# clear the existing source folder in case previous files remain
outdir = os.path.join(self._updater_path, "source")
try:
shutil.rmtree(outdir)
shutil.rmtree(outdir, ignore_errors=True)
if self._verbose:
print("Source folder cleared")
except:
@ -829,6 +866,7 @@ class Singleton_updater(object):
except Exception as err:
print("Error occurred while making extract dir:")
print(str(err))
self.print_trace()
self._error = "Install failed"
self._error_msg = "Failed to make extract directory"
return -1
@ -870,6 +908,7 @@ class Singleton_updater(object):
if exc.errno != errno.EEXIST:
self._error = "Install failed"
self._error_msg = "Could not create folder from zip"
self.print_trace()
return -1
else:
with open(os.path.join(outdir, subpath), "wb") as outfile:
@ -963,12 +1002,13 @@ class Singleton_updater(object):
print("Clean removing file {}".format(os.path.join(base,f)))
for f in folders:
if os.path.join(base,f)==self._updater_path: continue
shutil.rmtree(os.path.join(base,f))
shutil.rmtree(os.path.join(base,f), ignore_errors=True)
print("Clean removing folder and contents {}".format(os.path.join(base,f)))
except Exception as err:
error = "failed to create clean existing addon folder"
print(error, str(err))
self.print_trace()
# Walk through the base addon folder for rules on pre-removing
# but avoid removing/altering backup and updater file
@ -984,6 +1024,7 @@ class Singleton_updater(object):
if self._verbose: print("Pre-removed file "+file)
except OSError:
print("Failed to pre-remove "+file)
self.print_trace()
# Walk through the temp addon sub folder for replacements
# this implements the overwrite rules, which apply after
@ -1007,7 +1048,7 @@ class Singleton_updater(object):
# otherwise, check each file to see if matches an overwrite pattern
replaced=False
for ptrn in self._overwrite_patterns:
if fnmatch.filter([destFile],ptrn):
if fnmatch.filter([file],ptrn):
replaced=True
break
if replaced:
@ -1023,10 +1064,11 @@ class Singleton_updater(object):
# now remove the temp staging folder and downloaded zip
try:
shutil.rmtree(staging_path)
shutil.rmtree(staging_path, ignore_errors=True)
except:
error = "Error: Failed to remove existing staging directory, consider manually removing "+staging_path
if self._verbose: print(error)
self.print_trace()
def reload_addon(self):
@ -1042,9 +1084,16 @@ class Singleton_updater(object):
# not allowed in restricted context, such as register module
# toggle to refresh
bpy.ops.wm.addon_disable(module=self._addon_package)
bpy.ops.wm.addon_refresh()
bpy.ops.wm.addon_enable(module=self._addon_package)
if "addon_disable" in dir(bpy.ops.wm): # 2.7
bpy.ops.wm.addon_disable(module=self._addon_package)
bpy.ops.wm.addon_refresh()
bpy.ops.wm.addon_enable(module=self._addon_package)
print("2.7 reload complete")
else: # 2.8
bpy.ops.preferences.addon_disable(module=self._addon_package)
bpy.ops.preferences.addon_refresh()
bpy.ops.preferences.addon_enable(module=self._addon_package)
print("2.8 reload complete")
# -------------------------------------------------------------------------
@ -1376,26 +1425,26 @@ class Singleton_updater(object):
if "last_check" not in self._json or self._json["last_check"] == "":
return True
else:
now = datetime.now()
last_check = datetime.strptime(self._json["last_check"],
"%Y-%m-%d %H:%M:%S.%f")
next_check = last_check
offset = timedelta(
days=self._check_interval_days + 30*self._check_interval_months,
hours=self._check_interval_hours,
minutes=self._check_interval_minutes
)
delta = (now - offset) - last_check
if delta.total_seconds() > 0:
if self._verbose:
print("{} Updater: Time to check for updates!".format(self._addon))
return True
else:
if self._verbose:
print("{} Updater: Determined it's not yet time to check for updates".format(self._addon))
return False
now = datetime.now()
last_check = datetime.strptime(self._json["last_check"],
"%Y-%m-%d %H:%M:%S.%f")
next_check = last_check
offset = timedelta(
days=self._check_interval_days + 30*self._check_interval_months,
hours=self._check_interval_hours,
minutes=self._check_interval_minutes
)
delta = (now - offset) - last_check
if delta.total_seconds() > 0:
if self._verbose:
print("{} Updater: Time to check for updates!".format(self._addon))
return True
if self._verbose:
print("{} Updater: Determined it's not yet time to check for updates".format(self._addon))
return False
def get_json_path(self):
"""Returns the full path to the JSON state file used by this updater.
@ -1414,6 +1463,7 @@ class Singleton_updater(object):
except Exception as err:
print("Other OS error occurred while trying to rename old JSON")
print(err)
self.print_trace()
return json_path
def set_updater_json(self):
@ -1514,6 +1564,7 @@ class Singleton_updater(object):
except Exception as exception:
print("Checking for update error:")
print(exception)
self.print_trace()
if not self._error:
self._update_ready = False
self._update_version = None
@ -1625,10 +1676,7 @@ class GitlabEngine(object):
return "{}{}{}".format(self.api_url,"/api/v4/projects/",updater.repo)
def form_tags_url(self, updater):
if updater.use_releases:
return "{}{}".format(self.form_repo_url(updater),"/releases")
else:
return "{}{}".format(self.form_repo_url(updater),"/repository/tags")
return "{}{}".format(self.form_repo_url(updater),"/repository/tags")
def form_branch_list_url(self, updater):
# does not validate branch name.
@ -1656,12 +1704,7 @@ class GitlabEngine(object):
def parse_tags(self, response, updater):
if response == None:
return []
# Return asset links from release
if updater.use_releases:
return [{"name": release["name"], "zipball_url": release["assets"]["links"][0]["url"]} for release in response]
else:
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
# -----------------------------------------------------------------------------

File diff suppressed because it is too large Load Diff

View File

@ -34,7 +34,10 @@ __all__ = [
'bl_metaball',
'bl_lattice',
'bl_lightprobe',
'bl_speaker'
'bl_speaker',
'bl_font',
'bl_sound',
'bl_file'
] # Order here defines execution order
from . import *

View File

@ -134,6 +134,7 @@ class BlAction(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'ACTION_TWEAK'
def _construct(self, data):

View File

@ -31,6 +31,7 @@ class BlArmature(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 0
bl_automatic_push = True
bl_check_common = False
bl_icon = 'ARMATURE_DATA'
def _construct(self, data):

View File

@ -29,6 +29,7 @@ class BlCamera(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'CAMERA_DATA'
def _construct(self, data):
@ -79,6 +80,7 @@ class BlCamera(BlDatablock):
'aperture_fstop',
'aperture_blades',
'aperture_rotation',
'ortho_scale',
'aperture_ratio',
'display_size',
'show_limits',

View File

@ -21,6 +21,55 @@ import mathutils
from .. import utils
from .bl_datablock import BlDatablock
from .dump_anything import Loader, Dumper
def dump_collection_children(collection):
collection_children = []
for child in collection.children:
if child not in collection_children:
collection_children.append(child.uuid)
return collection_children
def dump_collection_objects(collection):
collection_objects = []
for object in collection.objects:
if object not in collection_objects:
collection_objects.append(object.uuid)
return collection_objects
def load_collection_objects(dumped_objects, collection):
for object in dumped_objects:
object_ref = utils.find_from_attr('uuid', object, bpy.data.objects)
if object_ref is None:
continue
elif object_ref.name not in collection.objects.keys():
collection.objects.link(object_ref)
for object in collection.objects:
if object.uuid not in dumped_objects:
collection.objects.unlink(object)
def load_collection_childrens(dumped_childrens, collection):
for child_collection in dumped_childrens:
collection_ref = utils.find_from_attr(
'uuid',
child_collection,
bpy.data.collections)
if collection_ref is None:
continue
if collection_ref.name not in collection.children.keys():
collection.children.link(collection_ref)
for child_collection in collection.children:
if child_collection.uuid not in dumped_childrens:
collection.children.unlink(child_collection)
class BlCollection(BlDatablock):
@ -30,71 +79,47 @@ class BlCollection(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True
def _construct(self, data):
if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.collections = [
name for name in sourceData.collections if name == self.data['name']]
instance = bpy.data.collections[self.data['name']]
return instance
instance = bpy.data.collections.new(data["name"])
return instance
def _load_implementation(self, data, target):
# Load other meshes metadata
target.name = data["name"]
loader = Loader()
loader.load(target, data)
# Objects
for object in data["objects"]:
object_ref = bpy.data.objects.get(object)
if object_ref is None:
continue
if object not in target.objects.keys():
target.objects.link(object_ref)
for object in target.objects:
if object.name not in data["objects"]:
target.objects.unlink(object)
load_collection_objects(data['objects'], target)
# Link childrens
for collection in data["children"]:
collection_ref = bpy.data.collections.get(collection)
if collection_ref is None:
continue
if collection_ref.name not in target.children.keys():
target.children.link(collection_ref)
for collection in target.children:
if collection.name not in data["children"]:
target.children.unlink(collection)
load_collection_childrens(data['children'], target)
def _dump_implementation(self, data, instance=None):
assert(instance)
data = {}
data['name'] = instance.name
dumper = Dumper()
dumper.depth = 1
dumper.include_filter = [
"name",
"instance_offset"
]
data = dumper.dump(instance)
# dump objects
collection_objects = []
for object in instance.objects:
if object not in collection_objects:
collection_objects.append(object.name)
data['objects'] = collection_objects
data['objects'] = dump_collection_objects(instance)
# dump children collections
collection_children = []
for child in instance.children:
if child not in collection_children:
collection_children.append(child.name)
data['children'] = collection_children
data['children'] = dump_collection_children(instance)
return data

View File

@ -46,12 +46,105 @@ SPLINE_POINT = [
"radius",
]
CURVE_METADATA = [
'align_x',
'align_y',
'bevel_depth',
'bevel_factor_end',
'bevel_factor_mapping_end',
'bevel_factor_mapping_start',
'bevel_factor_start',
'bevel_object',
'bevel_resolution',
'body',
'body_format',
'dimensions',
'eval_time',
'extrude',
'family',
'fill_mode',
'follow_curve',
'font',
'font_bold',
'font_bold_italic',
'font_italic',
'make_local',
'materials',
'name',
'offset',
'offset_x',
'offset_y',
'overflow',
'original',
'override_create',
'override_library',
'path_duration',
'preview',
'render_resolution_u',
'render_resolution_v',
'resolution_u',
'resolution_v',
'shape_keys',
'shear',
'size',
'small_caps_scale',
'space_character',
'space_line',
'space_word',
'type',
'taper_object',
'texspace_location',
'texspace_size',
'transform',
'twist_mode',
'twist_smooth',
'underline_height',
'underline_position',
'use_auto_texspace',
'use_deform_bounds',
'use_fake_user',
'use_fill_caps',
'use_fill_deform',
'use_map_taper',
'use_path',
'use_path_follow',
'use_radius',
'use_stretch',
]
SPLINE_METADATA = [
'hide',
'material_index',
# 'order_u',
# 'order_v',
# 'point_count_u',
# 'point_count_v',
'points',
'radius_interpolation',
'resolution_u',
'resolution_v',
'tilt_interpolation',
'type',
'use_bezier_u',
'use_bezier_v',
'use_cyclic_u',
'use_cyclic_v',
'use_endpoint_u',
'use_endpoint_v',
'use_smooth',
]
class BlCurve(BlDatablock):
bl_id = "curves"
bl_class = bpy.types.Curve
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'CURVE_DATA'
def _construct(self, data):
@ -62,6 +155,7 @@ class BlCurve(BlDatablock):
loader.load(target, data)
target.splines.clear()
# load splines
for spline in data['splines'].values():
new_spline = target.splines.new(spline['type'])
@ -72,8 +166,12 @@ class BlCurve(BlDatablock):
bezier_points = new_spline.bezier_points
bezier_points.add(spline['bezier_points_count'])
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
# Not really working for now...
if new_spline.type == 'POLY':
points = new_spline.points
points.add(spline['points_count'])
np_load_collection(spline['points'], points, SPLINE_POINT)
# Not working for now...
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
if new_spline.type == 'NURBS':
logging.error("NURBS not supported.")
@ -83,11 +181,14 @@ class BlCurve(BlDatablock):
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
loader.load(new_spline, spline)
def _dump_implementation(self, data, instance=None):
assert(instance)
dumper = Dumper()
# Conflicting attributes
# TODO: remove them with the NURBS support
dumper.include_filter = CURVE_METADATA
dumper.exclude_filter = [
'users',
'order_u',
@ -105,8 +206,13 @@ class BlCurve(BlDatablock):
for index, spline in enumerate(instance.splines):
dumper.depth = 2
dumper.include_filter = SPLINE_METADATA
spline_data = dumper.dump(spline)
# spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
if spline.type == 'POLY':
spline_data['points_count'] = len(spline.points)-1
spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
spline_data['bezier_points_count'] = len(spline.bezier_points)-1
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
data['splines'][index] = spline_data
@ -118,3 +224,17 @@ class BlCurve(BlDatablock):
elif isinstance(instance, T.Curve):
data['type'] = 'CURVE'
return data
def _resolve_deps_implementation(self):
# TODO: resolve material
deps = []
curve = self.instance
if isinstance(curve, T.TextCurve):
deps.extend([
curve.font,
curve.font_bold,
curve.font_bold_italic,
curve.font_italic])
return deps

View File

@ -16,13 +16,16 @@
# ##### END GPL LICENSE BLOCK #####
import logging
from collections.abc import Iterable
import bpy
import mathutils
from replication.constants import DIFF_BINARY, UP
from replication.data import ReplicatedDatablock
from .. import utils
from .dump_anything import Loader, Dumper
from replication.data import ReplicatedDatablock
from replication.constants import (UP, DIFF_BINARY)
from .dump_anything import Dumper, Loader
def has_action(target):
@ -86,6 +89,18 @@ def load_driver(target_datablock, src_driver):
loader.load(new_point, src_driver['keyframe_points'][src_point])
def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid:
return default
for category in dir(bpy.data):
root = getattr(bpy.data, category)
if isinstance(root, Iterable) and category not in ignore:
for item in root:
if getattr(item, 'uuid', None) == uuid:
return item
return default
class BlDatablock(ReplicatedDatablock):
"""BlDatablock
@ -95,23 +110,26 @@ class BlDatablock(ReplicatedDatablock):
bl_delay_apply : refresh rate in sec for apply
bl_automatic_push : boolean
bl_icon : type icon (blender icon name)
bl_check_common: enable check even in common rights
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
self.preferences = utils.get_preferences()
# TODO: use is_library_indirect
self.is_library = (instance and hasattr(instance, 'library') and
instance.library) or \
(self.data and 'library' in self.data)
(hasattr(self,'data') and self.data and 'library' in self.data)
if instance and hasattr(instance, 'uuid'):
instance.uuid = self.uuid
self.diff_method = DIFF_BINARY
# self.diff_method = DIFF_BINARY
def resolve(self):
def resolve(self, construct = True):
datablock_ref = None
datablock_root = getattr(bpy.data, self.bl_id)
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
@ -120,12 +138,27 @@ class BlDatablock(ReplicatedDatablock):
try:
datablock_ref = datablock_root[self.data['name']]
except Exception:
datablock_ref = self._construct(data=self.data)
if construct:
name = self.data.get('name')
logging.debug(f"Constructing {name}")
datablock_ref = self._construct(data=self.data)
if datablock_ref:
if datablock_ref is not None:
setattr(datablock_ref, 'uuid', self.uuid)
self.instance = datablock_ref
return True
else:
return False
self.instance = datablock_ref
def remove_instance(self):
"""
Remove instance from blender data
"""
assert(self.instance)
datablock_root = getattr(bpy.data, self.bl_id)
datablock_root.remove(self.instance)
def _dump(self, instance=None):
dumper = Dumper()
@ -187,6 +220,7 @@ class BlDatablock(ReplicatedDatablock):
if not self.is_library:
dependencies.extend(self._resolve_deps_implementation())
logging.debug(f"{self.instance.name} dependencies: {dependencies}")
return dependencies
def _resolve_deps_implementation(self):

View File

@ -0,0 +1,143 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
import sys
from pathlib import Path
import bpy
import mathutils
from replication.constants import DIFF_BINARY, UP
from replication.data import ReplicatedDatablock
from .. import utils
from .dump_anything import Dumper, Loader
def get_filepath(filename):
"""
Construct the local filepath
"""
return str(Path(
utils.get_preferences().cache_directory,
filename
))
def ensure_unpacked(datablock):
if datablock.packed_file:
logging.info(f"Unpacking {datablock.name}")
filename = Path(bpy.path.abspath(datablock.filepath)).name
datablock.filepath = get_filepath(filename)
datablock.unpack(method="WRITE_ORIGINAL")
class BlFile(ReplicatedDatablock):
bl_id = 'file'
bl_name = "file"
bl_class = Path
bl_delay_refresh = 0
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'FILE'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.instance = kwargs.get('instance', None)
if self.instance and not self.instance.exists():
raise FileNotFoundError(str(self.instance))
self.preferences = utils.get_preferences()
self.diff_method = DIFF_BINARY
def resolve(self):
if self.data:
self.instance = Path(get_filepath(self.data['name']))
if not self.instance.exists():
logging.debug("File don't exist, loading it.")
self._load(self.data, self.instance)
def push(self, socket, identity=None):
super().push(socket, identity=None)
if self.preferences.clear_memory_filecache:
del self.data['file']
def _dump(self, instance=None):
"""
Read the file and return a dict as:
{
name : filename
extension :
file: file content
}
"""
logging.info(f"Extracting file metadata")
data = {
'name': self.instance.name,
}
logging.info(
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
try:
file = open(self.instance, "rb")
data['file'] = file.read()
file.close()
except IOError:
logging.warning(f"{self.instance} doesn't exist, skipping")
else:
file.close()
return data
def _load(self, data, target):
"""
Writing the file
"""
# TODO: check for empty data
if target.exists() and not self.diff():
logging.info(f"{data['name']} already on the disk, skipping.")
return
try:
file = open(target, "wb")
file.write(data['file'])
if self.preferences.clear_memory_filecache:
del self.data['file']
except IOError:
logging.warning(f"{target} doesn't exist, skipping")
else:
file.close()
def diff(self):
if self.preferences.clear_memory_filecache:
return False
else:
memory_size = sys.getsizeof(self.data['file'])-33
disk_size = self.instance.stat().st_size
return memory_size == disk_size

View File

@ -0,0 +1,74 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy
from .bl_datablock import BlDatablock
from .bl_file import get_filepath, ensure_unpacked
from .dump_anything import Dumper, Loader
class BlFont(BlDatablock):
bl_id = "fonts"
bl_class = bpy.types.VectorFont
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'FILE_FONT'
def _construct(self, data):
filename = data.get('filename')
if filename == '<builtin>':
return bpy.data.fonts.load(filename)
else:
return bpy.data.fonts.load(get_filepath(filename))
def _load(self, data, target):
pass
def _dump(self, instance=None):
if instance.filepath == '<builtin>':
filename = '<builtin>'
else:
filename = Path(instance.filepath).name
if not filename:
raise FileExistsError(instance.filepath)
return {
'filename': filename,
'name': instance.name
}
def diff(self):
return False
def _resolve_deps_implementation(self):
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -218,6 +218,7 @@ class BlGpencil(BlDatablock):
bl_delay_refresh = 2
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'GREASEPENCIL'
def _construct(self, data):

View File

@ -16,90 +16,108 @@
# ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy
import mathutils
import os
import logging
from .. import utils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader
from .bl_file import get_filepath, ensure_unpacked
def dump_image(image):
pixels = None
if image.source == "GENERATED" or image.packed_file is not None:
prefs = utils.get_preferences()
img_name = f"{image.name}.png"
# Cache the image on the disk
image.filepath_raw = os.path.join(prefs.cache_directory, img_name)
os.makedirs(prefs.cache_directory, exist_ok=True)
image.file_format = "PNG"
image.save()
format_to_ext = {
'BMP': 'bmp',
'IRIS': 'sgi',
'PNG': 'png',
'JPEG': 'jpg',
'JPEG2000': 'jp2',
'TARGA': 'tga',
'TARGA_RAW': 'tga',
'CINEON': 'cin',
'DPX': 'dpx',
'OPEN_EXR_MULTILAYER': 'exr',
'OPEN_EXR': 'exr',
'HDR': 'hdr',
'TIFF': 'tiff',
'AVI_JPEG': 'avi',
'AVI_RAW': 'avi',
'FFMPEG': 'mpeg',
}
if image.source == "FILE":
image_path = bpy.path.abspath(image.filepath_raw)
image_directory = os.path.dirname(image_path)
os.makedirs(image_directory, exist_ok=True)
image.save()
file = open(image_path, "rb")
pixels = file.read()
file.close()
else:
raise ValueError()
return pixels
class BlImage(BlDatablock):
bl_id = "images"
bl_class = bpy.types.Image
bl_delay_refresh = 0
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = False
bl_automatic_push = True
bl_check_common = False
bl_icon = 'IMAGE_DATA'
def _construct(self, data):
return bpy.data.images.new(
name=data['name'],
width=data['size'][0],
height=data['size'][1]
)
name=data['name'],
width=data['size'][0],
height=data['size'][1]
)
def _load(self, data, target):
image = target
prefs = utils.get_preferences()
img_name = f"{image.name}.png"
img_path = os.path.join(prefs.cache_directory,img_name)
os.makedirs(prefs.cache_directory, exist_ok=True)
file = open(img_path, 'wb')
file.write(data["pixels"])
file.close()
image.source = 'FILE'
image.filepath = img_path
image.colorspace_settings.name = data["colorspace_settings"]["name"]
loader = Loader()
loader.load(data, target)
target.source = 'FILE'
target.filepath_raw = get_filepath(data['filename'])
target.colorspace_settings.name = data["colorspace_settings"]["name"]
def _dump(self, instance=None):
assert(instance)
data = {}
data['pixels'] = dump_image(instance)
filename = Path(instance.filepath).name
data = {
"filename": filename
}
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = [
"name",
'size',
'height',
'alpha',
'float_buffer',
'filepath',
'source',
'colorspace_settings']
dumper.include_filter = [
"name",
'size',
'height',
'alpha',
'float_buffer',
'alpha_mode',
'colorspace_settings']
data.update(dumper.dump(instance))
return data
def diff(self):
return False
if self.instance and (self.instance.name != self.data['name']):
return True
else:
return False
def _resolve_deps_implementation(self):
deps = []
if self.instance.filepath:
if self.instance.packed_file:
filename = Path(bpy.path.abspath(self.instance.filepath)).name
self.instance.filepath_raw = get_filepath(filename)
self.instance.save()
# An image can't be unpacked to the modified path
# TODO: make a bug report
self.instance.unpack(method="REMOVE")
elif self.instance.source == "GENERATED":
filename = f"{self.instance.name}.png"
self.instance.filepath = get_filepath(filename)
self.instance.save()
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -32,6 +32,7 @@ class BlLattice(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LATTICE_DATA'
def _construct(self, data):

View File

@ -29,6 +29,7 @@ class BlLibrary(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIBRARY_DATA_DIRECT'
def _construct(self, data):

View File

@ -29,6 +29,7 @@ class BlLight(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIGHT_DATA'
def _construct(self, data):

View File

@ -30,6 +30,7 @@ class BlLightprobe(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'LIGHTPROBE_GRID'
def _construct(self, data):

View File

@ -21,12 +21,12 @@ import mathutils
import logging
import re
from .. import utils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from .bl_datablock import BlDatablock, get_datablock_from_uuid
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
def load_node(node_data, node_tree):
""" Load a node into a node_tree from a dict
@ -39,6 +39,10 @@ def load_node(node_data, node_tree):
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
loader.load(target_node, node_data)
image_uuid = node_data.get('image_uuid', None)
if image_uuid and not target_node.image:
target_node.image = get_datablock_from_uuid(image_uuid, None)
for input in node_data["inputs"]:
if hasattr(target_node.inputs[input], "default_value"):
@ -48,6 +52,14 @@ def load_node(node_data, node_tree):
logging.error(
f"Material {input} parameter not supported, skipping")
for output in node_data["outputs"]:
if hasattr(target_node.outputs[output], "default_value"):
try:
target_node.outputs[output].default_value = node_data["outputs"][output]["default_value"]
except:
logging.error(
f"Material {output} parameter not supported, skipping")
def load_links(links_data, node_tree):
""" Load node_tree links from a list
@ -59,8 +71,10 @@ def load_links(links_data, node_tree):
"""
for link in links_data:
input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])]
output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])]
input_socket = node_tree.nodes[link['to_node']
].inputs[int(link['to_socket'])]
output_socket = node_tree.nodes[link['from_node']].outputs[int(
link['from_socket'])]
node_tree.links.new(input_socket, output_socket)
@ -75,8 +89,10 @@ def dump_links(links):
links_data = []
for link in links:
to_socket = NODE_SOCKET_INDEX.search(link.to_socket.path_from_id()).group(1)
from_socket = NODE_SOCKET_INDEX.search(link.from_socket.path_from_id()).group(1)
to_socket = NODE_SOCKET_INDEX.search(
link.to_socket.path_from_id()).group(1)
from_socket = NODE_SOCKET_INDEX.search(
link.from_socket.path_from_id()).group(1)
links_data.append({
'to_node': link.to_node.name,
'to_socket': to_socket,
@ -102,6 +118,7 @@ def dump_node(node):
"show_expanded",
"name_full",
"select",
"bl_label",
"bl_height_min",
"bl_height_max",
"bl_height_default",
@ -118,7 +135,8 @@ def dump_node(node):
"show_preview",
"show_texture",
"outputs",
"width_hidden"
"width_hidden",
"image"
]
dumped_node = node_dumper.dump(node)
@ -132,8 +150,17 @@ def dump_node(node):
input_dumper.include_filter = ["default_value"]
if hasattr(i, 'default_value'):
dumped_node['inputs'][i.name] = input_dumper.dump(
i)
dumped_node['inputs'][i.name] = input_dumper.dump(i)
dumped_node['outputs'] = {}
for i in node.outputs:
output_dumper = Dumper()
output_dumper.depth = 2
output_dumper.include_filter = ["default_value"]
if hasattr(i, 'default_value'):
dumped_node['outputs'][i.name] = output_dumper.dump(i)
if hasattr(node, 'color_ramp'):
ramp_dumper = Dumper()
ramp_dumper.depth = 4
@ -153,16 +180,24 @@ def dump_node(node):
'location'
]
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
if hasattr(node, 'image') and getattr(node, 'image'):
dumped_node['image_uuid'] = node.image.uuid
return dumped_node
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
return [node.image for node in node_tree.nodes if has_image(node)]
class BlMaterial(BlDatablock):
bl_id = "materials"
bl_class = bpy.types.Material
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'MATERIAL_DATA'
def _construct(self, data):
@ -170,22 +205,22 @@ class BlMaterial(BlDatablock):
def _load_implementation(self, data, target):
loader = Loader()
target.name = data['name']
if data['is_grease_pencil']:
is_grease_pencil = data.get('is_grease_pencil')
use_nodes = data.get('use_nodes')
loader.load(target, data)
if is_grease_pencil:
if not target.is_grease_pencil:
bpy.data.materials.create_gpencil_data(target)
loader.load(
target.grease_pencil, data['grease_pencil'])
if data["use_nodes"]:
loader.load(target.grease_pencil, data['grease_pencil'])
elif use_nodes:
if target.node_tree is None:
target.use_nodes = True
target.node_tree.nodes.clear()
loader.load(target, data)
# Load nodes
for node in data["node_tree"]["nodes"]:
load_node(data["node_tree"]["nodes"][node], target.node_tree)
@ -199,59 +234,71 @@ class BlMaterial(BlDatablock):
assert(instance)
mat_dumper = Dumper()
mat_dumper.depth = 2
mat_dumper.exclude_filter = [
"is_embed_data",
"is_evaluated",
"name_full",
"bl_description",
"bl_icon",
"bl_idname",
"bl_label",
"preview",
"original",
"uuid",
"users",
"alpha_threshold",
"line_color",
"view_center",
mat_dumper.include_filter = [
'name',
'blend_method',
'shadow_method',
'alpha_threshold',
'show_transparent_back',
'use_backface_culling',
'use_screen_refraction',
'use_sss_translucency',
'refraction_depth',
'preview_render_type',
'use_preview_world',
'pass_index',
'use_nodes',
'diffuse_color',
'specular_color',
'roughness',
'specular_intensity',
'metallic',
'line_color',
'line_priority',
'is_grease_pencil'
]
data = mat_dumper.dump(instance)
if instance.use_nodes:
nodes = {}
for node in instance.node_tree.nodes:
nodes[node.name] = dump_node(node)
data["node_tree"]['nodes'] = nodes
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
if instance.is_grease_pencil:
gp_mat_dumper = Dumper()
gp_mat_dumper.depth = 3
gp_mat_dumper.include_filter = [
'color',
'fill_color',
'mix_color',
'mix_factor',
'mix_stroke_factor',
# 'texture_angle',
# 'texture_scale',
# 'texture_offset',
'pixel_size',
'hide',
'lock',
'ghost',
# 'texture_clamp',
'flip',
'use_overlap_strokes',
'show_stroke',
'show_fill',
'alignment_mode',
'pass_index',
'mode',
'stroke_style',
'color',
'use_overlap_strokes',
'show_fill',
# 'stroke_image',
'fill_style',
'fill_color',
'pass_index',
'alignment_mode',
# 'fill_image',
'texture_opacity',
'mix_factor',
'texture_offset',
'texture_angle',
'texture_scale',
'texture_clamp',
'gradient_type',
'mix_color',
'flip'
# 'fill_image',
]
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
elif instance.use_nodes:
nodes = {}
data["node_tree"] = {}
for node in instance.node_tree.nodes:
nodes[node.name] = dump_node(node)
data["node_tree"]['nodes'] = nodes
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
return data
def _resolve_deps_implementation(self):
@ -259,9 +306,7 @@ class BlMaterial(BlDatablock):
deps = []
if self.instance.use_nodes:
for node in self.instance.node_tree.nodes:
if node.type == 'TEX_IMAGE':
deps.append(node.image)
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
if self.is_library:
deps.append(self.instance.library)

View File

@ -25,8 +25,7 @@ import numpy as np
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
from replication.constants import DIFF_BINARY
from replication.exception import ContextError
from .bl_datablock import BlDatablock
from .bl_datablock import BlDatablock, get_datablock_from_uuid
VERTICE = ['co']
@ -53,6 +52,7 @@ class BlMesh(BlDatablock):
bl_delay_refresh = 2
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'MESH_DATA'
def _construct(self, data):
@ -70,8 +70,17 @@ class BlMesh(BlDatablock):
# MATERIAL SLOTS
target.materials.clear()
for m in data["material_list"]:
target.materials.append(bpy.data.materials[m])
for mat_uuid, mat_name in data["material_list"]:
mat_ref = None
if mat_uuid is not None:
mat_ref = get_datablock_from_uuid(mat_uuid, None)
else:
mat_ref = bpy.data.materials.get(mat_name, None)
if mat_ref is None:
raise Exception("Material doesn't exist")
target.materials.append(mat_ref)
# CLEAR GEOMETRY
if target.vertices:
@ -89,32 +98,34 @@ class BlMesh(BlDatablock):
np_load_collection(data["polygons"],target.polygons, POLYGON)
# UV Layers
for layer in data['uv_layers']:
if layer not in target.uv_layers:
target.uv_layers.new(name=layer)
if 'uv_layers' in data.keys():
for layer in data['uv_layers']:
if layer not in target.uv_layers:
target.uv_layers.new(name=layer)
np_load_collection_primitives(
target.uv_layers[layer].data,
'uv',
data["uv_layers"][layer]['data'])
np_load_collection_primitives(
target.uv_layers[layer].data,
'uv',
data["uv_layers"][layer]['data'])
# Vertex color
for color_layer in data['vertex_colors']:
if color_layer not in target.vertex_colors:
target.vertex_colors.new(name=color_layer)
if 'vertex_colors' in data.keys():
for color_layer in data['vertex_colors']:
if color_layer not in target.vertex_colors:
target.vertex_colors.new(name=color_layer)
np_load_collection_primitives(
target.vertex_colors[color_layer].data,
'color',
data["vertex_colors"][color_layer]['data'])
np_load_collection_primitives(
target.vertex_colors[color_layer].data,
'color',
data["vertex_colors"][color_layer]['data'])
target.validate()
target.update()
def _dump_implementation(self, data, instance=None):
assert(instance)
if instance.is_editmode:
if instance.is_editmode and not self.preferences.sync_flags.sync_during_editmode:
raise ContextError("Mesh is in edit mode")
mesh = instance
@ -147,22 +158,24 @@ class BlMesh(BlDatablock):
data["loops"] = np_dump_collection(mesh.loops, LOOP)
# UV Layers
data['uv_layers'] = {}
for layer in mesh.uv_layers:
data['uv_layers'][layer.name] = {}
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
if mesh.uv_layers:
data['uv_layers'] = {}
for layer in mesh.uv_layers:
data['uv_layers'][layer.name] = {}
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
# Vertex color
data['vertex_colors'] = {}
for color_map in mesh.vertex_colors:
data['vertex_colors'][color_map.name] = {}
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
if mesh.vertex_colors:
data['vertex_colors'] = {}
for color_map in mesh.vertex_colors:
data['vertex_colors'][color_map.name] = {}
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
# Fix material index
m_list = []
for material in instance.materials:
if material:
m_list.append(material.name)
m_list.append((material.uuid,material.name))
data['material_list'] = m_list

View File

@ -68,6 +68,7 @@ class BlMetaball(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'META_BALL'
def _construct(self, data):

View File

@ -16,14 +16,16 @@
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
import logging
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
import bpy
import mathutils
from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from .dump_anything import Dumper, Loader
from replication.exception import ReparentException
def load_pose(target_bone, data):
target_bone.rotation_mode = data['rotation_mode']
@ -31,12 +33,59 @@ def load_pose(target_bone, data):
loader.load(target_bone, data)
def find_data_from_name(name=None):
instance = None
if not name:
pass
elif name in bpy.data.meshes.keys():
instance = bpy.data.meshes[name]
elif name in bpy.data.lights.keys():
instance = bpy.data.lights[name]
elif name in bpy.data.cameras.keys():
instance = bpy.data.cameras[name]
elif name in bpy.data.curves.keys():
instance = bpy.data.curves[name]
elif name in bpy.data.metaballs.keys():
instance = bpy.data.metaballs[name]
elif name in bpy.data.armatures.keys():
instance = bpy.data.armatures[name]
elif name in bpy.data.grease_pencils.keys():
instance = bpy.data.grease_pencils[name]
elif name in bpy.data.curves.keys():
instance = bpy.data.curves[name]
elif name in bpy.data.lattices.keys():
instance = bpy.data.lattices[name]
elif name in bpy.data.speakers.keys():
instance = bpy.data.speakers[name]
elif name in bpy.data.lightprobes.keys():
# Only supported since 2.83
if bpy.app.version[1] >= 83:
instance = bpy.data.lightprobes[name]
else:
logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
return instance
def load_data(object, name):
logging.info("loading data")
pass
def _is_editmode(object: bpy.types.Object) -> bool:
child_data = getattr(object, 'data', None)
return (child_data and
hasattr(child_data, 'is_editmode') and
child_data.is_editmode)
class BlObject(BlDatablock):
bl_id = "objects"
bl_class = bpy.types.Object
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'OBJECT_DATA'
def _construct(self, data):
@ -52,45 +101,67 @@ class BlObject(BlDatablock):
return instance
# TODO: refactoring
if "data" not in data:
pass
elif data["data"] in bpy.data.meshes.keys():
instance = bpy.data.meshes[data["data"]]
elif data["data"] in bpy.data.lights.keys():
instance = bpy.data.lights[data["data"]]
elif data["data"] in bpy.data.cameras.keys():
instance = bpy.data.cameras[data["data"]]
elif data["data"] in bpy.data.curves.keys():
instance = bpy.data.curves[data["data"]]
elif data["data"] in bpy.data.metaballs.keys():
instance = bpy.data.metaballs[data["data"]]
elif data["data"] in bpy.data.armatures.keys():
instance = bpy.data.armatures[data["data"]]
elif data["data"] in bpy.data.grease_pencils.keys():
instance = bpy.data.grease_pencils[data["data"]]
elif data["data"] in bpy.data.curves.keys():
instance = bpy.data.curves[data["data"]]
elif data["data"] in bpy.data.lattices.keys():
instance = bpy.data.lattices[data["data"]]
elif data["data"] in bpy.data.speakers.keys():
instance = bpy.data.speakers[data["data"]]
elif data["data"] in bpy.data.lightprobes.keys():
# Only supported since 2.83
if bpy.app.version[1] >= 83:
instance = bpy.data.lightprobes[data["data"]]
else:
logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
instance = bpy.data.objects.new(data["name"], instance)
object_name = data.get("name")
data_uuid = data.get("data_uuid")
data_id = data.get("data")
object_data = get_datablock_from_uuid(
data_uuid,
find_data_from_name(data_id),
ignore=['images']) #TODO: use resolve_from_id
instance = bpy.data.objects.new(object_name, object_data)
instance.uuid = self.uuid
return instance
def _load_implementation(self, data, target):
# Load transformation data
loader = Loader()
data_uuid = data.get("data_uuid")
data_id = data.get("data")
if target.type != data['type']:
raise ReparentException()
elif target.data and (target.data.name != data_id):
target.data = get_datablock_from_uuid(data_uuid, find_data_from_name(data_id), ignore=['images'])
# vertex groups
if 'vertex_groups' in data:
target.vertex_groups.clear()
for vg in data['vertex_groups']:
vertex_group=target.vertex_groups.new(name = vg['name'])
point_attr='vertices' if 'vertices' in vg else 'points'
for vert in vg[point_attr]:
vertex_group.add(
[vert['index']], vert['weight'], 'REPLACE')
# SHAPE KEYS
if 'shape_keys' in data:
target.shape_key_clear()
object_data=target.data
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data=data['shape_keys']['key_blocks'][key_block]
target.shape_key_add(name = key_block)
loader.load(
target.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
# Load transformation data
loader.load(target, data)
loader.load(target.display, data['display'])
# Pose
if 'pose' in data:
if not target.pose:
@ -114,58 +185,25 @@ class BlObject(BlDatablock):
if 'constraints' in bone_data.keys():
loader.load(target_bone, bone_data['constraints'])
load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys():
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
# vertex groups
if 'vertex_groups' in data:
target.vertex_groups.clear()
for vg in data['vertex_groups']:
vertex_group = target.vertex_groups.new(name=vg['name'])
point_attr = 'vertices' if 'vertices' in vg else 'points'
for vert in vg[point_attr]:
vertex_group.add(
[vert['index']], vert['weight'], 'REPLACE')
# SHAPE KEYS
if 'shape_keys' in data:
target.shape_key_clear()
object_data = target.data
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data = data['shape_keys']['key_blocks'][key_block]
target.shape_key_add(name=key_block)
loader.load(
target.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
# TODO: find another way...
if target.type == 'EMPTY':
img_key = data.get('data')
if target.data is None and img_key:
target.data = bpy.data.images.get(img_key, None)
img_uuid = data.get('data_uuid')
if target.data is None and img_uuid:
target.data = get_datablock_from_uuid(img_uuid, None)#bpy.data.images.get(img_key, None)
def _dump_implementation(self, data, instance=None):
assert(instance)
child_data = getattr(instance, 'data', None)
if child_data and hasattr(child_data, 'is_editmode') and child_data.is_editmode:
raise ContextError("Object is in edit-mode.")
if _is_editmode(instance):
if self.preferences.sync_flags.sync_during_editmode:
instance.update_from_editmode()
else:
raise ContextError("Object is in edit-mode.")
dumper = Dumper()
dumper.depth = 1
@ -185,29 +223,48 @@ class BlObject(BlDatablock):
"show_empty_image_perspective",
"show_empty_image_only_axis_aligned",
"use_empty_image_alpha",
"color"
"color",
"instance_collection",
"instance_type",
"location",
"scale",
'lock_location',
'lock_rotation',
'lock_scale',
'hide_render',
'display_type',
'display_bounds_type',
'show_bounds',
'show_name',
'show_axis',
'show_wire',
'show_all_edges',
'show_texture_space',
'show_in_front',
'type',
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
]
data = dumper.dump(instance)
dumper.include_filter = [
'show_shadows',
]
data['display'] = dumper.dump(instance.display)
data['data_uuid'] = getattr(instance.data, 'uuid', None)
if self.is_library:
return data
# MODIFIERS
if hasattr(instance, 'modifiers'):
dumper.include_filter = None
dumper.depth = 2
dumper.depth = 1
data["modifiers"] = {}
for index, modifier in enumerate(instance.modifiers):
data["modifiers"][modifier.name] = dumper.dump(modifier)
# CONSTRAINTS
# OBJECT
if hasattr(instance, 'constraints'):
dumper.depth = 3
data["constraints"] = dumper.dump(instance.constraints)
@ -260,7 +317,8 @@ class BlObject(BlDatablock):
# VERTEx GROUP
if len(instance.vertex_groups) > 0:
points_attr = 'vertices' if isinstance(instance.data, bpy.types.Mesh) else 'points'
points_attr = 'vertices' if isinstance(
instance.data, bpy.types.Mesh) else 'points'
vg_data = []
for vg in instance.vertex_groups:
vg_idx = vg.index
@ -315,7 +373,7 @@ class BlObject(BlDatablock):
def _resolve_deps_implementation(self):
deps = []
# Avoid Empty case
if self.instance.data:
deps.append(self.instance.data)
@ -330,4 +388,3 @@ class BlObject(BlDatablock):
deps.append(self.instance.instance_collection)
return deps

View File

@ -21,8 +21,245 @@ import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from .bl_collection import dump_collection_children, dump_collection_objects, load_collection_childrens, load_collection_objects
from replication.constants import (DIFF_JSON, MODIFIED)
from deepdiff import DeepDiff
import logging
from ..utils import get_preferences
RENDER_SETTINGS = [
'dither_intensity',
'engine',
'film_transparent',
'filter_size',
'fps',
'fps_base',
'frame_map_new',
'frame_map_old',
'hair_subdiv',
'hair_type',
'line_thickness',
'line_thickness_mode',
'metadata_input',
'motion_blur_shutter',
'pixel_aspect_x',
'pixel_aspect_y',
'preview_pixel_size',
'preview_start_resolution',
'resolution_percentage',
'resolution_x',
'resolution_y',
'sequencer_gl_preview',
'use_bake_clear',
'use_bake_lores_mesh',
'use_bake_multires',
'use_bake_selected_to_active',
'use_bake_user_scale',
'use_border',
'use_compositing',
'use_crop_to_border',
'use_file_extension',
'use_freestyle',
'use_full_sample',
'use_high_quality_normals',
'use_lock_interface',
'use_motion_blur',
'use_multiview',
'use_sequencer',
'use_sequencer_override_scene_strip',
'use_single_layer',
'views_format',
]
EVEE_SETTINGS = [
'gi_diffuse_bounces',
'gi_cubemap_resolution',
'gi_visibility_resolution',
'gi_irradiance_smoothing',
'gi_glossy_clamp',
'gi_filter_quality',
'gi_show_irradiance',
'gi_show_cubemaps',
'gi_irradiance_display_size',
'gi_cubemap_display_size',
'gi_auto_bake',
'taa_samples',
'taa_render_samples',
'use_taa_reprojection',
'sss_samples',
'sss_jitter_threshold',
'use_ssr',
'use_ssr_refraction',
'use_ssr_halfres',
'ssr_quality',
'ssr_max_roughness',
'ssr_thickness',
'ssr_border_fade',
'ssr_firefly_fac',
'volumetric_start',
'volumetric_end',
'volumetric_tile_size',
'volumetric_samples',
'volumetric_sample_distribution',
'use_volumetric_lights',
'volumetric_light_clamp',
'use_volumetric_shadows',
'volumetric_shadow_samples',
'use_gtao',
'use_gtao_bent_normals',
'use_gtao_bounce',
'gtao_factor',
'gtao_quality',
'gtao_distance',
'bokeh_max_size',
'bokeh_threshold',
'use_bloom',
'bloom_threshold',
'bloom_color',
'bloom_knee',
'bloom_radius',
'bloom_clamp',
'bloom_intensity',
'use_motion_blur',
'motion_blur_shutter',
'motion_blur_depth_scale',
'motion_blur_max',
'motion_blur_steps',
'shadow_cube_size',
'shadow_cascade_size',
'use_shadow_high_bitdepth',
'gi_diffuse_bounces',
'gi_cubemap_resolution',
'gi_visibility_resolution',
'gi_irradiance_smoothing',
'gi_glossy_clamp',
'gi_filter_quality',
'gi_show_irradiance',
'gi_show_cubemaps',
'gi_irradiance_display_size',
'gi_cubemap_display_size',
'gi_auto_bake',
'taa_samples',
'taa_render_samples',
'use_taa_reprojection',
'sss_samples',
'sss_jitter_threshold',
'use_ssr',
'use_ssr_refraction',
'use_ssr_halfres',
'ssr_quality',
'ssr_max_roughness',
'ssr_thickness',
'ssr_border_fade',
'ssr_firefly_fac',
'volumetric_start',
'volumetric_end',
'volumetric_tile_size',
'volumetric_samples',
'volumetric_sample_distribution',
'use_volumetric_lights',
'volumetric_light_clamp',
'use_volumetric_shadows',
'volumetric_shadow_samples',
'use_gtao',
'use_gtao_bent_normals',
'use_gtao_bounce',
'gtao_factor',
'gtao_quality',
'gtao_distance',
'bokeh_max_size',
'bokeh_threshold',
'use_bloom',
'bloom_threshold',
'bloom_color',
'bloom_knee',
'bloom_radius',
'bloom_clamp',
'bloom_intensity',
'use_motion_blur',
'motion_blur_shutter',
'motion_blur_depth_scale',
'motion_blur_max',
'motion_blur_steps',
'shadow_cube_size',
'shadow_cascade_size',
'use_shadow_high_bitdepth',
]
CYCLES_SETTINGS = [
'shading_system',
'progressive',
'use_denoising',
'denoiser',
'use_square_samples',
'samples',
'aa_samples',
'diffuse_samples',
'glossy_samples',
'transmission_samples',
'ao_samples',
'mesh_light_samples',
'subsurface_samples',
'volume_samples',
'sampling_pattern',
'use_layer_samples',
'sample_all_lights_direct',
'sample_all_lights_indirect',
'light_sampling_threshold',
'use_adaptive_sampling',
'adaptive_threshold',
'adaptive_min_samples',
'min_light_bounces',
'min_transparent_bounces',
'caustics_reflective',
'caustics_refractive',
'blur_glossy',
'max_bounces',
'diffuse_bounces',
'glossy_bounces',
'transmission_bounces',
'volume_bounces',
'transparent_max_bounces',
'volume_step_rate',
'volume_max_steps',
'dicing_rate',
'max_subdivisions',
'dicing_camera',
'offscreen_dicing_scale',
'film_exposure',
'film_transparent_glass',
'film_transparent_roughness',
'filter_type',
'pixel_filter_type',
'filter_width',
'seed',
'use_animated_seed',
'sample_clamp_direct',
'sample_clamp_indirect',
'tile_order',
'use_progressive_refine',
'bake_type',
'use_camera_cull',
'camera_cull_margin',
'use_distance_cull',
'distance_cull_margin',
'motion_blur_position',
'rolling_shutter_type',
'rolling_shutter_duration',
'texture_limit',
'texture_limit_render',
'ao_bounces',
'ao_bounces_render',
]
VIEW_SETTINGS = [
'look',
'view_transform',
'exposure',
'gamma',
'use_curve_mapping',
'white_level',
'black_level'
]
class BlScene(BlDatablock):
bl_id = "scenes"
@ -30,8 +267,14 @@ class BlScene(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True
bl_icon = 'SCENE_DATA'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.diff_method = DIFF_JSON
def _construct(self, data):
instance = bpy.data.scenes.new(data["name"])
return instance
@ -42,45 +285,38 @@ class BlScene(BlDatablock):
loader.load(target, data)
# Load master collection
for object in data["collection"]["objects"]:
if object not in target.collection.objects.keys():
target.collection.objects.link(bpy.data.objects[object])
for object in target.collection.objects.keys():
if object not in data["collection"]["objects"]:
target.collection.objects.unlink(bpy.data.objects[object])
# load collections
for collection in data["collection"]["children"]:
if collection not in target.collection.children.keys():
target.collection.children.link(
bpy.data.collections[collection])
for collection in target.collection.children.keys():
if collection not in data["collection"]["children"]:
target.collection.children.unlink(
bpy.data.collections[collection])
load_collection_objects(
data['collection']['objects'], target.collection)
load_collection_childrens(
data['collection']['children'], target.collection)
if 'world' in data.keys():
target.world = bpy.data.worlds[data['world']]
# Annotation
if 'grease_pencil' in data.keys():
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
if 'eevee' in data.keys():
loader.load(target.eevee, data['eevee'])
if 'cycles' in data.keys():
loader.load(target.eevee, data['cycles'])
if self.preferences.sync_flags.sync_render_settings:
if 'eevee' in data.keys():
loader.load(target.eevee, data['eevee'])
if 'view_settings' in data.keys():
loader.load(target.view_settings, data['view_settings'])
if target.view_settings.use_curve_mapping:
#TODO: change this ugly fix
target.view_settings.curve_mapping.white_level = data['view_settings']['curve_mapping']['white_level']
target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update()
if 'cycles' in data.keys():
loader.load(target.cycles, data['cycles'])
if 'render' in data.keys():
loader.load(target.render, data['render'])
if 'view_settings' in data.keys():
loader.load(target.view_settings, data['view_settings'])
if target.view_settings.use_curve_mapping and \
'curve_mapping' in data['view_settings']:
# TODO: change this ugly fix
target.view_settings.curve_mapping.white_level = data[
'view_settings']['curve_mapping']['white_level']
target.view_settings.curve_mapping.black_level = data[
'view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update()
def _dump_implementation(self, data, instance=None):
assert(instance)
@ -92,52 +328,55 @@ class BlScene(BlDatablock):
'name',
'world',
'id',
'camera',
'grease_pencil',
'frame_start',
'frame_end',
'frame_step',
]
if self.preferences.sync_flags.sync_active_camera:
scene_dumper.include_filter.append('camera')
data = scene_dumper.dump(instance)
scene_dumper.depth = 3
scene_dumper.include_filter = ['children','objects','name']
data['collection'] = scene_dumper.dump(instance.collection)
scene_dumper.include_filter = ['children', 'objects', 'name']
data['collection'] = {}
data['collection']['children'] = dump_collection_children(
instance.collection)
data['collection']['objects'] = dump_collection_objects(
instance.collection)
scene_dumper.depth = 1
scene_dumper.include_filter = None
pref = get_preferences()
if pref.sync_flags.sync_render_settings:
scene_dumper.exclude_filter = [
'gi_cache_info',
'feature_set',
'debug_use_hair_bvh',
'aa_samples',
'blur_glossy',
'glossy_bounces',
'device',
'max_bounces',
'preview_aa_samples',
'preview_samples',
'sample_clamp_indirect',
'samples',
'volume_bounces'
]
data['eevee'] = scene_dumper.dump(instance.eevee)
data['cycles'] = scene_dumper.dump(instance.cycles)
if self.preferences.sync_flags.sync_render_settings:
scene_dumper.include_filter = RENDER_SETTINGS
data['render'] = scene_dumper.dump(instance.render)
if instance.render.engine == 'BLENDER_EEVEE':
scene_dumper.include_filter = EVEE_SETTINGS
data['eevee'] = scene_dumper.dump(instance.eevee)
elif instance.render.engine == 'CYCLES':
scene_dumper.include_filter = CYCLES_SETTINGS
data['cycles'] = scene_dumper.dump(instance.cycles)
scene_dumper.include_filter = VIEW_SETTINGS
data['view_settings'] = scene_dumper.dump(instance.view_settings)
if instance.view_settings.use_curve_mapping:
data['view_settings']['curve_mapping'] = scene_dumper.dump(instance.view_settings.curve_mapping)
data['view_settings']['curve_mapping'] = scene_dumper.dump(
instance.view_settings.curve_mapping)
scene_dumper.depth = 5
scene_dumper.include_filter = [
'curves',
'points',
'location'
'location',
]
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(instance.view_settings.curve_mapping.curves)
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
instance.view_settings.curve_mapping.curves)
return data
def _resolve_deps_implementation(self):
@ -146,17 +385,31 @@ class BlScene(BlDatablock):
# child collections
for child in self.instance.collection.children:
deps.append(child)
# childs objects
for object in self.instance.objects:
for object in self.instance.collection.objects:
deps.append(object)
# world
if self.instance.world:
deps.append(self.instance.world)
# annotations
if self.instance.grease_pencil:
deps.append(self.instance.grease_pencil)
return deps
def diff(self):
exclude_path = []
if not self.preferences.sync_flags.sync_render_settings:
exclude_path.append("root['eevee']")
exclude_path.append("root['cycles']")
exclude_path.append("root['view_settings']")
exclude_path.append("root['render']")
if not self.preferences.sync_flags.sync_active_camera:
exclude_path.append("root['camera']")
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)

View File

@ -0,0 +1,69 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import os
from pathlib import Path
import bpy
from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import BlDatablock
from .dump_anything import Dumper, Loader
class BlSound(BlDatablock):
bl_id = "sounds"
bl_class = bpy.types.Sound
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'SOUND'
def _construct(self, data):
filename = data.get('filename')
return bpy.data.sounds.load(get_filepath(filename))
def _load(self, data, target):
loader = Loader()
loader.load(target, data)
def diff(self):
return False
def _dump(self, instance=None):
filename = Path(instance.filepath).name
if not filename:
raise FileExistsError(instance.filepath)
return {
'filename': filename,
'name': instance.name
}
def _resolve_deps_implementation(self):
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
return deps

View File

@ -29,6 +29,7 @@ class BlSpeaker(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = False
bl_icon = 'SPEAKER'
def _load_implementation(self, data, target):
@ -48,6 +49,7 @@ class BlSpeaker(BlDatablock):
'volume',
'name',
'pitch',
'sound',
'volume_min',
'volume_max',
'attenuation',
@ -60,6 +62,15 @@ class BlSpeaker(BlDatablock):
return dumper.dump(instance)
def _resolve_deps_implementation(self):
# TODO: resolve material
deps = []
sound = self.instance.sound
if sound:
deps.append(sound)
return deps

View File

@ -21,7 +21,11 @@ import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from .bl_material import load_links, load_node, dump_node, dump_links
from .bl_material import (load_links,
load_node,
dump_node,
dump_links,
get_node_tree_dependencies)
class BlWorld(BlDatablock):
@ -30,12 +34,16 @@ class BlWorld(BlDatablock):
bl_delay_refresh = 1
bl_delay_apply = 1
bl_automatic_push = True
bl_check_common = True
bl_icon = 'WORLD_DATA'
def _construct(self, data):
return bpy.data.worlds.new(data["name"])
def _load_implementation(self, data, target):
loader = Loader()
loader.load(target, data)
if data["use_nodes"]:
if target.node_tree is None:
target.use_nodes = True
@ -48,26 +56,21 @@ class BlWorld(BlDatablock):
# Load nodes links
target.node_tree.links.clear()
load_links(data["node_tree"]["links"], target.node_tree)
def _dump_implementation(self, data, instance=None):
assert(instance)
world_dumper = Dumper()
world_dumper.depth = 2
world_dumper.exclude_filter = [
"preview",
"original",
"uuid",
"color",
"cycles",
"light_settings",
"users",
"view_center"
world_dumper.depth = 1
world_dumper.include_filter = [
"use_nodes",
"name",
"color"
]
data = world_dumper.dump(instance)
if instance.use_nodes:
data['node_tree'] = {}
nodes = {}
for node in instance.node_tree.nodes:
@ -83,10 +86,7 @@ class BlWorld(BlDatablock):
deps = []
if self.instance.use_nodes:
for node in self.instance.node_tree.nodes:
if node.type == 'TEX_IMAGE':
deps.append(node.image)
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
if self.is_library:
deps.append(self.instance.library)
return deps

View File

@ -24,8 +24,8 @@ import numpy as np
BPY_TO_NUMPY_TYPES = {
'FLOAT': np.float,
'INT': np.int,
'FLOAT': np.float32,
'INT': np.int32,
'BOOL': np.bool}
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
@ -47,7 +47,7 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
:type attributes: list
"""
if not dikt or len(collection) == 0:
logging.warning(f'Skipping collection')
logging.debug(f'Skipping collection {collection}')
return
if attributes is None:
@ -115,7 +115,7 @@ def np_dump_collection_primitive(collection: bpy.types.CollectionProperty, attri
:return: numpy byte buffer
"""
if len(collection) == 0:
logging.warning(f'Skipping empty {attribute} attribute')
logging.debug(f'Skipping empty {attribute} attribute')
return {}
attr_infos = collection[0].bl_rna.properties.get(attribute)
@ -192,7 +192,7 @@ def np_load_collection_primitives(collection: bpy.types.CollectionProperty, attr
:type sequence: strr
"""
if len(collection) == 0 or not sequence:
logging.warning(f"Skipping loadin {attribute}")
logging.debug(f"Skipping loading {attribute}")
return
attr_infos = collection[0].bl_rna.properties.get(attribute)
@ -301,7 +301,7 @@ class Dumper:
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
self._dump_collection = (
self._dump_default_as_leaf, self._dump_collection_as_branch)
self._dump_array = (self._dump_default_as_leaf,
self._dump_array = (self._dump_array_as_branch,
self._dump_array_as_branch)
self._dump_matrix = (self._dump_matrix_as_leaf,
self._dump_matrix_as_leaf)
@ -593,6 +593,10 @@ class Loader:
instance.write(bpy.data.materials.get(dump))
elif isinstance(rna_property_type, T.Collection):
instance.write(bpy.data.collections.get(dump))
elif isinstance(rna_property_type, T.VectorFont):
instance.write(bpy.data.fonts.get(dump))
elif isinstance(rna_property_type, T.Sound):
instance.write(bpy.data.sounds.get(dump))
def _load_matrix(self, matrix, dump):
matrix.write(mathutils.Matrix(dump))
@ -622,11 +626,11 @@ class Loader:
for k in self._ordered_keys(dump.keys()):
v = dump[k]
if not hasattr(default.read(), k):
logging.debug(f"Load default, skipping {default} : {k}")
continue
try:
self._load_any(default.extend(k), v)
except Exception as err:
logging.debug(f"Cannot load {k}: {err}")
logging.debug(f"Skipping {k}")
@property
def match_subset_all(self):

View File

@ -19,21 +19,36 @@ import logging
import bpy
from . import operators, presence, utils
from . import utils
from .presence import (renderer,
UserFrustumWidget,
UserNameWidget,
UserSelectionWidget,
refresh_3d_view,
generate_user_camera,
get_view_matrix,
refresh_sidebar_view)
from replication.constants import (FETCHED,
RP_COMMON,
STATE_INITIAL,
STATE_QUITTING,
STATE_ACTIVE,
STATE_SYNCING,
STATE_LOBBY,
STATE_SRV_SYNC)
UP,
RP_COMMON,
STATE_INITIAL,
STATE_QUITTING,
STATE_ACTIVE,
STATE_SYNCING,
STATE_LOBBY,
STATE_SRV_SYNC,
REPARENT)
from replication.interface import session
from replication.exception import NonAuthorizedOperationError
class Delayable():
"""Delayable task interface
"""
def __init__(self):
self.is_registered = False
def register(self):
raise NotImplementedError
@ -51,13 +66,21 @@ class Timer(Delayable):
"""
def __init__(self, duration=1):
super().__init__()
self._timeout = duration
self._running = True
def register(self):
"""Register the timer into the blender timer system
"""
bpy.app.timers.register(self.main)
if not self.is_registered:
bpy.app.timers.register(self.main)
self.is_registered = True
logging.debug(f"Register {self.__class__.__name__}")
else:
logging.debug(
f"Timer {self.__class__.__name__} already registered")
def main(self):
self.execute()
@ -85,18 +108,29 @@ class ApplyTimer(Timer):
super().__init__(timout)
def execute(self):
client = operators.client
if client and client.state['STATE'] == STATE_ACTIVE:
nodes = client.list(filter=self._type)
if session and session.state['STATE'] == STATE_ACTIVE:
if self._type:
nodes = session.list(filter=self._type)
else:
nodes = session.list()
for node in nodes:
node_ref = client.get(uuid=node)
node_ref = session.get(uuid=node)
if node_ref.state == FETCHED:
try:
client.apply(node)
session.apply(node)
except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}: {e}")
elif node_ref.state == REPARENT:
# Reload the node
node_ref.remove_instance()
node_ref.resolve()
session.apply(node)
for parent in session._graph.find_parents(node):
logging.info(f"Applying parent {parent}")
session.apply(parent, force=True)
node_ref.state = UP
class DynamicRightSelectTimer(Timer):
@ -107,7 +141,6 @@ class DynamicRightSelectTimer(Timer):
self._right_strategy = RP_COMMON
def execute(self):
session = operators.client
settings = utils.get_preferences()
if session and session.state['STATE'] == STATE_ACTIVE:
@ -134,10 +167,14 @@ class DynamicRightSelectTimer(Timer):
recursive = True
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
session.change_owner(
node.uuid,
RP_COMMON,
recursive=recursive)
try:
session.change_owner(
node.uuid,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(f"Not authorized to change {node} owner")
# change new selection to our
for obj in obj_ours:
@ -148,10 +185,14 @@ class DynamicRightSelectTimer(Timer):
if node.data and 'instance_type' in node.data.keys():
recursive = node.data['instance_type'] != 'COLLECTION'
session.change_owner(
node.uuid,
settings.username,
recursive=recursive)
try:
session.change_owner(
node.uuid,
settings.username,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(f"Not authorized to change {node} owner")
else:
return
@ -170,101 +211,49 @@ class DynamicRightSelectTimer(Timer):
filter_owner=settings.username)
for key in owned_keys:
node = session.get(uuid=key)
try:
session.change_owner(
key,
RP_COMMON,
ignore_warnings=True,
affect_dependencies=recursive)
except NonAuthorizedOperationError:
logging.warning(f"Not authorized to change {key} owner")
session.change_owner(
key,
RP_COMMON,
recursive=recursive)
for user, user_info in session.online_users.items():
if user != settings.username:
metadata = user_info.get('metadata')
if 'selected_objects' in metadata:
# Update selectionnable objects
for obj in bpy.data.objects:
if obj.hide_select and obj.uuid not in metadata['selected_objects']:
obj.hide_select = False
elif not obj.hide_select and obj.uuid in metadata['selected_objects']:
obj.hide_select = True
class Draw(Delayable):
def __init__(self):
self._handler = None
def register(self):
self._handler = bpy.types.SpaceView3D.draw_handler_add(
self.execute, (), 'WINDOW', 'POST_VIEW')
def execute(self):
raise NotImplementedError()
def unregister(self):
try:
bpy.types.SpaceView3D.draw_handler_remove(
self._handler, "WINDOW")
except:
pass
class DrawClient(Draw):
def execute(self):
session = getattr(operators, 'client', None)
renderer = getattr(presence, 'renderer', None)
prefs = utils.get_preferences()
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
settings = bpy.context.window_manager.session
users = session.online_users
# Update users
for user in users.values():
metadata = user.get('metadata')
color = metadata.get('color')
scene_current = metadata.get('scene_current')
user_showable = scene_current == bpy.context.scene.name or settings.presence_show_far_user
if color and scene_current and user_showable:
if settings.presence_show_selected and 'selected_objects' in metadata.keys():
renderer.draw_client_selection(
user['id'], color, metadata['selected_objects'])
if settings.presence_show_user and 'view_corners' in metadata:
renderer.draw_client_camera(
user['id'], metadata['view_corners'], color)
if not user_showable:
# TODO: remove this when user event drivent update will be
# ready
renderer.flush_selection()
renderer.flush_users()
for obj in bpy.data.objects:
object_uuid = getattr(obj, 'uuid', None)
if object_uuid:
is_selectable = not session.is_readonly(object_uuid)
if obj.hide_select != is_selectable:
obj.hide_select = is_selectable
class ClientUpdate(Timer):
def __init__(self, timout=.032):
def __init__(self, timout=.1):
super().__init__(timout)
self.handle_quit = False
self.users_metadata = {}
def execute(self):
settings = utils.get_preferences()
session = getattr(operators, 'client', None)
renderer = getattr(presence, 'renderer', None)
if session and renderer:
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
local_user = operators.client.online_users.get(settings.username)
local_user = session.online_users.get(
settings.username)
if not local_user:
return
else:
for username, user_data in operators.client.online_users.items():
for username, user_data in session.online_users.items():
if username != settings.username:
cached_user_data = self.users_metadata.get(username)
new_user_data = operators.client.online_users[username]['metadata']
cached_user_data = self.users_metadata.get(
username)
new_user_data = session.online_users[username]['metadata']
if cached_user_data is None:
self.users_metadata[username] = user_data['metadata']
elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']:
presence.refresh_3d_view()
refresh_3d_view()
self.users_metadata[username] = user_data['metadata']
break
else:
@ -272,18 +261,18 @@ class ClientUpdate(Timer):
local_user_metadata = local_user.get('metadata')
scene_current = bpy.context.scene.name
local_user = session.online_users.get(settings.username)
current_view_corners = presence.get_view_corners()
local_user = session.online_users.get(settings.username)
current_view_corners = generate_user_camera()
# Init client metadata
if not local_user_metadata or 'color' not in local_user_metadata.keys():
metadata = {
'view_corners': presence.get_view_matrix(),
'view_matrix': presence.get_view_matrix(),
'view_corners': get_view_matrix(),
'view_matrix': get_view_matrix(),
'color': (settings.client_color.r,
settings.client_color.g,
settings.client_color.b,
1),
settings.client_color.g,
settings.client_color.b,
1),
'frame_current': bpy.context.scene.frame_current,
'scene_current': scene_current
}
@ -296,38 +285,60 @@ class ClientUpdate(Timer):
session.update_user_metadata(local_user_metadata)
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
local_user_metadata['view_corners'] = current_view_corners
local_user_metadata['view_matrix'] = presence.get_view_matrix()
local_user_metadata['view_matrix'] = get_view_matrix(
)
session.update_user_metadata(local_user_metadata)
class SessionStatusUpdate(Timer):
def __init__(self, timout=1):
super().__init__(timout)
def execute(self):
presence.refresh_sidebar_view()
refresh_sidebar_view()
class SessionUserSync(Timer):
def __init__(self, timout=1):
super().__init__(timout)
self.settings = utils.get_preferences()
def execute(self):
session = getattr(operators, 'client', None)
renderer = getattr(presence, 'renderer', None)
if session and renderer:
# sync online users
session_users = operators.client.online_users
session_users = session.online_users
ui_users = bpy.context.window_manager.online_users
for index, user in enumerate(ui_users):
if user.username not in session_users.keys():
if user.username not in session_users.keys() and \
user.username != self.settings.username:
renderer.remove_widget(f"{user.username}_cam")
renderer.remove_widget(f"{user.username}_select")
renderer.remove_widget(f"{user.username}_name")
ui_users.remove(index)
renderer.flush_selection()
renderer.flush_users()
break
for user in session_users:
if user not in ui_users:
new_key = ui_users.add()
new_key.name = user
new_key.username = user
new_key.username = user
if user != self.settings.username:
renderer.add_widget(
f"{user}_cam", UserFrustumWidget(user))
renderer.add_widget(
f"{user}_select", UserSelectionWidget(user))
renderer.add_widget(
f"{user}_name", UserNameWidget(user))
class MainThreadExecutor(Timer):
def __init__(self, timout=1, execution_queue=None):
super().__init__(timout)
self.execution_queue = execution_queue
def execute(self):
while not self.execution_queue.empty():
function = self.execution_queue.get()
logging.debug(f"Executing {function.__name__}")
function()

View File

@ -25,7 +25,7 @@ from pathlib import Path
import socket
import re
VERSION_EXPR = re.compile('\d+\.\d+\.\d+')
VERSION_EXPR = re.compile('\d+\.\d+\.\d+\w\d+')
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
DEFAULT_CACHE_DIR = os.path.join(
@ -52,14 +52,21 @@ def install_pip():
def install_package(name, version):
logging.info(f"installing {name} version...")
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"])
env = os.environ
if "PIP_REQUIRE_VIRTUALENV" in env:
# PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env
# https://docs.python-guide.org/dev/pip-virtualenv/
# But since Blender's pip is outside of a virtual env, it can block our packages installation, so we unset the
# env var for the subprocess.
env = os.environ.copy()
del env["PIP_REQUIRE_VIRTUALENV"]
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
def check_package_version(name, required_version):
logging.info(f"Checking {name} version...")
out = subprocess.run(f"{str(PYTHON_PATH)} -m pip show {name}", capture_output=True)
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
version = VERSION_EXPR.search(out.stdout.decode())
if version and version.group() == required_version:
logging.info(f"{name} is up to date")
return True

View File

@ -21,34 +21,101 @@ import logging
import os
import queue
import random
import shutil
import string
import time
from operator import itemgetter
from pathlib import Path
from subprocess import PIPE, Popen, TimeoutExpired
import zmq
from queue import Queue
import bpy
import mathutils
from bpy.app.handlers import persistent
from . import bl_types, delayable, environment, presence, ui, utils
from replication.constants import (FETCHED, STATE_ACTIVE,
STATE_INITIAL,
STATE_SYNCING)
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP)
from replication.data import ReplicatedDataFactory
from replication.exception import NonAuthorizedOperationError
from replication.interface import Session
from replication.interface import session
from . import bl_types, delayable, environment, ui, utils
from .presence import (SessionStatusWidget, renderer, view3d_find)
client = None
background_execution_queue = Queue()
delayables = []
stop_modal_executor = False
def session_callback(name):
""" Session callback wrapper
This allow to encapsulate session callbacks to background_execution_queue.
By doing this way callback are executed from the main thread.
"""
def func_wrapper(func):
@session.register(name)
def add_background_task():
background_execution_queue.put(func)
return add_background_task
return func_wrapper
@session_callback('on_connection')
def initialize_session():
"""Session connection init hander
"""
settings = utils.get_preferences()
runtime_settings = bpy.context.window_manager.session
# Step 1: Constrect nodes
for node in session._graph.list_ordered():
node_ref = session.get(node)
if node_ref.state == FETCHED:
node_ref.resolve()
# Step 2: Load nodes
for node in session._graph.list_ordered():
node_ref = session.get(node)
if node_ref.state == FETCHED:
node_ref.apply()
# Step 4: Register blender timers
for d in delayables:
d.register()
if settings.update_method == 'DEPSGRAPH':
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
@session_callback('on_exit')
def on_connection_end():
"""Session connection finished handler
"""
global delayables, stop_modal_executor
settings = utils.get_preferences()
# Step 1: Unregister blender timers
for d in delayables:
try:
d.unregister()
except:
continue
stop_modal_executor = True
if settings.update_method == 'DEPSGRAPH':
bpy.app.handlers.depsgraph_update_post.remove(
depsgraph_evaluation)
# Step 3: remove file handled
logger = logging.getLogger()
for handler in logger.handlers:
if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler)
# OPERATORS
class SessionStartOperator(bpy.types.Operator):
bl_idname = "session.start"
bl_label = "start"
@ -61,16 +128,38 @@ class SessionStartOperator(bpy.types.Operator):
return True
def execute(self, context):
global client, delayables
global delayables
settings = utils.get_preferences()
runtime_settings = context.window_manager.session
users = bpy.data.window_managers['WinMan'].online_users
admin_pass = runtime_settings.password
use_extern_update = settings.update_method == 'DEPSGRAPH'
users.clear()
delayables.clear()
logger = logging.getLogger()
if len(logger.handlers) == 1:
formatter = logging.Formatter(
fmt='%(asctime)s CLIENT %(levelname)-8s %(message)s',
datefmt='%H:%M:%S'
)
log_directory = os.path.join(
settings.cache_directory,
"multiuser_client.log")
os.makedirs(settings.cache_directory, exist_ok=True)
handler = logging.FileHandler(log_directory, mode='w')
logger.addHandler(handler)
for handler in logger.handlers:
if isinstance(handler, logging.NullHandler):
continue
handler.setFormatter(formatter)
bpy_factory = ReplicatedDataFactory()
supported_bl_types = []
@ -82,24 +171,35 @@ class SessionStartOperator(bpy.types.Operator):
supported_bl_types.append(type_module_class.bl_id)
# Retreive local replicated types settings
if type_impl_name not in settings.supported_datablocks:
logging.info(f"{type_impl_name} not found, \
regenerate type settings...")
settings.generate_supported_types()
type_local_config = settings.supported_datablocks[type_impl_name]
bpy_factory.register_type(
type_module_class.bl_class,
type_module_class,
timer=type_local_config.bl_delay_refresh,
automatic=type_local_config.auto_push)
timer=type_local_config.bl_delay_refresh*1000,
automatic=type_local_config.auto_push,
check_common=type_module_class.bl_check_common)
if type_local_config.bl_delay_apply > 0:
delayables.append(
delayable.ApplyTimer(
timout=type_local_config.bl_delay_apply,
target_type=type_module_class))
if settings.update_method == 'DEFAULT':
if type_local_config.bl_delay_apply > 0:
delayables.append(
delayable.ApplyTimer(
timout=type_local_config.bl_delay_apply,
target_type=type_module_class))
client = Session(
session.configure(
factory=bpy_factory,
python_path=bpy.app.binary_path_python)
python_path=bpy.app.binary_path_python,
external_update_handling=use_extern_update)
if settings.update_method == 'DEPSGRAPH':
delayables.append(delayable.ApplyTimer(
settings.depsgraph_update_rate/1000))
# Host a session
if self.host:
@ -109,30 +209,34 @@ class SessionStartOperator(bpy.types.Operator):
runtime_settings.is_host = True
runtime_settings.internet_ip = environment.get_ip()
for scene in bpy.data.scenes:
client.add(scene)
try:
client.host(
for scene in bpy.data.scenes:
session.add(scene)
session.host(
id=settings.username,
port=settings.port,
ipc_port=settings.ipc_port,
timeout=settings.connection_timeout,
password=admin_pass
password=admin_pass,
cache_directory=settings.cache_directory,
server_log_level=logging.getLevelName(
logging.getLogger().level),
)
except Exception as e:
self.report({'ERROR'}, repr(e))
logging.error(f"Error: {e}")
import traceback
traceback.print_exc()
# Join a session
else:
if not runtime_settings.admin:
utils.clean_scene()
# regular client, no password needed
# regular session, no password needed
admin_pass = None
try:
client.connect(
session.connect(
id=settings.username,
address=settings.ip,
port=settings.port,
@ -145,50 +249,23 @@ class SessionStartOperator(bpy.types.Operator):
logging.error(str(e))
# Background client updates service
#TODO: Refactoring
delayables.append(delayable.ClientUpdate())
delayables.append(delayable.DrawClient())
delayables.append(delayable.DynamicRightSelectTimer())
session_update = delayable.SessionStatusUpdate()
session_user_sync = delayable.SessionUserSync()
session_background_executor = delayable.MainThreadExecutor(
execution_queue=background_execution_queue)
session_update.register()
session_user_sync.register()
session_background_executor.register()
delayables.append(session_background_executor)
delayables.append(session_update)
delayables.append(session_user_sync)
@client.register('on_connection')
def initialize_session():
for node in client._graph.list_ordered():
node_ref = client.get(node)
if node_ref.state == FETCHED:
node_ref.resolve()
node_ref.apply()
# Launch drawing module
if runtime_settings.enable_presence:
presence.renderer.run()
# Register blender main thread tools
for d in delayables:
d.register()
@client.register('on_exit')
def desinitialize_session():
global delayables, stop_modal_executor
for d in delayables:
try:
d.unregister()
except:
continue
stop_modal_executor = True
presence.renderer.stop()
bpy.ops.session.apply_armature_operator()
self.report(
{'INFO'},
@ -224,15 +301,13 @@ class SessionInitOperator(bpy.types.Operator):
return wm.invoke_props_dialog(self)
def execute(self, context):
global client
if self.init_method == 'EMPTY':
utils.clean_scene()
for scene in bpy.data.scenes:
client.add(scene)
session.add(scene)
client.init()
session.init()
return {"FINISHED"}
@ -248,11 +323,12 @@ class SessionStopOperator(bpy.types.Operator):
return True
def execute(self, context):
global client, delayables, stop_modal_executor
global delayables, stop_modal_executor
if client:
if session:
try:
client.disconnect()
session.disconnect()
except Exception as e:
self.report({'ERROR'}, repr(e))
else:
@ -264,7 +340,7 @@ class SessionStopOperator(bpy.types.Operator):
class SessionKickOperator(bpy.types.Operator):
bl_idname = "session.kick"
bl_label = "Kick"
bl_description = "Kick the user"
bl_description = "Kick the target user"
bl_options = {"REGISTER"}
user: bpy.props.StringProperty()
@ -274,11 +350,11 @@ class SessionKickOperator(bpy.types.Operator):
return True
def execute(self, context):
global client, delayables, stop_modal_executor
assert(client)
global delayables, stop_modal_executor
assert(session)
try:
client.kick(self.user)
session.kick(self.user)
except Exception as e:
self.report({'ERROR'}, repr(e))
@ -294,8 +370,9 @@ class SessionKickOperator(bpy.types.Operator):
class SessionPropertyRemoveOperator(bpy.types.Operator):
bl_idname = "session.remove_prop"
bl_label = "remove"
bl_description = "broadcast a property to connected client_instances"
bl_label = "Delete cache"
bl_description = "Stop tracking modification on the target datablock." + \
"The datablock will no longer be updated for others client. "
bl_options = {"REGISTER"}
property_path: bpy.props.StringProperty(default="None")
@ -305,9 +382,8 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
return True
def execute(self, context):
global client
try:
client.remove(self.property_path)
session.remove(self.property_path)
return {"FINISHED"}
except: # NonAuthorizedOperationError:
@ -319,11 +395,12 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
class SessionPropertyRightOperator(bpy.types.Operator):
bl_idname = "session.right"
bl_label = "Change owner to"
bl_description = "Change owner of specified datablock"
bl_label = "Change modification rights"
bl_description = "Modify the owner of the target datablock"
bl_options = {"REGISTER"}
key: bpy.props.StringProperty(default="None")
recursive: bpy.props.BoolProperty(default=True)
@classmethod
def poll(cls, context):
@ -337,15 +414,21 @@ class SessionPropertyRightOperator(bpy.types.Operator):
layout = self.layout
runtime_settings = context.window_manager.session
col = layout.column()
col.prop(runtime_settings, "clients")
row = layout.row()
row.label(text="Give the owning rights to:")
row.prop(runtime_settings, "clients", text="")
row = layout.row()
row.label(text="Affect dependencies")
row.prop(self, "recursive", text="")
def execute(self, context):
runtime_settings = context.window_manager.session
global client
if client:
client.change_owner(self.key, runtime_settings.clients)
if session:
session.change_owner(self.key,
runtime_settings.clients,
ignore_warnings=True,
affect_dependencies=self.recursive)
return {"FINISHED"}
@ -391,11 +474,10 @@ class SessionSnapUserOperator(bpy.types.Operator):
return {'CANCELLED'}
if event.type == 'TIMER':
area, region, rv3d = presence.view3d_find()
global client
area, region, rv3d = view3d_find()
if client:
target_ref = client.online_users.get(self.target_client)
if session:
target_ref = session.online_users.get(self.target_client)
if target_ref:
target_scene = target_ref['metadata']['scene_current']
@ -404,14 +486,16 @@ class SessionSnapUserOperator(bpy.types.Operator):
if target_scene != context.scene.name:
blender_scene = bpy.data.scenes.get(target_scene, None)
if blender_scene is None:
self.report({'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.")
self.report(
{'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.")
session_sessings.time_snap_running = False
return {"CANCELLED"}
bpy.context.window.scene = blender_scene
# Update client viewmatrix
client_vmatrix = target_ref['metadata'].get('view_matrix', None)
client_vmatrix = target_ref['metadata'].get(
'view_matrix', None)
if client_vmatrix:
rv3d.view_matrix = mathutils.Matrix(client_vmatrix)
@ -464,10 +548,8 @@ class SessionSnapTimeOperator(bpy.types.Operator):
return {'CANCELLED'}
if event.type == 'TIMER':
global client
if client:
target_ref = client.online_users.get(self.target_client)
if session:
target_ref = session.online_users.get(self.target_client)
if target_ref:
context.scene.frame_current = target_ref['metadata']['frame_current']
@ -479,28 +561,31 @@ class SessionSnapTimeOperator(bpy.types.Operator):
class SessionApply(bpy.types.Operator):
bl_idname = "session.apply"
bl_label = "apply selected block into blender"
bl_description = "Apply selected block into blender"
bl_label = "Revert"
bl_description = "Revert the selected datablock from his cached" + \
" version."
bl_options = {"REGISTER"}
target: bpy.props.StringProperty()
reset_dependencies: bpy.props.BoolProperty(default=False)
@classmethod
def poll(cls, context):
return True
def execute(self, context):
global client
client.apply(self.target)
logging.debug(f"Running apply on {self.target}")
session.apply(self.target,
force=True,
force_dependencies=self.reset_dependencies)
return {"FINISHED"}
class SessionCommit(bpy.types.Operator):
bl_idname = "session.commit"
bl_label = "commit and push selected datablock to server"
bl_description = "commit and push selected datablock to server"
bl_label = "Force server update"
bl_description = "Commit and push the target datablock to server"
bl_options = {"REGISTER"}
target: bpy.props.StringProperty()
@ -510,10 +595,9 @@ class SessionCommit(bpy.types.Operator):
return True
def execute(self, context):
global client
# client.get(uuid=target).diff()
client.commit(uuid=self.target)
client.push(self.target)
# session.get(uuid=target).diff()
session.commit(uuid=self.target)
session.push(self.target)
return {"FINISHED"}
@ -531,16 +615,15 @@ class ApplyArmatureOperator(bpy.types.Operator):
return {'CANCELLED'}
if event.type == 'TIMER':
global client
if client and client.state['STATE'] == STATE_ACTIVE:
nodes = client.list(filter=bl_types.bl_armature.BlArmature)
if session and session.state['STATE'] == STATE_ACTIVE:
nodes = session.list(filter=bl_types.bl_armature.BlArmature)
for node in nodes:
node_ref = client.get(uuid=node)
node_ref = session.get(uuid=node)
if node_ref.state == FETCHED:
try:
client.apply(node)
session.apply(node)
except Exception as e:
logging.error("Fail to apply armature: {e}")
@ -561,6 +644,35 @@ class ApplyArmatureOperator(bpy.types.Operator):
stop_modal_executor = False
class ClearCache(bpy.types.Operator):
"Clear local session cache"
bl_idname = "session.clear_cache"
bl_label = "Modal Executor Operator"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
cache_dir = utils.get_preferences().cache_directory
try:
for root, dirs, files in os.walk(cache_dir):
for name in files:
Path(root, name).unlink()
except Exception as e:
self.report({'ERROR'}, repr(e))
return {"FINISHED"}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
classes = (
SessionStartOperator,
SessionStopOperator,
@ -573,7 +685,7 @@ classes = (
ApplyArmatureOperator,
SessionKickOperator,
SessionInitOperator,
ClearCache,
)
@ -585,31 +697,65 @@ def sanitize_deps_graph(dummy):
A future solution should be to avoid storing dataclock reference...
"""
global client
if client and client.state['STATE'] == STATE_ACTIVE:
for node_key in client.list():
client.get(node_key).resolve()
if session and session.state['STATE'] == STATE_ACTIVE:
for node_key in session.list():
node = session.get(node_key)
if node and not node.resolve(construct=False):
session.remove(node_key)
@persistent
def load_pre_handler(dummy):
global client
if client and client.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
def update_client_frame(scene):
if client and client.state['STATE'] == STATE_ACTIVE:
client.update_user_metadata({
if session and session.state['STATE'] == STATE_ACTIVE:
session.update_user_metadata({
'frame_current': scene.frame_current
})
@persistent
def depsgraph_evaluation(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
# NOTE: maybe we don't need to check each update but only the first
for update in reversed(dependency_updates):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.get(update.id.uuid)
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
# - if its to someone else, ignore the update (go deeper ?)
if node and node.owner in [session.id, RP_COMMON] and node.state == UP:
# Avoid slow geometry update
if 'EDIT' in context.mode and \
not settings.sync_during_editmode:
break
session.stash(node.uuid)
else:
# Distant update
continue
# else:
# # New items !
# logger.error("UPDATE: ADD")
def register():
from bpy.utils import register_class
for cls in classes:
register_class(cls)
@ -621,11 +767,8 @@ def register():
def unregister():
global client
if client and client.state['STATE'] == 2:
client.disconnect()
client = None
if session and session.state['STATE'] == STATE_ACTIVE:
session.disconnect()
from bpy.utils import unregister_class
for cls in reversed(classes):
@ -636,7 +779,3 @@ def unregister():
bpy.app.handlers.load_pre.remove(load_pre_handler)
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
if __name__ == "__main__":
register()

View File

@ -20,9 +20,14 @@ import logging
import bpy
import string
import re
import os
from . import utils, bl_types, environment, addon_updater_ops, presence, ui
from pathlib import Path
from . import bl_types, environment, addon_updater_ops, presence, ui
from .utils import get_preferences, get_expanded_icon
from replication.constants import RP_COMMON
from replication.interface import session
IP_EXPR = re.compile('\d+\.\d+\.\d+\.\d+')
@ -36,7 +41,7 @@ def randomColor():
def random_string_digits(stringLength=6):
"""Generate a random string of letters and digits """
"""Generate a random string of letters and digits"""
lettersAndDigits = string.ascii_letters + string.digits
return ''.join(random.choices(lettersAndDigits, k=stringLength))
@ -46,6 +51,7 @@ def update_panel_category(self, context):
ui.SESSION_PT_settings.bl_category = self.panel_category
ui.register()
def update_ip(self, context):
ip = IP_EXPR.search(self.ip)
@ -55,14 +61,35 @@ def update_ip(self, context):
logging.error("Wrong IP format")
self['ip'] = "127.0.0.1"
def update_port(self, context):
max_port = self.port + 3
if self.ipc_port < max_port and \
self['ipc_port'] >= self.port:
logging.error("IPC Port in conflic with the port, assigning a random value")
self['ipc_port'] >= self.port:
logging.error(
"IPC Port in conflict with the port, assigning a random value")
self['ipc_port'] = random.randrange(self.port+4, 10000)
def update_directory(self, context):
new_dir = Path(self.cache_directory)
if new_dir.exists() and any(Path(self.cache_directory).iterdir()):
logging.error("The folder is not empty, choose another one.")
self['cache_directory'] = environment.DEFAULT_CACHE_DIR
elif not new_dir.exists():
logging.info("Target cache folder doesn't exist, creating it.")
os.makedirs(self.cache_directory, exist_ok=True)
def set_log_level(self, value):
logging.getLogger().setLevel(value)
def get_log_level(self):
return logging.getLogger().level
class ReplicatedDatablock(bpy.types.PropertyGroup):
type_name: bpy.props.StringProperty()
bl_name: bpy.props.StringProperty()
@ -73,11 +100,49 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
icon: bpy.props.StringProperty()
def set_sync_render_settings(self, value):
self['sync_render_settings'] = value
if session and bpy.context.scene.uuid and value:
bpy.ops.session.apply('INVOKE_DEFAULT',
target=bpy.context.scene.uuid,
reset_dependencies=False)
def set_sync_active_camera(self, value):
self['sync_active_camera'] = value
if session and bpy.context.scene.uuid and value:
bpy.ops.session.apply('INVOKE_DEFAULT',
target=bpy.context.scene.uuid,
reset_dependencies=False)
class ReplicationFlags(bpy.types.PropertyGroup):
def get_sync_render_settings(self):
return self.get('sync_render_settings', True)
def get_sync_active_camera(self):
return self.get('sync_active_camera', True)
sync_render_settings: bpy.props.BoolProperty(
name="Synchronize render settings",
description="Synchronize render settings (eevee and cycles only)",
default=True)
default=False,
set=set_sync_render_settings,
get=get_sync_render_settings
)
sync_during_editmode: bpy.props.BoolProperty(
name="Edit mode updates",
description="Enable objects update in edit mode (! Impact performances !)",
default=False
)
sync_active_camera: bpy.props.BoolProperty(
name="Synchronize active camera",
description="Synchronize the active camera",
default=True,
get=get_sync_active_camera,
set=set_sync_active_camera
)
class SessionPrefs(bpy.types.AddonPreferences):
@ -109,9 +174,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
)
ipc_port: bpy.props.IntProperty(
name="ipc_port",
description='internal ttl port(only usefull for multiple local instances)',
default=5561,
update=update_port
description='internal ttl port(only useful for multiple local instances)',
default=random.randrange(5570, 70000),
update=update_port,
)
init_method: bpy.props.EnumProperty(
name='init_method',
@ -123,33 +188,80 @@ class SessionPrefs(bpy.types.AddonPreferences):
cache_directory: bpy.props.StringProperty(
name="cache directory",
subtype="DIR_PATH",
default=environment.DEFAULT_CACHE_DIR)
default=environment.DEFAULT_CACHE_DIR,
update=update_directory)
connection_timeout: bpy.props.IntProperty(
name='connection timeout',
description='connection timeout before disconnection',
default=1000
)
update_method: bpy.props.EnumProperty(
name='update method',
description='replication update method',
items=[
('DEFAULT', "Default", "Default: Use threads to monitor databloc changes"),
('DEPSGRAPH', "Depsgraph",
"Experimental: Use the blender dependency graph to trigger updates"),
],
)
# Replication update settings
depsgraph_update_rate: bpy.props.IntProperty(
name='depsgraph update rate',
description='Dependency graph uppdate rate (milliseconds)',
default=100
)
clear_memory_filecache: bpy.props.BoolProperty(
name="Clear memory filecache",
description="Remove filecache from memory",
default=False
)
# for UI
category: bpy.props.EnumProperty(
name="Category",
description="Preferences Category",
items=[
('CONFIG', "Configuration", "Configuration about this add-on"),
('CONFIG', "Configuration", "Configuration of this add-on"),
('UPDATE', "Update", "Update this add-on"),
],
default='CONFIG'
)
# WIP
logging_level: bpy.props.EnumProperty(
name="Log level",
description="Log verbosity level",
items=[
('ERROR', "error", "show only errors"),
('WARNING', "warning", "only show warnings and errors"),
('INFO', "info", "default level"),
('DEBUG', "debug", "show all logs"),
('ERROR', "error", "show only errors", logging.ERROR),
('WARNING', "warning", "only show warnings and errors", logging.WARNING),
('INFO', "info", "default level", logging.INFO),
('DEBUG', "debug", "show all logs", logging.DEBUG),
],
default='INFO'
default='INFO',
set=set_log_level,
get=get_log_level
)
presence_hud_scale: bpy.props.FloatProperty(
name="Text scale",
description="Adjust the session widget text scale",
min=7,
max=90,
default=15,
)
presence_hud_hpos: bpy.props.FloatProperty(
name="Horizontal position",
description="Adjust the session widget horizontal position",
min=1,
max=90,
default=3,
step=1,
subtype='PERCENTAGE',
)
presence_hud_vpos: bpy.props.FloatProperty(
name="Vertical position",
description="Adjust the session widget vertical position",
min=1,
max=94,
default=1,
step=1,
subtype='PERCENTAGE',
)
conf_session_identity_expanded: bpy.props.BoolProperty(
name="Identity",
@ -181,6 +293,26 @@ class SessionPrefs(bpy.types.AddonPreferences):
description="Interface",
default=False
)
sidebar_advanced_rep_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_rep_expanded",
description="sidebar_advanced_rep_expanded",
default=False
)
sidebar_advanced_log_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_log_expanded",
description="sidebar_advanced_log_expanded",
default=False
)
sidebar_advanced_net_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_net_expanded",
description="sidebar_advanced_net_expanded",
default=False
)
sidebar_advanced_cache_expanded: bpy.props.BoolProperty(
name="sidebar_advanced_cache_expanded",
description="sidebar_advanced_cache_expanded",
default=False
)
auto_check_update: bpy.props.BoolProperty(
name="Auto-check for Update",
@ -232,9 +364,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
# USER INFORMATIONS
box = grid.box()
box.prop(
self, "conf_session_identity_expanded", text="User informations",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_identity_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
self, "conf_session_identity_expanded", text="User information",
icon=get_expanded_icon(self.conf_session_identity_expanded),
emboss=False)
if self.conf_session_identity_expanded:
box.row().prop(self, "username", text="name")
box.row().prop(self, "client_color", text="color")
@ -242,24 +374,27 @@ class SessionPrefs(bpy.types.AddonPreferences):
# NETWORK SETTINGS
box = grid.box()
box.prop(
self, "conf_session_net_expanded", text="Netorking",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_net_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
self, "conf_session_net_expanded", text="Networking",
icon=get_expanded_icon(self.conf_session_net_expanded),
emboss=False)
if self.conf_session_net_expanded:
box.row().prop(self, "ip", text="Address")
row = box.row()
row.label(text="Port:")
row.prop(self, "port", text="Address")
row.prop(self, "port", text="")
row = box.row()
row.label(text="Init the session from:")
row.prop(self, "init_method", text="")
row = box.row()
row.label(text="Update method:")
row.prop(self, "update_method", text="")
table = box.box()
table.row().prop(
self, "conf_session_timing_expanded", text="Refresh rates",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_timing_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
icon=get_expanded_icon(self.conf_session_timing_expanded),
emboss=False)
if self.conf_session_timing_expanded:
line = table.row()
@ -277,8 +412,8 @@ class SessionPrefs(bpy.types.AddonPreferences):
box = grid.box()
box.prop(
self, "conf_session_hosting_expanded", text="Hosting",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_hosting_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
icon=get_expanded_icon(self.conf_session_hosting_expanded),
emboss=False)
if self.conf_session_hosting_expanded:
row = box.row()
row.label(text="Init the session from:")
@ -288,23 +423,33 @@ class SessionPrefs(bpy.types.AddonPreferences):
box = grid.box()
box.prop(
self, "conf_session_cache_expanded", text="Cache",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_cache_expanded
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
icon=get_expanded_icon(self.conf_session_cache_expanded),
emboss=False)
if self.conf_session_cache_expanded:
box.row().prop(self, "cache_directory", text="Cache directory")
box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache")
# INTERFACE SETTINGS
box = grid.box()
box.prop(
self, "conf_session_ui_expanded", text="Interface",
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_ui_expanded else 'DISCLOSURE_TRI_RIGHT',
icon=get_expanded_icon(self.conf_session_ui_expanded),
emboss=False)
if self.conf_session_ui_expanded:
box.row().prop(self, "panel_category", text="Panel category", expand=True)
row = box.row()
row.label(text="Session widget:")
col = box.column(align=True)
col.prop(self, "presence_hud_scale", expand=True)
col.prop(self, "presence_hud_hpos", expand=True)
col.prop(self, "presence_hud_vpos", expand=True)
if self.category == 'UPDATE':
from . import addon_updater_ops
addon_updater_ops.update_settings_ui_condensed(self, context)
addon_updater_ops.update_settings_ui(self, context)
def generate_supported_types(self):
self.supported_datablocks.clear()
@ -331,10 +476,10 @@ def client_list_callback(scene, context):
items = [(RP_COMMON, RP_COMMON, "")]
username = utils.get_preferences().username
cli = operators.client
if cli:
client_ids = cli.online_users.keys()
username = get_preferences().username
if session:
client_ids = session.online_users.keys()
for id in client_ids:
name_desc = id
if id == username:
@ -370,25 +515,26 @@ class SessionProps(bpy.types.PropertyGroup):
name="Presence overlay",
description='Enable overlay drawing module',
default=True,
update=presence.update_presence
)
presence_show_selected: bpy.props.BoolProperty(
name="Show selected objects",
description='Enable selection overlay ',
default=True,
update=presence.update_overlay_settings
)
presence_show_user: bpy.props.BoolProperty(
name="Show users",
description='Enable user overlay ',
default=True,
update=presence.update_overlay_settings
)
presence_show_far_user: bpy.props.BoolProperty(
name="Show users on different scenes",
description="Show user on different scenes",
default=False,
update=presence.update_overlay_settings
)
presence_show_session_status: bpy.props.BoolProperty(
name="Show session status ",
description="Show session status on the viewport",
default=True,
)
filter_owned: bpy.props.BoolProperty(
name="filter_owned",

View File

@ -19,6 +19,7 @@
import copy
import logging
import math
import sys
import traceback
import bgl
@ -28,13 +29,17 @@ import gpu
import mathutils
from bpy_extras import view3d_utils
from gpu_extras.batch import batch_for_shader
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
STATE_INITIAL, STATE_LAUNCHING_SERVICES,
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
STATE_SYNCING, STATE_WAITING)
from replication.interface import session
from . import utils
from .utils import find_from_attr, get_state_str, get_preferences
renderer = None
# Helper functions
def view3d_find():
def view3d_find() -> tuple:
""" Find the first 'VIEW_3D' windows found in areas
:return: tuple(Area, Region, RegionView3D)
@ -56,35 +61,48 @@ def refresh_3d_view():
if area and region and rv3d:
area.tag_redraw()
def refresh_sidebar_view():
""" Refresh the blender sidebar
""" Refresh the blender viewport sidebar
"""
area, region, rv3d = view3d_find()
area.regions[3].tag_redraw()
if area:
area.regions[3].tag_redraw()
def get_target(region, rv3d, coord):
def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D, coords: list, distance: float = 1.0) -> list:
""" Compute a projection from 2D to 3D viewport coordinate
:param region: target windows region
:type region: bpy.types.Region
:param rv3d: view 3D
:type rv3d: bpy.types.RegionView3D
:param coords: coordinate to project
:type coords: list
:param distance: distance offset into viewport
:type distance: float
:return: list of coordinates [x,y,z]
"""
target = [0, 0, 0]
if coord and region and rv3d:
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
target = ray_origin + view_vector
return [target.x, target.y, target.z]
def get_target_far(region, rv3d, coord, distance):
target = [0, 0, 0]
if coord and region and rv3d:
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
if coords and region and rv3d:
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coords)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coords)
target = ray_origin + view_vector * distance
return [target.x, target.y, target.z]
def get_default_bbox(obj, radius):
def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
""" Generate a bounding box for a given object by using its world matrix
:param obj: target object
:type obj: bpy.types.Object
:param radius: bounding box radius
:type radius: float
:return: list of 8 points [(x,y,z),...]
"""
coords = [
(-radius, -radius, -radius), (+radius, -radius, -radius),
(-radius, +radius, -radius), (+radius, +radius, -radius),
@ -92,264 +110,384 @@ def get_default_bbox(obj, radius):
(-radius, +radius, +radius), (+radius, +radius, +radius)]
base = obj.matrix_world
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
bbox_corners = [base @ mathutils.Vector(corner) for corner in coords]
return [(point.x, point.y, point.z)
for point in bbox_corners]
for point in bbox_corners]
def get_view_corners():
def generate_user_camera() -> list:
""" Generate a basic camera represention of the user point of view
:return: list of 7 points
"""
area, region, rv3d = view3d_find()
v1 = [0, 0, 0]
v2 = [0, 0, 0]
v3 = [0, 0, 0]
v4 = [0, 0, 0]
v5 = [0, 0, 0]
v6 = [0, 0, 0]
v7 = [0, 0, 0]
v1 = v2 = v3 = v4 = v5 = v6 = v7 = [0, 0, 0]
if area and region and rv3d:
width = region.width
height = region.height
v1 = get_target(region, rv3d, (0, 0))
v3 = get_target(region, rv3d, (0, height))
v2 = get_target(region, rv3d, (width, height))
v4 = get_target(region, rv3d, (width, 0))
v1 = project_to_viewport(region, rv3d, (0, 0))
v3 = project_to_viewport(region, rv3d, (0, height))
v2 = project_to_viewport(region, rv3d, (width, height))
v4 = project_to_viewport(region, rv3d, (width, 0))
v5 = get_target(region, rv3d, (width/2, height/2))
v5 = project_to_viewport(region, rv3d, (width/2, height/2))
v6 = list(rv3d.view_location)
v7 = get_target_far(region, rv3d, (width/2, height/2), -.8)
v7 = project_to_viewport(
region, rv3d, (width/2, height/2), distance=-.8)
coords = [v1, v2, v3, v4, v5, v6, v7]
return coords
def get_client_2d(coords):
def project_to_screen(coords: list) -> list:
""" Project 3D coordinate to 2D screen coordinates
:param coords: 3D coordinates (x,y,z)
:type coords: list
:return: list of 2D coordinates [x,y]
"""
area, region, rv3d = view3d_find()
if area and region and rv3d:
return view3d_utils.location_3d_to_region_2d(region, rv3d, coords)
else:
return (0, 0)
def get_bb_coords_from_obj(object, parent=None):
base = object.matrix_world if parent is None else parent.matrix_world
def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object = None) -> list:
""" Generate bounding box in world coordinate from object bound box
:param object: target object
:type object: bpy.types.Object
:param instance: optionnal instance
:type instance: bpy.types.Object
:return: list of 8 points [(x,y,z),...]
"""
base = object.matrix_world
if instance:
scale = mathutils.Matrix.Diagonal(object.matrix_world.to_scale())
base = instance.matrix_world @ scale.to_4x4()
bbox_corners = [base @ mathutils.Vector(
corner) for corner in object.bound_box]
corner) for corner in object.bound_box]
return [(point.x, point.y, point.z)
for point in bbox_corners]
return [(point.x, point.y, point.z) for point in bbox_corners]
def get_view_matrix():
def get_view_matrix() -> list:
""" Return the 3d viewport view matrix
:return: view matrix as a 4x4 list
"""
area, region, rv3d = view3d_find()
if area and region and rv3d:
if area and region and rv3d:
return [list(v) for v in rv3d.view_matrix]
def update_presence(self, context):
global renderer
if 'renderer' in globals() and hasattr(renderer, 'run'):
if self.enable_presence:
renderer.run()
class Widget(object):
""" Base class to define an interface element
"""
draw_type: str = 'POST_VIEW' # Draw event type
def poll(self) -> bool:
"""Test if the widget can be drawn or not
:return: bool
"""
return True
def draw(self):
"""How to draw the widget
"""
raise NotImplementedError()
class UserFrustumWidget(Widget):
# Camera widget indices
indices = ((1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6))
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
renderer.stop()
return None
def poll(self):
if self.data is None:
return False
def update_overlay_settings(self, context):
global renderer
scene_current = self.data.get('scene_current')
view_corners = self.data.get('view_corners')
if renderer and not self.presence_show_selected:
renderer.flush_selection()
if renderer and not self.presence_show_user:
renderer.flush_users()
return (scene_current == bpy.context.scene.name or
self.settings.presence_show_far_user) and \
view_corners and \
self.settings.presence_show_user and \
self.settings.enable_presence
class DrawFactory(object):
def __init__(self):
self.d3d_items = {}
self.d2d_items = {}
self.draw3d_handle = None
self.draw2d_handle = None
self.draw_event = None
self.coords = None
self.active_object = None
def run(self):
self.register_handlers()
def stop(self):
self.flush_users()
self.flush_selection()
self.unregister_handlers()
refresh_3d_view()
def register_handlers(self):
self.draw3d_handle = bpy.types.SpaceView3D.draw_handler_add(
self.draw3d_callback, (), 'WINDOW', 'POST_VIEW')
self.draw2d_handle = bpy.types.SpaceView3D.draw_handler_add(
self.draw2d_callback, (), 'WINDOW', 'POST_PIXEL')
def unregister_handlers(self):
if self.draw2d_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.draw2d_handle, "WINDOW")
self.draw2d_handle = None
if self.draw3d_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.draw3d_handle, "WINDOW")
self.draw3d_handle = None
self.d3d_items.clear()
self.d2d_items.clear()
def flush_selection(self, user=None):
key_to_remove = []
select_key = f"{user}_select" if user else "select"
for k in self.d3d_items.keys():
if select_key in k:
key_to_remove.append(k)
for k in key_to_remove:
del self.d3d_items[k]
def flush_users(self):
key_to_remove = []
for k in self.d3d_items.keys():
if "select" not in k:
key_to_remove.append(k)
for k in key_to_remove:
del self.d3d_items[k]
self.d2d_items.clear()
def draw_client_selection(self, client_id, client_color, client_selection):
local_user = utils.get_preferences().username
if local_user != client_id:
self.flush_selection(client_id)
for select_ob in client_selection:
drawable_key = f"{client_id}_select_{select_ob}"
ob = utils.find_from_attr("uuid", select_ob, bpy.data.objects)
if not ob:
return
if ob.type == 'EMPTY':
# TODO: Child case
# Collection instance case
indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
if ob.instance_collection:
for obj in ob.instance_collection.objects:
if obj.type == 'MESH':
self.append_3d_item(
drawable_key,
client_color,
get_bb_coords_from_obj(obj, parent=ob),
indices)
if ob.type in ['MESH','META']:
indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
self.append_3d_item(
drawable_key,
client_color,
get_bb_coords_from_obj(ob),
indices)
else:
indices = (
(0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
self.append_3d_item(
drawable_key,
client_color,
get_default_bbox(ob, ob.scale.x),
indices)
def append_3d_item(self,key,color, coords, indices):
def draw(self):
location = self.data.get('view_corners')
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
color = color
positions = [tuple(coord) for coord in location]
if len(positions) != 7:
return
batch = batch_for_shader(
shader, 'LINES', {"pos": coords}, indices=indices)
shader,
'LINES',
{"pos": positions},
indices=self.indices)
self.d3d_items[key] = (shader, batch, color)
def draw_client_camera(self, client_id, client_location, client_color):
if client_location:
local_user = utils.get_preferences().username
if local_user != client_id:
try:
indices = (
(1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6)
)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
position = [tuple(coord) for coord in client_location]
color = client_color
batch = batch_for_shader(
shader, 'LINES', {"pos": position}, indices=indices)
self.d3d_items[client_id] = (shader, batch, color)
self.d2d_items[client_id] = (position[1], client_id, color)
except Exception as e:
logging.debug(f"Draw client exception: {e} \n {traceback.format_exc()}\n pos:{position},ind:{indices}")
def draw3d_callback(self):
bgl.glLineWidth(2.)
bgl.glEnable(bgl.GL_DEPTH_TEST)
bgl.glEnable(bgl.GL_BLEND)
bgl.glEnable(bgl.GL_LINE_SMOOTH)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
class UserSelectionWidget(Widget):
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
user_selection = self.data.get('selected_objects')
scene_current = self.data.get('scene_current')
return (scene_current == bpy.context.scene.name or
self.settings.presence_show_far_user) and \
user_selection and \
self.settings.presence_show_selected and \
self.settings.enable_presence
def draw(self):
user_selection = self.data.get('selected_objects')
for select_ob in user_selection:
ob = find_from_attr("uuid", select_ob, bpy.data.objects)
if not ob:
return
vertex_pos = bbox_from_obj(ob, 1.0)
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
(4, 5), (4, 6), (5, 7), (6, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
if ob.instance_collection:
for obj in ob.instance_collection.objects:
if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
vertex_pos = get_bb_coords_from_obj(obj, instance=ob)
break
elif ob.type == 'EMPTY':
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
elif ob.type == 'LIGHT':
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
elif ob.type == 'LIGHT_PROBE':
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
elif ob.type == 'CAMERA':
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
elif hasattr(ob, 'bound_box'):
vertex_indices = (
(0, 1), (1, 2), (2, 3), (0, 3),
(4, 5), (5, 6), (6, 7), (4, 7),
(0, 4), (1, 5), (2, 6), (3, 7))
vertex_pos = get_bb_coords_from_obj(ob)
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(
shader,
'LINES',
{"pos": vertex_pos},
indices=vertex_indices)
shader.bind()
shader.uniform_float("color", self.data.get('color'))
batch.draw(shader)
class UserNameWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(
self,
username):
self.username = username
self.settings = bpy.context.window_manager.session
@property
def data(self):
user = session.online_users.get(self.username)
if user:
return user.get('metadata')
else:
return None
def poll(self):
if self.data is None:
return False
scene_current = self.data.get('scene_current')
view_corners = self.data.get('view_corners')
return (scene_current == bpy.context.scene.name or
self.settings.presence_show_far_user) and \
view_corners and \
self.settings.presence_show_user and \
self.settings.enable_presence
def draw(self):
view_corners = self.data.get('view_corners')
color = self.data.get('color')
position = [tuple(coord) for coord in view_corners]
coords = project_to_screen(position[1])
if coords:
blf.position(0, coords[0], coords[1]+10, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, self.username)
class SessionStatusWidget(Widget):
draw_type = 'POST_PIXEL'
def __init__(self):
self.preferences = get_preferences()
@property
def settings(self):
return getattr(bpy.context.window_manager, 'session', None)
def poll(self):
return self.settings and self.settings.presence_show_session_status and \
self.settings.enable_presence
def draw(self):
text_scale = self.preferences.presence_hud_scale
ui_scale = bpy.context.preferences.view.ui_scale
color = [1, 1, 0, 1]
state = session.state.get('STATE')
state_str = f"{get_state_str(state)}"
if state == STATE_ACTIVE:
color = [0, 1, 0, 1]
elif state == STATE_INITIAL:
color = [1, 0, 0, 1]
hpos = (self.preferences.presence_hud_hpos*bpy.context.area.width)/100
vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100
blf.position(0, hpos, vpos, 0)
blf.size(0, int(text_scale*ui_scale), 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, state_str)
class DrawFactory(object):
def __init__(self):
self.post_view_handle = None
self.post_pixel_handle = None
self.widgets = {}
def add_widget(self, name: str, widget: Widget):
self.widgets[name] = widget
def remove_widget(self, name: str):
if name in self.widgets:
del self.widgets[name]
else:
logging.error(f"Widget {name} not existing")
def clear_widgets(self):
self.widgets.clear()
def register_handlers(self):
self.post_view_handle = bpy.types.SpaceView3D.draw_handler_add(
self.post_view_callback,
(),
'WINDOW',
'POST_VIEW')
self.post_pixel_handle = bpy.types.SpaceView3D.draw_handler_add(
self.post_pixel_callback,
(),
'WINDOW',
'POST_PIXEL')
def unregister_handlers(self):
if self.post_pixel_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.post_pixel_handle,
"WINDOW")
self.post_pixel_handle = None
if self.post_view_handle:
bpy.types.SpaceView3D.draw_handler_remove(
self.post_view_handle,
"WINDOW")
self.post_view_handle = None
def post_view_callback(self):
try:
for shader, batch, color in self.d3d_items.values():
shader.bind()
shader.uniform_float("color", color)
batch.draw(shader)
except Exception:
logging.error("3D Exception")
for widget in self.widgets.values():
if widget.draw_type == 'POST_VIEW' and widget.poll():
widget.draw()
except Exception as e:
logging.error(
f"Post view widget exception: {e} \n {traceback.print_exc()}")
def draw2d_callback(self):
for position, font, color in self.d2d_items.values():
try:
coords = get_client_2d(position)
def post_pixel_callback(self):
try:
for widget in self.widgets.values():
if widget.draw_type == 'POST_PIXEL' and widget.poll():
widget.draw()
except Exception as e:
logging.error(
f"Post pixel widget Exception: {e} \n {traceback.print_exc()}")
if coords:
blf.position(0, coords[0], coords[1]+10, 0)
blf.size(0, 16, 72)
blf.color(0, color[0], color[1], color[2], color[3])
blf.draw(0, font)
except Exception:
logging.error("2D EXCEPTION")
this = sys.modules[__name__]
this.renderer = DrawFactory()
def register():
global renderer
renderer = DrawFactory()
this.renderer.register_handlers()
this.renderer.add_widget("session_status", SessionStatusWidget())
def unregister():
global renderer
renderer.unregister_handlers()
this.renderer.unregister_handlers()
del renderer
this.renderer.clear_widgets()

View File

@ -18,7 +18,7 @@
import bpy
from . import operators, utils
from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str
from replication.constants import (ADDED, ERROR, FETCHED,
MODIFIED, RP_COMMON, UP,
STATE_ACTIVE, STATE_AUTH,
@ -27,13 +27,16 @@ from replication.constants import (ADDED, ERROR, FETCHED,
STATE_WAITING, STATE_QUITTING,
STATE_LOBBY,
STATE_LAUNCHING_SERVICES)
from replication import __version__
from replication.interface import session
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
'TRIA_UP', # COMMITED
'KEYTYPE_KEYFRAME_VEC', # PUSHED
'TRIA_DOWN', # FETCHED
'FILE_REFRESH', # UP
'TRIA_UP'] # CHANGED
'RECOVER_LAST', # RESET
'TRIA_UP', # CHANGED
'ERROR'] # ERROR
def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='', fill_empty=' '):
@ -57,32 +60,6 @@ def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=
return f"{prefix} |{bar}| {iteration}/{total}{suffix}"
def get_state_str(state):
state_str = 'UNKNOWN'
if state == STATE_WAITING:
state_str = 'WARMING UP DATA'
elif state == STATE_SYNCING:
state_str = 'FETCHING'
elif state == STATE_AUTH:
state_str = 'AUTHENTIFICATION'
elif state == STATE_CONFIG:
state_str = 'CONFIGURATION'
elif state == STATE_ACTIVE:
state_str = 'ONLINE'
elif state == STATE_SRV_SYNC:
state_str = 'PUSHING'
elif state == STATE_INITIAL:
state_str = 'INIT'
elif state == STATE_QUITTING:
state_str = 'QUITTING'
elif state == STATE_LAUNCHING_SERVICES:
state_str = 'LAUNCHING SERVICES'
elif state == STATE_LOBBY:
state_str = 'LOBBY'
return state_str
class SESSION_PT_settings(bpy.types.Panel):
"""Settings panel"""
bl_idname = "MULTIUSER_SETTINGS_PT_panel"
@ -93,9 +70,9 @@ class SESSION_PT_settings(bpy.types.Panel):
def draw_header(self, context):
layout = self.layout
if operators.client and operators.client.state['STATE'] != STATE_INITIAL:
cli_state = operators.client.state
state = operators.client.state.get('STATE')
if session and session.state['STATE'] != STATE_INITIAL:
cli_state = session.state
state = session.state.get('STATE')
connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
if state == STATE_ACTIVE:
@ -105,76 +82,54 @@ class SESSION_PT_settings(bpy.types.Panel):
layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
else:
layout.label(text="Session",icon="PROP_OFF")
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
def draw(self, context):
layout = self.layout
layout.use_property_split = True
row = layout.row()
runtime_settings = context.window_manager.session
settings = utils.get_preferences()
settings = get_preferences()
if hasattr(context.window_manager, 'session'):
# STATE INITIAL
if not operators.client \
or (operators.client and operators.client.state['STATE'] == STATE_INITIAL):
if not session \
or (session and session.state['STATE'] == STATE_INITIAL):
pass
else:
cli_state = operators.client.state
cli_state = session.state
row = layout.row()
current_state = cli_state['STATE']
info_msg = None
# STATE ACTIVE
if current_state in [STATE_ACTIVE]:
row.operator("session.stop", icon='QUIT', text="Exit")
row = layout.row()
if runtime_settings.is_host:
row = row.box()
row.label(text=f"LAN: {runtime_settings.internet_ip}", icon='INFO')
row = layout.row()
row = row.split(factor=0.3)
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE')
row= layout.row()
if current_state in [STATE_ACTIVE] and runtime_settings.is_host:
info_msg = f"LAN: {runtime_settings.internet_ip}"
if current_state == STATE_LOBBY:
row = row.box()
row.label(text=f"Waiting the session to start", icon='INFO')
row = layout.row()
row.operator("session.stop", icon='QUIT', text="Exit")
# CONNECTION STATE
elif current_state in [STATE_SRV_SYNC,
STATE_SYNCING,
STATE_AUTH,
STATE_CONFIG,
STATE_WAITING]:
info_msg = "Waiting for the session to start."
if cli_state['STATE'] in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
box = row.box()
box.label(text=printProgressBar(
cli_state['CURRENT'],
cli_state['TOTAL'],
length=16
))
if info_msg:
info_box = row.box()
info_box.row().label(text=info_msg,icon='INFO')
row = layout.row()
row.operator("session.stop", icon='QUIT', text="CANCEL")
elif current_state == STATE_QUITTING:
row = layout.row()
box = row.box()
num_online_services = 0
for name, state in operators.client.services_state.items():
if state == STATE_ACTIVE:
num_online_services += 1
total_online_services = len(
operators.client.services_state)
box.label(text=printProgressBar(
total_online_services-num_online_services,
total_online_services,
# Progress bar
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
info_box = row.box()
info_box.row().label(text=printProgressBar(
cli_state['CURRENT'],
cli_state['TOTAL'],
length=16
))
layout.row().operator("session.stop", icon='QUIT', text="Exit")
class SESSION_PT_settings_network(bpy.types.Panel):
bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel"
@ -185,8 +140,8 @@ class SESSION_PT_settings_network(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not operators.client \
or (operators.client and operators.client.state['STATE'] == 0)
return not session \
or (session and session.state['STATE'] == 0)
def draw_header(self, context):
self.layout.label(text="", icon='URL')
@ -195,7 +150,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
layout = self.layout
runtime_settings = context.window_manager.session
settings = utils.get_preferences()
settings = get_preferences()
# USER SETTINGS
row = layout.row()
@ -243,8 +198,8 @@ class SESSION_PT_settings_user(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not operators.client \
or (operators.client and operators.client.state['STATE'] == 0)
return not session \
or (session and session.state['STATE'] == 0)
def draw_header(self, context):
self.layout.label(text="", icon='USER')
@ -253,7 +208,7 @@ class SESSION_PT_settings_user(bpy.types.Panel):
layout = self.layout
runtime_settings = context.window_manager.session
settings = utils.get_preferences()
settings = get_preferences()
row = layout.row()
# USER SETTINGS
@ -274,8 +229,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not operators.client \
or (operators.client and operators.client.state['STATE'] == 0)
return not session \
or (session and session.state['STATE'] == 0)
def draw_header(self, context):
self.layout.label(text="", icon='PREFERENCES')
@ -284,44 +239,107 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
layout = self.layout
runtime_settings = context.window_manager.session
settings = utils.get_preferences()
settings = get_preferences()
net_section = layout.row().box()
net_section.label(text="Network ", icon='TRIA_DOWN')
net_section_row = net_section.row()
net_section_row.label(text="IPC Port:")
net_section_row.prop(settings, "ipc_port", text="")
net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="")
net_section.prop(
settings,
"sidebar_advanced_net_expanded",
text="Network",
icon=get_expanded_icon(settings.sidebar_advanced_net_expanded),
emboss=False)
if settings.sidebar_advanced_net_expanded:
net_section_row = net_section.row()
net_section_row.label(text="IPC Port:")
net_section_row.prop(settings, "ipc_port", text="")
net_section_row = net_section.row()
net_section_row.label(text="Timeout (ms):")
net_section_row.prop(settings, "connection_timeout", text="")
replication_section = layout.row().box()
replication_section.label(text="Replication ", icon='TRIA_DOWN')
replication_section_row = replication_section.row()
if runtime_settings.session_mode == 'HOST':
replication_section.prop(
settings,
"sidebar_advanced_rep_expanded",
text="Replication",
icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded),
emboss=False)
if settings.sidebar_advanced_rep_expanded:
replication_section_row = replication_section.row()
replication_section_row.label(text="Sync flags", icon='COLLECTION_NEW')
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_render_settings")
replication_section_row = replication_section.row()
replication_section_row.prop(settings.sync_flags, "sync_active_camera")
replication_section_row = replication_section.row()
replication_section_row = replication_section.row()
replication_section_row.label(text="Per data type timers:")
replication_section_row = replication_section.row()
# Replication frequencies
flow = replication_section_row .grid_flow(
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
line = flow.row(align=True)
line.label(text=" ")
line.separator()
line.label(text="refresh (sec)")
line.label(text="apply (sec)")
replication_section_row.prop(settings.sync_flags, "sync_during_editmode")
replication_section_row = replication_section.row()
if settings.sync_flags.sync_during_editmode:
warning = replication_section_row.box()
warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
replication_section_row = replication_section.row()
for item in settings.supported_datablocks:
line = flow.row(align=True)
line.prop(item, "auto_push", text="", icon=item.icon)
line.separator()
line.prop(item, "bl_delay_refresh", text="")
line.prop(item, "bl_delay_apply", text="")
replication_section_row.label(text="Update method", icon='RECOVER_LAST')
replication_section_row = replication_section.row()
replication_section_row.prop(settings, "update_method", expand=True)
replication_section_row = replication_section.row()
replication_timers = replication_section_row.box()
replication_timers.label(text="Replication timers", icon='TIME')
if settings.update_method == "DEFAULT":
replication_timers = replication_timers.row()
# Replication frequencies
flow = replication_timers.grid_flow(
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
line = flow.row(align=True)
line.label(text=" ")
line.separator()
line.label(text="refresh (sec)")
line.label(text="apply (sec)")
for item in settings.supported_datablocks:
line = flow.row(align=True)
line.prop(item, "auto_push", text="", icon=item.icon)
line.separator()
line.prop(item, "bl_delay_refresh", text="")
line.prop(item, "bl_delay_apply", text="")
else:
replication_timers = replication_timers.row()
replication_timers.label(text="Update rate (ms):")
replication_timers.prop(settings, "depsgraph_update_rate", text="")
cache_section = layout.row().box()
cache_section.prop(
settings,
"sidebar_advanced_cache_expanded",
text="Cache",
icon=get_expanded_icon(settings.sidebar_advanced_cache_expanded),
emboss=False)
if settings.sidebar_advanced_cache_expanded:
cache_section_row = cache_section.row()
cache_section_row.label(text="Cache directory:")
cache_section_row = cache_section.row()
cache_section_row.prop(settings, "cache_directory", text="")
cache_section_row = cache_section.row()
cache_section_row.label(text="Clear memory filecache:")
cache_section_row.prop(settings, "clear_memory_filecache", text="")
cache_section_row = cache_section.row()
cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})")
log_section = layout.row().box()
log_section.prop(
settings,
"sidebar_advanced_log_expanded",
text="Logging",
icon=get_expanded_icon(settings.sidebar_advanced_log_expanded),
emboss=False)
if settings.sidebar_advanced_log_expanded:
log_section_row = log_section.row()
log_section_row.label(text="Log level:")
log_section_row.prop(settings, 'logging_level', text="")
class SESSION_PT_user(bpy.types.Panel):
bl_idname = "MULTIUSER_USER_PT_panel"
bl_label = "Online users"
@ -331,7 +349,7 @@ class SESSION_PT_user(bpy.types.Panel):
@classmethod
def poll(cls, context):
return operators.client and operators.client.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
return session and session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
def draw_header(self, context):
self.layout.label(text="", icon='USER')
@ -340,7 +358,7 @@ class SESSION_PT_user(bpy.types.Panel):
layout = self.layout
online_users = context.window_manager.online_users
selected_user = context.window_manager.user_index
settings = utils.get_preferences()
settings = get_preferences()
active_user = online_users[selected_user] if len(
online_users)-1 >= selected_user else 0
runtime_settings = context.window_manager.session
@ -362,7 +380,7 @@ class SESSION_PT_user(bpy.types.Panel):
if active_user != 0 and active_user.username != settings.username:
row = layout.row()
user_operations = row.split()
if operators.client.state['STATE'] == STATE_ACTIVE:
if session.state['STATE'] == STATE_ACTIVE:
user_operations.alert = context.window_manager.session.time_snap_running
user_operations.operator(
@ -376,7 +394,7 @@ class SESSION_PT_user(bpy.types.Panel):
text="",
icon='TIME').target_client = active_user.username
if operators.client.online_users[settings.username]['admin']:
if session.online_users[settings.username]['admin']:
user_operations.operator(
"session.kick",
text="",
@ -385,8 +403,7 @@ class SESSION_PT_user(bpy.types.Panel):
class SESSION_UL_users(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
session = operators.client
settings = utils.get_preferences()
settings = get_preferences()
is_local_user = item.username == settings.username
ping = '-'
frame_current = '-'
@ -398,8 +415,8 @@ class SESSION_UL_users(bpy.types.UIList):
ping = str(user['latency'])
metadata = user.get('metadata')
if metadata and 'frame_current' in metadata:
frame_current = str(metadata['frame_current'])
scene_current = metadata['scene_current']
frame_current = str(metadata.get('frame_current','-'))
scene_current = metadata.get('scene_current','-')
if user['admin']:
status_icon = 'FAKE_USER_ON'
split = layout.split(factor=0.35)
@ -420,8 +437,8 @@ class SESSION_PT_presence(bpy.types.Panel):
@classmethod
def poll(cls, context):
return not operators.client \
or (operators.client and operators.client.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
return not session \
or (session and session.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
def draw_header(self, context):
self.layout.prop(context.window_manager.session,
@ -431,56 +448,35 @@ class SESSION_PT_presence(bpy.types.Panel):
layout = self.layout
settings = context.window_manager.session
pref = get_preferences()
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status")
row = col.column()
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_scale", expand=True)
row = col.column(align=True)
row.active = settings.presence_show_session_status
row.prop(pref, "presence_hud_hpos", expand=True)
row.prop(pref, "presence_hud_vpos", expand=True)
col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
class SESSION_PT_services(bpy.types.Panel):
bl_idname = "MULTIUSER_SERVICE_PT_panel"
bl_label = "Services"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return operators.client and operators.client.state['STATE'] == 2
def draw_header(self, context):
self.layout.label(text="", icon='FILE_CACHE')
def draw(self, context):
layout = self.layout
online_users = context.window_manager.online_users
selected_user = context.window_manager.user_index
settings = context.window_manager.session
active_user = online_users[selected_user] if len(online_users)-1 >= selected_user else 0
# Create a simple row.
for name, state in operators.client.services_state.items():
row = layout.row()
row.label(text=name)
row.label(text=get_state_str(state))
def draw_property(context, parent, property_uuid, level=0):
settings = utils.get_preferences()
settings = get_preferences()
runtime_settings = context.window_manager.session
item = operators.client.get(uuid=property_uuid)
if item.state == ERROR:
return
item = session.get(uuid=property_uuid)
area_msg = parent.row(align=True)
if level > 0:
for i in range(level):
area_msg.label(text="")
if item.state == ERROR:
area_msg.alert=True
else:
area_msg.alert=False
line = area_msg.box()
name = item.data['name'] if item.data else item.uuid
@ -493,8 +489,8 @@ def draw_property(context, parent, property_uuid, level=0):
# Operations
have_right_to_modify = item.owner == settings.username or \
item.owner == RP_COMMON
have_right_to_modify = (item.owner == settings.username or \
item.owner == RP_COMMON) and item.state != ERROR
if have_right_to_modify:
detail_item_box.operator(
@ -504,10 +500,12 @@ def draw_property(context, parent, property_uuid, level=0):
detail_item_box.separator()
if item.state in [FETCHED, UP]:
detail_item_box.operator(
apply = detail_item_box.operator(
"session.apply",
text="",
icon=ICONS_PROP_STATES[item.state]).target = item.uuid
icon=ICONS_PROP_STATES[item.state])
apply.target = item.uuid
apply.reset_dependencies = True
elif item.state in [MODIFIED, ADDED]:
detail_item_box.operator(
"session.commit",
@ -530,7 +528,6 @@ def draw_property(context, parent, property_uuid, level=0):
else:
detail_item_box.label(text="", icon="DECORATE_LOCKED")
class SESSION_PT_repository(bpy.types.Panel):
bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
bl_label = "Repository"
@ -540,8 +537,7 @@ class SESSION_PT_repository(bpy.types.Panel):
@classmethod
def poll(cls, context):
session = operators.client
settings = utils.get_preferences()
settings = get_preferences()
admin = False
if session and hasattr(session,'online_users'):
@ -549,9 +545,9 @@ class SESSION_PT_repository(bpy.types.Panel):
if usr:
admin = usr['admin']
return hasattr(context.window_manager, 'session') and \
operators.client and \
(operators.client.state['STATE'] == STATE_ACTIVE or \
operators.client.state['STATE'] == STATE_LOBBY and admin)
session and \
(session.state['STATE'] == STATE_ACTIVE or \
session.state['STATE'] == STATE_LOBBY and admin)
def draw_header(self, context):
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
@ -560,10 +556,9 @@ class SESSION_PT_repository(bpy.types.Panel):
layout = self.layout
# Filters
settings = utils.get_preferences()
settings = get_preferences()
runtime_settings = context.window_manager.session
session = operators.client
usr = session.online_users.get(settings.username)
row = layout.row()
@ -589,11 +584,11 @@ class SESSION_PT_repository(bpy.types.Panel):
types_filter = [t.type_name for t in settings.supported_datablocks
if t.use_as_filter]
key_to_filter = operators.client.list(
filter_owner=settings.username) if runtime_settings.filter_owned else operators.client.list()
key_to_filter = session.list(
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
client_keys = [key for key in key_to_filter
if operators.client.get(uuid=key).str_type
if session.get(uuid=key).str_type
in types_filter]
if client_keys:
@ -609,6 +604,36 @@ class SESSION_PT_repository(bpy.types.Panel):
else:
row.label(text="Waiting to start")
class VIEW3D_PT_overlay_session(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Multi-user"
@classmethod
def poll(cls, context):
return True
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
row = col.row(align=True)
settings = context.window_manager.session
layout.active = settings.enable_presence
col = layout.column()
col.prop(settings, "presence_show_session_status")
col.prop(settings, "presence_show_selected")
col.prop(settings, "presence_show_user")
row = layout.column()
row.active = settings.presence_show_user
row.prop(settings, "presence_show_far_user")
classes = (
SESSION_UL_users,
@ -618,9 +643,8 @@ classes = (
SESSION_PT_presence,
SESSION_PT_advanced_settings,
SESSION_PT_user,
SESSION_PT_services,
SESSION_PT_repository,
VIEW3D_PT_overlay_session,
)

View File

@ -21,13 +21,22 @@ import logging
import os
import sys
import time
from uuid import uuid4
from collections.abc import Iterable
from pathlib import Path
from uuid import uuid4
import math
import bpy
import mathutils
from . import environment, presence
from . import environment
from replication.constants import (STATE_ACTIVE, STATE_AUTH,
STATE_CONFIG, STATE_SYNCING,
STATE_INITIAL, STATE_SRV_SYNC,
STATE_WAITING, STATE_QUITTING,
STATE_LOBBY,
STATE_LAUNCHING_SERVICES)
def find_from_attr(attr_name, attr_value, list):
@ -39,7 +48,7 @@ def find_from_attr(attr_name, attr_value, list):
def get_datablock_users(datablock):
users = []
supported_types = get_preferences().supported_datablocks
supported_types = get_preferences().supported_datablocks
if hasattr(datablock, 'users_collection') and datablock.users_collection:
users.extend(list(datablock.users_collection))
if hasattr(datablock, 'users_scene') and datablock.users_scene:
@ -47,7 +56,7 @@ def get_datablock_users(datablock):
if hasattr(datablock, 'users_group') and datablock.users_scene:
users.extend(list(datablock.users_scene))
for datatype in supported_types:
if datatype.bl_name != 'users':
if datatype.bl_name != 'users' and hasattr(bpy.data, datatype.bl_name):
root = getattr(bpy.data, datatype.bl_name)
for item in root:
if hasattr(item, 'data') and datablock == item.data or \
@ -56,6 +65,32 @@ def get_datablock_users(datablock):
return users
def get_state_str(state):
state_str = 'UNKOWN'
if state == STATE_WAITING:
state_str = 'WARMING UP DATA'
elif state == STATE_SYNCING:
state_str = 'FETCHING'
elif state == STATE_AUTH:
state_str = 'AUTHENTICATION'
elif state == STATE_CONFIG:
state_str = 'CONFIGURATION'
elif state == STATE_ACTIVE:
state_str = 'ONLINE'
elif state == STATE_SRV_SYNC:
state_str = 'PUSHING'
elif state == STATE_INITIAL:
state_str = 'OFFLINE'
elif state == STATE_QUITTING:
state_str = 'QUITTING'
elif state == STATE_LAUNCHING_SERVICES:
state_str = 'LAUNCHING SERVICES'
elif state == STATE_LOBBY:
state_str = 'LOBBY'
return state_str
def clean_scene():
for type_name in dir(bpy.data):
try:
@ -77,10 +112,76 @@ def resolve_from_id(id, optionnal_type=None):
if id in root and ((optionnal_type is None) or (optionnal_type.lower() in root[id].__class__.__name__.lower())):
return root[id]
return None
def get_preferences():
return bpy.context.preferences.addons[__package__].preferences
def current_milli_time():
return int(round(time.time() * 1000))
return int(round(time.time() * 1000))
def get_expanded_icon(prop: bpy.types.BoolProperty) -> str:
if prop:
return 'DISCLOSURE_TRI_DOWN'
else:
return 'DISCLOSURE_TRI_RIGHT'
# Taken from here: https://stackoverflow.com/a/55659577
def get_folder_size(folder):
return ByteSize(sum(file.stat().st_size for file in Path(folder).rglob('*')))
class ByteSize(int):
_kB = 1024
_suffixes = 'B', 'kB', 'MB', 'GB', 'PB'
def __new__(cls, *args, **kwargs):
return super().__new__(cls, *args, **kwargs)
def __init__(self, *args, **kwargs):
self.bytes = self.B = int(self)
self.kilobytes = self.kB = self / self._kB**1
self.megabytes = self.MB = self / self._kB**2
self.gigabytes = self.GB = self / self._kB**3
self.petabytes = self.PB = self / self._kB**4
*suffixes, last = self._suffixes
suffix = next((
suffix
for suffix in suffixes
if 1 < getattr(self, suffix) < self._kB
), last)
self.readable = suffix, getattr(self, suffix)
super().__init__()
def __str__(self):
return self.__format__('.2f')
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, super().__repr__())
def __format__(self, format_spec):
suffix, val = self.readable
return '{val:{fmt}} {suf}'.format(val=math.ceil(val), fmt=format_spec, suf=suffix)
def __sub__(self, other):
return self.__class__(super().__sub__(other))
def __add__(self, other):
return self.__class__(super().__add__(other))
def __mul__(self, other):
return self.__class__(super().__mul__(other))
def __rsub__(self, other):
return self.__class__(super().__sub__(other))
def __radd__(self, other):
return self.__class__(super().__add__(other))
def __rmul__(self, other):
return self.__class__(super().__rmul__(other))

View File

@ -0,0 +1,24 @@
# Download base image debian jessie
FROM python:slim
ARG replication_version=0.0.21
ARG version=0.1.1
# Infos
LABEL maintainer="Swann Martinez"
LABEL version=$version
LABEL description="Blender multi-user addon \
dedicated server image."
# Argument
ENV password='admin'
ENV port=5555
ENV timeout=3000
ENV log_level=INFO
ENV log_file="multiuser_server.log"
#Install replication
RUN pip install replication==$replication_version
# Run the server with parameters
CMD replication.serve -pwd ${password} -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}

View File

@ -0,0 +1,6 @@
import re
init_py = open("multi_user/__init__.py").read()
version = re.search("\d+, \d+, \d+", init_py).group(0)
digits = version.split(',')
print('.'.join(digits).replace(" ",""))

View File

@ -0,0 +1,4 @@
import re
init_py = open("multi_user/__init__.py").read()
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))

View File

@ -0,0 +1,10 @@
#! /bin/bash
# Start server in docker container, from image hosted on the multi-user gitlab's container registry
docker run -d \
-p 5555-5560:5555-5560 \
-e port=5555 \
-e log-level DEBUG \
-e password=admin \
-e timeout=1000 \
registry.gitlab.com/slumber/multi-user/multi-user-server:0.1.0

View File

@ -0,0 +1,5 @@
#! /bin/bash
# Start replication server locally, and include logging (requires replication_version=0.0.21a15)
clear
replication.serve -p 5555 -pwd admin -t 1000 -l DEBUG -lf server.log

View File

@ -2,7 +2,7 @@ import os
import pytest
from deepdiff import DeepDiff
from uuid import uuid4
import bpy
import random
from multi_user.bl_types.bl_collection import BlCollection
@ -10,8 +10,13 @@ from multi_user.bl_types.bl_collection import BlCollection
def test_collection(clear_blend):
# Generate a collection with childrens and a cube
datablock = bpy.data.collections.new("root")
datablock.children.link(bpy.data.collections.new("child"))
datablock.children.link(bpy.data.collections.new("child2"))
datablock.uuid = str(uuid4())
s1 = bpy.data.collections.new("child")
s1.uuid = str(uuid4())
s2 = bpy.data.collections.new("child2")
s2.uuid = str(uuid4())
datablock.children.link(s1)
datablock.children.link(s2)
bpy.ops.mesh.primitive_cube_add()
datablock.objects.link(bpy.data.objects[0])

View File

@ -1,21 +0,0 @@
import os
import pytest
from deepdiff import DeepDiff
import bpy
import random
from multi_user.bl_types.bl_image import BlImage
def test_image(clear_blend):
datablock = bpy.data.images.new('asd',2000,2000)
implementation = BlImage()
expected = implementation._dump(datablock)
bpy.data.images.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
assert not DeepDiff(expected, result)

View File

@ -7,13 +7,12 @@ import bpy
from multi_user.bl_types.bl_material import BlMaterial
def test_material(clear_blend):
def test_material_nodes(clear_blend):
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()]
datablock = bpy.data.materials.new("test")
datablock.use_nodes = True
bpy.data.materials.create_gpencil_data(datablock)
for ntype in nodes_types:
datablock.node_tree.nodes.new(ntype)
@ -26,3 +25,18 @@ def test_material(clear_blend):
result = implementation._dump(test)
assert not DeepDiff(expected, result)
def test_material_gpencil(clear_blend):
datablock = bpy.data.materials.new("test")
bpy.data.materials.create_gpencil_data(datablock)
implementation = BlMaterial()
expected = implementation._dump(datablock)
bpy.data.materials.remove(datablock)
test = implementation._construct(expected)
implementation._load(expected, test)
result = implementation._dump(test)
assert not DeepDiff(expected, result)

View File

@ -6,8 +6,11 @@ from deepdiff import DeepDiff
import bpy
import random
from multi_user.bl_types.bl_scene import BlScene
from multi_user.utils import get_preferences
def test_scene(clear_blend):
get_preferences().sync_flags.sync_render_settings = True
datablock = bpy.data.scenes.new("toto")
datablock.view_settings.use_curve_mapping = True
# Test