Compare commits
656 Commits
Author | SHA1 | Date | |
---|---|---|---|
e0b56d8990 | |||
0687090f05 | |||
920744334c | |||
dfa7f98126 | |||
ea530f0f96 | |||
c3546ff74f | |||
83aa9b57ec | |||
28a265be68 | |||
7dfabb16c7 | |||
ea5d9371ca | |||
3df73a0716 | |||
ae3c994ff1 | |||
bd73b385b6 | |||
f054b1c5f2 | |||
d083100a2a | |||
b813b8df9e | |||
d0e966ff1a | |||
56cbf14fe1 | |||
8bf55ebd46 | |||
edbc5ee343 | |||
4a92511582 | |||
b42df2cf4a | |||
7549466824 | |||
423e71476d | |||
3bc4b20035 | |||
9966a24b5e | |||
577c01a594 | |||
3d72796c10 | |||
edcbd7b02a | |||
b368c985b8 | |||
cab1a71eaa | |||
33cb188509 | |||
0a3dd9b5b8 | |||
7fbdbdcc21 | |||
8f9d5aabf9 | |||
824d4d6a83 | |||
5f4bccbcd9 | |||
8e8e54fe7d | |||
04b13cc0b7 | |||
ba98875560 | |||
a9fb84a5c6 | |||
2f139178d3 | |||
e466f81600 | |||
cb836e30f5 | |||
152e356dad | |||
7b13e8978b | |||
e0839fe1fb | |||
aec3e8b8bf | |||
a89564de6b | |||
e301a10456 | |||
cfc6ce91bc | |||
4f731c6640 | |||
9b1b8f11fd | |||
e742c824fc | |||
6757bbbd30 | |||
f6a39e4290 | |||
410d8d2f1a | |||
bd64c17f05 | |||
dc063b5954 | |||
0ae34d5702 | |||
167b39f15e | |||
9adc0d7d6e | |||
fb622fa098 | |||
c533d4b86a | |||
6c47e095be | |||
f992d06b03 | |||
af3afc1124 | |||
b77ab2dd05 | |||
150054d19c | |||
8d2b9e5580 | |||
6870331c34 | |||
6f73b7fc29 | |||
6385830f53 | |||
b705228f4a | |||
73d2da4c47 | |||
b28e7c2149 | |||
38f06683be | |||
62221c9e49 | |||
e9f416f682 | |||
3108a06e89 | |||
470df50dc2 | |||
d8a94e3f5e | |||
47a0efef27 | |||
ca5aebfeff | |||
fe6ffd19b4 | |||
b9a6ddafe9 | |||
ae71d7757e | |||
34ed5da6f0 | |||
2c16f07ae7 | |||
60f25359d1 | |||
975b50a988 | |||
66417dc84a | |||
514f90d602 | |||
086876ad2e | |||
71c179f32f | |||
2399096b07 | |||
0c4d1aaa5f | |||
de8fbb0629 | |||
d7396e578c | |||
7f5b5866f2 | |||
3eb1af406b | |||
79ccac915f | |||
f5232ccea0 | |||
c599a4e6ea | |||
b3230177d8 | |||
f2da4cb8e9 | |||
605bcc7581 | |||
e31d76a641 | |||
97c2118b7e | |||
352977e442 | |||
a46d5fa227 | |||
ade736d8a5 | |||
d7f7e86015 | |||
5e7d1e1dda | |||
fa5f0c7296 | |||
f14d0915c8 | |||
d1e088d229 | |||
aa35da9c56 | |||
f26c3b2606 | |||
00d60be75b | |||
bb5b9fe4c8 | |||
c6af49492e | |||
6158ef5171 | |||
6475b4fc08 | |||
e4e09d63ff | |||
4b07ae0cc3 | |||
49a419cbe2 | |||
5d52fb2460 | |||
f1e09c1507 | |||
f915c52bd0 | |||
dee2e77552 | |||
7953a2a177 | |||
3f0082927e | |||
07ffe05a84 | |||
09ee1cf826 | |||
61bcec98c3 | |||
1c85d436fd | |||
03318026d4 | |||
7a0b142d69 | |||
eb874110f8 | |||
6e0c7bc332 | |||
ee83e61b09 | |||
99b2dc0539 | |||
53f1118181 | |||
2791264a92 | |||
6c2ee0cad3 | |||
20f8c25f55 | |||
0224f55104 | |||
644702ebdf | |||
9377b2be9b | |||
29cbf23142 | |||
a645f71d19 | |||
909d92a7a1 | |||
7ee9089087 | |||
6201c82392 | |||
0faf7d9436 | |||
e69e61117a | |||
25e988d423 | |||
8a3ab895e0 | |||
06a8e3c0ab | |||
c1c1628a38 | |||
022e3354d9 | |||
211cb848b9 | |||
25e233f328 | |||
9bc3d9b29d | |||
15debf339d | |||
56df7d182d | |||
26e1579e35 | |||
a0e290ad6d | |||
092384b2e4 | |||
2dc3654e6c | |||
f37a9efc60 | |||
0c5d323063 | |||
b9f1b8a871 | |||
2f6d8e1701 | |||
9e64584f2d | |||
154aaf71c8 | |||
ac24ab69ff | |||
ad431378f8 | |||
784506cd95 | |||
eb7542b1dd | |||
2bc0d18120 | |||
27f9b8c659 | |||
ae08b40e8b | |||
6ce4cc2d47 | |||
f96d5e0e2f | |||
8ec80b5150 | |||
691c45b2c2 | |||
25f3e27b7f | |||
e2cdd26b7c | |||
eb52deade6 | |||
0e8bdf7fe5 | |||
b2d1cec7f4 | |||
29e19c7e05 | |||
dda252729d | |||
4e4d366a57 | |||
fca53a6725 | |||
80f37583cc | |||
5f763906c3 | |||
f64d36d7ed | |||
dc6975171c | |||
e5d3c664a7 | |||
d11035fd6c | |||
406039aa21 | |||
5e8c4c1455 | |||
92efa89e35 | |||
d806570ee0 | |||
b414ca31b1 | |||
59bfcede97 | |||
9d8e3a3e7b | |||
788477502d | |||
226c01df8b | |||
c57fb0ca67 | |||
745c7ca04a | |||
8ddb86556d | |||
b1ccbf72f3 | |||
f85001cb1a | |||
a7371c0566 | |||
3d9c20cc03 | |||
661065e51f | |||
c1fe033ff3 | |||
3ea45b3cf6 | |||
b385a821d4 | |||
ac1e1f39b7 | |||
41140286e1 | |||
c50313f8b1 | |||
2ad626197f | |||
e927676e3e | |||
4531b7fd6d | |||
5199a810cd | |||
2bdbfb082b | |||
9e6b1a141d | |||
9c3afdbd81 | |||
32669cef22 | |||
cc6d1a35bc | |||
63a36ad5d4 | |||
bcdefca32c | |||
88e69711ba | |||
1ccfd59e65 | |||
a201ae4ea6 | |||
e9029b1414 | |||
19946794f6 | |||
3f15092b3a | |||
838df92217 | |||
54b01e4513 | |||
c065b198d4 | |||
12c0dab881 | |||
7759234ea3 | |||
ad99a349f7 | |||
fdc7e4678c | |||
8d040cc304 | |||
f1c95d03f8 | |||
b55faf2d1c | |||
258f27d96e | |||
8027e541c3 | |||
fc1108ab61 | |||
54bcd41267 | |||
25c19471bb | |||
9e4e646bb1 | |||
95524fa3e9 | |||
07a646aa18 | |||
67fc19dae1 | |||
1070cabf7a | |||
fcc9292e02 | |||
f3d8f15ab1 | |||
4c44c2f1a0 | |||
e7d948049d | |||
0ad0f4d62d | |||
7df6ab1852 | |||
b4d1e04b87 | |||
2e60bb985f | |||
f8fa407a45 | |||
b2085e80f8 | |||
95241d4148 | |||
03490af042 | |||
94aeff7b35 | |||
f5452b8aee | |||
4f02134b6c | |||
52b2c25014 | |||
d8f68640b5 | |||
bb2fc2c32b | |||
582c908dc4 | |||
506284ad1f | |||
e7e8782c28 | |||
3f614cdcef | |||
5f1853bbe3 | |||
99d64cd411 | |||
607a1f25d3 | |||
b047b48738 | |||
e1688f7f12 | |||
5aa7a1f140 | |||
ca141f9860 | |||
d4d14d57ff | |||
1789ae20cf | |||
2e0414f9d5 | |||
52cbf79f39 | |||
e9e06bbf8f | |||
1f16e07a3c | |||
e476d44527 | |||
7a09026b6c | |||
09c724ac53 | |||
7152bb825d | |||
e8d23426d7 | |||
dcb593db07 | |||
3b9a722812 | |||
7cc0c0ccf8 | |||
1304489748 | |||
673a9dd669 | |||
fc15478dfa | |||
c23b5923ab | |||
2d700e83bb | |||
271f210591 | |||
e65dd1939a | |||
76be9092c8 | |||
3394299e8b | |||
5e6f6ac460 | |||
49d676f4f2 | |||
4dd4cea3ed | |||
408a16064c | |||
cfd80dd426 | |||
0fde356f4b | |||
427b36ddaf | |||
1b94e017e6 | |||
b3b2296b06 | |||
9c897745fd | |||
0783c625d0 | |||
a1036956c3 | |||
bfbf2727ea | |||
39766739d2 | |||
776664149e | |||
fef088e39b | |||
31feb2439d | |||
e041b2cb91 | |||
379e7cdf71 | |||
928eccfa23 | |||
9c9d7a31bf | |||
d46ea3a117 | |||
820c6dad7e | |||
acbf897f5d | |||
981ac03855 | |||
c20777f860 | |||
219973930b | |||
79ba63ce85 | |||
922538dc3a | |||
5574059b46 | |||
289a49251e | |||
ef9e9dbae8 | |||
98d86c050b | |||
3f0c31d771 | |||
d7e47e5c14 | |||
cab4a8876b | |||
d19932cc3b | |||
ea9ee4ead1 | |||
667c3cd04d | |||
6334bfdc01 | |||
2016af33b7 | |||
f0a2659b43 | |||
489502a783 | |||
cb6e26513c | |||
3680a751aa | |||
4413785903 | |||
25825f7aeb | |||
73019fc0b0 | |||
6a98e749f9 | |||
a84fccb3ce | |||
f9222d84ea | |||
d7964b645a | |||
c3ae56abd2 | |||
daff548010 | |||
757dbfd6ab | |||
01fdf7b35b | |||
90d4bb0e47 | |||
01faa94a9a | |||
b55700862f | |||
90a44eb5db | |||
fb0760928e | |||
8ce53b8413 | |||
2484028b5a | |||
2fcb4615be | |||
653cf7e25a | |||
aa0b54a054 | |||
8d2755060e | |||
ba9b4ebe70 | |||
b8f46c2523 | |||
153ff5b129 | |||
931301074e | |||
56e5709a35 | |||
7fa97704bd | |||
85b3f6e246 | |||
a0676f4e37 | |||
5a0be0f6f9 | |||
717a2da3de | |||
4a127e617c | |||
3f7cb65393 | |||
cd00813aed | |||
511983c7ff | |||
1e580dbcd6 | |||
824040660b | |||
931c683030 | |||
ff5e56e36c | |||
1c6e88ce61 | |||
08b9a35981 | |||
7db7382c68 | |||
1e81a2de16 | |||
624f67a621 | |||
ffb6c397b8 | |||
dbaff5df85 | |||
75839e60f0 | |||
cd10dbb04d | |||
7c3ac6aeed | |||
d30f4452be | |||
61a05dc347 | |||
9f59e7b6e8 | |||
30f787f507 | |||
a8da01c8ff | |||
9df7cd4659 | |||
c281ac4397 | |||
250cf91032 | |||
fe9a096ab2 | |||
a6e1566f89 | |||
adeb694b2d | |||
50d14e663e | |||
9b8d69042d | |||
b2475081b6 | |||
aef1d8987c | |||
d8f49ff298 | |||
efa243211b | |||
f03a3aadff | |||
16147ae2ba | |||
8e600778ab | |||
292f76aea5 | |||
28c4ccf1f3 | |||
a7641d6fc9 | |||
c7584964fe | |||
02938d1e40 | |||
2baddd1fc8 | |||
2703e8c15e | |||
8468be2690 | |||
d22586cd6e | |||
99feeb9b5e | |||
d3bb1f043d | |||
8d609a82ff | |||
0cbdc265a3 | |||
57a243798b | |||
a811211061 | |||
3d2ff10f80 | |||
ded2d3490f | |||
450b18cf16 | |||
414af6388c | |||
549b0b3784 | |||
fc9ab1a7e6 | |||
44bffc1850 | |||
e9f556f690 | |||
d15566daf7 | |||
badd4cc247 | |||
0a52b3cf80 | |||
26cec517c0 | |||
d546bc257a | |||
e71893c5b6 | |||
77979acc42 | |||
a141e9bfe7 | |||
5d576d6f25 | |||
6142a09283 | |||
7d989faae6 | |||
56a625ae48 | |||
07ce5ddb83 | |||
93472db01b | |||
9db7bcb73c | |||
536cf4cf7b | |||
e21995a36d | |||
67f1149f65 | |||
699bdf5fe0 | |||
e3d76c37db | |||
3d9a320612 | |||
2d352ec14a | |||
386af902a4 | |||
b9d144f24a | |||
32b24164cf | |||
5f2dca4032 | |||
6a0705a73e | |||
f1020fb072 | |||
65525ca2e1 | |||
af53e54aa8 | |||
6975edfb66 | |||
c42c0cb017 | |||
378f52a1ab | |||
b182632723 | |||
8a2d178a4f | |||
81752e9a23 | |||
22eb65358b | |||
2034773df6 | |||
5422518a47 | |||
210e8aa0fa | |||
b0c195c34f | |||
0753882015 | |||
cbdf8c94d7 | |||
6c47e72766 | |||
63294d2fd8 | |||
4825e6876c | |||
d1501da2f1 | |||
3b459d5387 | |||
6a667b0745 | |||
4828a91bd5 | |||
b6614768c4 | |||
30278126f8 | |||
fcf4604153 | |||
332287415e | |||
8e5c4b73a8 | |||
ff12746fe8 | |||
356b61c9ff | |||
a5e949d52e | |||
6191e4ac58 | |||
e7744708d9 | |||
e878bc62ba | |||
e4b5fd3170 | |||
da76386dd4 | |||
3181be0887 | |||
e9141c4970 | |||
ece88f3dda | |||
c8827c5e4b | |||
8800053fa4 | |||
87fa6e02e6 | |||
8b7716ac3c | |||
44e525aec7 | |||
76a51032b3 | |||
e22b036cc9 | |||
e253ccd52d | |||
da5406952c | |||
73a2bb56e2 | |||
011eba8cc9 | |||
07df42d087 | |||
ff932b0cdf | |||
fdb7995cbf | |||
e99e6965ec | |||
65fd73d6c0 | |||
0cb75af583 | |||
cb031bab55 | |||
f8bb544005 | |||
95768f4ea1 | |||
278aa6607d | |||
b6f923399f | |||
c2fe5c456b | |||
5008212739 | |||
aa3d24f991 | |||
3c176d25ed | |||
2eec228c3e | |||
f55319c453 | |||
28ca489fc3 | |||
ea3c9d701b | |||
a8d97aa43b | |||
c56b52d069 | |||
d12fe9dd70 | |||
315c2903c0 | |||
a12d32341c | |||
67b9e1a8cf | |||
dedc5a04e0 | |||
efa2d0eeef | |||
b1f5ca5660 | |||
2666444007 | |||
89035ebb85 | |||
c43207e9bf | |||
0b74f54268 | |||
af923feecd | |||
12ba867ee1 | |||
805dd4f319 | |||
f405662fb8 | |||
43eeb8299e | |||
ebe6af8082 | |||
27e310bfbf | |||
8c340d83ac | |||
8e2a831015 | |||
9e19b3ca91 | |||
5d3f81081a | |||
88e7fde848 | |||
07b6b552be | |||
e54ce5962e | |||
8e42b729e9 | |||
b0ea3f1acd | |||
7bf676020d | |||
99649f588c | |||
1b3c655829 | |||
cfadc91299 | |||
345a5d55e7 | |||
8f97091118 | |||
7e4d3e6d3f | |||
bd27da7aa6 | |||
0abd81f9d1 | |||
98c65e80fe | |||
7336819268 | |||
89ba3bcf56 | |||
9d0350ae22 | |||
4aac68e746 | |||
db5bcfdd11 | |||
25fa05ab20 | |||
6c3d6a5dd9 | |||
5bae286774 | |||
2f5869ea9c | |||
f577878fff | |||
320e824dbf | |||
fbf62244fb | |||
58962f755c | |||
ca0fb7c76f | |||
485e2527fe | |||
d322531dc0 | |||
6969e386e2 | |||
c9c359e226 | |||
add01961f2 | |||
f84b441321 | |||
0f863749f5 | |||
68d28e4a66 | |||
7423bd95c2 | |||
f5e81b2d92 | |||
9915fa83e1 | |||
e98ff91a5b | |||
9b79b441cf | |||
727394de5d | |||
1d041dcbdd | |||
b3ba76614a | |||
d293b36f1e | |||
960b699e3d | |||
2f002be7a8 | |||
f2b0e35dcf | |||
b5910ca5b4 | |||
5e1f019f24 | |||
e047874dbd | |||
688b57578c | |||
411d77643e | |||
5867d39a52 | |||
7eccef5c82 | |||
7afb80864e | |||
2e9c3a48a2 | |||
117928de48 | |||
16974ac0de | |||
8155836a12 | |||
2fb0069214 | |||
3d69e96c9f | |||
7c9decdded | |||
98999f09bb | |||
5819a40a30 | |||
93be6ae6cc | |||
14ccb68cf7 | |||
0ec7da0210 | |||
49552bbdb4 | |||
2aef9a3e63 | |||
11eb31af98 | |||
207199adc9 | |||
92a5a875a4 | |||
e47dba5775 | |||
4dccf8d319 | |||
0919579dd1 | |||
b2d3f5de6a | |||
c39e741618 | |||
f60c3937e5 | |||
5d3aa19170 | |||
b8eb3680c2 | |||
07ae67edaf |
9
.gitignore
vendored
@ -6,4 +6,11 @@ __pycache__/
|
||||
.vscode
|
||||
cache
|
||||
config
|
||||
*.code-workspace
|
||||
*.code-workspace
|
||||
multi_user_updater/
|
||||
|
||||
# sphinx build folder
|
||||
_build
|
||||
|
||||
# ignore generated zip generated from blender_addon_tester
|
||||
*.zip
|
9
.gitlab-ci.yml
Normal file
@ -0,0 +1,9 @@
|
||||
stages:
|
||||
- test
|
||||
- build
|
||||
- deploy
|
||||
|
||||
include:
|
||||
- local: .gitlab/ci/test.gitlab-ci.yml
|
||||
- local: .gitlab/ci/build.gitlab-ci.yml
|
||||
- local: .gitlab/ci/deploy.gitlab-ci.yml
|
10
.gitlab/ci/build.gitlab-ci.yml
Normal file
@ -0,0 +1,10 @@
|
||||
build:
|
||||
stage: build
|
||||
image: debian:stable-slim
|
||||
script:
|
||||
- rm -rf tests .git .gitignore script
|
||||
artifacts:
|
||||
name: multi_user
|
||||
paths:
|
||||
- multi_user
|
||||
|
18
.gitlab/ci/deploy.gitlab-ci.yml
Normal file
@ -0,0 +1,18 @@
|
||||
deploy:
|
||||
stage: deploy
|
||||
image: slumber/docker-python
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
|
||||
script:
|
||||
- RP_VERSION="$(python scripts/get_replication_version.py)"
|
||||
- VERSION="$(python scripts/get_addon_version.py)"
|
||||
- echo "Building docker image with replication ${RP_VERSION}"
|
||||
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
|
||||
- echo "Pushing to gitlab registry ${VERSION}"
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}
|
5
.gitlab/ci/test.gitlab-ci.yml
Normal file
@ -0,0 +1,5 @@
|
||||
test:
|
||||
stage: test
|
||||
image: slumber/blender-addon-testing:latest
|
||||
script:
|
||||
- python3 scripts/test_addon.py
|
46
.gitlab/issue_templates/Bug.md
Normal file
@ -0,0 +1,46 @@
|
||||
<!---
|
||||
Please read this!
|
||||
|
||||
Before opening a new issue, make sure to search for keywords in the issues
|
||||
filtered by the "bug" label:
|
||||
|
||||
- https://gitlab.com/slumber/multi-user/-/issues?scope=all&utf8=✓&label_name[]=bug
|
||||
|
||||
and verify the issue you're about to submit isn't a duplicate.
|
||||
--->
|
||||
|
||||
### Summary
|
||||
|
||||
(Summarize the bug encountered concisely)
|
||||
|
||||
* Addon version: (your addon-version)
|
||||
* Blender version: (your blender version)
|
||||
* OS: (your os windows/linux/mac)
|
||||
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
(How one can reproduce the issue - this is very important)
|
||||
|
||||
### Example Project [optionnal]
|
||||
(If possible, please create an example project that exhibits the problematic behavior, and link to it here in the bug report)
|
||||
|
||||
|
||||
### What is the current *bug* behavior?
|
||||
|
||||
(What actually happens)
|
||||
|
||||
|
||||
### Relevant logs and/or screenshots
|
||||
|
||||
(Paste any relevant logs - please use code blocks (```) to format console output,
|
||||
logs, and code as it's tough to read otherwise.)
|
||||
|
||||
|
||||
### Possible fixes [optionnal]
|
||||
|
||||
(If you can, link to the line of code that might be responsible for the problem)
|
||||
|
||||
|
||||
/label ~type::bug
|
||||
/cc @project-manager
|
30
.gitlab/issue_templates/Documentation.md
Normal file
@ -0,0 +1,30 @@
|
||||
### Problem to solve
|
||||
|
||||
<!-- Include the following detail as necessary:
|
||||
* What feature(s) affected?
|
||||
* What docs or doc section affected? Include links or paths.
|
||||
* Is there a problem with a specific document, or a feature/process that's not addressed sufficiently in docs?
|
||||
* Any other ideas or requests?
|
||||
-->
|
||||
|
||||
### Further details
|
||||
|
||||
<!--
|
||||
* Any concepts, procedures, reference info we could add to make it easier to successfully use the multi-user addom?
|
||||
* Include use cases, benefits, and/or goals for this work.
|
||||
-->
|
||||
|
||||
### Proposal
|
||||
|
||||
<!-- Further specifics for how can we solve the problem. -->
|
||||
|
||||
### Who can address the issue
|
||||
|
||||
<!-- What if any special expertise is required to resolve this issue? -->
|
||||
|
||||
### Other links/references
|
||||
|
||||
<!-- E.g. related GitLab issues/MRs -->
|
||||
|
||||
/label ~type::documentation
|
||||
/cc @project-manager
|
18
.gitlab/issue_templates/Feature Proposal.md
Normal file
@ -0,0 +1,18 @@
|
||||
### Problem to solve
|
||||
|
||||
<!-- What problem do we solve? Try to define the who/what/why of the opportunity as a user story. For example, "As a (who), I want (what), so I can (why/value)." -->
|
||||
|
||||
|
||||
### Proposal
|
||||
|
||||
<!-- How are we going to solve the problem?-->
|
||||
|
||||
### Further details
|
||||
|
||||
<!-- Include use cases, benefits, goals, or any other details that will help us understand the problem better. -->
|
||||
|
||||
|
||||
### Links / references
|
||||
|
||||
/label ~type::feature request
|
||||
/cc @project-manager
|
34
.gitlab/issue_templates/Refactoring.md
Normal file
@ -0,0 +1,34 @@
|
||||
## Summary
|
||||
|
||||
<!--
|
||||
Please briefly describe what part of the code base needs to be refactored.
|
||||
-->
|
||||
|
||||
## Improvements
|
||||
|
||||
<!--
|
||||
Explain the benefits of refactoring this code.
|
||||
-->
|
||||
|
||||
## Risks
|
||||
|
||||
<!--
|
||||
Please list features that can break because of this refactoring and how you intend to solve that.
|
||||
-->
|
||||
|
||||
## Involved components
|
||||
|
||||
<!--
|
||||
List files or directories that will be changed by the refactoring.
|
||||
-->
|
||||
|
||||
## Optional: Intended side effects
|
||||
|
||||
<!--
|
||||
If the refactoring involves changes apart from the main improvements (such as a better UI), list them here.
|
||||
It may be a good idea to create separate issues and link them here.
|
||||
-->
|
||||
|
||||
|
||||
/label ~type::refactoring
|
||||
/cc @project-manager
|
3
.gitmodules
vendored
@ -1,3 +0,0 @@
|
||||
[submodule "libs/replication"]
|
||||
path = libs/replication
|
||||
url = git@gitlab.com:slumber/replication.git
|
||||
|
26
CHANGELOG
@ -1,26 +0,0 @@
|
||||
# Changelog
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [v0.0.1]
|
||||
### Added
|
||||
- Functionnal chat test
|
||||
|
||||
## [v0.0.2]
|
||||
### Added
|
||||
- Property map shared between clients
|
||||
- Callbacks to push modifications to the blender scene
|
||||
- Observer Pattern for live property monitoring
|
||||
- ID to identify properties owner (For primitive right managment)
|
||||
|
||||
### Removed
|
||||
- Chat mecanism ( Will come back after the complete modularity refactoring)
|
||||
- Unused files and sub-modules
|
||||
|
||||
## [v0.0.5]
|
||||
### Added
|
||||
|
||||
- queued communication between threads
|
||||
- various fixes
|
||||
- multithreaded archiecture
|
||||
- curve, gpencil support
|
||||
- rightmanagment
|
98
CHANGELOG.md
Normal file
@ -0,0 +1,98 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [0.0.2] - 2020-02-28
|
||||
|
||||
### Added
|
||||
|
||||
- Blender animation features support (alpha).
|
||||
- Action.
|
||||
- Armature (Unstable).
|
||||
- Shape key.
|
||||
- Drivers.
|
||||
- Constraints.
|
||||
- Snap to user timeline tool.
|
||||
- Light probes support (only since 2.83).
|
||||
- Metaballs support.
|
||||
- Improved modifiers support.
|
||||
- Online documentation.
|
||||
- Improved Undo handling.
|
||||
- Improved overall session handling:
|
||||
- Time To Leave : ensure clients/server disconnect automatically on connection lost.
|
||||
- Ping: show clients latency.
|
||||
- Non-blocking connection.
|
||||
- Connection state tracking.
|
||||
- Service communication layer to manage background daemons.
|
||||
|
||||
### Changed
|
||||
|
||||
- UI revamp:
|
||||
- Show users frame.
|
||||
- Expose IPC(inter process communication) port.
|
||||
- New user list.
|
||||
- Progress bar to track connection status.
|
||||
- Right management takes view-layer in account for object selection.
|
||||
- Use a basic BFS approach for replication graph pre-load.
|
||||
- Serialization is now based on marshal (2x performance improvements).
|
||||
- Let pip chose python dependencies install path.
|
||||
|
||||
## [0.0.3] - 2020-07-29
|
||||
|
||||
### Added
|
||||
|
||||
- Auto updater support
|
||||
- Big Performances improvements on Meshes, Gpencils, Actions
|
||||
- Multi-scene workflow support
|
||||
- Render setting synchronization
|
||||
- Kick command
|
||||
- Dedicated server with a basic command set
|
||||
- Administrator session status
|
||||
- Tests
|
||||
- Blender 2.83-2.90 support
|
||||
|
||||
### Changed
|
||||
|
||||
- Config is now stored in blender user preference
|
||||
- Documentation update
|
||||
- Connection protocol
|
||||
- UI revamp:
|
||||
- user localization
|
||||
- repository init
|
||||
|
||||
### Removed
|
||||
|
||||
- Unused strict right management strategy
|
||||
- Legacy config management system
|
||||
|
||||
## [0.1.0] - preview
|
||||
|
||||
### Added
|
||||
|
||||
- Dependency graph driven updates [experimental]
|
||||
- Edit Mode updates
|
||||
- Late join mechanism
|
||||
- Sync Axis lock replication
|
||||
- Sync collection offset
|
||||
- Sync camera orthographic scale
|
||||
- Sync custom fonts
|
||||
- Sync sound files
|
||||
- Logging configuration (file output and level)
|
||||
- Object visibility type replication
|
||||
- Optionnal sync for active camera
|
||||
- Curve->Mesh conversion
|
||||
- Mesh->gpencil conversion
|
||||
|
||||
### Changed
|
||||
|
||||
- Auto updater now handle installation from branches
|
||||
- Use uuid for collection loading
|
||||
- Moved session instance to replication package
|
||||
|
||||
### Fixed
|
||||
|
||||
- Prevent unsupported data types to crash the session
|
||||
- Modifier vertex group assignation
|
||||
- World sync
|
||||
- Snapshot UUID error
|
||||
- The world is not synchronized
|
130
README.md
@ -1,108 +1,78 @@
|
||||
# Multi-user blender addon
|
||||
# MULTI-USER for blender
|
||||
|
||||
> Enable real-time collaborative workflow inside blender
|
||||
|
||||
<img src="https://i.imgur.com/X0B7O1Q.gif" width=600>
|
||||
|
||||
|
||||
:warning: Under development, use it at your own risks. Currently tested on Windows platform. :warning:
|
||||
|
||||
This tool aims to allow multiple users to work on the same scene over the network. Based on a Clients / Server architecture, the data-oriented replication schema replicate blender data-blocks across the wire.
|
||||
|
||||
This tool aims to allow multiple users to work on the same scene over the network. Based on a Clients / Server architecture, the data-oriented replication schema replicate blender datablocks across the wire.
|
||||
## Quick installation
|
||||
|
||||
## Installation
|
||||
1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
|
||||
2. Run blender as administrator (dependencies installation).
|
||||
3. Install last_version.zip from your addon preferences.
|
||||
|
||||
1. Download lastest release here.
|
||||
2. Install last_version.zip from your addon preferences
|
||||
[Dependencies](#dependencies) will be automatically added to your blender python during installation.
|
||||
|
||||
## Usage
|
||||
|
||||
Settings are under: `View3D -> Sidebar -> Multiuser`
|
||||
See the [documentation](https://multi-user.readthedocs.io/en/latest/) for details.
|
||||
|
||||
### Before starting
|
||||
## Current development status
|
||||
|
||||
#### 1. User
|
||||
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
||||
|
||||
_All user-related infos fields._
|
||||
| Name | Status | Comment |
|
||||
| ----------- | :----: | :--------------------------------------------------------------------------: |
|
||||
| action | ✔️ | |
|
||||
| armature | ❗ | Not stable |
|
||||
| camera | ✔️ | |
|
||||
| collection | ✔️ | |
|
||||
| curve | ❗ | Nurbs not supported |
|
||||
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| texts | ✔️ | |
|
||||
| scene | ✔️ | |
|
||||
| world | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| nla | ❌ | |
|
||||
| volumes | ❌ | |
|
||||
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
|
||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||
| libraries | ❗ | Partial |
|
||||
|
||||

|
||||
|
||||
- **name**: username.
|
||||
- **color**: color used to represent the user into other user workspaces.
|
||||
|
||||
#### 2. Network
|
||||
|
||||
_Session-related managment tools_
|
||||
|
||||
:warning: If you host a session over internet, special network configuration is needed :warning:
|
||||
|
||||
| Host | Join |
|
||||
| :------------------------------------------- | :----------------------------- |
|
||||
| asd |
|
||||
|  |  |
|
||||
| Start empty: Cleanup the file before hosting | IP: host ip |
|
||||
| | Port: host port |
|
||||
|
||||
#### 2.1 Advanced
|
||||
|
||||

|
||||
|
||||
**Right strategy** (only host) enable you to choose between a strict and a relaxed pattern:
|
||||
|
||||
- **Strict**: Host is the king, by default the host own each properties, only him can grant modification rights.
|
||||
- **Common**: Each properties are under common rights by default, on selection, a property is only modifiable by the owner.
|
||||
|
||||
_On each strategy, when a user is the owner he can choose to pass his rights to somemone else._
|
||||
|
||||
**Propertie frequency gird** allow to set a custom replication frequency for each type of datablock
|
||||
|
||||
### In-session
|
||||
|
||||
#### Connected users
|
||||
|
||||

|
||||
|
||||
This pannel displays all connected users, including you. The **camera button** allow you to focus on a user.
|
||||
|
||||
#### Properties outliner
|
||||
|
||||

|
||||
|
||||
## Current development statut
|
||||
|
||||
Actually, not all datablock are supported for replication over the wire. The following list summarizes the status for each ones.
|
||||
|
||||
| Name | Statut | Comment |
|
||||
| ---------- | :----------------: | :--------: |
|
||||
| action | :x: | WIP |
|
||||
| armature | :x: | WIP |
|
||||
| camera | :white_check_mark: | |
|
||||
| collection | :white_check_mark: | |
|
||||
| curve | :white_check_mark: | Not tested |
|
||||
| gpencil | :white_check_mark: | |
|
||||
| image | :white_check_mark: | Local only |
|
||||
| mesh | :white_check_mark: | |
|
||||
| material | :white_check_mark: | |
|
||||
| metaball | :x: | |
|
||||
| object | :white_check_mark: | |
|
||||
| scene | :white_check_mark: | |
|
||||
| world | :white_check_mark: | |
|
||||
|
||||
### Performance issues
|
||||
|
||||
Since this addon is writen in pure python for a prototyping purpose, perfomance could be better from all perspective. Soon I will start to port the multi-user addon concept to a blender branch.
|
||||
Since this addon is written in pure python for a research purpose, performances could be better from all perspective.
|
||||
I'm working on it.
|
||||
|
||||
## Dependencies
|
||||
|
||||
| Dependencies | Version | Needed |
|
||||
| ------------ | :-----: | -----: |
|
||||
| ZeroMQ | latest | yes |
|
||||
| msgpack | latest | yes |
|
||||
| PyYAML | latest | yes |
|
||||
| Replication | latest | yes |
|
||||
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork it (<https://gitlab.com/yourname/yourproject/fork>)
|
||||
2. Create your feature branch (`git checkout -b feature/fooBar`)
|
||||
3. Commit your changes (`git commit -am 'Add some fooBar'`)
|
||||
4. Push to the branch (`git push origin feature/fooBar`)
|
||||
5. Create a new Pull Request
|
||||
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
|
||||
|
||||
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.
|
||||
|
||||
## Licensing
|
||||
|
||||
See [license](LICENSE)
|
||||
|
||||
|
287
__init__.py
@ -1,287 +0,0 @@
|
||||
bl_info = {
|
||||
"name": "Multi-User",
|
||||
"author": "CUBE CREATIVE",
|
||||
"description": "",
|
||||
"blender": (2, 80, 0),
|
||||
"location": "",
|
||||
"warning": "Unstable addon, use it at your own risks",
|
||||
"category": "Collaboration"
|
||||
}
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import sys
|
||||
|
||||
import bpy
|
||||
from bpy.app.handlers import persistent
|
||||
|
||||
from . import environment, utils, presence
|
||||
from .libs.replication.replication.constants import RP_COMMON
|
||||
|
||||
|
||||
# TODO: remove dependency as soon as replication will be installed as a module
|
||||
DEPENDENCIES = {
|
||||
("zmq","zmq"),
|
||||
("msgpack","msgpack"),
|
||||
("yaml","pyyaml"),
|
||||
("jsondiff","jsondiff")
|
||||
}
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
#TODO: refactor config
|
||||
# UTILITY FUNCTIONS
|
||||
def generate_supported_types():
|
||||
stype_dict = {'supported_types':{}}
|
||||
for type in bl_types.types_to_register():
|
||||
_type = getattr(bl_types, type)
|
||||
props = {}
|
||||
props['bl_delay_refresh']=_type.bl_delay_refresh
|
||||
props['bl_delay_apply']=_type.bl_delay_apply
|
||||
props['use_as_filter'] = False
|
||||
props['icon'] = _type.bl_icon
|
||||
props['auto_push']=_type.bl_automatic_push
|
||||
props['bl_name']=_type.bl_id
|
||||
|
||||
stype_dict['supported_types'][_type.bl_rep_class.__name__] = props
|
||||
|
||||
return stype_dict
|
||||
|
||||
|
||||
def client_list_callback(scene, context):
|
||||
from . import operators
|
||||
from .bl_types.bl_user import BlUser
|
||||
|
||||
items = [(RP_COMMON, RP_COMMON, "")]
|
||||
|
||||
username = bpy.context.window_manager.session.username
|
||||
cli = operators.client
|
||||
if cli:
|
||||
client_keys = cli.list(filter=BlUser)
|
||||
for k in client_keys:
|
||||
name = cli.get(uuid=k).buffer["name"]
|
||||
|
||||
name_desc = name
|
||||
if name == username:
|
||||
name_desc += " (self)"
|
||||
|
||||
items.append((name, name_desc, ""))
|
||||
|
||||
return items
|
||||
|
||||
|
||||
def randomColor():
|
||||
r = random.random()
|
||||
v = random.random()
|
||||
b = random.random()
|
||||
return [r, v, b]
|
||||
|
||||
class ReplicatedDatablock(bpy.types.PropertyGroup):
|
||||
'''name = StringProperty() '''
|
||||
type_name: bpy.props.StringProperty()
|
||||
bl_name: bpy.props.StringProperty()
|
||||
bl_delay_refresh: bpy.props.FloatProperty()
|
||||
bl_delay_apply: bpy.props.FloatProperty()
|
||||
use_as_filter: bpy.props.BoolProperty(default=True)
|
||||
auto_push: bpy.props.BoolProperty(default=True)
|
||||
icon: bpy.props.StringProperty()
|
||||
|
||||
class SessionProps(bpy.types.PropertyGroup):
|
||||
username: bpy.props.StringProperty(
|
||||
name="Username",
|
||||
default="user_{}".format(utils.random_string_digits())
|
||||
)
|
||||
ip: bpy.props.StringProperty(
|
||||
name="ip",
|
||||
description='Distant host ip',
|
||||
default="127.0.0.1"
|
||||
)
|
||||
user_uuid: bpy.props.StringProperty(
|
||||
name="user_uuid",
|
||||
default="None"
|
||||
)
|
||||
port: bpy.props.IntProperty(
|
||||
name="port",
|
||||
description='Distant host port',
|
||||
default=5555
|
||||
)
|
||||
add_property_depth: bpy.props.IntProperty(
|
||||
name="add_property_depth",
|
||||
default=1
|
||||
)
|
||||
outliner_filter: bpy.props.StringProperty(name="None")
|
||||
is_admin: bpy.props.BoolProperty(
|
||||
name="is_admin",
|
||||
default=False
|
||||
)
|
||||
init_scene: bpy.props.BoolProperty(
|
||||
name="init_scene",
|
||||
default=True
|
||||
)
|
||||
start_empty: bpy.props.BoolProperty(
|
||||
name="start_empty",
|
||||
default=True
|
||||
)
|
||||
active_object: bpy.props.PointerProperty(
|
||||
name="active_object", type=bpy.types.Object)
|
||||
session_mode: bpy.props.EnumProperty(
|
||||
name='session_mode',
|
||||
description='session mode',
|
||||
items={
|
||||
('HOST', 'hosting', 'host a session'),
|
||||
('CONNECT', 'connexion', 'connect to a session')},
|
||||
default='HOST')
|
||||
right_strategy: bpy.props.EnumProperty(
|
||||
name='right_strategy',
|
||||
description='right strategy',
|
||||
items={
|
||||
('STRICT', 'strict', 'strict right repartition'),
|
||||
('COMMON', 'common', 'relaxed right repartition')},
|
||||
default='COMMON')
|
||||
client_color: bpy.props.FloatVectorProperty(
|
||||
name="client_instance_color",
|
||||
subtype='COLOR',
|
||||
default=randomColor())
|
||||
clients: bpy.props.EnumProperty(
|
||||
name="clients",
|
||||
description="client enum",
|
||||
items=client_list_callback)
|
||||
enable_presence: bpy.props.BoolProperty(
|
||||
name="Presence overlay",
|
||||
description='Enable overlay drawing module',
|
||||
default=True,
|
||||
update=presence.update_presence
|
||||
)
|
||||
presence_show_selected: bpy.props.BoolProperty(
|
||||
name="Show selected objects",
|
||||
description='Enable selection overlay ',
|
||||
default=True,
|
||||
update=presence.update_overlay_settings
|
||||
)
|
||||
presence_show_user: bpy.props.BoolProperty(
|
||||
name="Show users",
|
||||
description='Enable user overlay ',
|
||||
default=True,
|
||||
update=presence.update_overlay_settings
|
||||
)
|
||||
supported_datablock: bpy.props.CollectionProperty(
|
||||
type=ReplicatedDatablock,
|
||||
)
|
||||
session_filter: bpy.props.CollectionProperty(
|
||||
type=ReplicatedDatablock,
|
||||
)
|
||||
filter_owned: bpy.props.BoolProperty(
|
||||
name="filter_owned",
|
||||
description='Show only owned datablocks',
|
||||
default=True
|
||||
)
|
||||
use_select_right: bpy.props.BoolProperty(
|
||||
name="Selection right",
|
||||
description='Change right on selection',
|
||||
default=True
|
||||
)
|
||||
|
||||
def load(self):
|
||||
config = environment.load_config()
|
||||
if "username" in config.keys():
|
||||
self.username = config["username"]
|
||||
self.ip = config["ip"]
|
||||
self.port = config["port"]
|
||||
self.start_empty = config["start_empty"]
|
||||
self.enable_presence = config["enable_presence"]
|
||||
self.client_color = config["client_color"]
|
||||
else:
|
||||
logger.error("Fail to read user config")
|
||||
|
||||
if len(self.supported_datablock)>0:
|
||||
self.supported_datablock.clear()
|
||||
if "supported_types" not in config:
|
||||
config = generate_supported_types()
|
||||
for datablock in config["supported_types"].keys():
|
||||
rep_value = self.supported_datablock.add()
|
||||
rep_value.name = datablock
|
||||
rep_value.type_name = datablock
|
||||
|
||||
config_block = config["supported_types"][datablock]
|
||||
rep_value.bl_delay_refresh = config_block['bl_delay_refresh']
|
||||
rep_value.bl_delay_apply = config_block['bl_delay_apply']
|
||||
rep_value.icon = config_block['icon']
|
||||
rep_value.auto_push = config_block['auto_push']
|
||||
rep_value.bl_name = config_block['bl_name']
|
||||
|
||||
def save(self,context):
|
||||
config = environment.load_config()
|
||||
|
||||
config["username"] = self.username
|
||||
config["ip"] = self.ip
|
||||
config["port"] = self.port
|
||||
config["start_empty"] = self.start_empty
|
||||
config["enable_presence"] = self.enable_presence
|
||||
config["client_color"] = [self.client_color.r,self.client_color.g,self.client_color.b]
|
||||
|
||||
|
||||
for bloc in self.supported_datablock:
|
||||
config_block = config["supported_types"][bloc.type_name]
|
||||
config_block['bl_delay_refresh'] = bloc.bl_delay_refresh
|
||||
config_block['bl_delay_apply'] = bloc.bl_delay_apply
|
||||
config_block['use_as_filter'] = bloc.use_as_filter
|
||||
config_block['icon'] = bloc.icon
|
||||
config_block['auto_push'] = bloc.auto_push
|
||||
config_block['bl_name'] = bloc.bl_name
|
||||
environment.save_config(config)
|
||||
|
||||
|
||||
classes = (
|
||||
ReplicatedDatablock,
|
||||
SessionProps,
|
||||
|
||||
)
|
||||
|
||||
libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication"
|
||||
|
||||
@persistent
|
||||
def load_handler(dummy):
|
||||
import bpy
|
||||
bpy.context.window_manager.session.load()
|
||||
|
||||
def register():
|
||||
if libs not in sys.path:
|
||||
sys.path.append(libs)
|
||||
|
||||
environment.setup(DEPENDENCIES,bpy.app.binary_path_python)
|
||||
|
||||
from . import presence
|
||||
from . import operators
|
||||
from . import ui
|
||||
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
bpy.types.WindowManager.session = bpy.props.PointerProperty(
|
||||
type=SessionProps)
|
||||
bpy.types.ID.uuid = bpy.props.StringProperty(default="")
|
||||
|
||||
bpy.context.window_manager.session.load()
|
||||
|
||||
presence.register()
|
||||
operators.register()
|
||||
ui.register()
|
||||
bpy.app.handlers.load_post.append(load_handler)
|
||||
|
||||
def unregister():
|
||||
from . import presence
|
||||
from . import operators
|
||||
from . import ui
|
||||
|
||||
presence.unregister()
|
||||
ui.unregister()
|
||||
operators.unregister()
|
||||
|
||||
del bpy.types.WindowManager.session
|
||||
|
||||
for cls in reversed(classes):
|
||||
bpy.utils.unregister_class(cls)
|
@ -1,25 +0,0 @@
|
||||
__all__ = [
|
||||
'bl_user',
|
||||
'bl_object',
|
||||
'bl_mesh',
|
||||
'bl_camera',
|
||||
'bl_collection',
|
||||
'bl_curve',
|
||||
'bl_gpencil',
|
||||
'bl_image',
|
||||
'bl_light',
|
||||
'bl_scene',
|
||||
'bl_material',
|
||||
'bl_library',
|
||||
'bl_armature',
|
||||
'bl_action',
|
||||
'bl_world',
|
||||
'bl_metaball'
|
||||
] # Order here defines execution order
|
||||
|
||||
from . import *
|
||||
from ..libs.replication.replication.data import ReplicatedDataFactory
|
||||
|
||||
def types_to_register():
|
||||
return __all__
|
||||
|
@ -1,69 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
# WIP
|
||||
|
||||
class BlAction(BlDatablock):
|
||||
def load(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
def construct(self, data):
|
||||
return bpy.data.actions.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
pass
|
||||
# # find target object
|
||||
# object_ = bpy.context.scene.objects.active
|
||||
# if object_ is None:
|
||||
# raise RuntimeError("Nothing is selected.")
|
||||
# if object_.mode != 'POSE': # object must be in pose mode
|
||||
# raise RuntimeError("Object must be in pose mode.")
|
||||
# if object_.animation_data.action is None:
|
||||
# raise RuntimeError("Object needs an active action.")
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
data = utils.dump_datablock(pointer, 1)
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 2
|
||||
|
||||
|
||||
data["fcurves"] = []
|
||||
for fcurve in self.pointer.fcurves:
|
||||
fc = {
|
||||
"data_path": fcurve.data_path,
|
||||
"dumped_array_index": fcurve.array_index,
|
||||
"keyframe_points": []
|
||||
}
|
||||
|
||||
for k in fcurve.keyframe_points:
|
||||
fc["keyframe_points"].append(
|
||||
dumper.dump(k)
|
||||
)
|
||||
|
||||
data["fcurves"].append(fc)
|
||||
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.actions.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.actions.get(self.buffer['name'])
|
||||
|
||||
bl_id = "actions"
|
||||
bl_class = bpy.types.Action
|
||||
bl_rep_class = BlAction
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'ACTION_TWEAK'
|
@ -1,107 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from ..libs.overrider import Overrider
|
||||
from .. import utils
|
||||
from .. import presence
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
# WIP
|
||||
class BlArmature(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.armatures.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
# Load parent object
|
||||
if data['user'] not in bpy.data.objects.keys():
|
||||
parent_object = bpy.data.objects.new(data['user'], self.pointer)
|
||||
else:
|
||||
parent_object = bpy.data.objects[data['user']]
|
||||
|
||||
is_object_in_master = (data['user_collection'][0] == "Master Collection")
|
||||
#TODO: recursive parent collection loading
|
||||
# Link parent object to the collection
|
||||
if is_object_in_master:
|
||||
parent_collection = bpy.data.scenes[data['user_scene'][0]].collection
|
||||
elif data['user_collection'][0] not in bpy.data.collections.keys():
|
||||
parent_collection = bpy.data.collections.new(data['user_collection'][0])
|
||||
else:
|
||||
parent_collection = bpy.data.collections[data['user_collection'][0]]
|
||||
|
||||
if parent_object.name not in parent_collection.objects:
|
||||
parent_collection.objects.link(parent_object)
|
||||
|
||||
# Link parent collection to the scene master collection
|
||||
if not is_object_in_master and parent_collection.name not in bpy.data.scenes[data['user_scene'][0]].collection.children:
|
||||
bpy.data.scenes[data['user_scene'][0]].collection. children.link(parent_collection)
|
||||
|
||||
|
||||
# utils.dump_anything.load(target, data)
|
||||
# with Overrider(name="bpy_",parent=bpy.context) as bpy_:
|
||||
area, region, rv3d = presence.view3d_find()
|
||||
|
||||
|
||||
|
||||
bpy.context.view_layer.objects.active = parent_object
|
||||
# override = bpy.context.copy()
|
||||
# override['window'] = bpy.data.window_managers[0].windows[0]
|
||||
# override['mode'] = 'EDIT_ARMATURE'
|
||||
# override['window_manager'] = bpy.data.window_managers[0]
|
||||
# override['area'] = area
|
||||
# override['region'] = region
|
||||
# override['screen'] = bpy.data.window_managers[0].windows[0].screen
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
for bone in data['bones']:
|
||||
if bone not in self.pointer.edit_bones:
|
||||
new_bone = self.pointer.edit_bones.new(bone)
|
||||
else:
|
||||
new_bone = self.pointer.edit_bones[bone]
|
||||
|
||||
new_bone.tail = data['bones'][bone]['tail_local']
|
||||
new_bone.head = data['bones'][bone]['head_local']
|
||||
new_bone.tail_radius = data['bones'][bone]['tail_radius']
|
||||
new_bone.head_radius = data['bones'][bone]['head_radius']
|
||||
|
||||
if 'parent' in data['bones'][bone]:
|
||||
new_bone.parent = self.pointer.edit_bones[data['bones'][bone]['parent']['name']]
|
||||
new_bone.use_connect = data['bones'][bone]['use_connect']
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# bpy_.mode = 'EDIT_ARMATURE'
|
||||
|
||||
# bpy_.active_object = armature
|
||||
# bpy_.selected_objects = [armature]
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
data = utils.dump_datablock(pointer, 4)
|
||||
|
||||
#get the parent Object
|
||||
object_users = utils.get_datablock_users(pointer)[0]
|
||||
data['user'] = object_users.name
|
||||
|
||||
#get parent collection
|
||||
container_users = utils.get_datablock_users(object_users)
|
||||
data['user_collection'] = [item.name for item in container_users if isinstance(item,bpy.types.Collection)]
|
||||
data['user_scene'] = [item.name for item in container_users if isinstance(item,bpy.types.Scene)]
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.armatures.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
False
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.armatures.get(self.buffer['name'])
|
||||
|
||||
bl_id = "armatures"
|
||||
bl_class = bpy.types.Armature
|
||||
bl_rep_class = BlArmature
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 0
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'ARMATURE_DATA'
|
@ -1,68 +0,0 @@
|
||||
from jsondiff import diff
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlCamera(BlDatablock):
|
||||
def load(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
dof_settings = data.get('dof')
|
||||
|
||||
# DOF settings
|
||||
if dof_settings:
|
||||
utils.dump_anything.load(target.dof, dof_settings)
|
||||
|
||||
def construct(self, data):
|
||||
return bpy.data.cameras.new(data["name"])
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
'type',
|
||||
'lens',
|
||||
'lens_unit',
|
||||
'shift_x',
|
||||
'shift_y',
|
||||
'clip_start',
|
||||
'clip_end',
|
||||
'dof',
|
||||
'use_dof',
|
||||
'sensor_fit',
|
||||
'sensor_width',
|
||||
'focus_object',
|
||||
'focus_distance',
|
||||
'aperture_fstop',
|
||||
'aperture_blades',
|
||||
'aperture_rotation',
|
||||
'aperture_ratio',
|
||||
]
|
||||
return dumper.dump(pointer)
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.cameras.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
return (self.bl_diff() or
|
||||
len(diff(self.dump(pointer=self.pointer), self.buffer)))
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.cameras.get(self.buffer['name'])
|
||||
|
||||
|
||||
bl_id = "cameras"
|
||||
bl_class = bpy.types.Camera
|
||||
bl_rep_class = BlCamera
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'CAMERA_DATA'
|
@ -1,71 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlCollection(BlDatablock):
|
||||
def construct(self,data):
|
||||
return bpy.data.collections.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
# Load other meshes metadata
|
||||
# dump_anything.load(target, data)
|
||||
|
||||
# link objects
|
||||
for object in data["objects"]:
|
||||
if object not in target.objects.keys():
|
||||
target.objects.link(bpy.data.objects[object])
|
||||
|
||||
for object in target.objects.keys():
|
||||
if object not in data["objects"]:
|
||||
target.objects.unlink(bpy.data.objects[object])
|
||||
|
||||
# Link childrens
|
||||
for collection in data["children"]:
|
||||
if collection not in target.children.keys():
|
||||
# if bpy.data.collections.find(collection) == -1:
|
||||
target.children.link(
|
||||
bpy.data.collections[collection])
|
||||
|
||||
for collection in target.children.keys():
|
||||
if collection not in data["children"]:
|
||||
target.collection.children.unlink(
|
||||
bpy.data.collections[collection])
|
||||
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
return utils.dump_datablock(pointer, 4)
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.collections.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
return (self.bl_diff() or
|
||||
len(self.pointer.objects) != len(self.buffer['objects']) or
|
||||
len(self.pointer.children) != len(self.buffer['children']))
|
||||
|
||||
def resolve_dependencies(self):
|
||||
deps = []
|
||||
|
||||
for child in self.pointer.children:
|
||||
deps.append(child)
|
||||
for object in self.pointer.objects:
|
||||
deps.append(object)
|
||||
|
||||
return deps
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.collections.get(self.buffer['name'])
|
||||
|
||||
bl_id = "collections"
|
||||
bl_icon = 'FILE_FOLDER'
|
||||
bl_class = bpy.types.Collection
|
||||
bl_rep_class = BlCollection
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
@ -1,65 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
class BlCurve(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.curves.new(data["name"], 'CURVE')
|
||||
|
||||
def load(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
target.splines.clear()
|
||||
# load splines
|
||||
for spline in data['splines']:
|
||||
new_spline = target.splines.new(data['splines'][spline]['type'])
|
||||
utils.dump_anything.load(new_spline, data['splines'][spline])
|
||||
|
||||
# Load curve geometry data
|
||||
for bezier_point_index in data['splines'][spline]["bezier_points"]:
|
||||
if bezier_point_index != 0:
|
||||
new_spline.bezier_points.add(1)
|
||||
utils.dump_anything.load(
|
||||
new_spline.bezier_points[bezier_point_index], data['splines'][spline]["bezier_points"][bezier_point_index])
|
||||
|
||||
for point_index in data['splines'][spline]["points"]:
|
||||
new_spline.points.add(1)
|
||||
utils.dump_anything.load(
|
||||
new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
data = utils.dump_datablock(pointer, 1)
|
||||
data['splines'] = {}
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 3
|
||||
|
||||
for index,spline in enumerate(pointer.splines):
|
||||
spline_data = {}
|
||||
spline_data['points'] = dumper.dump(spline.points)
|
||||
spline_data['bezier_points'] = dumper.dump(spline.bezier_points)
|
||||
spline_data['type'] = dumper.dump(spline.type)
|
||||
data['splines'][index] = spline_data
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.curves.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
return (self.bl_diff() or
|
||||
len(diff(self.dump(pointer=self.pointer), self.buffer)) > 1)
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.curves.get(self.buffer['name'])
|
||||
bl_id = "curves"
|
||||
bl_class = bpy.types.Curve
|
||||
bl_rep_class = BlCurve
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'CURVE_DATA'
|
@ -1,62 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from ..libs.replication.replication.data import ReplicatedDatablock
|
||||
from ..libs.replication.replication.constants import UP
|
||||
|
||||
|
||||
class BlDatablock(ReplicatedDatablock):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
pointer = kwargs.get('pointer', None)
|
||||
buffer = self.buffer
|
||||
|
||||
# TODO: use is_library_indirect
|
||||
self.is_library = (pointer and hasattr(pointer, 'library') and
|
||||
pointer.library) or \
|
||||
(buffer and 'library' in buffer)
|
||||
|
||||
if self.is_library:
|
||||
self.load = self.load_library
|
||||
self.dump = self.dump_library
|
||||
self.diff = self.diff_library
|
||||
|
||||
if self.pointer and hasattr(self.pointer, 'uuid'):
|
||||
self.pointer.uuid = self.uuid
|
||||
|
||||
def library_apply(self):
|
||||
"""Apply stored data
|
||||
"""
|
||||
# UP in case we want to reset our pointer data
|
||||
self.state = UP
|
||||
|
||||
def bl_diff(self):
|
||||
"""Generic datablock diff"""
|
||||
return self.pointer.name != self.buffer['name']
|
||||
|
||||
def construct_library(self, data):
|
||||
return None
|
||||
|
||||
def load_library(self, data, target):
|
||||
pass
|
||||
|
||||
def dump_library(self, pointer=None):
|
||||
return utils.dump_datablock(pointer, 1)
|
||||
|
||||
def diff_library(self):
|
||||
return False
|
||||
|
||||
def resolve_dependencies_library(self):
|
||||
return [self.pointer.library]
|
||||
|
||||
def resolve_dependencies(self):
|
||||
dependencies = []
|
||||
|
||||
if hasattr(self.pointer,'animation_data') and self.pointer.animation_data:
|
||||
dependencies.append(self.pointer.animation_data.action)
|
||||
|
||||
return dependencies
|
||||
|
||||
def is_valid(self):
|
||||
raise NotImplementedError
|
@ -1,91 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
def load_gpencil_layer(target=None, data=None, create=False):
|
||||
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
for frame in data["frames"]:
|
||||
try:
|
||||
tframe = target.frames[frame]
|
||||
except:
|
||||
tframe = target.frames.new(frame)
|
||||
utils.dump_anything.load(tframe, data["frames"][frame])
|
||||
for stroke in data["frames"][frame]["strokes"]:
|
||||
try:
|
||||
tstroke = tframe.strokes[stroke]
|
||||
except:
|
||||
tstroke = tframe.strokes.new()
|
||||
utils.dump_anything.load(
|
||||
tstroke, data["frames"][frame]["strokes"][stroke])
|
||||
|
||||
for point in data["frames"][frame]["strokes"][stroke]["points"]:
|
||||
p = data["frames"][frame]["strokes"][stroke]["points"][point]
|
||||
|
||||
tstroke.points.add(1)
|
||||
tpoint = tstroke.points[len(tstroke.points)-1]
|
||||
|
||||
utils.dump_anything.load(tpoint, p)
|
||||
|
||||
|
||||
class BlGpencil(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.grease_pencils.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
for layer in target.layers:
|
||||
target.layers.remove(layer)
|
||||
|
||||
if "layers" in data.keys():
|
||||
for layer in data["layers"]:
|
||||
if layer not in target.layers.keys():
|
||||
gp_layer = target.layers.new(data["layers"][layer]["info"])
|
||||
else:
|
||||
gp_layer = target.layers[layer]
|
||||
load_gpencil_layer(
|
||||
target=gp_layer, data=data["layers"][layer], create=True)
|
||||
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
target.materials.clear()
|
||||
if "materials" in data.keys():
|
||||
for mat in data['materials']:
|
||||
target.materials.append(bpy.data.materials[mat])
|
||||
|
||||
def diff(self):
|
||||
return (self.bl_diff())
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
data = utils.dump_datablock(pointer, 2)
|
||||
utils.dump_datablock_attibutes(
|
||||
pointer, ['layers'], 9, data)
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.grease_pencils.get(self.buffer['name'])
|
||||
|
||||
def resolve_dependencies(self):
|
||||
deps = []
|
||||
|
||||
for material in self.pointer.materials:
|
||||
deps.append(material)
|
||||
|
||||
return deps
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.grease_pencils.get(self.buffer['name'])
|
||||
|
||||
bl_id = "grease_pencils"
|
||||
bl_class = bpy.types.GreasePencil
|
||||
bl_rep_class = BlGpencil
|
||||
bl_delay_refresh = 5
|
||||
bl_delay_apply = 5
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'GREASEPENCIL'
|
@ -1,76 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
import os
|
||||
|
||||
from .. import utils, environment
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
def dump_image(image):
|
||||
pixels = None
|
||||
if image.source == "GENERATED":
|
||||
img_name = "{}.png".format(image.name)
|
||||
|
||||
image.filepath_raw = os.path.join(environment.CACHE_DIR, img_name)
|
||||
image.file_format = "PNG"
|
||||
image.save()
|
||||
|
||||
if image.source == "FILE":
|
||||
image.save()
|
||||
file = open(image.filepath_raw, "rb")
|
||||
pixels = file.read()
|
||||
file.close()
|
||||
else:
|
||||
raise ValueError()
|
||||
return pixels
|
||||
|
||||
class BlImage(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.images.new(
|
||||
name=data['name'],
|
||||
width=data['size'][0],
|
||||
height=data['size'][1]
|
||||
)
|
||||
|
||||
def load(self, data, target):
|
||||
image = target
|
||||
|
||||
img_name = "{}.png".format(image.name)
|
||||
|
||||
img_path = os.path.join(environment.CACHE_DIR, img_name)
|
||||
|
||||
file = open(img_path, 'wb')
|
||||
file.write(data["pixels"])
|
||||
file.close()
|
||||
|
||||
image.source = 'FILE'
|
||||
image.filepath = img_path
|
||||
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
data = {}
|
||||
data['pixels'] = dump_image(pointer)
|
||||
utils.dump_datablock_attibutes(pointer, [], 2, data)
|
||||
data = utils.dump_datablock_attibutes(
|
||||
pointer,
|
||||
["name", 'size', 'height', 'alpha', 'float_buffer', 'filepath', 'source'],
|
||||
2,
|
||||
data)
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.images.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.images.get(self.buffer['name'])
|
||||
bl_id = "images"
|
||||
bl_class = bpy.types.Image
|
||||
bl_rep_class = BlImage
|
||||
bl_delay_refresh = 0
|
||||
bl_delay_apply = 0
|
||||
bl_automatic_push = False
|
||||
bl_icon = 'IMAGE_DATA'
|
@ -1,37 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlLibrary(BlDatablock):
|
||||
def construct(self, data):
|
||||
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
|
||||
targetData = sourceData
|
||||
return sourceData
|
||||
def load(self, data, target):
|
||||
pass
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
return utils.dump_datablock(pointer, 1)
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.libraries.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
return (self.bl_diff())
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.libraries.get(self.buffer['name'])
|
||||
|
||||
bl_id = "libraries"
|
||||
bl_class = bpy.types.Library
|
||||
bl_rep_class = BlLibrary
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'LIBRARY_DATA_DIRECT'
|
@ -1,60 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlLight(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.lights.new(data["name"], data["type"])
|
||||
|
||||
def load(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
"type",
|
||||
"color",
|
||||
"energy",
|
||||
"specular_factor",
|
||||
"uuid",
|
||||
"shadow_soft_size",
|
||||
"use_custom_distance",
|
||||
"cutoff_distance",
|
||||
"use_shadow",
|
||||
"shadow_buffer_clip_start",
|
||||
"shadow_buffer_soft",
|
||||
"shadow_buffer_bias",
|
||||
"shadow_buffer_bleed_bias",
|
||||
"contact_shadow_distance",
|
||||
"contact_shadow_soft_size",
|
||||
"contact_shadow_bias",
|
||||
"contact_shadow_thickness"
|
||||
]
|
||||
data = dumper.dump(pointer)
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.lights.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
return (self.bl_diff() or
|
||||
len(diff(self.dump(pointer=self.pointer), self.buffer)) > 1)
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.lights.get(self.buffer['name'])
|
||||
|
||||
bl_id = "lights"
|
||||
bl_class = bpy.types.Light
|
||||
bl_rep_class = BlLight
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'LIGHT_DATA'
|
@ -1,158 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
def load_node(target_node_tree, source):
|
||||
target_node = target_node_tree.nodes.get(source["name"])
|
||||
|
||||
if target_node is None:
|
||||
node_type = source["bl_idname"]
|
||||
|
||||
target_node = target_node_tree.nodes.new(type=node_type)
|
||||
|
||||
utils.dump_anything.load(
|
||||
target_node, source)
|
||||
|
||||
if source['type'] == 'TEX_IMAGE':
|
||||
target_node.image = bpy.data.images[source['image']['name']]
|
||||
|
||||
for input in source["inputs"]:
|
||||
if hasattr(target_node.inputs[input], "default_value"):
|
||||
target_node.inputs[input].default_value = source["inputs"][input]["default_value"]
|
||||
|
||||
|
||||
def load_link(target_node_tree, source):
|
||||
input_socket = target_node_tree.nodes[source['to_node']
|
||||
['name']].inputs[source['to_socket']['name']]
|
||||
output_socket = target_node_tree.nodes[source['from_node']
|
||||
['name']].outputs[source['from_socket']['name']]
|
||||
|
||||
target_node_tree.links.new(input_socket, output_socket)
|
||||
|
||||
|
||||
class BlMaterial(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.materials.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
if data['is_grease_pencil']:
|
||||
if not target.is_grease_pencil:
|
||||
bpy.data.materials.create_gpencil_data(target)
|
||||
|
||||
utils.dump_anything.load(
|
||||
target.grease_pencil, data['grease_pencil'])
|
||||
|
||||
utils.load_dict(data['grease_pencil'], target.grease_pencil)
|
||||
|
||||
elif data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
|
||||
target.node_tree.nodes.clear()
|
||||
|
||||
# Load nodes
|
||||
for node in data["node_tree"]["nodes"]:
|
||||
load_node(target.node_tree, data["node_tree"]["nodes"][node])
|
||||
|
||||
# Load nodes links
|
||||
target.node_tree.links.clear()
|
||||
|
||||
for link in data["node_tree"]["links"]:
|
||||
load_link(target.node_tree, data["node_tree"]["links"][link])
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
mat_dumper = utils.dump_anything.Dumper()
|
||||
mat_dumper.depth = 2
|
||||
mat_dumper.exclude_filter = [
|
||||
"preview",
|
||||
"original",
|
||||
"uuid",
|
||||
"users",
|
||||
"alpha_threshold",
|
||||
"line_color",
|
||||
"view_center",
|
||||
]
|
||||
node_dumper = utils.dump_anything.Dumper()
|
||||
node_dumper.depth = 1
|
||||
node_dumper.exclude_filter = [
|
||||
"dimensions",
|
||||
"select",
|
||||
"bl_height_min",
|
||||
"bl_height_max",
|
||||
"bl_width_min",
|
||||
"bl_width_max",
|
||||
"bl_width_default",
|
||||
"hide",
|
||||
"show_options",
|
||||
"show_tetxures",
|
||||
"show_preview",
|
||||
"outputs",
|
||||
"width_hidden"
|
||||
]
|
||||
input_dumper = utils.dump_anything.Dumper()
|
||||
input_dumper.depth = 2
|
||||
input_dumper.include_filter = ["default_value"]
|
||||
links_dumper = utils.dump_anything.Dumper()
|
||||
links_dumper.depth = 3
|
||||
links_dumper.exclude_filter = ["dimensions"]
|
||||
data = mat_dumper.dump(pointer)
|
||||
|
||||
if pointer.use_nodes:
|
||||
nodes = {}
|
||||
|
||||
for node in pointer.node_tree.nodes:
|
||||
nodes[node.name] = node_dumper.dump(node)
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
nodes[node.name]['inputs'] = {}
|
||||
|
||||
for i in node.inputs:
|
||||
|
||||
if hasattr(i, 'default_value'):
|
||||
nodes[node.name]['inputs'][i.name] = input_dumper.dump(
|
||||
i)
|
||||
data["node_tree"]['nodes'] = nodes
|
||||
data["node_tree"]["links"] = links_dumper.dump(pointer.node_tree.links)
|
||||
|
||||
elif pointer.is_grease_pencil:
|
||||
utils.dump_datablock_attibutes(pointer, ["grease_pencil"], 3, data)
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.materials.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
diff_rev = diff(self.dump(pointer=self.pointer), self.buffer)
|
||||
return (self.bl_diff() or
|
||||
len(diff_rev.keys()) > 0)
|
||||
|
||||
def resolve_dependencies(self):
|
||||
# TODO: resolve node group deps
|
||||
deps = []
|
||||
|
||||
if self.pointer.use_nodes:
|
||||
for node in self.pointer.node_tree.nodes:
|
||||
if node.type == 'TEX_IMAGE':
|
||||
deps.append(node.image)
|
||||
if self.is_library:
|
||||
deps.append(self.pointer.library)
|
||||
|
||||
return deps
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.materials.get(self.buffer['name'])
|
||||
|
||||
|
||||
bl_id = "materials"
|
||||
bl_class = bpy.types.Material
|
||||
bl_rep_class = BlMaterial
|
||||
bl_delay_refresh = 10
|
||||
bl_delay_apply = 10
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'MATERIAL_DATA'
|
@ -1,181 +0,0 @@
|
||||
import bpy
|
||||
import bmesh
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
def dump_mesh(mesh, data={}):
|
||||
import bmesh
|
||||
|
||||
mesh_data = data
|
||||
mesh_buffer = bmesh.new()
|
||||
|
||||
mesh_buffer.from_mesh(mesh)
|
||||
|
||||
uv_layer = mesh_buffer.loops.layers.uv.verify()
|
||||
bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify()
|
||||
skin_layer = mesh_buffer.verts.layers.skin.verify()
|
||||
|
||||
verts = {}
|
||||
for vert in mesh_buffer.verts:
|
||||
v = {}
|
||||
v["co"] = list(vert.co)
|
||||
|
||||
# vert metadata
|
||||
v['bevel'] = vert[bevel_layer]
|
||||
v['normal'] = list(vert.normal)
|
||||
# v['skin'] = list(vert[skin_layer])
|
||||
|
||||
verts[str(vert.index)] = v
|
||||
|
||||
mesh_data["verts"] = verts
|
||||
|
||||
edges = {}
|
||||
for edge in mesh_buffer.edges:
|
||||
e = {}
|
||||
e["verts"] = [edge.verts[0].index, edge.verts[1].index]
|
||||
|
||||
# Edge metadata
|
||||
e["smooth"] = edge.smooth
|
||||
|
||||
edges[edge.index] = e
|
||||
mesh_data["edges"] = edges
|
||||
|
||||
faces = {}
|
||||
for face in mesh_buffer.faces:
|
||||
f = {}
|
||||
fverts = []
|
||||
for vert in face.verts:
|
||||
fverts.append(vert.index)
|
||||
|
||||
f["verts"] = fverts
|
||||
f["material_index"] = face.material_index
|
||||
f["smooth"] = face.smooth
|
||||
f["normal"] = list(face.normal)
|
||||
f["index"] = face.index
|
||||
|
||||
uvs = []
|
||||
# Face metadata
|
||||
for loop in face.loops:
|
||||
loop_uv = loop[uv_layer]
|
||||
|
||||
uvs.append(list(loop_uv.uv))
|
||||
|
||||
f["uv"] = uvs
|
||||
faces[face.index] = f
|
||||
|
||||
mesh_data["faces"] = faces
|
||||
|
||||
uv_layers = []
|
||||
for uv_layer in mesh.uv_layers:
|
||||
uv_layers.append(uv_layer.name)
|
||||
|
||||
mesh_data["uv_layers"] = uv_layers
|
||||
return mesh_data
|
||||
|
||||
class BlMesh(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.meshes.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
if not target or not target.is_editmode:
|
||||
# 1 - LOAD MATERIAL SLOTS
|
||||
material_to_load = []
|
||||
material_to_load = utils.revers(data["materials"])
|
||||
target.materials.clear()
|
||||
# SLots
|
||||
i = 0
|
||||
|
||||
for m in data["material_list"]:
|
||||
target.materials.append(bpy.data.materials[m])
|
||||
|
||||
# 2 - LOAD GEOMETRY
|
||||
mesh_buffer = bmesh.new()
|
||||
|
||||
for i in data["verts"]:
|
||||
v = mesh_buffer.verts.new(data["verts"][i]["co"])
|
||||
v.normal = data["verts"][i]["normal"]
|
||||
mesh_buffer.verts.ensure_lookup_table()
|
||||
|
||||
for i in data["edges"]:
|
||||
verts = mesh_buffer.verts
|
||||
v1 = data["edges"][i]["verts"][0]
|
||||
v2 = data["edges"][i]["verts"][1]
|
||||
edge = mesh_buffer.edges.new([verts[v1], verts[v2]])
|
||||
edge.smooth = data["edges"][i]["smooth"]
|
||||
for p in data["faces"]:
|
||||
verts = []
|
||||
for v in data["faces"][p]["verts"]:
|
||||
verts.append(mesh_buffer.verts[v])
|
||||
|
||||
if len(verts) > 0:
|
||||
f = mesh_buffer.faces.new(verts)
|
||||
|
||||
uv_layer = mesh_buffer.loops.layers.uv.verify()
|
||||
|
||||
f.smooth = data["faces"][p]["smooth"]
|
||||
f.normal = data["faces"][p]["normal"]
|
||||
f.index = data["faces"][p]["index"]
|
||||
f.material_index = data["faces"][p]['material_index']
|
||||
# UV loading
|
||||
for i, loop in enumerate(f.loops):
|
||||
loop_uv = loop[uv_layer]
|
||||
loop_uv.uv = data["faces"][p]["uv"][i]
|
||||
mesh_buffer.faces.ensure_lookup_table()
|
||||
mesh_buffer.to_mesh(target)
|
||||
|
||||
# 3 - LOAD METADATA
|
||||
# uv's
|
||||
for uv_layer in data['uv_layers']:
|
||||
target.uv_layers.new(name=uv_layer)
|
||||
|
||||
bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify()
|
||||
skin_layer = mesh_buffer.verts.layers.skin.verify()
|
||||
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
data = utils.dump_datablock(pointer, 2)
|
||||
data = dump_mesh(pointer, data)
|
||||
# Fix material index
|
||||
m_list = []
|
||||
for material in pointer.materials:
|
||||
if material:
|
||||
m_list.append(material.name)
|
||||
|
||||
data['material_list'] = m_list
|
||||
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.meshes.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
return (self.bl_diff() or
|
||||
len(self.pointer.vertices) != len(self.buffer['verts']) or
|
||||
len(self.pointer.materials) != len(self.buffer['materials']))
|
||||
|
||||
def resolve_dependencies(self):
|
||||
deps = []
|
||||
|
||||
for material in self.pointer.materials:
|
||||
deps.append(material)
|
||||
|
||||
return deps
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.meshes.get(self.buffer['name'])
|
||||
|
||||
bl_id = "meshes"
|
||||
bl_class = bpy.types.Mesh
|
||||
bl_rep_class = BlMesh
|
||||
bl_delay_refresh = 10
|
||||
bl_delay_apply = 10
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'MESH_DATA'
|
@ -1,48 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlMetaball(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.metaballs.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
target.elements.clear()
|
||||
for element in data["elements"]:
|
||||
new_element = target.elements.new(type=data["elements"][element]['type'])
|
||||
utils.dump_anything.load(new_element, data["elements"][element])
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.exclude_filter = ["is_editmode"]
|
||||
|
||||
data = dumper.dump(pointer)
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.metaballs.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
rev = diff(self.dump(pointer=self.pointer), self.buffer)
|
||||
return (self.bl_diff() or
|
||||
len(rev) > 0)
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.metaballs.get(self.buffer['name'])
|
||||
|
||||
bl_id = "metaballs"
|
||||
bl_class = bpy.types.MetaBall
|
||||
bl_rep_class = BlMetaball
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'META_BALL'
|
@ -1,135 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from jsondiff import diff
|
||||
|
||||
|
||||
class BlObject(BlDatablock):
|
||||
def construct(self, data):
|
||||
pointer = None
|
||||
|
||||
if self.is_library:
|
||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.buffer['library']].filepath, link=True) as (sourceData, targetData):
|
||||
targetData.objects = [
|
||||
name for name in sourceData.objects if name == self.buffer['name']]
|
||||
|
||||
return targetData.objects[self.buffer['name']]
|
||||
|
||||
# Object specific constructor...
|
||||
if "data" not in data:
|
||||
pass
|
||||
elif data["data"] in bpy.data.meshes.keys():
|
||||
pointer = bpy.data.meshes[data["data"]]
|
||||
elif data["data"] in bpy.data.lights.keys():
|
||||
pointer = bpy.data.lights[data["data"]]
|
||||
elif data["data"] in bpy.data.cameras.keys():
|
||||
pointer = bpy.data.cameras[data["data"]]
|
||||
elif data["data"] in bpy.data.curves.keys():
|
||||
pointer = bpy.data.curves[data["data"]]
|
||||
elif data["data"] in bpy.data.metaballs.keys():
|
||||
pointer = bpy.data.metaballs[data["data"]]
|
||||
elif data["data"] in bpy.data.armatures.keys():
|
||||
pointer = bpy.data.armatures[data["data"]]
|
||||
elif data["data"] in bpy.data.grease_pencils.keys():
|
||||
pointer = bpy.data.grease_pencils[data["data"]]
|
||||
elif data["data"] in bpy.data.curves.keys():
|
||||
pointer = bpy.data.curves[data["data"]]
|
||||
|
||||
return bpy.data.objects.new(data["name"], pointer)
|
||||
|
||||
def load(self, data, target):
|
||||
target.uuid = data['uuid']
|
||||
target.matrix_world = mathutils.Matrix(data["matrix_world"])
|
||||
|
||||
# Load modifiers
|
||||
if hasattr(target, 'modifiers'):
|
||||
for local_modifier in target.modifiers:
|
||||
if local_modifier.name not in data['modifiers']:
|
||||
target.modifiers.remove(local_modifier)
|
||||
for modifier in data['modifiers']:
|
||||
target_modifier = target.modifiers.get(modifier)
|
||||
|
||||
if not target_modifier:
|
||||
target_modifier = target.modifiers.new(
|
||||
data['modifiers'][modifier]['name'], data['modifiers'][modifier]['type'])
|
||||
|
||||
utils.dump_anything.load(
|
||||
target_modifier, data['modifiers'][modifier])
|
||||
|
||||
if 'children' in data.keys():
|
||||
for child in data['children']:
|
||||
bpy.data.objects[child].parent = self.pointer
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
"matrix_world",
|
||||
"rotation_mode",
|
||||
"parent",
|
||||
"data",
|
||||
"uuid",
|
||||
"children",
|
||||
"library"
|
||||
]
|
||||
data = dumper.dump(pointer)
|
||||
|
||||
if self.is_library:
|
||||
return data
|
||||
|
||||
if hasattr(pointer, 'modifiers'):
|
||||
dumper.include_filter = None
|
||||
dumper.depth = 3
|
||||
data["modifiers"] = dumper.dump(pointer.modifiers)
|
||||
|
||||
if len(pointer.children) > 0:
|
||||
childs = []
|
||||
for child in pointer.children:
|
||||
childs.append(child.name)
|
||||
|
||||
data["children"] = childs
|
||||
# deps.extend(list(self.pointer.children))
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
object_name = self.buffer['name']
|
||||
try:
|
||||
self.pointer = bpy.data.objects[object_name]
|
||||
except:
|
||||
pass
|
||||
|
||||
def diff(self):
|
||||
diff_rev = diff(self.dump(pointer=self.pointer), self.buffer)
|
||||
return (self.bl_diff() or
|
||||
len(diff_rev.keys()))
|
||||
|
||||
def resolve_dependencies(self):
|
||||
deps = super().resolve_dependencies()
|
||||
|
||||
# Avoid Empty case
|
||||
if self.pointer.data:
|
||||
deps.append(self.pointer.data)
|
||||
if len(self.pointer.children) > 0:
|
||||
deps.extend(list(self.pointer.children))
|
||||
|
||||
if self.is_library:
|
||||
deps.append(self.pointer.library)
|
||||
|
||||
return deps
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.objects.get(self.buffer['name'])
|
||||
|
||||
|
||||
bl_id = "objects"
|
||||
bl_class = bpy.types.Object
|
||||
bl_rep_class = BlObject
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'OBJECT_DATA'
|
@ -1,92 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
class BlScene(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.scenes.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
target = self.pointer
|
||||
# Load other meshes metadata
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
# Load master collection
|
||||
for object in data["collection"]["objects"]:
|
||||
if object not in target.collection.objects.keys():
|
||||
target.collection.objects.link(bpy.data.objects[object])
|
||||
|
||||
for object in target.collection.objects.keys():
|
||||
if object not in data["collection"]["objects"]:
|
||||
target.collection.objects.unlink(bpy.data.objects[object])
|
||||
|
||||
# load collections
|
||||
for collection in data["collection"]["children"]:
|
||||
if collection not in target.collection.children.keys():
|
||||
target.collection.children.link(
|
||||
bpy.data.collections[collection])
|
||||
|
||||
for collection in target.collection.children.keys():
|
||||
if collection not in data["collection"]["children"]:
|
||||
target.collection.children.unlink(
|
||||
bpy.data.collections[collection])
|
||||
|
||||
if 'world' in data.keys():
|
||||
target.world = bpy.data.worlds[data['world']]
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
data = {}
|
||||
|
||||
scene_dumper = utils.dump_anything.Dumper()
|
||||
scene_dumper.depth = 1
|
||||
scene_dumper.include_filter = ['name','world', 'id', 'camera', 'grease_pencil']
|
||||
data = scene_dumper.dump(pointer)
|
||||
|
||||
scene_dumper.depth = 3
|
||||
scene_dumper.include_filter = ['children','objects','name']
|
||||
data['collection'] = scene_dumper.dump(pointer.collection)
|
||||
|
||||
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
scene_name = self.buffer['name']
|
||||
|
||||
self.pointer = bpy.data.scenes.get(scene_name)
|
||||
|
||||
def diff(self):
|
||||
return (self.bl_diff() or
|
||||
len(self.pointer.collection.objects) != len(self.buffer['collection']['objects']) or
|
||||
len(self.pointer.collection.children) != len(self.buffer['collection']['children']))
|
||||
|
||||
def resolve_dependencies(self):
|
||||
deps = []
|
||||
|
||||
# child collections
|
||||
for child in self.pointer.collection.children:
|
||||
deps.append(child)
|
||||
|
||||
# childs objects
|
||||
for object in self.pointer.objects:
|
||||
deps.append(object)
|
||||
|
||||
# world
|
||||
if self.pointer.world:
|
||||
deps.append(self.pointer.world)
|
||||
|
||||
return deps
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.scenes.get(self.buffer['name'])
|
||||
bl_id = "scenes"
|
||||
bl_class = bpy.types.Scene
|
||||
bl_rep_class = BlScene
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'SCENE_DATA'
|
@ -1,66 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .. import presence
|
||||
from .bl_datablock import BlDatablock
|
||||
from ..libs.replication.replication.constants import UP
|
||||
|
||||
class BlUser(BlDatablock):
|
||||
def construct(self, name):
|
||||
return presence.User()
|
||||
|
||||
def load(self, data, target):
|
||||
target.name = data['name']
|
||||
target.location = data['location']
|
||||
target.selected_objects = data['selected_objects']
|
||||
utils.dump_anything.load(target, data)
|
||||
|
||||
def apply(self):
|
||||
if self.pointer is None:
|
||||
self.pointer = self.construct(self.buffer)
|
||||
|
||||
if self.pointer:
|
||||
self.load(data=self.buffer, target=self.pointer)
|
||||
|
||||
settings = bpy.context.window_manager.session
|
||||
self.state = UP
|
||||
#TODO: refactor in order to redraw in cleaner ways
|
||||
area, region, rv3d = presence.view3d_find()
|
||||
if presence.renderer and area and region and rv3d and settings.enable_presence:
|
||||
if settings.presence_show_user:
|
||||
presence.renderer.draw_client_camera(self.buffer['name'], self.buffer['location'],self.buffer['color'])
|
||||
if settings.presence_show_selected:
|
||||
presence.renderer.draw_client_selection(self.buffer['name'], self.buffer['color'],self.buffer['selected_objects'])
|
||||
presence.refresh_3d_view()
|
||||
|
||||
|
||||
def dump(self,pointer=None):
|
||||
data = utils.dump_anything.dump(pointer)
|
||||
data['location'] = pointer.location
|
||||
data['color'] = pointer.color
|
||||
data['selected_objects'] = pointer.selected_objects
|
||||
return data
|
||||
|
||||
|
||||
def diff(self):
|
||||
if not self.pointer:
|
||||
return False
|
||||
if self.pointer.is_dirty:
|
||||
self.pointer.is_dirty = False
|
||||
return True
|
||||
|
||||
for i,coord in enumerate(self.pointer.location):
|
||||
if coord != self.buffer['location'][i]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_valid(self):
|
||||
return True
|
||||
bl_id = "users"
|
||||
bl_class = presence.User
|
||||
bl_rep_class = BlUser
|
||||
bl_delay_refresh = .2
|
||||
bl_delay_apply = .2
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'CON_ARMATURE'
|
@ -1,114 +0,0 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
from jsondiff import diff
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_material import load_link, load_node
|
||||
|
||||
|
||||
class BlWorld(BlDatablock):
|
||||
def construct(self, data):
|
||||
return bpy.data.worlds.new(data["name"])
|
||||
|
||||
def load(self, data, target):
|
||||
if data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
|
||||
target.node_tree.nodes.clear()
|
||||
|
||||
for node in data["node_tree"]["nodes"]:
|
||||
load_node(target.node_tree, data["node_tree"]["nodes"][node])
|
||||
|
||||
# Load nodes links
|
||||
target.node_tree.links.clear()
|
||||
|
||||
for link in data["node_tree"]["links"]:
|
||||
load_link(target.node_tree, data["node_tree"]["links"][link])
|
||||
|
||||
def dump(self, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
world_dumper = utils.dump_anything.Dumper()
|
||||
world_dumper.depth = 2
|
||||
world_dumper.exclude_filter = [
|
||||
"preview",
|
||||
"original",
|
||||
"uuid",
|
||||
"color",
|
||||
"cycles",
|
||||
"light_settings"
|
||||
]
|
||||
data = world_dumper.dump(pointer)
|
||||
if pointer.use_nodes:
|
||||
nodes = {}
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.exclude_filter = [
|
||||
"dimensions",
|
||||
"select",
|
||||
"bl_height_min",
|
||||
"bl_height_max",
|
||||
"bl_width_min",
|
||||
"bl_width_max",
|
||||
"bl_width_default",
|
||||
"hide",
|
||||
"show_options",
|
||||
"show_tetxures",
|
||||
"show_preview",
|
||||
"outputs",
|
||||
"preview",
|
||||
"original",
|
||||
"width_hidden"
|
||||
]
|
||||
|
||||
for node in pointer.node_tree.nodes:
|
||||
nodes[node.name] = dumper.dump(node)
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
nodes[node.name]['inputs'] = {}
|
||||
|
||||
for i in node.inputs:
|
||||
input_dumper = utils.dump_anything.Dumper()
|
||||
input_dumper.depth = 2
|
||||
input_dumper.include_filter = ["default_value"]
|
||||
if hasattr(i, 'default_value'):
|
||||
nodes[node.name]['inputs'][i.name] = input_dumper.dump(
|
||||
i)
|
||||
data["node_tree"]['nodes'] = nodes
|
||||
utils.dump_datablock_attibutes(
|
||||
pointer.node_tree, ["links"], 3, data['node_tree'])
|
||||
return data
|
||||
|
||||
def resolve(self):
|
||||
assert(self.buffer)
|
||||
self.pointer = bpy.data.worlds.get(self.buffer['name'])
|
||||
|
||||
def diff(self):
|
||||
diff_rev = diff(self.dump(pointer=self.pointer), self.buffer)
|
||||
return (self.bl_diff() or
|
||||
len(diff_rev.keys()) > 0)
|
||||
|
||||
def resolve_dependencies(self):
|
||||
deps = []
|
||||
|
||||
if self.pointer.use_nodes:
|
||||
for node in self.pointer.node_tree.nodes:
|
||||
if node.type == 'TEX_IMAGE':
|
||||
deps.append(node.image)
|
||||
if self.is_library:
|
||||
deps.append(self.pointer.library)
|
||||
return deps
|
||||
|
||||
def is_valid(self):
|
||||
return bpy.data.worlds.get(self.buffer['name'])
|
||||
|
||||
|
||||
bl_id = "worlds"
|
||||
bl_class = bpy.types.World
|
||||
bl_rep_class = BlWorld
|
||||
bl_delay_refresh = 4
|
||||
bl_delay_apply = 4
|
||||
bl_automatic_push = True
|
||||
bl_icon = 'WORLD_DATA'
|
169
delayable.py
@ -1,169 +0,0 @@
|
||||
import logging
|
||||
|
||||
import bpy
|
||||
|
||||
from . import operators, presence, utils
|
||||
from .bl_types.bl_user import BlUser
|
||||
from .libs.replication.replication.constants import FETCHED, RP_COMMON
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Delayable():
|
||||
"""Delayable task interface
|
||||
"""
|
||||
|
||||
def register(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def unregister(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Timer(Delayable):
|
||||
"""Timer binder interface for blender
|
||||
|
||||
Run a bpy.app.Timer in the background looping at the given rate
|
||||
"""
|
||||
|
||||
def __init__(self, duration=1):
|
||||
self._timeout = duration
|
||||
self._running = True
|
||||
|
||||
def register(self):
|
||||
"""Register the timer into the blender timer system
|
||||
"""
|
||||
bpy.app.timers.register(self.main)
|
||||
|
||||
def main(self):
|
||||
self.execute()
|
||||
|
||||
if self._running:
|
||||
return self._timeout
|
||||
|
||||
def execute(self):
|
||||
"""Main timer loop
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def unregister(self):
|
||||
"""Unnegister the timer of the blender timer system
|
||||
"""
|
||||
if bpy.app.timers.is_registered(self.main):
|
||||
bpy.app.timers.unregister(self.main)
|
||||
|
||||
self._running = False
|
||||
|
||||
|
||||
class ApplyTimer(Timer):
|
||||
def __init__(self, timout=1, target_type=None):
|
||||
self._type = target_type
|
||||
super().__init__(timout)
|
||||
|
||||
def execute(self):
|
||||
if operators.client:
|
||||
nodes = operators.client.list(filter=self._type)
|
||||
|
||||
for node in nodes:
|
||||
node_ref = operators.client.get(uuid=node)
|
||||
|
||||
if node_ref.state == FETCHED:
|
||||
operators.client.apply(uuid=node)
|
||||
|
||||
|
||||
class DynamicRightSelectTimer(Timer):
|
||||
def __init__(self, timout=1):
|
||||
super().__init__(timout)
|
||||
self.last_selection = []
|
||||
|
||||
def execute(self):
|
||||
if operators.client:
|
||||
users = operators.client.list(filter=BlUser)
|
||||
|
||||
for user in users:
|
||||
user_ref = operators.client.get(uuid=user)
|
||||
settings = bpy.context.window_manager.session
|
||||
|
||||
# Other user
|
||||
if user_ref.buffer['name'] != settings.username:
|
||||
user_selection = user_ref.buffer['selected_objects']
|
||||
for obj in bpy.data.objects:
|
||||
obj.hide_select = obj.name in user_selection
|
||||
# Local user
|
||||
elif user_ref.pointer:
|
||||
current_selection = utils.get_selected_objects(
|
||||
bpy.context.scene)
|
||||
if current_selection != self.last_selection:
|
||||
user_ref.pointer.update_selected_objects(bpy.context)
|
||||
right_strategy = operators.client.get_config()[
|
||||
'right_strategy']
|
||||
if right_strategy == RP_COMMON:
|
||||
obj_common = [
|
||||
o for o in self.last_selection if o not in current_selection]
|
||||
obj_ours = [
|
||||
o for o in current_selection if o not in self.last_selection]
|
||||
|
||||
# change old selection right to common
|
||||
for obj in obj_common:
|
||||
_object = bpy.data.objects.get(obj)
|
||||
|
||||
node = operators.client.get(reference=_object)
|
||||
if node and node.owner == settings.username:
|
||||
operators.client.change_owner(
|
||||
node.uuid, RP_COMMON)
|
||||
|
||||
# change new selection to our
|
||||
for obj in obj_ours:
|
||||
node = operators.client.get(
|
||||
reference=bpy.data.objects[obj])
|
||||
if node and node.owner == RP_COMMON:
|
||||
operators.client.change_owner(
|
||||
node.uuid, settings.username)
|
||||
|
||||
self.last_selection = current_selection
|
||||
|
||||
|
||||
class RedrawTimer(Timer):
|
||||
def __init__(self, timout=1, target_type=None):
|
||||
self._type = target_type
|
||||
super().__init__(timout)
|
||||
|
||||
def execute(self):
|
||||
if presence.renderer:
|
||||
presence.refresh_3d_view()
|
||||
|
||||
|
||||
class Draw(Delayable):
|
||||
def __init__(self):
|
||||
self._handler = None
|
||||
|
||||
def register(self):
|
||||
self._handler = bpy.types.SpaceView3D.draw_handler_add(
|
||||
self.execute, (), 'WINDOW', 'POST_VIEW')
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def unregister(self):
|
||||
try:
|
||||
bpy.types.SpaceView3D.draw_handler_remove(
|
||||
self._handler, "WINDOW")
|
||||
except:
|
||||
logger.error("draw already unregistered")
|
||||
|
||||
|
||||
class ClientUpdate(Draw):
|
||||
def __init__(self, client_uuid=None):
|
||||
assert(client_uuid)
|
||||
self._client_uuid = client_uuid
|
||||
super().__init__()
|
||||
|
||||
def execute(self):
|
||||
if self._client_uuid and operators.client:
|
||||
client = operators.client.get(uuid=self._client_uuid)
|
||||
|
||||
if client:
|
||||
client.pointer.update_location()
|
20
docs/Makefile
Normal file
@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
BIN
docs/about/img/about_chain.gif
Normal file
After Width: | Height: | Size: 5.8 MiB |
8
docs/about/index.rst
Normal file
@ -0,0 +1,8 @@
|
||||
About
|
||||
=====
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:name: toc-about
|
||||
|
||||
introduction
|
15
docs/about/introduction.rst
Normal file
@ -0,0 +1,15 @@
|
||||
============
|
||||
Introduction
|
||||
============
|
||||
|
||||
|
||||
A film is an idea carved along the whole production process by many different peoples. A traditional animation pipeline involve a linear succession of tasks. From storyboard to compositing by passing upon different step, its fundamental work flow is similar to an industrial assembly line. Since each step is almost a department, its common that one person on department B doesn't know what another person did on a previous step in a department A. This lack of visibility/communication could be a source of problems which could produce a bad impact on the final production result.
|
||||
|
||||
.. figure:: img/about_chain.gif
|
||||
:align: center
|
||||
|
||||
The linear workflow problems
|
||||
|
||||
Nowadays it's a known fact that real-time rendering technologies allows to speedup traditional linear production by reducing drastically the iteration time across different steps. All majors industrial CG solutions are moving toward real-time horizons to bring innovative interactive workflows. But this is a microscopic, per-task/solution vision of real-time rendering benefits for the animation production. What if we step-back, get a macroscopic picture of an animation movie pipeline and ask ourself how real-time could change our global workflow ? Could-it bring better ways of working together by giving more visibility between departments during the whole production ?
|
||||
|
||||
The multi-user addon is an attempt to experiment real-time parallelism between different production stage. By replicating blender data blocks over the networks, it allows different artists to collaborate on a same scene in real-time.
|
98
docs/conf.py
Normal file
@ -0,0 +1,98 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# This file only contains a selection of the most common options. For a full
|
||||
# list see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Path setup --------------------------------------------------------------
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
import os
|
||||
import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = 'multi-user'
|
||||
copyright = '2020, Swann Martinez'
|
||||
author = 'Swann Martinez'
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
release = '0.0.2'
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = 'python'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
||||
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
|
||||
import sphinx_rtd_theme
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
if on_rtd:
|
||||
using_rtd_theme = True
|
||||
|
||||
# Theme options
|
||||
html_theme_options = {
|
||||
# 'typekit_id': 'hiw1hhg',
|
||||
# 'analytics_id': '',
|
||||
# 'sticky_navigation': True # Set to False to disable the sticky nav while scrolling.
|
||||
# 'logo_only': True, # if we have a html_logo below, this shows /only/ the logo with no title text
|
||||
'collapse_navigation': False, # Collapse navigation (False makes it tree-like)
|
||||
# 'display_version': True, # Display the docs version
|
||||
# 'navigation_depth': 4, # Depth of the headers shown in the navigation bar
|
||||
}
|
||||
|
||||
|
||||
# -- Options for HTMLHelp output ------------------------------------------
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'multiusrdoc'
|
||||
|
||||
# sphinx-notfound-page
|
||||
# https://github.com/readthedocs/sphinx-notfound-page
|
||||
notfound_context = {
|
||||
'title': 'Page Not Found',
|
||||
'body': '''
|
||||
<h1>Page Not Found</h1>
|
||||
<p>Sorry, we couldn't find that page.</p>
|
||||
<p>Try using the search box or go to the homepage.</p>
|
||||
''',
|
||||
}
|
||||
|
||||
# Enable directives that insert the contents of external files
|
||||
file_insertion_enabled = False
|
59
docs/getting_started/glossary.rst
Normal file
@ -0,0 +1,59 @@
|
||||
========
|
||||
Glossary
|
||||
========
|
||||
|
||||
|
||||
.. glossary::
|
||||
|
||||
.. _admin:
|
||||
|
||||
administrator
|
||||
|
||||
*A session administrator can manage users (kick) and have a write access on
|
||||
each datablock. He could also init a dedicated server repository.*
|
||||
|
||||
.. _session-status:
|
||||
|
||||
session status
|
||||
|
||||
*Located in the title of the multi-user panel, the session status show
|
||||
you the connection state.*
|
||||
|
||||
.. figure:: img/quickstart_session_status.png
|
||||
:align: center
|
||||
|
||||
Session status in panel title bar
|
||||
|
||||
All possible state are listed here with their meaning:*
|
||||
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| State | Description |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| WARMING UP DATA | Commiting local data |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| FETCHING | Dowloading snapshot from the server |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| AUTHENTIFICATION | Initial server authentication |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| ONLINE | Connected to the session |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| PUSHING | Init the server repository by pushing ours |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| INIT | Initial state |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| QUITTING | Exiting the session |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| LAUNCHING SERVICES | Launching local services. Services are spetialized daemons running in the background. ) |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| LOBBY | The lobby is a waiting state triggered when the server repository hasn't been initiated yet |
|
||||
| | |
|
||||
| | Once initialized, the server will automatically launch all client in the **LOBBY**. |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
|
||||
|
||||
.. _common-right:
|
||||
|
||||
common right
|
||||
|
||||
When a data block is under common right, it is available for everyone to modification.
|
||||
The rights will be given to the user that select it first.
|
BIN
docs/getting_started/img/quickstart_advanced.png
Normal file
After Width: | Height: | Size: 8.4 KiB |
BIN
docs/getting_started/img/quickstart_advanced_cache.png
Normal file
After Width: | Height: | Size: 7.6 KiB |
BIN
docs/getting_started/img/quickstart_advanced_logging.png
Normal file
After Width: | Height: | Size: 2.9 KiB |
BIN
docs/getting_started/img/quickstart_advanced_network.png
Normal file
After Width: | Height: | Size: 4.1 KiB |
BIN
docs/getting_started/img/quickstart_advanced_replication.png
Normal file
After Width: | Height: | Size: 18 KiB |
BIN
docs/getting_started/img/quickstart_host.png
Normal file
After Width: | Height: | Size: 13 KiB |
BIN
docs/getting_started/img/quickstart_join.png
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
docs/getting_started/img/quickstart_presence.png
Normal file
After Width: | Height: | Size: 9.7 KiB |
BIN
docs/getting_started/img/quickstart_properties.png
Normal file
After Width: | Height: | Size: 22 KiB |
BIN
docs/getting_started/img/quickstart_pull.png
Normal file
After Width: | Height: | Size: 3.0 KiB |
BIN
docs/getting_started/img/quickstart_push.png
Normal file
After Width: | Height: | Size: 2.9 KiB |
BIN
docs/getting_started/img/quickstart_refresh.png
Normal file
After Width: | Height: | Size: 7.1 KiB |
BIN
docs/getting_started/img/quickstart_remove.png
Normal file
After Width: | Height: | Size: 1.7 KiB |
BIN
docs/getting_started/img/quickstart_replication.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
docs/getting_started/img/quickstart_session_init.png
Normal file
After Width: | Height: | Size: 19 KiB |
BIN
docs/getting_started/img/quickstart_session_status.png
Normal file
After Width: | Height: | Size: 2.5 KiB |
BIN
docs/getting_started/img/quickstart_snap_time.gif
Normal file
After Width: | Height: | Size: 1.5 MiB |
BIN
docs/getting_started/img/quickstart_snap_view.gif
Normal file
After Width: | Height: | Size: 5.2 MiB |
BIN
docs/getting_started/img/quickstart_unlock.png
Normal file
After Width: | Height: | Size: 3.8 KiB |
BIN
docs/getting_started/img/quickstart_user_info.png
Normal file
After Width: | Height: | Size: 5.4 KiB |
BIN
docs/getting_started/img/quickstart_user_representation.png
Normal file
After Width: | Height: | Size: 32 KiB |
BIN
docs/getting_started/img/quickstart_users.png
Normal file
After Width: | Height: | Size: 18 KiB |
11
docs/getting_started/index.rst
Normal file
@ -0,0 +1,11 @@
|
||||
===============
|
||||
Getting started
|
||||
===============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:name: toc-getting-started
|
||||
|
||||
install
|
||||
quickstart
|
||||
glossary
|
13
docs/getting_started/install.rst
Normal file
@ -0,0 +1,13 @@
|
||||
============
|
||||
Installation
|
||||
============
|
||||
|
||||
.. hint::
|
||||
The process is the same for linux, mac and windows.
|
||||
|
||||
1. Download latest `release <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build>`_ or `develop (unstable !) <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build>`_ build.
|
||||
2. Run blender as administrator (to allow python dependencies auto-installation).
|
||||
3. Install **multi-user.zip** from your addon preferences.
|
||||
|
||||
Once the addon is succesfully installed, I strongly recommend you to follow the :ref:`quickstart`
|
||||
tutorial.
|
418
docs/getting_started/quickstart.rst
Normal file
@ -0,0 +1,418 @@
|
||||
.. _quickstart:
|
||||
|
||||
===========
|
||||
Quick start
|
||||
===========
|
||||
|
||||
.. hint::
|
||||
*All session related settings are located under: `View3D -> Sidebar -> Multiuser panel`*
|
||||
|
||||
The multi-user is based on a session management system.
|
||||
In this this guide you will quickly learn how to use the collaborative session system in three part:
|
||||
|
||||
- :ref:`how-to-host`
|
||||
- :ref:`how-to-join`
|
||||
- :ref:`how-to-manage`
|
||||
|
||||
.. _how-to-host:
|
||||
|
||||
How to host a session
|
||||
=====================
|
||||
|
||||
The multi-user add-on rely on a Client-Server architecture.
|
||||
The server is the heart of the collaborative session,
|
||||
it will allow each users to communicate with each others.
|
||||
In simple terms, *Hosting a session* means *run a local server and connect the local client to it*.
|
||||
When I said **local server** I mean accessible from the LAN (Local Area Network).
|
||||
|
||||
However sometime you will need to host a session over the internet,
|
||||
in this case I strongly recommand you to read the :ref:`internet-guide` tutorial.
|
||||
|
||||
.. _user-info:
|
||||
|
||||
-----------------------------
|
||||
1. Fill your user information
|
||||
-----------------------------
|
||||
|
||||
The **User Info** panel (See image below) allow you to constomize your online identity.
|
||||
|
||||
.. figure:: img/quickstart_user_info.png
|
||||
:align: center
|
||||
|
||||
User info panel
|
||||
|
||||
|
||||
Let's fill those tow field:
|
||||
|
||||
- **name**: your online name.
|
||||
- **color**: a color used to represent you into other user workspace(see image below).
|
||||
|
||||
|
||||
During online sessions, other users will see your selected object and camera hilghlited in your profile color.
|
||||
|
||||
.. _user-representation:
|
||||
|
||||
.. figure:: img/quickstart_user_representation.png
|
||||
:align: center
|
||||
|
||||
User viewport representation
|
||||
|
||||
--------------------
|
||||
2. Setup the network
|
||||
--------------------
|
||||
|
||||
When the hosting process will start, the multi-user addon will lauch a local server instance.
|
||||
In the nerwork panel select **HOST**.
|
||||
The **Host sub-panel** (see image below) allow you to configure the server according to:
|
||||
|
||||
* **Port**: Port on wich the server is listening.
|
||||
* **Start from**: The session initialisation method.
|
||||
|
||||
* **current scenes**: Start with the current blendfile datas.
|
||||
* **an empty scene**: Clear a data and start over.
|
||||
|
||||
.. danger::
|
||||
By starting from an empty, all of the blend data will be removed !
|
||||
Ensure to save your existing work before launching the session.
|
||||
|
||||
* **Admin password**: The session administration password.
|
||||
|
||||
.. figure:: img/quickstart_host.png
|
||||
:align: center
|
||||
:alt: host menu
|
||||
|
||||
Host network panel
|
||||
|
||||
|
||||
.. note:: Additionnal configuration setting can be found in the :ref:`advanced` section.
|
||||
|
||||
Once everything is setup you can hit the **HOST** button to launch the session !
|
||||
|
||||
It will do two things:
|
||||
|
||||
* Start a local server
|
||||
* Connect you to it as an :ref:`admin`
|
||||
|
||||
During online session, various actions are available to you, go to :ref:`how-to-manage` section to
|
||||
learn more about them.
|
||||
|
||||
.. _how-to-join:
|
||||
|
||||
How to join a session
|
||||
=====================
|
||||
|
||||
This section describe how join a launched session.
|
||||
Before starting make sure that you have access to the session ip and port.
|
||||
|
||||
-----------------------------
|
||||
1. Fill your user information
|
||||
-----------------------------
|
||||
|
||||
Follow the user-info_ section for this step.
|
||||
|
||||
----------------
|
||||
2. Network setup
|
||||
----------------
|
||||
|
||||
In the nerwork panel select **JOIN**.
|
||||
The **join sub-panel** (see image below) allow you configure the client to join a
|
||||
collaborative session.
|
||||
|
||||
.. figure:: img/quickstart_join.png
|
||||
:align: center
|
||||
:alt: Connect menu
|
||||
|
||||
Connection panel
|
||||
|
||||
Fill those field with your information:
|
||||
|
||||
- **IP**: the host ip.
|
||||
- **Port**: the host port.
|
||||
- **Connect as admin**: connect you with **admin rights** (see :ref:`admin` ) to the session.
|
||||
|
||||
.. Maybe something more explicit here
|
||||
|
||||
.. note::
|
||||
Additionnal configuration setting can be found in the :ref:`advanced` section.
|
||||
|
||||
Once you've set every field, hit the button **CONNECT** to join the session !
|
||||
When the :ref:`session-status` is **ONLINE** you are online and ready to start to collaborate.
|
||||
|
||||
.. note::
|
||||
On the **dedicated server** startup, the session status will get you to the **LOBBY** waiting a admin to start it.
|
||||
|
||||
If the session status is set to **LOBBY** and you are a regular user, you need to wait that an admin launch it.
|
||||
If you are the admin, you just need to init the repository to start the session (see image below).
|
||||
|
||||
.. figure:: img/quickstart_session_init.png
|
||||
:align: center
|
||||
|
||||
Session initialisation for dedicated server
|
||||
|
||||
During online session, various actions are available to you, go to :ref:`how-to-manage` section to
|
||||
learn more about them.
|
||||
|
||||
.. _how-to-manage:
|
||||
|
||||
How to manage a session
|
||||
=======================
|
||||
|
||||
The collaboration quality directly depend on the communication quality. This section describes
|
||||
various tools made in an effort to ease the communication between the different session users.
|
||||
Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
|
||||
|
||||
---------------------------
|
||||
Change replication behavior
|
||||
---------------------------
|
||||
|
||||
During a session, the multi-user will replicate your modifications to other instances.
|
||||
In order to avoid annoying other users when you are experimenting, some of those modifications can be ignored via
|
||||
various flags present at the top of the panel (see red area in the image bellow). Those flags are explained in the :ref:`replication` section.
|
||||
|
||||
.. figure:: img/quickstart_replication.png
|
||||
:align: center
|
||||
|
||||
Session replication flags
|
||||
|
||||
--------------------
|
||||
Monitor online users
|
||||
--------------------
|
||||
|
||||
One of the most vital tool is the **Online user panel**. It list all connected
|
||||
users information's including yours such as :
|
||||
|
||||
* **Role** : if user is an admin or a regular user.
|
||||
* **Location**: Where the user is actually working.
|
||||
* **Frame**: When (in frame) the user working.
|
||||
* **Ping**: user connection delay in milliseconds
|
||||
|
||||
.. figure:: img/quickstart_users.png
|
||||
:align: center
|
||||
|
||||
Online user panel
|
||||
|
||||
By selecting a user in the list you'll have access to different user related **actions**.
|
||||
Those operators allow you reach the selected user state in tow different dimensions: **SPACE** and **TIME**.
|
||||
|
||||
Snapping in space
|
||||
----------------
|
||||
|
||||
The **CAMERA button** (Also called **snap view** operator) allow you to snap on
|
||||
the user viewpoint. To disable the snap, click back on the button. This action
|
||||
served different purposes such as easing the review process, working together on
|
||||
wide world.
|
||||
|
||||
.. hint::
|
||||
If the target user is localized on another scene, the **snap view** operator will send you to his scene.
|
||||
|
||||
.. figure:: img/quickstart_snap_view.gif
|
||||
:align: center
|
||||
|
||||
Snap view in action
|
||||
|
||||
Snapping in time
|
||||
---------------
|
||||
|
||||
The **CLOCK button** (Also called **snap time** operator) allow you to snap on
|
||||
the user time (current frame). To disable the snap, click back on the button.
|
||||
This action is built to help various actors to work on the same temporality
|
||||
(for instance multiple animators).
|
||||
|
||||
.. figure:: img/quickstart_snap_time.gif
|
||||
:align: center
|
||||
|
||||
Snap time in action
|
||||
|
||||
|
||||
Kick a user
|
||||
-----------
|
||||
|
||||
.. warning:: Only available for :ref:`admin` !
|
||||
|
||||
|
||||
The **CROSS button** (Also called **kick** operator) allow the admin to kick the selected user. On the target user side, the session will properly disconnect.
|
||||
|
||||
|
||||
Change users display
|
||||
--------------------
|
||||
|
||||
Presence is the multi-user module responsible for users display. During the session,
|
||||
it draw users related information in your viewport such as:
|
||||
|
||||
* Username
|
||||
* User point of view
|
||||
* User selection
|
||||
|
||||
.. figure:: img/quickstart_presence.png
|
||||
:align: center
|
||||
|
||||
Presence show flags
|
||||
|
||||
The presence overlay panel (see image above) allow you to enable/disable
|
||||
various drawn parts via the following flags:
|
||||
|
||||
- **Show selected objects**: display other users current selection
|
||||
- **Show users**: display users current viewpoint
|
||||
- **Show different scenes**: display users working on other scenes
|
||||
|
||||
|
||||
|
||||
-----------
|
||||
Manage data
|
||||
-----------
|
||||
|
||||
In order to understand replication data managment, a quick introduction to the multi-user data workflow is required.
|
||||
First thing to know: until now, the addon rely on a data-based replication. In simple words, it means that it replicate
|
||||
user's action results.
|
||||
To replicate datablocks between clients the multi-user rely on what tends to be a distributed architecture:
|
||||
|
||||
- The server store the "master" version of the work.
|
||||
- Each client have a local version of the work.
|
||||
|
||||
When an artist modified something in the scene, here is what is happening in the background:
|
||||
|
||||
1. Modified data are **COMMITTED** to the local repository.
|
||||
2. Once committed locally, they are **PUSHED** to the server
|
||||
3. As soon as the server is getting updates, they are stored locally and pushed to every other clients
|
||||
|
||||
At the top of this data management system, a right management system prevent
|
||||
multiple users from modifying same data at same time. A datablock may belong to
|
||||
a connected user or be under :ref:`common-right<**COMMON**>` rights.
|
||||
|
||||
.. note::
|
||||
In a near future, the right management system will support roles to allow multiple users to
|
||||
work on different aspect of the same datablock.
|
||||
|
||||
The Repository panel (see image below) allow you to monitor, change datablock states and right manually.
|
||||
|
||||
.. figure:: img/quickstart_properties.png
|
||||
:align: center
|
||||
|
||||
Repository panel
|
||||
|
||||
The **show only owned** flag allow you to see which datablocks you are currently modifying.
|
||||
|
||||
.. warning::
|
||||
If you are editing a datablock not listed with this fag enabled, it means that you do
|
||||
not have right granted to modify it. So it won't be updated to other client !
|
||||
|
||||
Here is a quick list of available actions:
|
||||
|
||||
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+
|
||||
| icon | Action | Description |
|
||||
+=======================================+===================+====================================================================================+
|
||||
| .. image:: img/quickstart_push.png | **Push** | push data-block to other clients |
|
||||
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+
|
||||
| .. image:: img/quickstart_pull.png | **Pull** | pull last version into blender |
|
||||
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+
|
||||
| .. image:: img/quickstart_refresh.png | **Reset** | Reset local change to the server version |
|
||||
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+
|
||||
| .. image:: img/quickstart_unlock.png | **Lock/Unlock** | If locked, does nothing. If unlocked, grant modification rights to another user. |
|
||||
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+
|
||||
| .. image:: img/quickstart_remove.png | **Delete** | Remove the data-block from network replication |
|
||||
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+
|
||||
|
||||
.. _advanced:
|
||||
|
||||
Advanced settings
|
||||
=================
|
||||
|
||||
This section contains optional settings to configure the session behavior.
|
||||
|
||||
.. figure:: img/quickstart_advanced.png
|
||||
:align: center
|
||||
|
||||
Advanced configuration panel
|
||||
|
||||
-------
|
||||
Network
|
||||
-------
|
||||
|
||||
.. figure:: img/quickstart_advanced_network.png
|
||||
:align: center
|
||||
|
||||
Advanced network settings
|
||||
|
||||
**IPC Port** is the port used for Inter Process Communication. This port is used
|
||||
by the multi-users subprocesses to communicate with each others. If different instances
|
||||
of the multi-user are using the same IPC port it will create conflict !
|
||||
|
||||
.. note::
|
||||
You only need to modify it if you need to launch multiple clients from the same
|
||||
computer(or if you try to host and join on the same computer). You should just enter a different
|
||||
**IPC port** for each blender instance.
|
||||
|
||||
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
|
||||
You should only increase it if you have a bad connection.
|
||||
|
||||
.. _replication:
|
||||
|
||||
-----------
|
||||
Replication
|
||||
-----------
|
||||
|
||||
.. figure:: img/quickstart_advanced_replication.png
|
||||
:align: center
|
||||
|
||||
Advanced replication settings
|
||||
|
||||
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
|
||||
|
||||
**Synchronize active camera** sync the scene active camera.
|
||||
|
||||
**Edit Mode Updates** enable objects update while you are in Edit_Mode.
|
||||
|
||||
.. warning:: Edit Mode Updates kill performances with complex objects (heavy meshes, gpencil, etc...).
|
||||
|
||||
**Update method** allow you to change how replication update are triggered. Until now two update methode are implemented:
|
||||
|
||||
- **Default**: Use external threads to monitor datablocks changes, slower and less accurate.
|
||||
- **Despgraph ⚠️**: Use the blender dependency graph to trigger updates. Faster but experimental and unstable !
|
||||
|
||||
**Properties frequency gird** allow to set a custom replication frequency for each type of data-block:
|
||||
|
||||
- **Refresh**: pushed data update rate (in second)
|
||||
- **Apply**: pulled data update rate (in second)
|
||||
|
||||
-----
|
||||
Cache
|
||||
-----
|
||||
|
||||
The multi-user allows to replicate external blend dependencies such as images, movies sounds.
|
||||
On each client, those files are stored into the cache folder.
|
||||
|
||||
.. figure:: img/quickstart_advanced_cache.png
|
||||
:align: center
|
||||
|
||||
Advanced cache settings
|
||||
|
||||
**cache_directory** allows to choose where cached files (images, sound, movies) will be saved.
|
||||
|
||||
**Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it have been written to the disk.
|
||||
|
||||
**Clear cache** will remove all file from the cache folder.
|
||||
|
||||
.. warning:: Clear cash could break your scene image/movie/sound if they are used into the blend !
|
||||
|
||||
---
|
||||
Log
|
||||
---
|
||||
|
||||
.. figure:: img/quickstart_advanced_logging.png
|
||||
:align: center
|
||||
|
||||
Advanced log settings
|
||||
|
||||
**log level** allow to set the logging level of detail. Here is the detail for each values:
|
||||
|
||||
+-----------+-----------------------------------------------+
|
||||
| Log level | Description |
|
||||
+===========+===============================================+
|
||||
| ERROR | Shows only critical error |
|
||||
+-----------+-----------------------------------------------+
|
||||
| WARNING | Shows only errors (all kind) |
|
||||
+-----------+-----------------------------------------------+
|
||||
| INFO | Shows only status related messages and errors |
|
||||
+-----------+-----------------------------------------------+
|
||||
| DEBUG | Shows every possible information. |
|
||||
+-----------+-----------------------------------------------+
|
BIN
docs/img/homepage_ban.png
Normal file
After Width: | Height: | Size: 409 KiB |
BIN
docs/img/homepage_roadmap.png
Normal file
After Width: | Height: | Size: 274 KiB |
65
docs/index.rst
Normal file
@ -0,0 +1,65 @@
|
||||
=====================================
|
||||
Welcome to Multi-user's documentation
|
||||
=====================================
|
||||
|
||||
.. image:: img/homepage_ban.png
|
||||
|
||||
The multi-user addon is a free and open source blender plugin. It tool aims to bring multiple users to work on the same .blend over the network.
|
||||
|
||||
.. warning:: Under development, use it at your own risks.
|
||||
|
||||
|
||||
|
||||
Main Features
|
||||
=============
|
||||
|
||||
- Collaborative workflow in blender
|
||||
- Viewport users presence (active selection, POV)
|
||||
- Datablocks right managment
|
||||
- Tested under Windows
|
||||
|
||||
Community
|
||||
=========
|
||||
|
||||
A `discord server <https://discord.gg/aBPvGws>`_ have been created to provide help for new users and
|
||||
organize collaborative creation sessions.
|
||||
|
||||
Status
|
||||
======
|
||||
|
||||
.. image:: img/homepage_roadmap.png
|
||||
|
||||
|
||||
Follow the `roadmap <https://gitlab.com/slumber/multi-user/-/boards/929107>`_ to be aware of last news.
|
||||
|
||||
Documentation is organized into the following sections:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: About
|
||||
:name: sec-about
|
||||
|
||||
about/introduction
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: Getting started
|
||||
:name: sec-learn
|
||||
|
||||
getting_started/install
|
||||
getting_started/quickstart
|
||||
getting_started/glossary
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: Tutorials
|
||||
:name: sec-tutorials
|
||||
|
||||
tutorials/hosting_guide
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: Community
|
||||
:name: sec-community
|
||||
|
||||
ways_to_contribute
|
35
docs/make.bat
Normal file
@ -0,0 +1,35 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=.
|
||||
set BUILDDIR=_build
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
278
docs/tutorials/hosting_guide.rst
Normal file
@ -0,0 +1,278 @@
|
||||
.. _internet-guide:
|
||||
|
||||
===================
|
||||
Hosting on internet
|
||||
===================
|
||||
|
||||
.. warning::
|
||||
Until now, those communications are not encrypted but are planned to be in a mid-term future (`Status <https://gitlab.com/slumber/multi-user/issues/62>`_).
|
||||
|
||||
This tutorial aims to guide you to host a collaborative Session on internet.
|
||||
Hosting a session can be done is several ways:
|
||||
|
||||
- :ref:`host-blender`: hosting a session directly from the blender add-on panel.
|
||||
- :ref:`host-dedicated`: hosting a session directly from the command line interface on a computer without blender.
|
||||
|
||||
.. _host-blender:
|
||||
|
||||
-------------
|
||||
From blender
|
||||
-------------
|
||||
By default your router doesn't allow anyone to share you connection.
|
||||
In order grant server access to people from internet you have tow main option:
|
||||
|
||||
* The :ref:`connection-sharing`: the easiest way.
|
||||
* The :ref:`port-forwarding`: this one is the most unsecure, if you have no networking knowledge, you should definitively go to :ref:`connection-sharing`.
|
||||
|
||||
.. _connection-sharing:
|
||||
|
||||
Using a connection sharing solution
|
||||
-----------------------------------
|
||||
|
||||
Many third party software like `ZEROTIER <https://www.zerotier.com/download/>`_ (Free) or `HAMACHI <https://vpn.net/>`_ (Free until 5 users) allow you to share your private network with other people.
|
||||
For the example I'm gonna use ZeroTier because its free and open source.
|
||||
|
||||
1. Installation
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Let's start by downloading and installing ZeroTier:
|
||||
https://www.zerotier.com/download/
|
||||
|
||||
Once installed, launch it.
|
||||
|
||||
2. Network creation
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To create a ZeroTier private network you need to register a ZeroTier account `on my.zerotier.com <https://my.zerotier.com/login>`_
|
||||
(click on **login** then register on the bottom)
|
||||
|
||||
Once you account it activated, you can connect to `my.zerotier.com <https://my.zerotier.com/login>`_.
|
||||
Head up to the **Network** section(highlighted in red in the image below).
|
||||
|
||||
.. figure:: img/hosting_guide_head_network.png
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
ZeroTier user homepage
|
||||
|
||||
Hit 'Create a network'(see image below) and go to the network settings.
|
||||
|
||||
.. figure:: img/hosting_guide_create_network.png
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Network page
|
||||
|
||||
Now that the network is created, let's configure it.
|
||||
|
||||
In the Settings section(see image below), you can change the network name to what you want.
|
||||
Make sure that the field **Access Control** is set to **PRIVATE**.
|
||||
|
||||
.. hint::
|
||||
If you set the Access Control to PUBLIC, anyone will be able to join without
|
||||
your confirmation. It is easier to set up but less secure.
|
||||
|
||||
.. figure:: img/hosting_guide_network_settings.png
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Network settings
|
||||
|
||||
That's all for the network setup !
|
||||
Now let's connect everyone.
|
||||
|
||||
.. _network-authorization:
|
||||
|
||||
3. Network authorization
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Since your ZeroTier network is Private, you will need to authorize each new users
|
||||
to connect to it.
|
||||
For each user you want to add, do the following step:
|
||||
|
||||
1. Get the client **ZeroTier id** by right clicking on the ZeroTier tray icon and click on the `Node ID`, it will copy it.
|
||||
|
||||
.. figure:: img/hosting_guide_get_node.png
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Get the ZeroTier client id
|
||||
|
||||
2. Go to the network settings in the Member section and paste the Node ID into the Manually Add Member field.
|
||||
|
||||
.. figure:: img/hosting_guide_add_node.png
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Add the client to network authorized users
|
||||
|
||||
4. Network connection
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To connect to the ZeroTier network, get the network id from the network settings (see image).
|
||||
|
||||
.. figure:: img/hosting_guide_get_id.png
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Now we are ready to join the network !
|
||||
Right click on the ZeroTier tray icon and select **Join Network** !
|
||||
|
||||
.. figure:: img/hosting_guide_join_network.png
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
.. figure:: img/hosting_guide_join.png
|
||||
:align: center
|
||||
|
||||
Joining the network
|
||||
|
||||
Past the network id and check ``Allow Managed`` then click on join !
|
||||
You should be connected to the network.
|
||||
|
||||
Let's check the connection status. Right click on the tray icon and click on **Show Networks...**.
|
||||
|
||||
.. figure:: img/hosting_guide_show_network.png
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Show network status
|
||||
|
||||
.. figure:: img/hosting_guide_network_status.png
|
||||
:align: center
|
||||
|
||||
Network status.
|
||||
|
||||
The network status must be **OK** for each user(like in the picture above) otherwise it means that you are not connected to the network.
|
||||
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the :ref:`network-authorization` section.
|
||||
|
||||
This is it for the ZeroTier network setup. Now everything should be setup to use the multi-user add-on over internet ! You can now follow the :ref:`quickstart` guide to start using the multi-user add-on !
|
||||
|
||||
.. _port-forwarding:
|
||||
|
||||
Using port-forwarding
|
||||
---------------------
|
||||
|
||||
The port forwarding method consist to configure you Network route to allow internet trafic throught specific ports.
|
||||
|
||||
In order to know which port are used by the add-on, check the :ref:`port-setup` section.
|
||||
To set up port forwarding for each port you can follow this `guide <https://www.wikihow.com/Set-Up-Port-Forwarding-on-a-Router>`_ for example.
|
||||
|
||||
Once you have set up the network you can follow the :ref:`quickstart` guide to start using the multi-user add-on !
|
||||
|
||||
.. _host-dedicated:
|
||||
|
||||
--------------------------
|
||||
From the dedicated server
|
||||
--------------------------
|
||||
|
||||
.. warning::
|
||||
The dedicated server is developed to run directly on internet server (like VPS). You can also
|
||||
run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first.
|
||||
|
||||
The dedicated server allow you to host a session with simplicity from any location.
|
||||
It was developed to improve intaernet hosting performance.
|
||||
|
||||
The dedicated server can be run in tow ways:
|
||||
|
||||
- :ref:`cmd-line`
|
||||
- :ref:`docker`
|
||||
|
||||
.. _cmd-line:
|
||||
|
||||
Using a regular command line
|
||||
----------------------------
|
||||
|
||||
You can run the dedicated server on any platform by following those steps:
|
||||
|
||||
1. Firstly, download and intall python 3 (3.6 or above).
|
||||
2. Install the replication library:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
python -m pip install replication
|
||||
|
||||
4. Launch the server with:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
replication.serve
|
||||
|
||||
.. hint::
|
||||
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level(ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optionnal argument
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
replication.serve -p 5555 -pwd toto -t 1000 -l INFO -lf server.log
|
||||
|
||||
As soon as the dedicated server is running, you can connect to it from blender (follow :ref:`how-to-join`).
|
||||
|
||||
|
||||
.. hint::
|
||||
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
|
||||
|
||||
|
||||
.. _docker:
|
||||
|
||||
Using a pre-configured image on docker engine
|
||||
---------------------------------------------
|
||||
|
||||
Launching the dedicated server from a docker server is simple as:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker run -d \
|
||||
-p 5555-5560:5555-5560 \
|
||||
-e port=5555 \
|
||||
-e password=admin \
|
||||
-e timeout=1000 \
|
||||
registry.gitlab.com/slumber/multi-user/multi-user-server:0.0.3
|
||||
|
||||
As soon as the dedicated server is running, you can connect to it from blender.
|
||||
You can check the :ref:`how-to-join` section.
|
||||
|
||||
.. hint::
|
||||
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
|
||||
|
||||
.. _dedicated-management:
|
||||
|
||||
Dedicated server management
|
||||
---------------------------
|
||||
|
||||
Here is the list of available commands from the dedicated server:
|
||||
|
||||
- ``help``: Show all commands.
|
||||
- ``exit`` or ``Ctrl+C`` : Stop the server.
|
||||
- ``kick username``: kick the provided user.
|
||||
- ``users``: list all online users.
|
||||
|
||||
|
||||
.. _port-setup:
|
||||
|
||||
----------
|
||||
Port setup
|
||||
----------
|
||||
|
||||
The multi-user network architecture is based on a clients-server model. The communication protocol use four ports to communicate with client:
|
||||
|
||||
* Commands: command transmission (such as **snapshots**, **change_rights**, etc.) [given port]
|
||||
* Subscriber : pull data [Commands port + 1]
|
||||
* Publisher : push data [Commands port + 2]
|
||||
* TTL (time to leave) : used to ping each client [Commands port + 3]
|
||||
|
||||
To know which ports will be used, you just have to read the port in your preference.
|
||||
|
||||
.. figure:: img/hosting_guide_port.png
|
||||
:align: center
|
||||
:alt: Port
|
||||
:width: 200px
|
||||
|
||||
Port in host settings
|
||||
In the picture below we have setup our port to **5555** so it will be:
|
||||
|
||||
* Commands: 5555 (**5555**)
|
||||
* Subscriber: 5556 (**5555** +1)
|
||||
* Publisher: 5557 (**5555** +2)
|
||||
* TTL: 5558 (**5555** +3)
|
||||
|
||||
Those four ports need to be accessible from the client otherwise it won't work at all !
|
BIN
docs/tutorials/img/hosting_guide_add_node.png
Normal file
After Width: | Height: | Size: 38 KiB |
BIN
docs/tutorials/img/hosting_guide_create_network.png
Normal file
After Width: | Height: | Size: 9.8 KiB |
BIN
docs/tutorials/img/hosting_guide_get_id.png
Normal file
After Width: | Height: | Size: 27 KiB |
BIN
docs/tutorials/img/hosting_guide_get_node.png
Normal file
After Width: | Height: | Size: 20 KiB |
BIN
docs/tutorials/img/hosting_guide_head_network.png
Normal file
After Width: | Height: | Size: 28 KiB |
BIN
docs/tutorials/img/hosting_guide_join.png
Normal file
After Width: | Height: | Size: 3.4 KiB |
BIN
docs/tutorials/img/hosting_guide_join_network.png
Normal file
After Width: | Height: | Size: 22 KiB |
BIN
docs/tutorials/img/hosting_guide_network_settings.png
Normal file
After Width: | Height: | Size: 37 KiB |
BIN
docs/tutorials/img/hosting_guide_network_status.png
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
docs/tutorials/img/hosting_guide_port.png
Normal file
After Width: | Height: | Size: 20 KiB |
BIN
docs/tutorials/img/hosting_guide_show_network.png
Normal file
After Width: | Height: | Size: 19 KiB |
8
docs/tutorials/index.rst
Normal file
@ -0,0 +1,8 @@
|
||||
Tutorials
|
||||
=========
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:name: toc-tutorial
|
||||
|
||||
hosting_guide
|
42
docs/ways_to_contribute.rst
Normal file
@ -0,0 +1,42 @@
|
||||
==================
|
||||
Ways to contribute
|
||||
==================
|
||||
|
||||
.. Note:: Work in progress
|
||||
|
||||
Testing and reporting issues
|
||||
============================
|
||||
|
||||
A great way of contributing to the multi-user addon is to test development branch and to report issues.
|
||||
It is also helpful to report issues discovered in releases, so that they can be fixed in the development branch and in future releases.
|
||||
|
||||
|
||||
----------------------------
|
||||
Testing development versions
|
||||
----------------------------
|
||||
|
||||
In order to help with the testing, you have several possibilities:
|
||||
|
||||
- Test `latest release <https://gitlab.com/slumber/multi-user/-/tags>`_
|
||||
- Test `development branch <https://gitlab.com/slumber/multi-user/-/branches>`_
|
||||
|
||||
--------------------------
|
||||
Filling an issue on Gitlab
|
||||
--------------------------
|
||||
|
||||
The `gitlab issue tracker <https://gitlab.com/slumber/multi-user/issues>`_ is used for bug report and enhancement suggestion.
|
||||
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button.
|
||||
|
||||
Here are some useful information you should provide in a bug report:
|
||||
|
||||
- **Multi-user version** such as *lastest*, *commit-hash*, *branch*. This is a must have. Some issues might be relevant in the current stable release, but fixed in the development branch.
|
||||
- **How to reproduce the bug**. In the majority of cases, bugs are reproducible, i.e. it is possible to trigger them reliably by following some steps. Please always describe those steps as clearly as possible, so that everyone can try to reproduce the issue and confirm it. It could also take the form of a screen capture.
|
||||
|
||||
Contributing code
|
||||
=================
|
||||
|
||||
1. Fork it (https://gitlab.com/yourname/yourproject/fork)
|
||||
2. Create your feature branch (git checkout -b feature/fooBar)
|
||||
3. Commit your changes (git commit -am 'Add some fooBar')
|
||||
4. Push to the branch (git push origin feature/fooBar)
|
||||
5. Create a new Pull Request
|
@ -1,80 +0,0 @@
|
||||
import collections
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
CONFIG_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config")
|
||||
CONFIG = os.path.join(CONFIG_DIR, "app.yaml")
|
||||
|
||||
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
CACHE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "cache")
|
||||
PYTHON_PATH = None
|
||||
SUBPROCESS_DIR = None
|
||||
|
||||
|
||||
rtypes = []
|
||||
|
||||
|
||||
def load_config():
|
||||
import yaml
|
||||
|
||||
try:
|
||||
with open(CONFIG, 'r') as config_file:
|
||||
return yaml.safe_load(config_file)
|
||||
except FileNotFoundError:
|
||||
logger.info("no config")
|
||||
return {}
|
||||
|
||||
|
||||
def save_config(config):
|
||||
import yaml
|
||||
|
||||
logger.info("saving config")
|
||||
with open(CONFIG, 'w') as outfile:
|
||||
yaml.dump(config, outfile, default_flow_style=False)
|
||||
|
||||
|
||||
def module_can_be_imported(name):
|
||||
try:
|
||||
__import__(name)
|
||||
return True
|
||||
except ModuleNotFoundError:
|
||||
return False
|
||||
|
||||
|
||||
def get_package_install_directory():
|
||||
for path in sys.path:
|
||||
if os.path.basename(path) in ("dist-packages", "site-packages"):
|
||||
return path
|
||||
|
||||
|
||||
def install_pip():
|
||||
# pip can not necessarily be imported into Blender after this
|
||||
get_pip_path = Path(__file__).parent / "libs" / "get-pip.py"
|
||||
subprocess.run([str(PYTHON_PATH), str(get_pip_path)], cwd=SUBPROCESS_DIR)
|
||||
|
||||
|
||||
def install_package(name):
|
||||
target = get_package_install_directory()
|
||||
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install",
|
||||
name, '--target', target], cwd=SUBPROCESS_DIR)
|
||||
|
||||
|
||||
def setup(dependencies, python_path):
|
||||
global PYTHON_PATH, SUBPROCESS_DIR
|
||||
|
||||
PYTHON_PATH = Path(python_path)
|
||||
SUBPROCESS_DIR = PYTHON_PATH.parent
|
||||
|
||||
if not module_can_be_imported("pip"):
|
||||
install_pip()
|
||||
|
||||
for module_name, package_name in dependencies:
|
||||
if not module_can_be_imported(module_name):
|
||||
install_package(package_name)
|
@ -1,373 +0,0 @@
|
||||
import bpy
|
||||
import bpy.types as T
|
||||
import mathutils
|
||||
|
||||
|
||||
def remove_items_from_dict(d, keys, recursive=False):
|
||||
copy = dict(d)
|
||||
for k in keys:
|
||||
copy.pop(k, None)
|
||||
if recursive:
|
||||
for k in [k for k in copy.keys() if isinstance(copy[k], dict)]:
|
||||
copy[k] = remove_items_from_dict(copy[k], keys, recursive)
|
||||
return copy
|
||||
|
||||
|
||||
def _is_dictionnary(v):
|
||||
return hasattr(v, "items") and callable(v.items)
|
||||
|
||||
|
||||
def _dump_filter_type(t):
|
||||
return lambda x: isinstance(x, t)
|
||||
|
||||
def _dump_filter_type_by_name(t_name):
|
||||
return lambda x: t_name == x.__class__.__name__
|
||||
|
||||
def _dump_filter_array(array):
|
||||
# only primitive type array
|
||||
if not isinstance(array, T.bpy_prop_array):
|
||||
return False
|
||||
if len(array) > 0 and type(array[0]) not in [bool, float, int]:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _dump_filter_default(default):
|
||||
if default is None:
|
||||
return False
|
||||
if type(default) is list:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _load_filter_type(t, use_bl_rna=True):
|
||||
def filter_function(x):
|
||||
if use_bl_rna and x.bl_rna_property:
|
||||
return isinstance(x.bl_rna_property, t)
|
||||
else:
|
||||
isinstance(x.read(), t)
|
||||
return filter_function
|
||||
|
||||
|
||||
def _load_filter_array(array):
|
||||
# only primitive type array
|
||||
if not isinstance(array.read(), T.bpy_prop_array):
|
||||
return False
|
||||
if len(array.read()) > 0 and type(array.read()[0]) not in [bool, float, int]:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _load_filter_color(color):
|
||||
return color.__class__.__name__ == 'Color'
|
||||
|
||||
def _load_filter_default(default):
|
||||
if default.read() is None:
|
||||
return False
|
||||
if type(default.read()) is list:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Dumper:
|
||||
|
||||
def __init__(self):
|
||||
self.verbose = False
|
||||
self.depth = 1
|
||||
self.keep_compounds_as_leaves = False
|
||||
self.accept_read_only = True
|
||||
self._build_inline_dump_functions()
|
||||
self._build_match_elements()
|
||||
self.type_subset = self.match_subset_all
|
||||
self.include_filter = []
|
||||
self.exclude_filter = []
|
||||
# self._atomic_types = [] # TODO future option?
|
||||
|
||||
def dump(self, any):
|
||||
return self._dump_any(any, 0)
|
||||
|
||||
def _dump_any(self, any, depth):
|
||||
for filter_function, dump_function in self.type_subset:
|
||||
if filter_function(any):
|
||||
return dump_function[not (depth >= self.depth)](any, depth + 1)
|
||||
|
||||
def _build_inline_dump_functions(self):
|
||||
self._dump_identity = (lambda x, depth: x, lambda x, depth: x)
|
||||
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
|
||||
self._dump_collection = (self._dump_default_as_leaf, self._dump_collection_as_branch)
|
||||
self._dump_array = (self._dump_default_as_leaf, self._dump_array_as_branch)
|
||||
self._dump_matrix = (self._dump_matrix_as_leaf, self._dump_matrix_as_leaf)
|
||||
self._dump_vector = (self._dump_vector_as_leaf, self._dump_vector_as_leaf)
|
||||
self._dump_default = (self._dump_default_as_leaf, self._dump_default_as_branch)
|
||||
self._dump_color = (self._dump_color_as_leaf, self._dump_color_as_leaf)
|
||||
|
||||
def _build_match_elements(self):
|
||||
self._match_type_bool = (_dump_filter_type(bool), self._dump_identity)
|
||||
self._match_type_int = (_dump_filter_type(int), self._dump_identity)
|
||||
self._match_type_float = (_dump_filter_type(float), self._dump_identity)
|
||||
self._match_type_string = (_dump_filter_type(str), self._dump_identity)
|
||||
self._match_type_ID = (_dump_filter_type(T.ID), self._dump_ID)
|
||||
self._match_type_bpy_prop_collection = (_dump_filter_type(T.bpy_prop_collection), self._dump_collection)
|
||||
self._match_type_array = (_dump_filter_array, self._dump_array)
|
||||
self._match_type_matrix = (_dump_filter_type(mathutils.Matrix), self._dump_matrix)
|
||||
self._match_type_vector = (_dump_filter_type(mathutils.Vector), self._dump_vector)
|
||||
self._match_type_color = (_dump_filter_type_by_name("Color"), self._dump_color)
|
||||
self._match_default = (_dump_filter_default, self._dump_default)
|
||||
|
||||
def _dump_collection_as_branch(self, collection, depth):
|
||||
dump = {}
|
||||
for i in collection.items():
|
||||
dv = self._dump_any(i[1], depth)
|
||||
if not (dv is None):
|
||||
dump[i[0]] = dv
|
||||
return dump
|
||||
|
||||
def _dump_default_as_leaf(self, default, depth):
|
||||
if self.keep_compounds_as_leaves:
|
||||
return str(type(default))
|
||||
else:
|
||||
return None
|
||||
|
||||
def _dump_array_as_branch(self, array, depth):
|
||||
return [i for i in array]
|
||||
|
||||
def _dump_matrix_as_leaf(self, matrix, depth):
|
||||
return [list(v) for v in matrix]
|
||||
|
||||
def _dump_vector_as_leaf(self, vector, depth):
|
||||
return list(vector)
|
||||
|
||||
def _dump_color_as_leaf(self, color, depth):
|
||||
return list(color)
|
||||
|
||||
def _dump_default_as_branch(self, default, depth):
|
||||
def is_valid_property(p):
|
||||
try:
|
||||
if (self.include_filter and p not in self.include_filter):
|
||||
return False
|
||||
getattr(default, p)
|
||||
except AttributeError:
|
||||
return False
|
||||
if p.startswith("__"):
|
||||
return False
|
||||
if callable(getattr(default, p)):
|
||||
return False
|
||||
if p in ["bl_rna", "rna_type"]:
|
||||
return False
|
||||
return True
|
||||
|
||||
all_property_names = [p for p in dir(default) if is_valid_property(p) and p != '' and p not in self.exclude_filter]
|
||||
dump = {}
|
||||
for p in all_property_names:
|
||||
if (self.exclude_filter and p in self.exclude_filter) or\
|
||||
(self.include_filter and p not in self.include_filter):
|
||||
return False
|
||||
dp = self._dump_any(getattr(default, p), depth)
|
||||
if not (dp is None):
|
||||
dump[p] = dp
|
||||
return dump
|
||||
|
||||
@property
|
||||
def match_subset_all(self):
|
||||
return [
|
||||
self._match_type_bool,
|
||||
self._match_type_int,
|
||||
self._match_type_float,
|
||||
self._match_type_string,
|
||||
self._match_type_ID,
|
||||
self._match_type_bpy_prop_collection,
|
||||
self._match_type_array,
|
||||
self._match_type_matrix,
|
||||
self._match_type_vector,
|
||||
self._match_type_color,
|
||||
self._match_type_color,
|
||||
self._match_default
|
||||
]
|
||||
|
||||
@property
|
||||
def match_subset_primitives(self):
|
||||
return [
|
||||
self._match_type_bool,
|
||||
self._match_type_int,
|
||||
self._match_type_float,
|
||||
self._match_type_string,
|
||||
self._match_default
|
||||
]
|
||||
|
||||
|
||||
class BlenderAPIElement:
|
||||
def __init__(self, api_element, sub_element_name="", occlude_read_only=True):
|
||||
self.api_element = api_element
|
||||
self.sub_element_name = sub_element_name
|
||||
self.occlude_read_only = occlude_read_only
|
||||
|
||||
def read(self):
|
||||
return getattr(self.api_element, self.sub_element_name) if self.sub_element_name else self.api_element
|
||||
|
||||
def write(self, value):
|
||||
# take precaution if property is read-only
|
||||
try:
|
||||
if self.sub_element_name:
|
||||
setattr(self.api_element, self.sub_element_name, value)
|
||||
else:
|
||||
self.api_element = value
|
||||
except AttributeError as err:
|
||||
if not self.occlude_read_only:
|
||||
raise err
|
||||
|
||||
def extend(self, element_name):
|
||||
return BlenderAPIElement(self.read(), element_name)
|
||||
|
||||
@property
|
||||
def bl_rna_property(self):
|
||||
if not hasattr(self.api_element, "bl_rna"):
|
||||
return False
|
||||
if not self.sub_element_name:
|
||||
return False
|
||||
return self.api_element.bl_rna.properties[self.sub_element_name]
|
||||
|
||||
|
||||
class Loader:
|
||||
def __init__(self):
|
||||
self.type_subset = self.match_subset_all
|
||||
self.occlude_read_only = True
|
||||
self.order = ['*']
|
||||
|
||||
def load(self, dst_data, src_dumped_data):
|
||||
self._load_any(
|
||||
BlenderAPIElement(dst_data, occlude_read_only=self.occlude_read_only),
|
||||
src_dumped_data
|
||||
)
|
||||
|
||||
def _load_any(self, any, dump):
|
||||
for filter_function, load_function in self.type_subset:
|
||||
if filter_function(any):
|
||||
load_function(any, dump)
|
||||
return
|
||||
|
||||
|
||||
def _load_identity(self, element, dump):
|
||||
element.write(dump)
|
||||
|
||||
def _load_array(self, element, dump):
|
||||
# supports only primitive types currently
|
||||
try:
|
||||
for i in range(len(dump)):
|
||||
element.read()[i] = dump[i]
|
||||
except AttributeError as err:
|
||||
if not self.occlude_read_only:
|
||||
raise err
|
||||
|
||||
def _load_collection(self, element, dump):
|
||||
if not element.bl_rna_property:
|
||||
return
|
||||
# local enum
|
||||
CONSTRUCTOR_NEW = "new"
|
||||
CONSTRUCTOR_ADD = "add"
|
||||
|
||||
constructors = {
|
||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, [])
|
||||
}
|
||||
element_type = element.bl_rna_property.fixed_type
|
||||
constructor = constructors.get(type(element_type))
|
||||
if constructor is None: # collection type not supported
|
||||
return
|
||||
for dumped_element in dump.values():
|
||||
try:
|
||||
constructor_parameters = [dumped_element[name] for name in constructor[1]]
|
||||
except KeyError:
|
||||
print("Collection load error, missing parameters.")
|
||||
continue # TODO handle error
|
||||
new_element = getattr(element.read(), constructor[0])(*constructor_parameters)
|
||||
self._load_any(
|
||||
BlenderAPIElement(new_element, occlude_read_only=self.occlude_read_only),
|
||||
dumped_element
|
||||
)
|
||||
|
||||
def _load_pointer(self, pointer, dump):
|
||||
rna_property_type = pointer.bl_rna_property.fixed_type
|
||||
if not rna_property_type:
|
||||
return
|
||||
if isinstance(rna_property_type, T.Image):
|
||||
pointer.write(bpy.data.images.get(dump))
|
||||
elif isinstance(rna_property_type, T.Texture):
|
||||
pointer.write(bpy.data.textures.get(dump))
|
||||
elif isinstance(rna_property_type, T.ColorRamp):
|
||||
self._load_default(pointer, dump)
|
||||
elif isinstance(rna_property_type, T.Object):
|
||||
pointer.write(bpy.data.objects.get(dump))
|
||||
elif isinstance(rna_property_type, T.Mesh):
|
||||
pointer.write(bpy.data.meshes.get(dump))
|
||||
elif isinstance(rna_property_type, T.Material):
|
||||
pointer.write(bpy.data.materials.get(dump))
|
||||
|
||||
def _load_matrix(self, matrix, dump):
|
||||
matrix.write(mathutils.Matrix(dump))
|
||||
|
||||
def _load_vector(self, vector, dump):
|
||||
vector.write(mathutils.Vector(dump))
|
||||
|
||||
def _ordered_keys(self, keys):
|
||||
ordered_keys = []
|
||||
for order_element in self.order:
|
||||
if order_element == '*':
|
||||
ordered_keys += [k for k in keys if not k in self.order]
|
||||
else:
|
||||
if order_element in keys:
|
||||
ordered_keys.append(order_element)
|
||||
return ordered_keys
|
||||
|
||||
def _load_default(self, default, dump):
|
||||
if not _is_dictionnary(dump):
|
||||
return # TODO error handling
|
||||
for k in self._ordered_keys(dump.keys()):
|
||||
v = dump[k]
|
||||
if not hasattr(default.read(), k):
|
||||
continue # TODO error handling
|
||||
try:
|
||||
self._load_any(default.extend(k), v)
|
||||
except:
|
||||
pass
|
||||
|
||||
@property
|
||||
def match_subset_all(self):
|
||||
return [
|
||||
(_load_filter_type(T.BoolProperty), self._load_identity),
|
||||
(_load_filter_type(T.IntProperty), self._load_identity),
|
||||
(_load_filter_type(mathutils.Matrix, use_bl_rna=False), self._load_matrix), # before float because bl_rna type of matrix if FloatProperty
|
||||
(_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector), # before float because bl_rna type of vector if FloatProperty
|
||||
(_load_filter_type(T.FloatProperty), self._load_identity),
|
||||
(_load_filter_type(T.StringProperty), self._load_identity),
|
||||
(_load_filter_type(T.EnumProperty), self._load_identity),
|
||||
(_load_filter_type(T.PointerProperty), self._load_pointer),
|
||||
(_load_filter_array, self._load_array),
|
||||
(_load_filter_type(T.CollectionProperty), self._load_collection),
|
||||
(_load_filter_default, self._load_default),
|
||||
(_load_filter_color, self._load_identity),
|
||||
]
|
||||
|
||||
|
||||
# Utility functions
|
||||
|
||||
|
||||
def dump(any, depth=1):
|
||||
dumper = Dumper()
|
||||
dumper.depath = depth
|
||||
return dumper.dump(any)
|
||||
|
||||
def dump_datablock(datablock, depth):
|
||||
if datablock:
|
||||
dumper = Dumper()
|
||||
dumper.type_subset = dumper.match_subset_all
|
||||
dumper.depth = depth
|
||||
|
||||
datablock_type = datablock.bl_rna.name
|
||||
key = "{}/{}".format(datablock_type, datablock.name)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
return data
|
||||
|
||||
def load(dst, src):
|
||||
loader = Loader()
|
||||
# loader.match_subset_all = loader.match_subset_all
|
||||
loader.load(dst, src)
|
20890
libs/get-pip.py
@ -1,219 +0,0 @@
|
||||
"""
|
||||
Context Manager allowing temporary override of attributes
|
||||
|
||||
````python
|
||||
import bpy
|
||||
from overrider import Overrider
|
||||
|
||||
with Overrider(name='bpy_', parent=bpy) as bpy_:
|
||||
# set preview render settings
|
||||
bpy_.context.scene.render.use_file_extension = False
|
||||
bpy_.context.scene.render.resolution_x = 512
|
||||
bpy_.context.scene.render.resolution_y = 512
|
||||
bpy_.context.scene.render.use_file_extension = False
|
||||
bpy_.context.scene.render.image_settings.file_format = "JPEG"
|
||||
bpy_.context.scene.layers[10] = False
|
||||
|
||||
frame_start = action.frame_range[0]
|
||||
frame_end = action.frame_range[1]
|
||||
if begin_frame is not None:
|
||||
frame_start = begin_frame
|
||||
if end_frame is not None:
|
||||
frame_end = end_frame
|
||||
|
||||
# render
|
||||
window = bpy_.data.window_managers[0].windows[0]
|
||||
screen = bpy_.data.window_managers[0].windows[0].screen
|
||||
area = next(area for area in screen.areas if area.type == 'VIEW_3D')
|
||||
space = next(space for space in area.spaces if space.type == 'VIEW_3D')
|
||||
|
||||
space.viewport_shade = 'MATERIAL'
|
||||
space.region_3d.view_perspective = 'CAMERA'
|
||||
|
||||
override_context = {
|
||||
"window": window._real_value_(),
|
||||
"screen": screen._real_value_()
|
||||
}
|
||||
|
||||
if frame_start == frame_end:
|
||||
bpy.context.scene.frame_set(int(frame_start))
|
||||
bpy_.context.scene.render.filepath = os.path.join(directory, "icon.jpg")
|
||||
bpy.ops.render.opengl(override_context, write_still=True)
|
||||
|
||||
else:
|
||||
for icon_index, frame_number in enumerate(range(int(frame_start), int(frame_end) + 1)):
|
||||
bpy.context.scene.frame_set(frame_number)
|
||||
bpy.context.scene.render.filepath = os.path.join(directory, "icon", "{:04d}.jpg".format(icon_index))
|
||||
bpy.ops.render.opengl(override_context, write_still=True)
|
||||
````
|
||||
"""
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class OverrideIter:
|
||||
|
||||
def __init__(self, parent):
|
||||
self.parent = parent
|
||||
self.index = -1
|
||||
|
||||
def __next__(self):
|
||||
self.index += 1
|
||||
try:
|
||||
return self.parent[self.index]
|
||||
except IndexError as e:
|
||||
raise StopIteration
|
||||
|
||||
|
||||
class OverrideBase:
|
||||
|
||||
def __init__(self, context_manager, name=None, parent=None):
|
||||
self._name__ = name
|
||||
self._context_manager_ = context_manager
|
||||
self._parent_ = parent
|
||||
self._changed_attributes_ = OrderedDict()
|
||||
self._changed_items_ = OrderedDict()
|
||||
self._children_ = list()
|
||||
self._original_value_ = self._real_value_()
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}({})>".format(self.__class__.__name__, self._path_)
|
||||
|
||||
@property
|
||||
def _name_(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def _path_(self):
|
||||
if isinstance(self._parent_, OverrideBase):
|
||||
return self._parent_._path_ + self._name_
|
||||
|
||||
return self._name_
|
||||
|
||||
def _real_value_(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _restore_(self):
|
||||
for attribute, original_value in reversed(self._changed_attributes_.items()):
|
||||
setattr(self._real_value_(), attribute, original_value)
|
||||
|
||||
for item, original_value in reversed(self._changed_items_.items()):
|
||||
self._real_value_()[item] = original_value
|
||||
|
||||
def __getattr__(self, attr):
|
||||
new_attribute = OverrideAttribute(self._context_manager_, name=attr, parent=self)
|
||||
self._children_.append(new_attribute)
|
||||
return new_attribute
|
||||
|
||||
def __getitem__(self, item):
|
||||
new_item = OverrideItem(self._context_manager_, name=item, parent=self)
|
||||
self._children_.append(new_item)
|
||||
return new_item
|
||||
|
||||
def __iter__(self):
|
||||
return OverrideIter(self)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if attr in (
|
||||
'_name__',
|
||||
'_context_manager_',
|
||||
'_parent_',
|
||||
'_children_',
|
||||
'_original_value_',
|
||||
'_changed_attributes_',
|
||||
'_changed_items_'
|
||||
):
|
||||
self.__dict__[attr] = value
|
||||
return
|
||||
|
||||
if attr not in self._changed_attributes_.keys():
|
||||
self._changed_attributes_[attr] = getattr(self._real_value_(), attr)
|
||||
self._context_manager_.register_as_changed(self)
|
||||
|
||||
setattr(self._real_value_(), attr, value)
|
||||
|
||||
def __setitem__(self, item, value):
|
||||
if item not in self._changed_items_.keys():
|
||||
self._changed_items_[item] = self._real_value_()[item]
|
||||
self._context_manager_.register_as_changed(self)
|
||||
|
||||
self._real_value_()[item] = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._real_value_() == other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._real_value_() > other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._real_value_() < other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._real_value_() >= other
|
||||
|
||||
def __le__(self, other):
|
||||
return self._real_value_() <= other
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
# TODO : surround str value with quotes
|
||||
arguments = list([str(arg) for arg in args]) + ['{}={}'.format(key, value) for key, value in kwargs.items()]
|
||||
arguments = ', '.join(arguments)
|
||||
raise RuntimeError('Overrider does not allow call to {}({})'.format(self._path_, arguments))
|
||||
|
||||
|
||||
class OverrideRoot(OverrideBase):
|
||||
|
||||
@property
|
||||
def _name_(self):
|
||||
return self._name__
|
||||
|
||||
def _real_value_(self):
|
||||
return self._parent_
|
||||
|
||||
|
||||
class OverrideAttribute(OverrideBase):
|
||||
|
||||
@property
|
||||
def _name_(self):
|
||||
return '.{}'.format(self._name__)
|
||||
|
||||
def _real_value_(self):
|
||||
return getattr(self._parent_._real_value_(), self._name__)
|
||||
|
||||
|
||||
class OverrideItem(OverrideBase):
|
||||
|
||||
@property
|
||||
def _name_(self):
|
||||
if isinstance(self._name__, str):
|
||||
return '["{}"]'.format(self._name__)
|
||||
|
||||
return '[{}]'.format(self._name__)
|
||||
|
||||
def _real_value_(self):
|
||||
return self._parent_._real_value_()[self._name__]
|
||||
|
||||
|
||||
class Overrider:
|
||||
def __init__(self, name, parent):
|
||||
self.name = name
|
||||
self.parent = parent
|
||||
self.override = None
|
||||
self.registered_overrides = list()
|
||||
|
||||
def __enter__(self):
|
||||
self.override = OverrideRoot(
|
||||
context_manager=self,
|
||||
parent=self.parent,
|
||||
name=self.name
|
||||
)
|
||||
return self.override
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.restore()
|
||||
|
||||
def register_as_changed(self, override):
|
||||
self.registered_overrides.append(override)
|
||||
|
||||
def restore(self):
|
||||
for override in reversed(self.registered_overrides):
|
||||
override._restore_()
|
BIN
medias/host.png
Before Width: | Height: | Size: 8.9 KiB |
BIN
medias/join.png
Before Width: | Height: | Size: 8.5 KiB |
Before Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 16 KiB |
BIN
medias/user.png
Before Width: | Height: | Size: 4.6 KiB |
BIN
medias/users.png
Before Width: | Height: | Size: 5.0 KiB |
104
multi_user/__init__.py
Normal file
@ -0,0 +1,104 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
bl_info = {
|
||||
"name": "Multi-User",
|
||||
"author": "Swann Martinez",
|
||||
"version": (0, 1, 0),
|
||||
"description": "Enable real-time collaborative workflow inside blender",
|
||||
"blender": (2, 82, 0),
|
||||
"location": "3D View > Sidebar > Multi-User tab",
|
||||
"warning": "Unstable addon, use it at your own risks",
|
||||
"category": "Collaboration",
|
||||
"doc_url": "https://multi-user.readthedocs.io/en/develop/index.html",
|
||||
"wiki_url": "https://multi-user.readthedocs.io/en/develop/index.html",
|
||||
"tracker_url": "https://gitlab.com/slumber/multi-user/issues",
|
||||
"support": "COMMUNITY"
|
||||
}
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import sys
|
||||
|
||||
import bpy
|
||||
from bpy.app.handlers import persistent
|
||||
|
||||
from . import environment, utils
|
||||
|
||||
|
||||
DEPENDENCIES = {
|
||||
("replication", '0.0.21a15'),
|
||||
}
|
||||
|
||||
|
||||
module_error_msg = "Insufficient rights to install the multi-user \
|
||||
dependencies, aunch blender with administrator rights."
|
||||
def register():
|
||||
# Setup logging policy
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s CLIENT %(levelname)-8s %(message)s',
|
||||
datefmt='%H:%M:%S',
|
||||
level=logging.INFO)
|
||||
|
||||
try:
|
||||
environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
|
||||
|
||||
from . import presence
|
||||
from . import operators
|
||||
from . import ui
|
||||
from . import preferences
|
||||
from . import addon_updater_ops
|
||||
|
||||
preferences.register()
|
||||
addon_updater_ops.register(bl_info)
|
||||
presence.register()
|
||||
operators.register()
|
||||
ui.register()
|
||||
except ModuleNotFoundError as e:
|
||||
raise Exception(module_error_msg)
|
||||
logging.error(module_error_msg)
|
||||
|
||||
bpy.types.WindowManager.session = bpy.props.PointerProperty(
|
||||
type=preferences.SessionProps)
|
||||
bpy.types.ID.uuid = bpy.props.StringProperty(
|
||||
default="",
|
||||
options={'HIDDEN', 'SKIP_SAVE'})
|
||||
bpy.types.WindowManager.online_users = bpy.props.CollectionProperty(
|
||||
type=preferences.SessionUser
|
||||
)
|
||||
bpy.types.WindowManager.user_index = bpy.props.IntProperty()
|
||||
|
||||
def unregister():
|
||||
from . import presence
|
||||
from . import operators
|
||||
from . import ui
|
||||
from . import preferences
|
||||
from . import addon_updater_ops
|
||||
|
||||
presence.unregister()
|
||||
addon_updater_ops.unregister()
|
||||
ui.unregister()
|
||||
operators.unregister()
|
||||
preferences.unregister()
|
||||
|
||||
del bpy.types.WindowManager.session
|
||||
del bpy.types.ID.uuid
|
||||
del bpy.types.WindowManager.online_users
|
||||
del bpy.types.WindowManager.user_index
|
1715
multi_user/addon_updater.py
Normal file
1511
multi_user/addon_updater_ops.py
Normal file
48
multi_user/bl_types/__init__.py
Normal file
@ -0,0 +1,48 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
__all__ = [
|
||||
'bl_object',
|
||||
'bl_mesh',
|
||||
'bl_camera',
|
||||
'bl_collection',
|
||||
'bl_curve',
|
||||
'bl_gpencil',
|
||||
'bl_image',
|
||||
'bl_light',
|
||||
'bl_scene',
|
||||
'bl_material',
|
||||
'bl_library',
|
||||
'bl_armature',
|
||||
'bl_action',
|
||||
'bl_world',
|
||||
'bl_metaball',
|
||||
'bl_lattice',
|
||||
'bl_lightprobe',
|
||||
'bl_speaker',
|
||||
'bl_font',
|
||||
'bl_sound',
|
||||
'bl_file'
|
||||
] # Order here defines execution order
|
||||
|
||||
from . import *
|
||||
from replication.data import ReplicatedDataFactory
|
||||
|
||||
def types_to_register():
|
||||
return __all__
|
||||
|
180
multi_user/bl_types/bl_action.py
Normal file
@ -0,0 +1,180 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
import copy
|
||||
import numpy as np
|
||||
from enum import Enum
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
KEYFRAME = [
|
||||
'amplitude',
|
||||
'co',
|
||||
'back',
|
||||
'handle_left',
|
||||
'handle_right',
|
||||
'easing',
|
||||
'handle_left_type',
|
||||
'handle_right_type',
|
||||
'type',
|
||||
'interpolation',
|
||||
]
|
||||
|
||||
|
||||
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict:
|
||||
""" Dump a sigle curve to a dict
|
||||
|
||||
:arg fcurve: fcurve to dump
|
||||
:type fcurve: bpy.types.FCurve
|
||||
:arg use_numpy: use numpy to eccelerate dump
|
||||
:type use_numpy: bool
|
||||
:return: dict
|
||||
"""
|
||||
fcurve_data = {
|
||||
"data_path": fcurve.data_path,
|
||||
"dumped_array_index": fcurve.array_index,
|
||||
"use_numpy": use_numpy
|
||||
}
|
||||
|
||||
if use_numpy:
|
||||
points = fcurve.keyframe_points
|
||||
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
|
||||
|
||||
else: # Legacy method
|
||||
dumper = Dumper()
|
||||
fcurve_data["keyframe_points"] = []
|
||||
|
||||
for k in fcurve.keyframe_points:
|
||||
fcurve_data["keyframe_points"].append(
|
||||
dumper.dump(k)
|
||||
)
|
||||
|
||||
return fcurve_data
|
||||
|
||||
|
||||
def load_fcurve(fcurve_data, fcurve):
|
||||
""" Load a dumped fcurve
|
||||
|
||||
:arg fcurve_data: a dumped fcurve
|
||||
:type fcurve_data: dict
|
||||
:arg fcurve: fcurve to dump
|
||||
:type fcurve: bpy.types.FCurve
|
||||
"""
|
||||
use_numpy = fcurve_data.get('use_numpy')
|
||||
|
||||
keyframe_points = fcurve.keyframe_points
|
||||
|
||||
# Remove all keyframe points
|
||||
for i in range(len(keyframe_points)):
|
||||
keyframe_points.remove(keyframe_points[0], fast=True)
|
||||
|
||||
if use_numpy:
|
||||
keyframe_points.add(fcurve_data['keyframes_count'])
|
||||
np_load_collection(fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
|
||||
|
||||
else:
|
||||
# paste dumped keyframes
|
||||
for dumped_keyframe_point in fcurve_data["keyframe_points"]:
|
||||
if dumped_keyframe_point['type'] == '':
|
||||
dumped_keyframe_point['type'] = 'KEYFRAME'
|
||||
|
||||
new_kf = keyframe_points.insert(
|
||||
dumped_keyframe_point["co"][0],
|
||||
dumped_keyframe_point["co"][1],
|
||||
options={'FAST', 'REPLACE'}
|
||||
)
|
||||
|
||||
keycache = copy.copy(dumped_keyframe_point)
|
||||
keycache = remove_items_from_dict(
|
||||
keycache,
|
||||
["co", "handle_left", "handle_right", 'type']
|
||||
)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(new_kf, keycache)
|
||||
|
||||
new_kf.type = dumped_keyframe_point['type']
|
||||
new_kf.handle_left = [
|
||||
dumped_keyframe_point["handle_left"][0],
|
||||
dumped_keyframe_point["handle_left"][1]
|
||||
]
|
||||
new_kf.handle_right = [
|
||||
dumped_keyframe_point["handle_right"][0],
|
||||
dumped_keyframe_point["handle_right"][1]
|
||||
]
|
||||
|
||||
fcurve.update()
|
||||
|
||||
|
||||
class BlAction(BlDatablock):
|
||||
bl_id = "actions"
|
||||
bl_class = bpy.types.Action
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'ACTION_TWEAK'
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.actions.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
for dumped_fcurve in data["fcurves"]:
|
||||
dumped_data_path = dumped_fcurve["data_path"]
|
||||
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
||||
|
||||
# create fcurve if needed
|
||||
fcurve = target.fcurves.find(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
if fcurve is None:
|
||||
fcurve = target.fcurves.new(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
|
||||
load_fcurve(dumped_fcurve, fcurve)
|
||||
target.id_root = data['id_root']
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'name_full',
|
||||
'original',
|
||||
'use_fake_user',
|
||||
'user',
|
||||
'is_library_indirect',
|
||||
'select_control_point',
|
||||
'select_right_handle',
|
||||
'select_left_handle',
|
||||
'uuid',
|
||||
'users'
|
||||
]
|
||||
dumper.depth = 1
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data["fcurves"] = []
|
||||
|
||||
for fcurve in instance.fcurves:
|
||||
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||
|
||||
return data
|
150
multi_user/bl_types/bl_armature.py
Normal file
@ -0,0 +1,150 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .. import presence, operators, utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlArmature(BlDatablock):
|
||||
bl_id = "armatures"
|
||||
bl_class = bpy.types.Armature
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 0
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'ARMATURE_DATA'
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.armatures.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
# Load parent object
|
||||
parent_object = utils.find_from_attr(
|
||||
'uuid',
|
||||
data['user'],
|
||||
bpy.data.objects
|
||||
)
|
||||
|
||||
if parent_object is None:
|
||||
parent_object = bpy.data.objects.new(
|
||||
data['user_name'], target)
|
||||
parent_object.uuid = data['user']
|
||||
|
||||
is_object_in_master = (
|
||||
data['user_collection'][0] == "Master Collection")
|
||||
# TODO: recursive parent collection loading
|
||||
# Link parent object to the collection
|
||||
if is_object_in_master:
|
||||
parent_collection = bpy.data.scenes[data['user_scene']
|
||||
[0]].collection
|
||||
elif data['user_collection'][0] not in bpy.data.collections.keys():
|
||||
parent_collection = bpy.data.collections.new(
|
||||
data['user_collection'][0])
|
||||
else:
|
||||
parent_collection = bpy.data.collections[data['user_collection'][0]]
|
||||
|
||||
if parent_object.name not in parent_collection.objects:
|
||||
parent_collection.objects.link(parent_object)
|
||||
|
||||
# Link parent collection to the scene master collection
|
||||
if not is_object_in_master and parent_collection.name not in bpy.data.scenes[data['user_scene'][0]].collection.children:
|
||||
bpy.data.scenes[data['user_scene'][0]
|
||||
].collection. children.link(parent_collection)
|
||||
|
||||
current_mode = bpy.context.mode
|
||||
current_active_object = bpy.context.view_layer.objects.active
|
||||
|
||||
# LOAD ARMATURE BONES
|
||||
if bpy.context.mode != 'OBJECT':
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
bpy.context.view_layer.objects.active = parent_object
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for bone in data['bones']:
|
||||
if bone not in target.edit_bones:
|
||||
new_bone = target.edit_bones.new(bone)
|
||||
else:
|
||||
new_bone = target.edit_bones[bone]
|
||||
|
||||
bone_data = data['bones'].get(bone)
|
||||
|
||||
new_bone.tail = bone_data['tail_local']
|
||||
new_bone.head = bone_data['head_local']
|
||||
new_bone.tail_radius = bone_data['tail_radius']
|
||||
new_bone.head_radius = bone_data['head_radius']
|
||||
# new_bone.roll = bone_data['roll']
|
||||
|
||||
if 'parent' in bone_data:
|
||||
new_bone.parent = target.edit_bones[data['bones']
|
||||
[bone]['parent']]
|
||||
new_bone.use_connect = bone_data['use_connect']
|
||||
|
||||
loader = Loader()
|
||||
loader.load(new_bone, bone_data)
|
||||
|
||||
if bpy.context.mode != 'OBJECT':
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
bpy.context.view_layer.objects.active = current_active_object
|
||||
|
||||
# TODO: clean way to restore previous context
|
||||
if 'EDIT' in current_mode:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 4
|
||||
dumper.include_filter = [
|
||||
'bones',
|
||||
'tail_local',
|
||||
'head_local',
|
||||
'tail_radius',
|
||||
'head_radius',
|
||||
'use_connect',
|
||||
'parent',
|
||||
'name',
|
||||
'layers',
|
||||
# 'roll',
|
||||
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
|
||||
for bone in instance.bones:
|
||||
if bone.parent:
|
||||
data['bones'][bone.name]['parent'] = bone.parent.name
|
||||
# get the parent Object
|
||||
object_users = utils.get_datablock_users(instance)[0]
|
||||
data['user'] = object_users.uuid
|
||||
data['user_name'] = object_users.name
|
||||
|
||||
# get parent collection
|
||||
container_users = utils.get_datablock_users(object_users)
|
||||
data['user_collection'] = [
|
||||
item.name for item in container_users if isinstance(item, bpy.types.Collection)]
|
||||
data['user_scene'] = [
|
||||
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
||||
return data
|
||||
|
||||
|