Compare commits
580 Commits
v0.1.0
...
arm64-dock
Author | SHA1 | Date | |
---|---|---|---|
6fc03ad0ee | |||
5a12b82392 | |||
35ee8bc8ec | |||
cef3cd445c | |||
2b205c4b62 | |||
ed4b26f925 | |||
c9758e6f11 | |||
7152ea9307 | |||
0f35031375 | |||
37b0b5040d | |||
318bd50eec | |||
fd733d45bf | |||
09af14bc4b | |||
e4e93f7c7f | |||
de32bd89e3 | |||
50e86aea15 | |||
c05a12343c | |||
a09193fba2 | |||
60e21f2b8e | |||
421f00879f | |||
964e6a8c63 | |||
80c81dc934 | |||
563fdb693d | |||
a64eea3cea | |||
03ad7c0066 | |||
d685573834 | |||
0681b53141 | |||
6f02b38b0e | |||
92c773dae9 | |||
f48ade6390 | |||
63c4501b88 | |||
06e21c86ce | |||
9d484b00e9 | |||
de9255f71c | |||
99528ea3e0 | |||
bb342951a5 | |||
438a79177b | |||
08fc49c40f | |||
d7e25b1192 | |||
1671422143 | |||
a9620c0752 | |||
583beaf6fe | |||
126d2338f2 | |||
24b0c0ed8a | |||
07fc1cf000 | |||
8e0131b3a8 | |||
912a2d524c | |||
82a5124d64 | |||
cca5bf903b | |||
4c0d4cb1c7 | |||
ca64797641 | |||
a49d9ee437 | |||
4c1cd6b8f8 | |||
d6cda709a6 | |||
4bc0feb3a5 | |||
59aab7159a | |||
0a798bb21b | |||
beaafce4fa | |||
6f77337832 | |||
07252d62df | |||
ac615cd134 | |||
a4f9f6e051 | |||
10de88cdc9 | |||
e4fa34c984 | |||
0dd685d009 | |||
3e8c30c0ab | |||
21cc3cd917 | |||
81e620ee3d | |||
fb9bd108bd | |||
4846fbb589 | |||
cab6625399 | |||
1b81251a11 | |||
cf44e547a2 | |||
0269363c63 | |||
4ffca17c54 | |||
77bf269fb5 | |||
1e675132d4 | |||
781287c390 | |||
fc91b252f4 | |||
41c7c569ca | |||
a82d263f05 | |||
d4476baa1b | |||
467e98906e | |||
64a25f94a3 | |||
e6996316be | |||
cf4cd94096 | |||
e9ab633aac | |||
297639e80f | |||
f0cc63b6f0 | |||
d433e8f241 | |||
963a551a1e | |||
d01a434fb7 | |||
3a5a5fc633 | |||
8926ab44e1 | |||
a8f96581c5 | |||
440a4cc1cd | |||
a207c51973 | |||
e706c8e0bf | |||
e590e896da | |||
4140b62a8e | |||
6d9c9c4532 | |||
e9e1911840 | |||
ab350ca7bc | |||
0a8f0b5f88 | |||
2238a15c11 | |||
de73f022e6 | |||
f517205647 | |||
f33c3d8481 | |||
71c69000ec | |||
de1e684b3c | |||
d87730cffb | |||
3f005b86ab | |||
5098e5135d | |||
37cfed489c | |||
9003abcd18 | |||
a199e0df00 | |||
3774419b7e | |||
3e552cb406 | |||
9f381b44c8 | |||
ad795caed5 | |||
504dd77405 | |||
82022c9e4d | |||
d81b4dc014 | |||
63affa079f | |||
fcf5a12dd0 | |||
b0529e4444 | |||
bdfd89c085 | |||
ff1630f9cc | |||
5830fe1abb | |||
c609f72080 | |||
a28a6f91bd | |||
a996f39d3b | |||
7790a16034 | |||
836fdd02b8 | |||
7cb3482353 | |||
041022056c | |||
05f3eb1445 | |||
17193bde3a | |||
a14b4313f5 | |||
b203d9dffd | |||
f64db2155e | |||
e07ebdeff5 | |||
3d6453f7a2 | |||
7421511079 | |||
bc24525cec | |||
699cf578e2 | |||
e9b4afb440 | |||
0c6491590e | |||
b87e733ddc | |||
cb0962b484 | |||
1fc25412ac | |||
b5405553dc | |||
a1b6fb0533 | |||
b6a8a2ec01 | |||
3e41b18af1 | |||
f7c4f5d1fe | |||
c616054878 | |||
5c08493774 | |||
af8a138b4f | |||
6d9216f14a | |||
fc4fb088bb | |||
98553ba00c | |||
1e15a12b10 | |||
569543650f | |||
07358802f7 | |||
a059fafe12 | |||
297f68ccfe | |||
c9c70d1e08 | |||
a34f58ef3f | |||
e7b7f38991 | |||
392e0aaaa3 | |||
4c774d5d53 | |||
4c4cf8a970 | |||
211d0848c2 | |||
c9665c4719 | |||
431fe0d840 | |||
df7ca66ad8 | |||
c2d2db78e6 | |||
ad89a4e389 | |||
6ca6d4443d | |||
81c9b5fc06 | |||
9fddfe084c | |||
ca40523393 | |||
76e28ced21 | |||
55c6002b28 | |||
8d5c8aded3 | |||
8ebba80b97 | |||
50d6c6b3c8 | |||
f0b03c50f2 | |||
28e83a38e6 | |||
2e261cd66b | |||
3f6e4f7333 | |||
49fadf084a | |||
e2e0dc31c1 | |||
389bbd97d5 | |||
19602691d3 | |||
2e2ff5d4bf | |||
fef6559ce0 | |||
5f669fd49a | |||
330ff08fd3 | |||
f3be8f9623 | |||
ffb70ab74c | |||
26140eefb2 | |||
cdf0433e8a | |||
acd70f73bf | |||
36c3a9ab0b | |||
cfb1afdd72 | |||
4eeb80350e | |||
fb1c985f31 | |||
689c2473d6 | |||
41620fce90 | |||
249bcf827b | |||
d47eab4f26 | |||
f011089d82 | |||
acc58a1c9f | |||
24d850de9f | |||
b045911a59 | |||
a67be76422 | |||
32033c743c | |||
5da8650611 | |||
aec5096f87 | |||
fba39b9980 | |||
6af3e4b777 | |||
58d639e9d8 | |||
0efe5d5a10 | |||
2ad93cf304 | |||
771d76a98b | |||
1e83241494 | |||
1bcbff3ed4 | |||
9a45fe7125 | |||
207901afdd | |||
c6eb1ba22f | |||
ba4168d0fd | |||
00e7adf022 | |||
d9d8ca7ca0 | |||
e8cd271bd8 | |||
e71af6402c | |||
dd1c6a4fc7 | |||
7fe1ae83b1 | |||
a7ad9d30c3 | |||
14779be1ed | |||
a36c3740cc | |||
d2108facab | |||
e5651151d9 | |||
fb61b380b6 | |||
e538752fbc | |||
53eaaa2fcd | |||
a7e9108bff | |||
570909a7c4 | |||
736c3df7c4 | |||
8e606068f3 | |||
eb631e2d4b | |||
70641435cc | |||
552c649d34 | |||
d9d5a34653 | |||
12acd22660 | |||
826a59085e | |||
5ee4988aca | |||
cb85a1db4c | |||
5e30e215ab | |||
9f167256d0 | |||
4e19c169b2 | |||
9c633c35ec | |||
9610b50a49 | |||
67d18f08e2 | |||
9d0d684589 | |||
2446df4fe3 | |||
07862f1cf0 | |||
3a02711baa | |||
c7e8002fed | |||
f4e7ec6be8 | |||
480818fe85 | |||
b965c80ba5 | |||
235db712fd | |||
647ac46c01 | |||
8e3c86561f | |||
dba19e831d | |||
93df5ca5fa | |||
b17104c67e | |||
b66d0dd4ce | |||
9487753307 | |||
df1257ca4c | |||
875b9ce934 | |||
2d638ef76f | |||
cc5a87adb8 | |||
19c56e590b | |||
d0e80da945 | |||
0ccd0563ea | |||
1c3394ce56 | |||
d2b63df68e | |||
3d9c78c2f9 | |||
4726a90a4a | |||
73b763d85f | |||
5e29c6fe26 | |||
113ab81cbf | |||
d2215b662c | |||
238a34d023 | |||
55ca8a7b84 | |||
7049c1723d | |||
6586647eac | |||
fb6f170d60 | |||
c1c39438e3 | |||
317fc03f87 | |||
505f3ab770 | |||
209062af4f | |||
88bab2a4c6 | |||
a91bae3506 | |||
0a96643a9f | |||
261d4d9610 | |||
3293741969 | |||
3eee8db1ae | |||
031b143843 | |||
7dd6e38e3f | |||
1dd0235061 | |||
cdcb2de786 | |||
0b88631250 | |||
c00b2a2d7d | |||
1f0f44fdbf | |||
8262fb9d4e | |||
c2114b593e | |||
7e28ca3fa1 | |||
d0bd4193d9 | |||
d09479fd47 | |||
07cfb85561 | |||
cf0d7a1122 | |||
3f335c7031 | |||
2180db5206 | |||
e6110b4cea | |||
819598ebd9 | |||
1be43f0336 | |||
6a5ff9a097 | |||
86cb3d29fb | |||
589702dab7 | |||
ed76210270 | |||
75c4f42796 | |||
1fd54769b9 | |||
c4484b4b51 | |||
4eb787cc0f | |||
c855b5a424 | |||
ee4083c134 | |||
0325e9d0bd | |||
21dc95b5a7 | |||
d8161f22f3 | |||
94f8bff231 | |||
d3a1094cc2 | |||
d08e0a80a4 | |||
26148e9934 | |||
0a7be03c6f | |||
b6449a7da2 | |||
36038effdf | |||
cb90c196a5 | |||
ae3c9fe43e | |||
2983195af4 | |||
4d69faf186 | |||
2304563b6e | |||
1cfb4e797e | |||
8f95158f08 | |||
5949e3c5cc | |||
790f145022 | |||
ec40862dd0 | |||
9cc1c92e0e | |||
7a716b4c37 | |||
a4ef8a6344 | |||
c5e20085f0 | |||
f4463f9cfe | |||
a212445927 | |||
89a8891073 | |||
52ebb874b0 | |||
2913e6d5a7 | |||
6a00b58600 | |||
2bde136bb6 | |||
f5c77fec3a | |||
d3211199c4 | |||
2d90ea7679 | |||
abd846fc8d | |||
e3bd7ea445 | |||
12bd4a603b | |||
3c31fb5118 | |||
c24f70fad5 | |||
ca2d8e49b5 | |||
4b1499f6ae | |||
f0c1fe9c87 | |||
dfaf1be4ff | |||
51cb099c4c | |||
64731a9198 | |||
213523c8d6 | |||
4fdb72f874 | |||
1412fc638c | |||
7920f67aea | |||
a0c0f781e2 | |||
ec74ea0038 | |||
2c016833fd | |||
3d9da73ab0 | |||
2f4e30f432 | |||
cd1e535a56 | |||
8a8cc0b322 | |||
1e64e17ff4 | |||
66b6c06a2c | |||
45fbc46d8d | |||
39e3c1dbd5 | |||
f043b03128 | |||
ffe419a46e | |||
d4a0f782aa | |||
9273adbd01 | |||
03b92eb5e7 | |||
d0dc61bf66 | |||
056b3524e5 | |||
16fc4b8c54 | |||
e62f0682a2 | |||
f463cb82e7 | |||
54f416e8c3 | |||
682c983a64 | |||
8015881e29 | |||
6e2d36cd00 | |||
affab2414c | |||
af6d54296a | |||
2ab0a75608 | |||
ccb9e55920 | |||
31254d13c0 | |||
6fddbb6f5e | |||
9ee3f26b80 | |||
19c5ca0928 | |||
4bc35d7da4 | |||
8d19ffd52f | |||
4f506c461a | |||
49fcc40db9 | |||
8492b537a8 | |||
e6e4f6ab7a | |||
e073182028 | |||
78eb5d948c | |||
ac9a0f3733 | |||
4e7ade8f38 | |||
0b25264375 | |||
1d03fe4975 | |||
1273ab2371 | |||
7dd2ee5e70 | |||
c72bb21f20 | |||
5b43520353 | |||
7e25ca4c84 | |||
4a4cd5db50 | |||
7f19d45b71 | |||
37b3d6d094 | |||
64bb715aff | |||
3024b479d3 | |||
abf4462da6 | |||
aa6827303f | |||
254e8e8ca1 | |||
a63bf66023 | |||
983c5f9020 | |||
9f61335a85 | |||
ed0c5d9431 | |||
46414aef13 | |||
3841ed3330 | |||
47a281faff | |||
f0442861a8 | |||
039225a41c | |||
f1ffe37ac7 | |||
6ca7b42ab4 | |||
fbb32147b9 | |||
d85db295a5 | |||
0f1850bf2c | |||
8f4de7adbf | |||
320745aab3 | |||
40cec39d27 | |||
498616147b | |||
30b2f5d32e | |||
f7e98abb59 | |||
4022f300b3 | |||
cef45dad3c | |||
30d734c2c1 | |||
4391510d7b | |||
04a4f7668a | |||
908c0fa4af | |||
c718e62b33 | |||
2f34bba1fd | |||
db4e495183 | |||
c00a7184ff | |||
9c83df45fc | |||
17949003f7 | |||
371d793a13 | |||
c710111887 | |||
664f7635cc | |||
babecf5ae7 | |||
0bad6895da | |||
66e55a7eec | |||
4e2377cd7f | |||
f90c12b27f | |||
3573db0969 | |||
92bde00a5a | |||
2c82560d24 | |||
6f364d2b88 | |||
760b52c02b | |||
4dd932fc56 | |||
ba1a03cbfa | |||
18b5fa795c | |||
1a82ec72e4 | |||
804747c73b | |||
7ee705332f | |||
bed33ca6ba | |||
716c78e380 | |||
5e4ce4556f | |||
aa9ea08151 | |||
f56890128e | |||
8865556229 | |||
5bc9b10c12 | |||
7db3c18213 | |||
ff35e34032 | |||
9f8222afa7 | |||
1828bfac22 | |||
3a1087ecb8 | |||
b398541787 | |||
f0b33d8471 | |||
5a282a3e22 | |||
4283fc0fff | |||
753f4d3f27 | |||
9dd02b2756 | |||
c74d12c843 | |||
e1d9982276 | |||
8861986213 | |||
1cb9fb410c | |||
c4a8cc4606 | |||
187f11071c | |||
530fae8cb4 | |||
6771c371a1 | |||
c844c6e54f | |||
a4d0b1a68b | |||
2fdc11692d | |||
dbfca4568f | |||
069a528276 | |||
030f2661fd | |||
e589e3eec4 | |||
04140ced1b | |||
0d9ce43e74 | |||
d3969b4fd4 | |||
e21f64ac98 | |||
b25b380d21 | |||
1146d9d304 | |||
51b60521e6 | |||
035f8a1dcd | |||
cefaef5c4b | |||
4714e60ff7 | |||
3eca25ae19 | |||
96346f8a25 | |||
a258c2c182 | |||
6862df5331 | |||
f271a9d0e3 | |||
bdff6eb5c9 | |||
b661407952 | |||
d5eb7fda02 | |||
35e8ac9c33 | |||
4453d256b8 | |||
299e330ec6 | |||
34b9f7ae27 | |||
9d100d84ad | |||
2f677c399e | |||
e967b35c38 | |||
7bd0a196b4 | |||
7892b5e9b6 | |||
f779678c0e | |||
629fc2d223 | |||
724c2345df | |||
673c4e69a4 | |||
fbfff6c7ec | |||
f592294335 | |||
8e7be5afde | |||
fc76b2a8e6 | |||
1a8bcddb74 | |||
60fba5b9df | |||
be0eb1fa42 | |||
93d9bea3ae | |||
022b7f7822 | |||
ae34846509 | |||
d328077cb0 | |||
0c4740eef8 | |||
d7b2c7e2f6 | |||
efbb9e7096 | |||
7a94c21187 | |||
56ea93508c | |||
5f95eadc1d | |||
40ad96b0af |
3
.gitignore
vendored
@ -13,4 +13,5 @@ multi_user_updater/
|
||||
_build
|
||||
|
||||
# ignore generated zip generated from blender_addon_tester
|
||||
*.zip
|
||||
*.zip
|
||||
libs
|
@ -1,9 +1,43 @@
|
||||
stages:
|
||||
- test
|
||||
- build
|
||||
- deploy
|
||||
# - test
|
||||
# - build-addon-zip
|
||||
# - build-amd64
|
||||
- build-arm64
|
||||
# - doc
|
||||
|
||||
include:
|
||||
- local: .gitlab/ci/test.gitlab-ci.yml
|
||||
- local: .gitlab/ci/build.gitlab-ci.yml
|
||||
- local: .gitlab/ci/deploy.gitlab-ci.yml
|
||||
|
||||
|
||||
# include:
|
||||
# # - local: .gitlab/ci/test.gitlab-ci.yml
|
||||
# # - local: .gitlab/ci/build-addon.gitlab-ci.yml
|
||||
# - local: .gitlab/ci/build-image.gitlab-ci.yml
|
||||
# # - local: .gitlab/ci/doc.gitlab-ci.yml
|
||||
|
||||
build-arm64:
|
||||
stage: build-arm64
|
||||
# needs: ["build-addon-zip"]
|
||||
image: slumber/docker-python
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
before_script:
|
||||
- apk add curl
|
||||
- mkdir -p ~/.docker/cli-plugins
|
||||
- curl -sSLo ~/.docker/cli-plugins/docker-buildx https://github.com/docker/buildx/releases/download/$BUILDX_VERSION/buildx-$BUILDX_VERSION.linux-amd64
|
||||
- chmod +x ~/.docker/cli-plugins/docker-buildx
|
||||
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||
- docker info
|
||||
script:
|
||||
- RP_VERSION="$(python scripts/get_replication_version.py)"
|
||||
- VERSION="$(python scripts/get_addon_version.py)"
|
||||
- echo "Building docker image with replication ${RP_VERSION}"
|
||||
- docker buildx create arm64 --use
|
||||
- docker buildx build --platform linux/arm64 --build-arg version={VERSION} --tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}-arm64 ./scripts/docker_server
|
||||
- echo "Pushing to gitlab registry ${VERSION}-arm64"
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}-arm64 registry.gitlab.com/slumber/multi-user/multi-user-server:${CI_COMMIT_REF_NAME}-arm64
|
||||
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}-arm64
|
@ -1,5 +1,6 @@
|
||||
build:
|
||||
stage: build
|
||||
build-addon-zip:
|
||||
stage: build-addon-zip
|
||||
needs: ["test"]
|
||||
image: debian:stable-slim
|
||||
script:
|
||||
- rm -rf tests .git .gitignore script
|
||||
@ -7,4 +8,5 @@ build:
|
||||
name: multi_user
|
||||
paths:
|
||||
- multi_user
|
||||
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
50
.gitlab/ci/build-image.gitlab-ci.yml
Normal file
@ -0,0 +1,50 @@
|
||||
build-amd64:
|
||||
stage: build-amd64
|
||||
needs: ["build-addon-zip"]
|
||||
image: slumber/docker-python
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
|
||||
script:
|
||||
- RP_VERSION="$(python scripts/get_replication_version.py)"
|
||||
- VERSION="$(python scripts/get_addon_version.py)"
|
||||
- echo "Building docker image with replication ${RP_VERSION}"
|
||||
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
|
||||
- echo "Pushing to gitlab registry ${VERSION}"
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} registry.gitlab.com/slumber/multi-user/multi-user-server:${CI_COMMIT_REF_NAME}
|
||||
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server
|
||||
|
||||
build-arm64:
|
||||
stage: build-arm64
|
||||
# needs: ["build-addon-zip"]
|
||||
image: slumber/docker-python
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
before_script:
|
||||
- apk add curl
|
||||
- mkdir -p ~/.docker/cli-plugins
|
||||
- curl -sSLo ~/.docker/cli-plugins/docker-buildx https://github.com/docker/buildx/releases/download/$BUILDX_VERSION/buildx-$BUILDX_VERSION.linux-amd64
|
||||
- chmod +x ~/.docker/cli-plugins/docker-buildx
|
||||
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||
- docker info
|
||||
script:
|
||||
- RP_VERSION="$(python scripts/get_replication_version.py)"
|
||||
- VERSION="$(python scripts/get_addon_version.py)"
|
||||
- echo "Building docker image with replication ${RP_VERSION}"
|
||||
- docker buildx create arm64 --use
|
||||
- docker buildx build --platform linux/arm64 --build-arg version={VERSION} --tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}-arm64 ./scripts/docker_server
|
||||
- echo "Pushing to gitlab registry ${VERSION}-arm64"
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker tag registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}-arm64 registry.gitlab.com/slumber/multi-user/multi-user-server:${CI_COMMIT_REF_NAME}-arm64
|
||||
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}-arm64
|
@ -1,18 +0,0 @@
|
||||
deploy:
|
||||
stage: deploy
|
||||
image: slumber/docker-python
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
|
||||
script:
|
||||
- RP_VERSION="$(python scripts/get_replication_version.py)"
|
||||
- VERSION="$(python scripts/get_addon_version.py)"
|
||||
- echo "Building docker image with replication ${RP_VERSION}"
|
||||
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
|
||||
- echo "Pushing to gitlab registry ${VERSION}"
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}
|
16
.gitlab/ci/doc.gitlab-ci.yml
Normal file
@ -0,0 +1,16 @@
|
||||
pages:
|
||||
stage: doc
|
||||
needs: ["build-arm64","build-amd64"]
|
||||
image: python
|
||||
script:
|
||||
- pip install -U sphinx sphinx_rtd_theme sphinx-material
|
||||
- sphinx-build -b html ./docs public
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
only:
|
||||
refs:
|
||||
- master
|
||||
- develop
|
||||
|
||||
|
@ -3,3 +3,5 @@ test:
|
||||
image: slumber/blender-addon-testing:latest
|
||||
script:
|
||||
- python3 scripts/test_addon.py
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
3
.gitmodules
vendored
@ -0,0 +1,3 @@
|
||||
[submodule "multi_user/libs/replication"]
|
||||
path = multi_user/libs/replication
|
||||
url = https://gitlab.com/slumber/replication.git
|
||||
|
125
CHANGELOG.md
@ -65,7 +65,7 @@ All notable changes to this project will be documented in this file.
|
||||
- Unused strict right management strategy
|
||||
- Legacy config management system
|
||||
|
||||
## [0.1.0] - preview
|
||||
## [0.1.0] - 2020-10-05
|
||||
|
||||
### Added
|
||||
|
||||
@ -95,4 +95,125 @@ All notable changes to this project will be documented in this file.
|
||||
- Modifier vertex group assignation
|
||||
- World sync
|
||||
- Snapshot UUID error
|
||||
- The world is not synchronized
|
||||
- The world is not synchronized
|
||||
|
||||
## [0.1.1] - 2020-10-16
|
||||
|
||||
### Added
|
||||
|
||||
- Session status widget
|
||||
- Affect dependencies during change owner
|
||||
- Dedicated server managment scripts(@brybalicious)
|
||||
|
||||
### Changed
|
||||
|
||||
- Refactored presence.py
|
||||
- Reset button UI icon
|
||||
- Documentation `How to contribute` improvements (@brybalicious)
|
||||
- Documentation `Hosting guide` improvements (@brybalicious)
|
||||
- Show flags are now available from the viewport overlay
|
||||
|
||||
### Fixed
|
||||
|
||||
- Render sync race condition (causing scene errors)
|
||||
- Binary differentials
|
||||
- Hybrid session crashes between Linux/Windows
|
||||
- Materials node default output value
|
||||
- Right selection
|
||||
- Client node rights changed to COMMON after disconnecting from the server
|
||||
- Collection instances selection draw
|
||||
- Packed image save error
|
||||
- Material replication
|
||||
- UI spelling errors (@brybalicious)
|
||||
|
||||
|
||||
## [0.2.0] - 2020-12-17
|
||||
|
||||
### Added
|
||||
|
||||
- Documentation `Troubleshouting` section (@brybalicious)
|
||||
- Documentation `Update` section (@brybalicious)
|
||||
- Documentation `Cloud Hosting Walkthrough` (@brybalicious)
|
||||
- Support DNS name
|
||||
- Sync annotations
|
||||
- Sync volume objects
|
||||
- Sync material node_goups
|
||||
- Sync VSE
|
||||
- Sync grease pencil modifiers
|
||||
- Sync textures (modifier only)
|
||||
- Session status widget
|
||||
- Disconnection popup
|
||||
- Popup with disconnection reason
|
||||
|
||||
|
||||
### Changed
|
||||
|
||||
- Improved GPencil performances
|
||||
|
||||
### Fixed
|
||||
|
||||
- Texture paint update
|
||||
- Various documentation fixes section (@brybalicious)
|
||||
- Empty and Light object selection highlights
|
||||
- Material renaming
|
||||
- Default material nodes input parameters
|
||||
- blender 2.91 python api compatibility
|
||||
|
||||
## [0.3.0] - 2021-04-14
|
||||
|
||||
### Added
|
||||
|
||||
- Curve material support
|
||||
- Cycle visibility settings
|
||||
- Session save/load operator
|
||||
- Add new scene support
|
||||
- Physic initial support
|
||||
- Geometry node initial support
|
||||
- Blender 2.93 compatibility
|
||||
### Changed
|
||||
|
||||
- Host documentation on Gitlab Page
|
||||
- Event driven update (from the blender deps graph)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Vertex group assignation
|
||||
- Parent relation can't be removed
|
||||
- Separate object
|
||||
- Delete animation
|
||||
- Sync missing holdout option for grease pencil material
|
||||
- Sync missing `skin_vertices`
|
||||
- Exception access violation during Undo/Redo
|
||||
- Sync missing armature bone Roll
|
||||
- Sync missing driver data_path
|
||||
- Constraint replication
|
||||
|
||||
## [0.4.0] - 2021-07-20
|
||||
|
||||
### Added
|
||||
|
||||
- Connection preset system (@Kysios)
|
||||
- Display connected users active mode (users pannel and viewport) (@Kysios)
|
||||
- Delta-based replication
|
||||
- Sync timeline marker
|
||||
- Sync images settings (@Kysios)
|
||||
- Sync parent relation type (@Kysios)
|
||||
- Sync uv project modifier
|
||||
- Sync FCurves modifiers
|
||||
|
||||
### Changed
|
||||
|
||||
- User selection optimizations (draw and sync) (@Kysios)
|
||||
- Improved shapekey syncing performances
|
||||
- Improved gpencil syncing performances
|
||||
- Integrate replication as a submodule
|
||||
- The dependencies are now installed in a folder(blender addon folder) that no longer requires administrative rights
|
||||
- Presence overlay UI optimization (@Kysios)
|
||||
|
||||
### Fixed
|
||||
|
||||
- User selection bounding box glitches for non-mesh objects (@Kysios)
|
||||
- Transforms replication for animated objects
|
||||
- GPencil fill stroke
|
||||
- Sculpt and GPencil brushes deleted when joining a session (@Kysios)
|
||||
- Auto-updater doesn't work for master and develop builds
|
||||
|
69
README.md
@ -11,46 +11,53 @@ This tool aims to allow multiple users to work on the same scene over the networ
|
||||
|
||||
## Quick installation
|
||||
|
||||
1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
|
||||
2. Run blender as administrator (dependencies installation).
|
||||
3. Install last_version.zip from your addon preferences.
|
||||
1. Download [latest build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build) or [stable build](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
|
||||
2. Install last_version.zip from your addon preferences.
|
||||
|
||||
[Dependencies](#dependencies) will be automatically added to your blender python during installation.
|
||||
|
||||
## Usage
|
||||
|
||||
See the [documentation](https://multi-user.readthedocs.io/en/latest/) for details.
|
||||
See the [documentation](https://slumber.gitlab.io/multi-user/index.html) for details.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_started/troubleshooting.html) for tips on the most common issues.
|
||||
|
||||
## Current development status
|
||||
|
||||
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
||||
|
||||
| Name | Status | Comment |
|
||||
| ----------- | :----: | :--------------------------------------------------------------------------: |
|
||||
| action | ✔️ | |
|
||||
| armature | ❗ | Not stable |
|
||||
| camera | ✔️ | |
|
||||
| collection | ✔️ | |
|
||||
| curve | ❗ | Nurbs not supported |
|
||||
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| texts | ✔️ | |
|
||||
| scene | ✔️ | |
|
||||
| world | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| nla | ❌ | |
|
||||
| volumes | ❌ | |
|
||||
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
|
||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||
| libraries | ❗ | Partial |
|
||||
| Name | Status | Comment |
|
||||
| -------------- | :----: | :---------------------------------------------------------------------: |
|
||||
| action | ✔️ | |
|
||||
| camera | ✔️ | |
|
||||
| collection | ✔️ | |
|
||||
| gpencil | ✔️ | |
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| node_groups | ✔️ | Material & Geometry only |
|
||||
| geometry nodes | ✔️ | |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| texts | ✔️ | |
|
||||
| scene | ✔️ | |
|
||||
| world | ✔️ | |
|
||||
| volumes | ✔️ | |
|
||||
| lightprobes | ✔️ | |
|
||||
| physics | ✔️ | |
|
||||
| textures | ✔️ | |
|
||||
| curve | ❗ | Nurbs surfaces not supported |
|
||||
| armature | ❗ | Only for Mesh. [Planned for GPencil](https://gitlab.com/slumber/multi-user/-/issues/161). Not stable yet |
|
||||
| particles | ❗ | The cache isn't syncing. |
|
||||
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||
| vse | ❗ | Mask and Clip not supported yet |
|
||||
| libraries | ❌ | |
|
||||
| nla | ❌ | |
|
||||
| texts | ❌ | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||
| compositing | ❌ | [Planned for v0.5.0](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||
|
||||
|
||||
|
||||
### Performance issues
|
||||
@ -68,7 +75,7 @@ I'm working on it.
|
||||
|
||||
## Contributing
|
||||
|
||||
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
|
||||
See [contributing section](https://slumber.gitlab.io/multi-user/ways_to_contribute.html) of the documentation.
|
||||
|
||||
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.
|
||||
|
||||
|
@ -19,10 +19,10 @@ import sys
|
||||
|
||||
project = 'multi-user'
|
||||
copyright = '2020, Swann Martinez'
|
||||
author = 'Swann Martinez'
|
||||
author = 'Swann Martinez, Poochy, Fabian'
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
release = '0.0.2'
|
||||
release = '0.5.0-develop'
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
@ -9,14 +9,14 @@ Glossary
|
||||
|
||||
administrator
|
||||
|
||||
*A session administrator can manage users (kick) and have a write access on
|
||||
each datablock. He could also init a dedicated server repository.*
|
||||
*A session administrator can manage users (kick) and hold write access on
|
||||
each datablock. They can also init a dedicated server repository.*
|
||||
|
||||
.. _session-status:
|
||||
|
||||
session status
|
||||
|
||||
*Located in the title of the multi-user panel, the session status show
|
||||
*Located in the title of the multi-user panel, the session status shows
|
||||
you the connection state.*
|
||||
|
||||
.. figure:: img/quickstart_session_status.png
|
||||
@ -24,7 +24,7 @@ Glossary
|
||||
|
||||
Session status in panel title bar
|
||||
|
||||
All possible state are listed here with their meaning:*
|
||||
All possible connection states are listed here with their meaning:*
|
||||
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| State | Description |
|
||||
@ -33,7 +33,7 @@ Glossary
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| FETCHING | Dowloading snapshot from the server |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| AUTHENTIFICATION | Initial server authentication |
|
||||
| AUTHENTICATION | Initial server authentication |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
| ONLINE | Connected to the session |
|
||||
+--------------------+---------------------------------------------------------------------------------------------+
|
||||
@ -55,5 +55,5 @@ Glossary
|
||||
|
||||
common right
|
||||
|
||||
When a data block is under common right, it is available for everyone to modification.
|
||||
The rights will be given to the user that select it first.
|
||||
When a data block is under common right, it is available to everyone for modification.
|
||||
The rights will be given to the user that selects it first.
|
BIN
docs/getting_started/img/quickstart_cancel_save_session_data.png
Normal file
After Width: | Height: | Size: 14 KiB |
BIN
docs/getting_started/img/quickstart_import_session_data.png
Normal file
After Width: | Height: | Size: 106 KiB |
Before Width: | Height: | Size: 9.7 KiB After Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 13 KiB |
Before Width: | Height: | Size: 7.1 KiB After Width: | Height: | Size: 559 B |
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 22 KiB |
BIN
docs/getting_started/img/quickstart_save_session_data.png
Normal file
After Width: | Height: | Size: 18 KiB |
BIN
docs/getting_started/img/quickstart_save_session_data_cancel.png
Normal file
After Width: | Height: | Size: 20 KiB |
BIN
docs/getting_started/img/quickstart_save_session_data_dialog.png
Normal file
After Width: | Height: | Size: 80 KiB |
BIN
docs/getting_started/img/quickstart_status.png
Normal file
After Width: | Height: | Size: 365 KiB |
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 26 KiB |
BIN
docs/getting_started/img/server_preset_exemple.gif
Normal file
After Width: | Height: | Size: 320 KiB |
BIN
docs/getting_started/img/server_preset_image_add.png
Normal file
After Width: | Height: | Size: 7.3 KiB |
BIN
docs/getting_started/img/server_preset_image_admin.png
Normal file
After Width: | Height: | Size: 4.2 KiB |
BIN
docs/getting_started/img/server_preset_image_normal_server.png
Normal file
After Width: | Height: | Size: 9.0 KiB |
BIN
docs/getting_started/img/server_preset_image_report.png
Normal file
After Width: | Height: | Size: 3.2 KiB |
BIN
docs/getting_started/img/update_1.jpg
Normal file
After Width: | Height: | Size: 223 KiB |
BIN
docs/getting_started/img/update_2.jpg
Normal file
After Width: | Height: | Size: 209 KiB |
BIN
docs/getting_started/img/update_3.jpg
Normal file
After Width: | Height: | Size: 217 KiB |
BIN
docs/getting_started/img/update_4.jpg
Normal file
After Width: | Height: | Size: 226 KiB |
@ -8,4 +8,5 @@ Getting started
|
||||
|
||||
install
|
||||
quickstart
|
||||
troubleshooting
|
||||
glossary
|
||||
|
@ -5,9 +5,54 @@ Installation
|
||||
.. hint::
|
||||
The process is the same for linux, mac and windows.
|
||||
|
||||
1. Download latest `release <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build>`_ or `develop (unstable !) <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build>`_ build.
|
||||
1. Download `LATEST build <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/develop/download?job=build>`_ or `STABLE build <https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build>`_.
|
||||
2. Run blender as administrator (to allow python dependencies auto-installation).
|
||||
3. Install **multi-user.zip** from your addon preferences.
|
||||
|
||||
Once the addon is succesfully installed, I strongly recommend you to follow the :ref:`quickstart`
|
||||
tutorial.
|
||||
tutorial.
|
||||
|
||||
.. _update-version:
|
||||
|
||||
Updates
|
||||
=======
|
||||
|
||||
Multi-User has a built-in auto-update function.
|
||||
|
||||
1. Navigate to Edit >> Preferences pane in Blender, and go to the 'Add-ons' section.
|
||||
2. Search 'multi-user', select the 'Update' tab, click 'Auto-check for Update' and choose the frequency you'd like.
|
||||
3. Make sure to click the three bars in the bottom-left, and save this to your preferences (userpref.blend).
|
||||
|
||||
Sometimes you'd like to perform manual update, or even side-grade or rollback your multi-user version. Perhaps you are trying out new features from the 'develop' branch in a test session.
|
||||
|
||||
1. Click on 'Check now for multiuser update'. Multi-user will now find new versions
|
||||
|
||||
.. figure:: img/update_1.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
Check for updates
|
||||
|
||||
2. Select 'Install latest master / old version'
|
||||
|
||||
.. figure:: img/update_2.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
Install
|
||||
|
||||
3. In most cases, select 'master' branch for the latest stable release. The unstable 'develop' branch and older releases are available
|
||||
|
||||
.. figure:: img/update_3.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
Select version
|
||||
|
||||
4. Finally, restart blender to use the updated version
|
||||
|
||||
.. figure:: img/update_4.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
Restart blender
|
@ -5,10 +5,10 @@ Quick start
|
||||
===========
|
||||
|
||||
.. hint::
|
||||
*All session related settings are located under: `View3D -> Sidebar -> Multiuser panel`*
|
||||
*All session-related settings are located under: `View3D -> Sidebar -> Multiuser panel`*
|
||||
|
||||
The multi-user is based on a session management system.
|
||||
In this this guide you will quickly learn how to use the collaborative session system in three part:
|
||||
The multi-user addon provides a session management system.
|
||||
In this guide, you will quickly learn how to use the collaborative session management system in three parts:
|
||||
|
||||
- :ref:`how-to-host`
|
||||
- :ref:`how-to-join`
|
||||
@ -19,22 +19,22 @@ In this this guide you will quickly learn how to use the collaborative session s
|
||||
How to host a session
|
||||
=====================
|
||||
|
||||
The multi-user add-on rely on a Client-Server architecture.
|
||||
The server is the heart of the collaborative session,
|
||||
it will allow each users to communicate with each others.
|
||||
The multi-user add-on relies on a Client-Server architecture.
|
||||
The server is the heart of the collaborative session.
|
||||
It is what allows user's blender instances to communicate with each other.
|
||||
In simple terms, *Hosting a session* means *run a local server and connect the local client to it*.
|
||||
When I said **local server** I mean accessible from the LAN (Local Area Network).
|
||||
When I say **local server** I mean a server which is accessible from the LAN (Local Area Network) without requiring an internet connection.
|
||||
|
||||
However sometime you will need to host a session over the internet,
|
||||
in this case I strongly recommand you to read the :ref:`internet-guide` tutorial.
|
||||
However, there are times when you will need to host a session over the internet.
|
||||
In this case, I strongly recommend that you read the :ref:`internet-guide` tutorial.
|
||||
|
||||
.. _user-info:
|
||||
|
||||
-----------------------------
|
||||
1. Fill your user information
|
||||
-----------------------------
|
||||
--------------------------------
|
||||
1. Fill in your user information
|
||||
--------------------------------
|
||||
|
||||
The **User Info** panel (See image below) allow you to constomize your online identity.
|
||||
The **User Info** panel (See image below) allows you to customise your online identity.
|
||||
|
||||
.. figure:: img/quickstart_user_info.png
|
||||
:align: center
|
||||
@ -42,38 +42,38 @@ The **User Info** panel (See image below) allow you to constomize your online id
|
||||
User info panel
|
||||
|
||||
|
||||
Let's fill those tow field:
|
||||
Let's fill in those two fields:
|
||||
|
||||
- **name**: your online name.
|
||||
- **color**: a color used to represent you into other user workspace(see image below).
|
||||
- **color**: a color used to represent you in other users' workspaces (see image below).
|
||||
|
||||
|
||||
During online sessions, other users will see your selected object and camera hilghlited in your profile color.
|
||||
During online sessions, other users will see your selected object and camera highlighted in your profile color.
|
||||
|
||||
.. _user-representation:
|
||||
|
||||
.. figure:: img/quickstart_user_representation.png
|
||||
:align: center
|
||||
|
||||
User viewport representation
|
||||
User viewport representation aka 'User Presence'
|
||||
|
||||
--------------------
|
||||
2. Setup the network
|
||||
--------------------
|
||||
---------------------
|
||||
2. Set up the network
|
||||
---------------------
|
||||
|
||||
When the hosting process will start, the multi-user addon will lauch a local server instance.
|
||||
In the nerwork panel select **HOST**.
|
||||
The **Host sub-panel** (see image below) allow you to configure the server according to:
|
||||
When the hosting process starts, the multi-user addon will launch a local server instance.
|
||||
In the network panel, select **HOST**.
|
||||
The **Host sub-panel** (see image below) allows you to configure the server according to:
|
||||
|
||||
* **Port**: Port on wich the server is listening.
|
||||
* **Port**: Port on which the server is listening.
|
||||
* **Start from**: The session initialisation method.
|
||||
|
||||
* **current scenes**: Start with the current blendfile datas.
|
||||
* **an empty scene**: Clear a data and start over.
|
||||
* **current scenes**: Start with the data loaded in the current blend file.
|
||||
* **an empty scene**: Clear the blend file's data and start over.
|
||||
|
||||
.. danger::
|
||||
By starting from an empty, all of the blend data will be removed !
|
||||
Ensure to save your existing work before launching the session.
|
||||
By starting from an empty scene, all of the blend data will be removed!
|
||||
Be sure to save your existing work before launching the session.
|
||||
|
||||
* **Admin password**: The session administration password.
|
||||
|
||||
@ -84,16 +84,16 @@ The **Host sub-panel** (see image below) allow you to configure the server accor
|
||||
Host network panel
|
||||
|
||||
|
||||
.. note:: Additionnal configuration setting can be found in the :ref:`advanced` section.
|
||||
.. note:: Additional configuration setting can be found in the :ref:`advanced` section.
|
||||
|
||||
Once everything is setup you can hit the **HOST** button to launch the session !
|
||||
Once everything is set up, you can hit the **HOST** button to launch the session!
|
||||
|
||||
It will do two things:
|
||||
This will do two things:
|
||||
|
||||
* Start a local server
|
||||
* Connect you to it as an :ref:`admin`
|
||||
|
||||
During online session, various actions are available to you, go to :ref:`how-to-manage` section to
|
||||
During an online session, various actions are available to you, go to :ref:`how-to-manage` section to
|
||||
learn more about them.
|
||||
|
||||
.. _how-to-join:
|
||||
@ -101,55 +101,88 @@ learn more about them.
|
||||
How to join a session
|
||||
=====================
|
||||
|
||||
This section describe how join a launched session.
|
||||
Before starting make sure that you have access to the session ip and port.
|
||||
This section describes how join a launched session.
|
||||
Before starting make sure that you have access to the session IP address and port number.
|
||||
|
||||
-----------------------------
|
||||
1. Fill your user information
|
||||
-----------------------------
|
||||
--------------------------------
|
||||
1. Fill in your user information
|
||||
--------------------------------
|
||||
|
||||
Follow the user-info_ section for this step.
|
||||
Joining a server
|
||||
=======================
|
||||
|
||||
----------------
|
||||
2. Network setup
|
||||
----------------
|
||||
--------------
|
||||
Network setup
|
||||
--------------
|
||||
|
||||
In the nerwork panel select **JOIN**.
|
||||
The **join sub-panel** (see image below) allow you configure the client to join a
|
||||
collaborative session.
|
||||
In the network panel, select **JOIN**.
|
||||
The **join sub-panel** (see image below) allows you to configure your client to join a
|
||||
collaborative session which is already hosted.
|
||||
|
||||
.. figure:: img/quickstart_join.png
|
||||
:align: center
|
||||
:alt: Connect menu
|
||||
.. figure:: img/server_preset_image_normal_server.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
Connection panel
|
||||
Connection pannel
|
||||
|
||||
Fill those field with your information:
|
||||
Fill in the fields with your information:
|
||||
|
||||
- **IP**: the host ip.
|
||||
- **Port**: the host port.
|
||||
- **Connect as admin**: connect you with **admin rights** (see :ref:`admin` ) to the session.
|
||||
- **IP**: the host's IP address.
|
||||
- **Port**: the host's port number.
|
||||
|
||||
.. Maybe something more explicit here
|
||||
Once you've configured every field, hit the button **CONNECT** to join the session !
|
||||
When the :ref:`session-status` is **ONLINE** you are online and ready to start co-creating.
|
||||
|
||||
.. note::
|
||||
Additionnal configuration setting can be found in the :ref:`advanced` section.
|
||||
|
||||
If you want to have **administrator rights** (see :ref:`admin` ) on the server, just enter the password created by the host in the **Connect as admin** section
|
||||
|
||||
Once you've set every field, hit the button **CONNECT** to join the session !
|
||||
When the :ref:`session-status` is **ONLINE** you are online and ready to start to collaborate.
|
||||
.. figure:: img/server_preset_image_admin.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
Admin password
|
||||
|
||||
---------------
|
||||
Server presets
|
||||
---------------
|
||||
|
||||
You can save your server presets in a preset list below the 'JOIN' and 'HOST' buttons. This allows you to quickly access and manage your servers.
|
||||
|
||||
To add a server, first enter the ip address and the port (plus the password if needed), then click on the + icon to add a name to your preset. To remove a server from the list, select it and click on the - icon.
|
||||
|
||||
.. figure:: img/server_preset_exemple.gif
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
.. warning:: Be careful, if you don't rename your new preset, or if it has the same name as an existing preset, the old preset will be overwritten.
|
||||
|
||||
.. figure:: img/server_preset_image_report.png
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
.. note::
|
||||
|
||||
Two presets are already present when the addon is launched:
|
||||
|
||||
- The 'localhost' preset, to host and join a local session quickly
|
||||
- The 'public session' preset, to join the public sessions of the multi-user server (official discord to participate : https://discord.gg/aBPvGws)
|
||||
|
||||
.. note::
|
||||
On the **dedicated server** startup, the session status will get you to the **LOBBY** waiting a admin to start it.
|
||||
Additional configuration settings can be found in the :ref:`advanced` section.
|
||||
|
||||
If the session status is set to **LOBBY** and you are a regular user, you need to wait that an admin launch it.
|
||||
If you are the admin, you just need to init the repository to start the session (see image below).
|
||||
.. note::
|
||||
When starting a **dedicated server**, the session status screen will take you to the **LOBBY**, awaiting an admin to start the session.
|
||||
|
||||
If the session status is set to **LOBBY** and you are a regular user, you need to wait for the admin to launch the scene.
|
||||
If you are the admin, you just need to initialise the repository to start the session (see image below).
|
||||
|
||||
.. figure:: img/quickstart_session_init.png
|
||||
:align: center
|
||||
|
||||
Session initialisation for dedicated server
|
||||
|
||||
During online session, various actions are available to you, go to :ref:`how-to-manage` section to
|
||||
During an online session, various actions are available to you. Go to :ref:`how-to-manage` to
|
||||
learn more about them.
|
||||
|
||||
.. _how-to-manage:
|
||||
@ -157,17 +190,17 @@ learn more about them.
|
||||
How to manage a session
|
||||
=======================
|
||||
|
||||
The collaboration quality directly depend on the communication quality. This section describes
|
||||
various tools made in an effort to ease the communication between the different session users.
|
||||
Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
|
||||
The quality of a collaborative session directly depends on the quality of the network connection, and the communication between the users. This section describes
|
||||
various tools which have been made in an effort to ease the communication between your fellow creators.
|
||||
Feel free to suggest any ideas for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
|
||||
|
||||
---------------------------
|
||||
Change replication behavior
|
||||
---------------------------
|
||||
|
||||
During a session, the multi-user will replicate your modifications to other instances.
|
||||
In order to avoid annoying other users when you are experimenting, some of those modifications can be ignored via
|
||||
various flags present at the top of the panel (see red area in the image bellow). Those flags are explained in the :ref:`replication` section.
|
||||
During a session, multi-user will replicate all of your local modifications to the scene, to all other users' blender instances.
|
||||
In order to avoid annoying other users when you are experimenting, you can flag some of your local modifications to be ignored via
|
||||
various flags present at the top of the panel (see red area in the image below). Those flags are explained in the :ref:`replication` section.
|
||||
|
||||
.. figure:: img/quickstart_replication.png
|
||||
:align: center
|
||||
@ -178,32 +211,33 @@ various flags present at the top of the panel (see red area in the image bellow)
|
||||
Monitor online users
|
||||
--------------------
|
||||
|
||||
One of the most vital tool is the **Online user panel**. It list all connected
|
||||
users information's including yours such as :
|
||||
One of the most vital tools is the **Online user panel**. It lists all connected
|
||||
users' information including your own:
|
||||
|
||||
* **Role** : if user is an admin or a regular user.
|
||||
* **Role** : if a user is an admin or a regular user.
|
||||
* **Username** : Name of the user.
|
||||
* **Mode** : User's active editing mode (edit_mesh, paint,etc.).
|
||||
* **Frame**: When (on which frame) the user is working.
|
||||
* **Location**: Where the user is actually working.
|
||||
* **Frame**: When (in frame) the user working.
|
||||
* **Ping**: user connection delay in milliseconds
|
||||
* **Ping**: user's connection delay in milliseconds
|
||||
|
||||
.. figure:: img/quickstart_users.png
|
||||
:align: center
|
||||
|
||||
Online user panel
|
||||
|
||||
By selecting a user in the list you'll have access to different user related **actions**.
|
||||
Those operators allow you reach the selected user state in tow different dimensions: **SPACE** and **TIME**.
|
||||
By selecting a user in the list you'll have access to different users' related **actions**.
|
||||
Those operators allow you to experience the selected user's state in two different dimensions: **SPACE** and **TIME**.
|
||||
|
||||
Snapping in space
|
||||
----------------
|
||||
-----------------
|
||||
|
||||
The **CAMERA button** (Also called **snap view** operator) allow you to snap on
|
||||
the user viewpoint. To disable the snap, click back on the button. This action
|
||||
served different purposes such as easing the review process, working together on
|
||||
wide world.
|
||||
The **CAMERA button** (Also called **snap view** operator) allow you to snap to
|
||||
the user's viewpoint. To disable the snap, click on the button once again. This action
|
||||
serves different purposes such as easing the review process, and working together on a large or populated world.
|
||||
|
||||
.. hint::
|
||||
If the target user is localized on another scene, the **snap view** operator will send you to his scene.
|
||||
If the target user is located in another scene, the **snap view** operator will send you to their scene.
|
||||
|
||||
.. figure:: img/quickstart_snap_view.gif
|
||||
:align: center
|
||||
@ -211,11 +245,11 @@ wide world.
|
||||
Snap view in action
|
||||
|
||||
Snapping in time
|
||||
---------------
|
||||
----------------
|
||||
|
||||
The **CLOCK button** (Also called **snap time** operator) allow you to snap on
|
||||
the user time (current frame). To disable the snap, click back on the button.
|
||||
This action is built to help various actors to work on the same temporality
|
||||
The **CLOCK button** (Also called **snap time** operator) allows you to snap to
|
||||
the user's time (current frame). To disable the snap, click on the button once again.
|
||||
This action helps various multiple creators to work in the same time-frame
|
||||
(for instance multiple animators).
|
||||
|
||||
.. figure:: img/quickstart_snap_time.gif
|
||||
@ -230,17 +264,18 @@ Kick a user
|
||||
.. warning:: Only available for :ref:`admin` !
|
||||
|
||||
|
||||
The **CROSS button** (Also called **kick** operator) allow the admin to kick the selected user. On the target user side, the session will properly disconnect.
|
||||
The **CROSS button** (Also called **kick** operator) allows the administrator to kick the selected user. This can be helpful if a user is acting unruly, but more importantly, if they are experiencing a high ping which is slowing down the scene. Meanwhile, in the target user's world, the session will properly disconnect.
|
||||
|
||||
|
||||
Change users display
|
||||
--------------------
|
||||
|
||||
Presence is the multi-user module responsible for users display. During the session,
|
||||
it draw users related information in your viewport such as:
|
||||
Presence is the multi-user module responsible for displaying user presence. During the session,
|
||||
it draw users' related information in your viewport such as:
|
||||
|
||||
* Username
|
||||
* User point of view
|
||||
* User active mode
|
||||
* User selection
|
||||
|
||||
.. figure:: img/quickstart_presence.png
|
||||
@ -248,11 +283,19 @@ it draw users related information in your viewport such as:
|
||||
|
||||
Presence show flags
|
||||
|
||||
The presence overlay panel (see image above) allow you to enable/disable
|
||||
The presence overlay panel (see image above) allows you to enable/disable
|
||||
various drawn parts via the following flags:
|
||||
|
||||
- **Show selected objects**: display other users current selection
|
||||
- **Show users**: display users current viewpoint
|
||||
- **Show session status**: display the session status in the viewport
|
||||
|
||||
.. figure:: img/quickstart_status.png
|
||||
:align: center
|
||||
|
||||
- **Text scale**: session status text size
|
||||
- **Vertical/Horizontal position**: session position in the viewport
|
||||
|
||||
- **Show selected objects**: display other users' current selections
|
||||
- **Show users**: display users' current viewpoint
|
||||
- **Show different scenes**: display users working on other scenes
|
||||
|
||||
|
||||
@ -261,40 +304,40 @@ various drawn parts via the following flags:
|
||||
Manage data
|
||||
-----------
|
||||
|
||||
In order to understand replication data managment, a quick introduction to the multi-user data workflow is required.
|
||||
First thing to know: until now, the addon rely on a data-based replication. In simple words, it means that it replicate
|
||||
user's action results.
|
||||
To replicate datablocks between clients the multi-user rely on what tends to be a distributed architecture:
|
||||
In order to understand replication data managment, a quick introduction to the multi-user data workflow is in order.
|
||||
The first thing to know: until now, the addon relies on data-based replication. In simple words, it means that it replicates
|
||||
the resultant output of a user's actions.
|
||||
To replicate datablocks between clients, multi-user relies on a standard distributed architecture:
|
||||
|
||||
- The server store the "master" version of the work.
|
||||
- Each client have a local version of the work.
|
||||
- The server stores the "master" version of the work.
|
||||
- Each client has a local version of the work.
|
||||
|
||||
When an artist modified something in the scene, here is what is happening in the background:
|
||||
When an artist modifies something in the scene, here is what is happening in the background:
|
||||
|
||||
1. Modified data are **COMMITTED** to the local repository.
|
||||
2. Once committed locally, they are **PUSHED** to the server
|
||||
3. As soon as the server is getting updates, they are stored locally and pushed to every other clients
|
||||
3. As soon as the server receives updates, they are stored locally and pushed to every other client
|
||||
|
||||
At the top of this data management system, a right management system prevent
|
||||
multiple users from modifying same data at same time. A datablock may belong to
|
||||
At the top of this data management system, a rights management system prevents
|
||||
multiple users from modifying the same data at the same time. A datablock may belong to
|
||||
a connected user or be under :ref:`common-right<**COMMON**>` rights.
|
||||
|
||||
.. note::
|
||||
In a near future, the right management system will support roles to allow multiple users to
|
||||
work on different aspect of the same datablock.
|
||||
In a near future, the rights management system will support roles to allow multiple users to
|
||||
work on different aspects of the same datablock.
|
||||
|
||||
The Repository panel (see image below) allow you to monitor, change datablock states and right manually.
|
||||
The Repository panel (see image below) allows you to monitor, change datablock states and rights manually.
|
||||
|
||||
.. figure:: img/quickstart_properties.png
|
||||
.. figure:: img/quickstart_save_session_data.png
|
||||
:align: center
|
||||
|
||||
Repository panel
|
||||
|
||||
The **show only owned** flag allow you to see which datablocks you are currently modifying.
|
||||
The **show only owned** flag allows you to see which datablocks you are currently modifying.
|
||||
|
||||
.. warning::
|
||||
If you are editing a datablock not listed with this fag enabled, it means that you do
|
||||
not have right granted to modify it. So it won't be updated to other client !
|
||||
If you are editing a datablock not listed with this flag enabled, it means that you have not been granted the rights to modify it.
|
||||
So, it won't be updated to other clients!
|
||||
|
||||
Here is a quick list of available actions:
|
||||
|
||||
@ -312,6 +355,40 @@ Here is a quick list of available actions:
|
||||
| .. image:: img/quickstart_remove.png | **Delete** | Remove the data-block from network replication |
|
||||
+---------------------------------------+-------------------+------------------------------------------------------------------------------------+
|
||||
|
||||
Save session data
|
||||
-----------------
|
||||
|
||||
.. danger::
|
||||
This is an experimental feature, until the stable release it is highly recommended to use regular .blend save.
|
||||
|
||||
The save session data allows you to create a backup of the session data.
|
||||
|
||||
When you hit the **save session data** button, the following popup dialog will appear.
|
||||
It allows you to choose the destination folder and if you want to run an auto-save.
|
||||
|
||||
.. figure:: img/quickstart_save_session_data_dialog.png
|
||||
:align: center
|
||||
|
||||
Save session data dialog.
|
||||
|
||||
If you enabled the auto-save option, you can cancel it from the **Cancel auto-save** button.
|
||||
|
||||
.. figure:: img/quickstart_save_session_data_cancel.png
|
||||
:align: center
|
||||
|
||||
Cancel session autosave.
|
||||
|
||||
|
||||
To import session data backups, use the following **Multiuser session snapshot** import dialog
|
||||
|
||||
.. figure:: img/quickstart_import_session_data.png
|
||||
:align: center
|
||||
|
||||
Import session data dialog.
|
||||
|
||||
.. note::
|
||||
It is not yet possible to start a session directly from a backup.
|
||||
|
||||
.. _advanced:
|
||||
|
||||
Advanced settings
|
||||
@ -333,15 +410,6 @@ Network
|
||||
|
||||
Advanced network settings
|
||||
|
||||
**IPC Port** is the port used for Inter Process Communication. This port is used
|
||||
by the multi-users subprocesses to communicate with each others. If different instances
|
||||
of the multi-user are using the same IPC port it will create conflict !
|
||||
|
||||
.. note::
|
||||
You only need to modify it if you need to launch multiple clients from the same
|
||||
computer(or if you try to host and join on the same computer). You should just enter a different
|
||||
**IPC port** for each blender instance.
|
||||
|
||||
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
|
||||
You should only increase it if you have a bad connection.
|
||||
|
||||
@ -356,20 +424,20 @@ Replication
|
||||
|
||||
Advanced replication settings
|
||||
|
||||
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
|
||||
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match renders between clients.
|
||||
|
||||
**Synchronize active camera** sync the scene active camera.
|
||||
**Synchronize active camera** sync the scene's active camera.
|
||||
|
||||
**Edit Mode Updates** enable objects update while you are in Edit_Mode.
|
||||
**Edit Mode Updates** enable objects to update while you are in Edit_Mode.
|
||||
|
||||
.. warning:: Edit Mode Updates kill performances with complex objects (heavy meshes, gpencil, etc...).
|
||||
.. warning:: Edit Mode Updates kills the session's performance with complex objects (heavy meshes, gpencil, etc...).
|
||||
|
||||
**Update method** allow you to change how replication update are triggered. Until now two update methode are implemented:
|
||||
**Update method** allows you to change how replication updates are triggered. Until now, two update methods are implemented:
|
||||
|
||||
- **Default**: Use external threads to monitor datablocks changes, slower and less accurate.
|
||||
- **Default**: Use external threads to monitor datablocks changes. Slower and less accurate.
|
||||
- **Despgraph ⚠️**: Use the blender dependency graph to trigger updates. Faster but experimental and unstable !
|
||||
|
||||
**Properties frequency gird** allow to set a custom replication frequency for each type of data-block:
|
||||
**Properties frequency grid** set a custom replication frequency for each type of data-block:
|
||||
|
||||
- **Refresh**: pushed data update rate (in second)
|
||||
- **Apply**: pulled data update rate (in second)
|
||||
@ -378,21 +446,21 @@ Replication
|
||||
Cache
|
||||
-----
|
||||
|
||||
The multi-user allows to replicate external blend dependencies such as images, movies sounds.
|
||||
On each client, those files are stored into the cache folder.
|
||||
Multi-user allows you to replicate external dependencies such as images (textures, hdris, etc...), movies, and sounds.
|
||||
On each client, the files will be stored in the multi-user cache folder.
|
||||
|
||||
.. figure:: img/quickstart_advanced_cache.png
|
||||
:align: center
|
||||
|
||||
Advanced cache settings
|
||||
|
||||
**cache_directory** allows to choose where cached files (images, sound, movies) will be saved.
|
||||
**cache_directory** choose where cached files (images, sound, movies) will be saved.
|
||||
|
||||
**Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it have been written to the disk.
|
||||
**Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it has been written to the disk.
|
||||
|
||||
**Clear cache** will remove all file from the cache folder.
|
||||
**Clear cache** will remove all files from the cache folder.
|
||||
|
||||
.. warning:: Clear cash could break your scene image/movie/sound if they are used into the blend !
|
||||
.. warning:: Clearing the cache could break your scene images/movies/sounds if they are used in a blend file! Try saving the blend file and choosing 'Pack all into blend' before clearing the cache.
|
||||
|
||||
---
|
||||
Log
|
||||
@ -403,16 +471,16 @@ Log
|
||||
|
||||
Advanced log settings
|
||||
|
||||
**log level** allow to set the logging level of detail. Here is the detail for each values:
|
||||
**log level** allows you to set the level of detail captured in multi-user's logging output. Here is a brief description on the level of detail for each value of the logging parameter:
|
||||
|
||||
+-----------+-----------------------------------------------+
|
||||
| Log level | Description |
|
||||
+===========+===============================================+
|
||||
| ERROR | Shows only critical error |
|
||||
| ERROR | Shows only critical errors |
|
||||
+-----------+-----------------------------------------------+
|
||||
| WARNING | Shows only errors (all kind) |
|
||||
| WARNING | Shows only errors (of all kinds) |
|
||||
+-----------+-----------------------------------------------+
|
||||
| INFO | Shows only status related messages and errors |
|
||||
| INFO | Shows only status-related messages and errors |
|
||||
+-----------+-----------------------------------------------+
|
||||
| DEBUG | Shows every possible information. |
|
||||
| DEBUG | Shows all possible information |
|
||||
+-----------+-----------------------------------------------+
|
19
docs/getting_started/troubleshooting.rst
Normal file
@ -0,0 +1,19 @@
|
||||
.. _troubleshooting:
|
||||
|
||||
===============
|
||||
Troubleshooting
|
||||
===============
|
||||
|
||||
The majority of issues new users experience when first using Multi-User can be solved with a few quick checks.
|
||||
|
||||
- Run Blender in Administrator mode
|
||||
- Update the multi-user addon to the latest version
|
||||
- Make sure to allow Blender through your firewall
|
||||
|
||||
.. hint:: Your firewall may have additional settings like Ransomware protection, or you may need to enable both Blender and Python on private and/or public Networks
|
||||
|
||||
- Solve problems with your connection quality
|
||||
- Minimise the use of large textures or file sizes
|
||||
- Avoid using 'Undo'. Use 'delete' instead
|
||||
|
||||
Use the #support channel on the multi-user `discord server <https://discord.gg/aBPvGws>`_ to chat, seek help and contribute.
|
@ -49,6 +49,7 @@ Documentation is organized into the following sections:
|
||||
getting_started/install
|
||||
getting_started/quickstart
|
||||
getting_started/glossary
|
||||
getting_started/troubleshooting
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
@ -1,36 +1,51 @@
|
||||
.. _internet-guide:
|
||||
|
||||
===================
|
||||
Hosting on internet
|
||||
===================
|
||||
=======================
|
||||
Hosting on the internet
|
||||
=======================
|
||||
|
||||
.. warning::
|
||||
Until now, those communications are not encrypted but are planned to be in a mid-term future (`Status <https://gitlab.com/slumber/multi-user/issues/62>`_).
|
||||
Until now, those communications are not encrypted but are planned to be in a mid-term future (`status <https://gitlab.com/slumber/multi-user/issues/62>`_).
|
||||
|
||||
This tutorial aims to guide you to host a collaborative Session on internet.
|
||||
Hosting a session can be done is several ways:
|
||||
This tutorial aims to guide you toward hosting a collaborative multi-user session on the internet.
|
||||
Hosting a session can be achieved in several ways:
|
||||
|
||||
- :ref:`host-blender`: hosting a session directly from the blender add-on panel.
|
||||
- :ref:`host-dedicated`: hosting a session directly from the command line interface on a computer without blender.
|
||||
- :ref:`host-cloud`: hosting a session on a dedicated cloud server such as Google Cloud's free tier.
|
||||
|
||||
|
||||
.. _host-blender:
|
||||
|
||||
-------------
|
||||
--------------------
|
||||
From blender
|
||||
-------------
|
||||
--------------------
|
||||
By default your router doesn't allow anyone to share you connection.
|
||||
In order grant server access to people from internet you have tow main option:
|
||||
In order grant the server access to people from internet you have two main option:
|
||||
|
||||
* The :ref:`connection-sharing`: the easiest way.
|
||||
* The :ref:`port-forwarding`: this one is the most unsecure, if you have no networking knowledge, you should definitively go to :ref:`connection-sharing`.
|
||||
* The :ref:`port-forwarding`: this way is the most unsecure. If you have no networking knowledge, you should definitely follow :ref:`connection-sharing`.
|
||||
|
||||
.. _connection-sharing:
|
||||
|
||||
Using a connection sharing solution
|
||||
-----------------------------------
|
||||
|
||||
You can either follow `Pierre Schiller's <https://www.youtube.com/c/activemotionpictures/featured>`_ excellent video tutorial or jump to the `text tutorial <zt-installation_>`_.
|
||||
|
||||
.. raw:: html
|
||||
|
||||
<p>
|
||||
<iframe width="560" height="315" src="https://www.youtube.com/embed/xV4R5AukkVw" frameborder="0" allow="accelerometer; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
|
||||
Many third party software like `ZEROTIER <https://www.zerotier.com/download/>`_ (Free) or `HAMACHI <https://vpn.net/>`_ (Free until 5 users) allow you to share your private network with other people.
|
||||
For the example I'm gonna use ZeroTier because its free and open source.
|
||||
For the example I'm gonna use ZeroTier because it's free and open source.
|
||||
|
||||
.. _zt-installation:
|
||||
|
||||
1. Installation
|
||||
^^^^^^^^^^^^^^^
|
||||
@ -47,7 +62,7 @@ To create a ZeroTier private network you need to register a ZeroTier account `on
|
||||
(click on **login** then register on the bottom)
|
||||
|
||||
Once you account it activated, you can connect to `my.zerotier.com <https://my.zerotier.com/login>`_.
|
||||
Head up to the **Network** section(highlighted in red in the image below).
|
||||
Head up to the **Network** section (highlighted in red in the image below).
|
||||
|
||||
.. figure:: img/hosting_guide_head_network.png
|
||||
:align: center
|
||||
@ -61,7 +76,7 @@ Hit 'Create a network'(see image below) and go to the network settings.
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Network page
|
||||
Admin password
|
||||
|
||||
Now that the network is created, let's configure it.
|
||||
|
||||
@ -86,7 +101,7 @@ Now let's connect everyone.
|
||||
3. Network authorization
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Since your ZeroTier network is Private, you will need to authorize each new users
|
||||
Since your ZeroTier network is Private, you will need to authorize each new user
|
||||
to connect to it.
|
||||
For each user you want to add, do the following step:
|
||||
|
||||
@ -104,7 +119,7 @@ For each user you want to add, do the following step:
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Add the client to network authorized users
|
||||
Add the client to network-authorized users
|
||||
|
||||
4. Network connection
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
@ -144,7 +159,7 @@ Let's check the connection status. Right click on the tray icon and click on **S
|
||||
Network status.
|
||||
|
||||
The network status must be **OK** for each user(like in the picture above) otherwise it means that you are not connected to the network.
|
||||
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the :ref:`network-authorization` section.
|
||||
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the section :ref:`network-authorization`
|
||||
|
||||
This is it for the ZeroTier network setup. Now everything should be setup to use the multi-user add-on over internet ! You can now follow the :ref:`quickstart` guide to start using the multi-user add-on !
|
||||
|
||||
@ -153,12 +168,12 @@ This is it for the ZeroTier network setup. Now everything should be setup to use
|
||||
Using port-forwarding
|
||||
---------------------
|
||||
|
||||
The port forwarding method consist to configure you Network route to allow internet trafic throught specific ports.
|
||||
The port forwarding method consists of configuring your network router to deny most traffic with a firewall, but to then allow particular internet traffic (like a multiuser connection) through the firewall on specified ports.
|
||||
|
||||
In order to know which port are used by the add-on, check the :ref:`port-setup` section.
|
||||
In order to know which ports are used by the add-on, please check the :ref:`port-setup` section.
|
||||
To set up port forwarding for each port you can follow this `guide <https://www.wikihow.com/Set-Up-Port-Forwarding-on-a-Router>`_ for example.
|
||||
|
||||
Once you have set up the network you can follow the :ref:`quickstart` guide to start using the multi-user add-on !
|
||||
Once you have set up the network you can follow the :ref:`quickstart` guide to begin using the multi-user add-on !
|
||||
|
||||
.. _host-dedicated:
|
||||
|
||||
@ -167,30 +182,31 @@ From the dedicated server
|
||||
--------------------------
|
||||
|
||||
.. warning::
|
||||
The dedicated server is developed to run directly on internet server (like VPS). You can also
|
||||
run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first.
|
||||
The dedicated server is developed to run directly on an internet server (like a VPS (Virtual Private Server)). You can also run it at home on a LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first. Please see :ref:`host-cloud` for a detailed walkthrough of cloud hosting using Google Cloud.
|
||||
|
||||
The dedicated server allow you to host a session with simplicity from any location.
|
||||
It was developed to improve intaernet hosting performance.
|
||||
The dedicated server allows you to host a session with simplicity from any location.
|
||||
It was developed to improve internet hosting performance (for example poor latency).
|
||||
|
||||
The dedicated server can be run in tow ways:
|
||||
The dedicated server can be run in two ways:
|
||||
|
||||
- :ref:`cmd-line`
|
||||
- :ref:`docker`
|
||||
|
||||
.. Note:: There are shell scripts to conveniently start a dedicated server via either of these approaches available in the gitlab repository. See section: :ref:`serverstartscripts`
|
||||
|
||||
.. _cmd-line:
|
||||
|
||||
Using a regular command line
|
||||
----------------------------
|
||||
|
||||
You can run the dedicated server on any platform by following those steps:
|
||||
You can run the dedicated server on any platform by following these steps:
|
||||
|
||||
1. Firstly, download and intall python 3 (3.6 or above).
|
||||
2. Install the replication library:
|
||||
2. Install the latest version of the replication library:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
python -m pip install replication
|
||||
python -m pip install replication==0.1.13
|
||||
|
||||
4. Launch the server with:
|
||||
|
||||
@ -199,17 +215,19 @@ You can run the dedicated server on any platform by following those steps:
|
||||
replication.serve
|
||||
|
||||
.. hint::
|
||||
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level(ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optionnal argument
|
||||
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
replication.serve -p 5555 -pwd toto -t 1000 -l INFO -lf server.log
|
||||
replication.serve -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
|
||||
|
||||
As soon as the dedicated server is running, you can connect to it from blender (follow :ref:`how-to-join`).
|
||||
Here, for example, a server is instantiated on port 5555, with password 'admin', a 5 second timeout, and logging enabled.
|
||||
|
||||
As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
|
||||
|
||||
|
||||
.. hint::
|
||||
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
|
||||
Some server commands are available to enable administrators to manage a multi-user session. Check :ref:`dedicated-management` to learn more.
|
||||
|
||||
|
||||
.. _docker:
|
||||
@ -217,22 +235,134 @@ As soon as the dedicated server is running, you can connect to it from blender (
|
||||
Using a pre-configured image on docker engine
|
||||
---------------------------------------------
|
||||
|
||||
Launching the dedicated server from a docker server is simple as:
|
||||
Launching the dedicated server from a docker server is simple as running:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker run -d \
|
||||
-p 5555-5560:5555-5560 \
|
||||
-e port=5555 \
|
||||
-e log_level=DEBUG \
|
||||
-e password=admin \
|
||||
-e timeout=1000 \
|
||||
registry.gitlab.com/slumber/multi-user/multi-user-server:0.0.3
|
||||
-e timeout=5000 \
|
||||
registry.gitlab.com/slumber/multi-user/multi-user-server:latest
|
||||
|
||||
As soon as the dedicated server is running, you can connect to it from blender.
|
||||
You can check the :ref:`how-to-join` section.
|
||||
Please use the :latest tag, or otherwise use the URL of the most recent container available in the `multi-user container registry <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_. As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
|
||||
|
||||
You can check that your container is running, and find its ID and name with:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker ps
|
||||
|
||||
.. _docker-logs:
|
||||
|
||||
Viewing logs in a docker container
|
||||
----------------------------------
|
||||
|
||||
Logs for the server running in a docker container can be accessed by outputting the container logs to a log file. First, you'll need to know your container ID, which you can find by running:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker ps
|
||||
|
||||
Then, output the container logs to a file:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker logs your-container-id >& dockerserver.log
|
||||
|
||||
.. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available.
|
||||
|
||||
.. This may not be true. Need to write up how to locally start a docker container from WSL2
|
||||
|
||||
|
||||
Downloading logs from a docker container on a cloud-hosted server
|
||||
-----------------------------------------------------------------
|
||||
|
||||
If you'd like to pull the log files from a cloud-hosted server to submit to a developer for review, a simple process using SSH and SCP is as follows:
|
||||
|
||||
First SSH into your instance. You can either open the `VM Instances console <https://console.cloud.google.com/compute/instances>`_ and use the browser terminal provided by Google Cloud (I had the best luck using the Google Chrome browser)... or you can see `here <https://cloud.google.com/compute/docs/instances/connecting-advanced#thirdpartytools>`_ for how to set up your instance for SSH access from your local terminal.
|
||||
|
||||
If using SSH from your terminal, first generate SSH keys (setting their access permissions to e.g. chmod 400 level whereby only the user has permissions) and submit the public key to the cloud-hosted VM instance, storing the private key on your local machine.
|
||||
Then, SSH into your cloud server from your local terminal, with the following command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ssh -i PATH_TO_PRIVATE_KEY USERNAME@EXTERNAL_IP_ADDRESS
|
||||
|
||||
Use the private key which corresponds to the public key you uploaded, and the username associated with that key (visible in the Google Cloud console for your VM Instance). Use the external IP address for the server, available from the `VM Instances console <https://console.cloud.google.com/compute/instances>`_
|
||||
e.g.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ssh -i ~/.ssh/id_rsa user@xxx.xxx.xxx.xxx
|
||||
|
||||
Once you've connected to the server's secure shell, you can generate a log file from the docker container running the replication server. First, you'll need to know your container ID, which you can find by running:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker ps
|
||||
|
||||
If you're cloud-hosting with e.g. Google Cloud, your container will be the one associated with the `registry address <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_ where your Docker image was located. e.g. registry.gitlab.com/slumber/multi-user/multi-user-server:latest
|
||||
|
||||
To view the docker container logs, run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker logs your-container-name
|
||||
|
||||
OR
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker logs your-container-id
|
||||
|
||||
To save the output to a file, run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker logs your-container-id >& dockerserver.log
|
||||
|
||||
Now that the server logs are available in a file, we can disconnect from the secure shell (SSH), and then copy the file to the local machine using SCP. In your local terminal, execute the following:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
scp -i PATH_TO_PRIVATE_KEY USERNAME@EXTERNAL_IP_ADDRESS:"dockerserver.log" LOCAL_PATH_TO_COPY_FILE_TO
|
||||
|
||||
e.g.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
scp -i ~/.ssh/id_rsa user@xxx.xxx.xxx.xxx:"dockerserver.log" .
|
||||
|
||||
This copies the file dockerserver.log generated in the previous step to the current directory on the local machine. From there, you can send it to the multi-user maintainers for review.
|
||||
|
||||
|
||||
.. Note:: See these `notes <https://cloud.google.com/compute/docs/containers/deploying-containers?_ga=2.113663175.-1396941296.1606125558#viewing_container_logs>`_ for how to check server logs on Google Cloud using other tools.
|
||||
|
||||
|
||||
.. _serverstartscripts:
|
||||
|
||||
Server startup scripts
|
||||
----------------------
|
||||
|
||||
Convenient scripts are available in the Gitlab repository: https://gitlab.com/slumber/multi-user/scripts/startup_scripts/
|
||||
|
||||
Simply run the relevant script in a shell on the host machine to start a server with one line of code via replication directly or via a docker container. Choose between the two methods:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
./start-server.sh
|
||||
|
||||
or
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
./run-dockerfile.sh
|
||||
|
||||
.. hint::
|
||||
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
|
||||
Once your server is up and running, some commands are available to manage the session :ref:`dedicated-management`
|
||||
|
||||
.. _dedicated-management:
|
||||
|
||||
@ -241,11 +371,30 @@ Dedicated server management
|
||||
|
||||
Here is the list of available commands from the dedicated server:
|
||||
|
||||
- ``help``: Show all commands.
|
||||
- ``help`` or ``?``: Show all commands. Or, use ``help <command>`` to learn about another command
|
||||
- ``exit`` or ``Ctrl+C`` : Stop the server.
|
||||
- ``kick username``: kick the provided user.
|
||||
- ``users``: list all online users.
|
||||
|
||||
Also, see :ref:`how-to-manage` for more details on managing a server.
|
||||
|
||||
.. _cloud-dockermanage:
|
||||
|
||||
Managing a docker server from the command line
|
||||
----------------------------------------------
|
||||
If you want to be able to manage a server running within a docker container, open the terminal on the host machine (or SSH in, if you are using cloud hosting), and then run
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker ps
|
||||
|
||||
to find your container id, and then
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker attach your-container-id
|
||||
|
||||
to attach to the STDOUT from the container. There, you can issue the server management commands detailed in :ref:`dedicated-management`. Type ``?`` and hit return/enter to see the available commands. Also, see :ref:`how-to-manage` for more details on managing a server.
|
||||
|
||||
.. _port-setup:
|
||||
|
||||
@ -253,14 +402,14 @@ Here is the list of available commands from the dedicated server:
|
||||
Port setup
|
||||
----------
|
||||
|
||||
The multi-user network architecture is based on a clients-server model. The communication protocol use four ports to communicate with client:
|
||||
The multi-user network architecture is based on a client-server model. The communication protocol uses four ports to communicate with clients:
|
||||
|
||||
* Commands: command transmission (such as **snapshots**, **change_rights**, etc.) [given port]
|
||||
* Commands: command transmission (such as **snapshots**, **change_rights**, etc.) [user-nominated port]
|
||||
* Subscriber : pull data [Commands port + 1]
|
||||
* Publisher : push data [Commands port + 2]
|
||||
* TTL (time to leave) : used to ping each client [Commands port + 3]
|
||||
|
||||
To know which ports will be used, you just have to read the port in your preference.
|
||||
To know which ports will be used, you just have to read the port in your preferences.
|
||||
|
||||
.. figure:: img/hosting_guide_port.png
|
||||
:align: center
|
||||
@ -268,11 +417,315 @@ To know which ports will be used, you just have to read the port in your prefere
|
||||
:width: 200px
|
||||
|
||||
Port in host settings
|
||||
In the picture below we have setup our port to **5555** so it will be:
|
||||
|
||||
* Commands: 5555 (**5555**)
|
||||
* Subscriber: 5556 (**5555** +1)
|
||||
* Publisher: 5557 (**5555** +2)
|
||||
* TTL: 5558 (**5555** +3)
|
||||
In the picture below we have setup our port to **5555** so the four ports will be:
|
||||
|
||||
Those four ports need to be accessible from the client otherwise it won't work at all !
|
||||
* Commands: **5555** (5555)
|
||||
* Subscriber: **5556** (5555 +1)
|
||||
* Publisher: **5557** (5555 +2)
|
||||
* TTL: **5558** (5555 +3)
|
||||
|
||||
Those four ports need to be accessible from the client otherwise multi-user won't work at all !
|
||||
|
||||
.. _host-cloud:
|
||||
|
||||
-------------------------
|
||||
Cloud Hosting Walkthrough
|
||||
-------------------------
|
||||
|
||||
The following is a walkthrough for how to set up a multi-user dedicated server instance on a cloud hosting provider - in this case, `Google Cloud <https://www.cloud.google.com>`_. Google Cloud is a powerful hosting service with a worldwide network of servers. It offers a free trial which provides free cloud hosting for 90 days, and then a free tier which runs indefinitely thereafter, so long as you stay within the `usage limits <https://cloud.google.com/free/docs/gcp-free-tier#free-tier-usage-limits>`_. ^^Thanks to community member @NotFood for the tip!
|
||||
|
||||
Cloud hosting is a little more complicated to set up, but it can be valuable if you are trying to host a session with multiple friends scattered about planet earth. This can resolve issues with data replication or slowdowns due to poor latency of some users (high ping). This guide may seem technical, but if you follow the steps, you should be able to succeed in hosting an internet server to co-create with other multi-user creators around the world.
|
||||
|
||||
Setup Process
|
||||
-------------
|
||||
|
||||
1. Sign Up for Google Cloud
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's start by activating an account with Google Cloud. Go to https://www.cloud.google.com and click 'Get Started For Free'
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_1.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Google will ask you to login/signup, and to set up a billing account (Don't worry. It will not be charged unless you explicitly enable billing and then run over your `free credit allowance <https://cloud.google.com/free/docs/gcp-free-tier>`_). You will need to choose a billing country (relevant for `tax purposes <https://cloud.google.com/billing/docs/resources/vat-overview>`_). You will choose your server location at a later step.
|
||||
|
||||
2. Enable Billing and Compute Engine API
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
From here on, we will mostly stick to the instructions provided `here <https://cloud.google.com/compute/docs/quickstart-linux>`_. Nevertheless, the instructions for multi-user specifically are as follows.
|
||||
|
||||
In order to set up a Virtual Machine (VM) to host your server, you will need to enable the billing account which was created during your signup process. From your `console <https://console.cloud.google.com/getting-started>`_, click on 'Go to Checklist' and then 'Create a Billing Account', following the prompts to choose the billing account that was created for you upon signup.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_2.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_3.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_4.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_5.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_6.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
Now hit 'Set Account', and go back to your `console <https://console.cloud.google.com/getting-started>`_.
|
||||
|
||||
Now enable the Compute Engine API. Click `here <https://console.cloud.google.com/apis/api/compute.googleapis.com/overview>`_ to enable.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_7.jpg
|
||||
:align: center
|
||||
:width: 300px
|
||||
|
||||
3. Create a Linux Virtual Machine Instance
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Continue following the `instructions <https://cloud.google.com/compute/docs/quickstart-linux#create_a_virtual_machine_instance>`_ to create a VM instance. However, once you've finished step 2 of 'Create a virtual machine instance', use the settings and steps for multi-user as follows.
|
||||
|
||||
.. _server-location:
|
||||
|
||||
3.1 Choose a Server Location
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The most important settings which you will need to choose for your specific case are the server Region and Zone. You must choose a location which will provide the best ping for all of your fellow creators.
|
||||
|
||||
All you need to know is that you'll probably want to choose a location near to where most of your collaborators are located. If your friends are spread out, somewhere in the middle which distributes the ping evenly to all users is best.
|
||||
|
||||
You can use `this map <https://cloud.google.com/about/locations/>`_ to make a rough guess of the best server location, if you know your friends' locations.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_9.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
A much better approach is to have your users run a ping test for Google Cloud's servers at https://www.gcping.com/
|
||||
|
||||
Have your collaborators open this webpage from their fastest browser, and press the play button. The play button turns to a stop icon while the ping test is running. When it is complete, the play button returns. You may need to refresh your browser to get this to work. You can replay the test to add more server locations to the scan, and stop when you are satisfied that the results are consistent.
|
||||
|
||||
Now, gather your friends' data, and work down each user's list from the top, until you find the first location which gives roughly the same ping for all users.
|
||||
|
||||
In general, global (using load balancing) will provide the best results, but beyond that, the US Central servers e.g. IOWA generally turn out best for a globally distributed bunch of creators. When in doubt, choose between the servers offered under the `free tier <https://cloud.google.com/free/docs/gcp-free-tier>`_
|
||||
|
||||
- Oregon: *us-west1*
|
||||
|
||||
- Iowa: *us-central1*
|
||||
|
||||
- South Carolina: *us-east1*
|
||||
|
||||
For the following example, the server which gave the most balanced, and lowest average ping between two friends based in Europe and Australia was in Iowa. Salt Lake City would also be an excellent choice.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_10.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Left - European User | Right - Australian User
|
||||
|
||||
Now, input this server location in the 'Region' field for your instance, and leave the default zone which is then populated.
|
||||
|
||||
.. Note:: You can read `here <https://cloud.google.com/solutions/best-practices-compute-engine-region-selection>`_ for a deeper understanding about how to choose a good server location.
|
||||
|
||||
3.2 Configure the VM
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can deploy the replication server to your VM in either of the ways mentioned at :ref:`host-dedicated`. That is, you can set it up :ref:`cmd-line` or :ref:`docker`. We will go through both options in this walkthrough. See :ref:`container_v_direct` for more details on how to choose. Deploying a container is the recommended approach.
|
||||
|
||||
.. _cloud-container:
|
||||
|
||||
Option 1 - Deploy a container
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you are familiar with Docker, you'll appreciate that it makes life a little simpler for us. While configuring your instance, you can check **Deploy a container to this VM instance** and copy in the URL of the latest docker image available from the `multi-user container registry <https://gitlab.com/slumber/multi-user/container_registry/1174180>`_ to the *Container image* field, or use the tag ``:latest``
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_8b.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Your configuration with Docker should look like this
|
||||
|
||||
Make sure to choose the amount of memory you'd like your server to be able to handle (how much memory does your blender scene require?). In this example, I've chosen 4GB of RAM.
|
||||
|
||||
Click on **Advanced container options** and turn on *Allocate a buffer for STDIN* and *Allocate a pseudo-TTY* just in case you want to run an interactive shell in your container.
|
||||
|
||||
.. _cloud-optional-parameters:
|
||||
|
||||
Optional server parameters
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The default Docker image essentially runs the equivalent of:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
replication.serve -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
|
||||
|
||||
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.
|
||||
|
||||
.. Note:: If you'd like to configure different server options from the default docker configuration, you can insert your options here by expanding 'Advanced container options'
|
||||
|
||||
For example, I would like to launch my server with a different administrator password than the default, my own log filename, and a shorter 3-second (3000ms) timeout. I'll click *Add argument* under **Command arguments** and paste the following command with options into the "command arguments" field:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
replication.serve -pwd supersecretpassword -p 5555 -t 3000 -l DEBUG -lf logname.log
|
||||
|
||||
Now, my configuration should look like this:
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_8c.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
The rest of the settings are now complete. Hit **Create** and your instance will go live. If you've taken this approach, you're already almost there! Skip to :ref:`cloud-firewall`.
|
||||
|
||||
.. hint:: You can find further information on configuration options `here <https://cloud.google.com/compute/docs/containers/configuring-options-to-run-containers>`_. Also, see these `notes <https://cloud.google.com/compute/docs/containers/deploying-containers?_ga=2.113663175.-1396941296.1606125558#viewing_container_logs>`_ for other options when deploying your server inside a container, including how to access the server's logs.
|
||||
|
||||
.. _cloud-direct:
|
||||
|
||||
Option 2 - Over SSH
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Otherwise, we can run the dedicated server ourselves from the command-line over SSH.
|
||||
|
||||
While creating your instance, keep the default settings mentioned in the `guide <https://cloud.google.com/compute/docs/quickstart-linux#create_a_virtual_machine_instance>`_, however at step 4, choose Debian version 10. Also, there is no need to enable HTTP so skip step 6.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_8a.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Your configuration should look like this
|
||||
|
||||
Make sure to choose the amount of memory you'd like your server to be able to handle (how much memory does your blender scene require?). In this example, I've chosen 4GB of RAM.
|
||||
|
||||
Now, finally, click 'Create' to generate your Virtual Machine Instance.
|
||||
|
||||
.. _cloud-firewall:
|
||||
|
||||
4. Setting up Firewall and opening Ports
|
||||
----------------------------------------
|
||||
|
||||
Now that your VM is instanced, you'll need to set up firewall rules, and open the ports required by multi-user. The documentation for VM firewalls on google cloud is `here <https://cloud.google.com/vpc/docs/using-firewalls#listing-rules-vm>`_.
|
||||
|
||||
First, go to the dashboard showing your `VM instances <https://console.cloud.google.com/compute/instances>`_ and note the 'External IP' address for later. This is the address of your server. Then, click 'Set up Firewall Rules'.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_11.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Note down your External IP
|
||||
|
||||
Now you will need to create two rules. One to enable communication inbound to your server (ingress), and another to enable outbound communication from your server (egress). Click 'Create Firewall'
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_12.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Now create a rule exactly as in the image below for the outbound communication (egress).
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_13.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Egress
|
||||
|
||||
.. Note:: If you set a different port number in :ref:`cloud-optional-parameters`, then use the ports indicated in :ref:`port-setup`
|
||||
|
||||
And another rule exactly as in the image below for the inbound communication (ingress).
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_14.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Ingress
|
||||
|
||||
Finally, your firewall configuration should look like this.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_15.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Final Firewall Configuration
|
||||
|
||||
|
||||
5. Install Replication Server into Virtual Machine
|
||||
--------------------------------------------------
|
||||
|
||||
.. Note:: Skip to :ref:`initialise-server` if you've opted to launch the server by deploying a container. Your server is already live!
|
||||
|
||||
Now that we have set up our Virtual Machine instance, we can SSH into it, and install the Replication Server. Open the `VM Instances console <https://console.cloud.google.com/compute/instances>`_ once more, and SSH into your instance. It's easiest to use the browser terminal provided by Google Cloud (I had the best luck using the Google Chrome browser), but you can also see `here <https://cloud.google.com/compute/docs/instances/connecting-advanced#thirdpartytools>`_ for how to set up your instance for SSH access from your terminal.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_16.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Now, a terminal window should pop up in a new browser window looking something like this:
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_17.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
Remember, you had set up the VM with Debian 10. This comes with Python 3.7.3 already installed. The only dependency missing is to set up pip3. So, run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt install python3-pip
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_18.jpg
|
||||
:align: center
|
||||
:width: 450px
|
||||
|
||||
And now lets install the latest version of replication:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo pip3 install replication==0.1.13
|
||||
|
||||
6. Launch Replication Server on VM Instance
|
||||
-------------------------------------------
|
||||
|
||||
We're finally ready to launch the server. Simply run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
replication.serve -p 5555 -pwd admin -t 5000 -l INFO -lf server.log
|
||||
|
||||
See :ref:`cmd-line` for a description of optional parameters
|
||||
|
||||
And your replication server is live! It should stay running in the terminal window until you close it. Copy the external IP that you noted down earlier, available `here <https://console.cloud.google.com/networking/addresses/list>`_ and now you can open Blender and connect to your server!
|
||||
|
||||
.. _initialise-server:
|
||||
|
||||
7. Initialise your Server in Blender
|
||||
------------------------------------
|
||||
|
||||
Once in Blender, make sure your multi-user addon is updated to the latest version. :ref:`update-version`. Then, follow the instructions from :ref:`how-to-join` and connect as an admin user, using the password you launched the server with. Input your external IP, and make sure you're set to JOIN the server. Then, click CONNECT.
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_19.jpg
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
Now as the admin user, you can choose whether to initialise the server with a preloaded scene, or an empty scene
|
||||
|
||||
.. figure:: img/hosting_guide_gcloud_20.jpg
|
||||
:align: center
|
||||
:width: 200px
|
||||
|
||||
Press okay, and now your session is live!
|
||||
|
||||
If you made it this far, congratulations! You can now go ahead and share the external IP address with your friends and co-creators and have fun with real-time collaboration in Blender!
|
||||
|
||||
Hopefully, your cloud server setup has improved your group's overall ping readings, and you're in for a smooth and trouble-free co-creation session.
|
||||
|
||||
.. Note:: If you should so desire, pay attention to your credit and follow the steps `here <https://cloud.google.com/compute/docs/quickstart-linux#clean-up>`_ to close your instance at your discretion.
|
||||
|
||||
.. _container_v_direct:
|
||||
|
||||
Should I deploy a Docker Container or launch a server from Linux VM command-line?
|
||||
------------------------------------------------------
|
||||
|
||||
- Directly from Linux VM - This approach gives you control over your session more easily. However, your server may time out once your SSH link to the server is interrupted (for example, if the admin's computer goes to sleep).
|
||||
- Deploy a Docker Container - This is the recommended approach. This approach is better for leaving a session running without supervision. It can however be more complicated to manage. Use this approach if you'd like a consistent experience with others in the multi-user community, pulling from the most up-to-date docker image maintained by @swann in the multi-user container registry.
|
BIN
docs/tutorials/img/hosting_guide_gcloud_1.jpg
Normal file
After Width: | Height: | Size: 757 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_10.jpg
Normal file
After Width: | Height: | Size: 214 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_11.jpg
Normal file
After Width: | Height: | Size: 249 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_12.jpg
Normal file
After Width: | Height: | Size: 88 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_13.jpg
Normal file
After Width: | Height: | Size: 116 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_14.jpg
Normal file
After Width: | Height: | Size: 97 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_15.jpg
Normal file
After Width: | Height: | Size: 230 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_16.jpg
Normal file
After Width: | Height: | Size: 136 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_17.jpg
Normal file
After Width: | Height: | Size: 687 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_18.jpg
Normal file
After Width: | Height: | Size: 635 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_19.jpg
Normal file
After Width: | Height: | Size: 51 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_2.jpg
Normal file
After Width: | Height: | Size: 204 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_20.jpg
Normal file
After Width: | Height: | Size: 60 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_3.jpg
Normal file
After Width: | Height: | Size: 153 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_4.jpg
Normal file
After Width: | Height: | Size: 67 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_5.jpg
Normal file
After Width: | Height: | Size: 104 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_6.jpg
Normal file
After Width: | Height: | Size: 65 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_7.jpg
Normal file
After Width: | Height: | Size: 59 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_8a.jpg
Normal file
After Width: | Height: | Size: 109 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_8b.jpg
Normal file
After Width: | Height: | Size: 252 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_8c.jpg
Normal file
After Width: | Height: | Size: 262 KiB |
BIN
docs/tutorials/img/hosting_guide_gcloud_9.jpg
Normal file
After Width: | Height: | Size: 233 KiB |
@ -21,11 +21,11 @@ In order to help with the testing, you have several possibilities:
|
||||
- Test `development branch <https://gitlab.com/slumber/multi-user/-/branches>`_
|
||||
|
||||
--------------------------
|
||||
Filling an issue on Gitlab
|
||||
Filing an issue on Gitlab
|
||||
--------------------------
|
||||
|
||||
The `gitlab issue tracker <https://gitlab.com/slumber/multi-user/issues>`_ is used for bug report and enhancement suggestion.
|
||||
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button.
|
||||
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button in the main multi-user project.
|
||||
|
||||
Here are some useful information you should provide in a bug report:
|
||||
|
||||
@ -35,8 +35,77 @@ Here are some useful information you should provide in a bug report:
|
||||
Contributing code
|
||||
=================
|
||||
|
||||
1. Fork it (https://gitlab.com/yourname/yourproject/fork)
|
||||
2. Create your feature branch (git checkout -b feature/fooBar)
|
||||
3. Commit your changes (git commit -am 'Add some fooBar')
|
||||
4. Push to the branch (git push origin feature/fooBar)
|
||||
5. Create a new Pull Request
|
||||
In general, this project follows the `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_. It may help to understand that there are three different repositories - the upstream (main multi-user project repository, designated in git by 'upstream'), remote (forked repository, designated in git by 'origin'), and the local repository on your machine.
|
||||
The following example suggests how to contribute a feature.
|
||||
|
||||
1. Fork the project into a new repository:
|
||||
https://gitlab.com/yourname/multi-user
|
||||
|
||||
2. Clone the new repository locally:
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://gitlab.com/yourname/multi-user.git
|
||||
|
||||
3. Keep your fork in sync with the main repository by setting up the upstream pointer once. cd into your git repo and then run:
|
||||
.. code-block:: bash
|
||||
|
||||
git remote add upstream https://gitlab.com/slumber/multi-user.git
|
||||
|
||||
4. Now, locally check out the develop branch, upon which to base your new feature branch:
|
||||
.. code-block:: bash
|
||||
|
||||
git checkout develop
|
||||
|
||||
5. Fetch any changes from the main upstream repository into your fork (especially if some time has passed since forking):
|
||||
.. code-block:: bash
|
||||
|
||||
git fetch upstream
|
||||
|
||||
'Fetch' downloads objects and refs from the repository, but doesn’t apply them to the branch we are working on. We want to apply the updates to the branch we will work from, which we checked out in step 4.
|
||||
|
||||
6. Let's merge any recent changes from the remote upstream (original repository's) 'develop' branch into our local 'develop' branch:
|
||||
.. code-block:: bash
|
||||
|
||||
git merge upstream/develop
|
||||
|
||||
7. Update your forked repository's remote 'develop' branch with the fetched changes, just to keep things tidy. Make sure you haven't committed any local changes in the interim:
|
||||
.. code-block:: bash
|
||||
|
||||
git push origin develop
|
||||
|
||||
8. Locally create your own new feature branch from the develop branch, using the syntax:
|
||||
.. code-block:: bash
|
||||
|
||||
git checkout -b feature/yourfeaturename
|
||||
|
||||
...where 'feature/' designates a feature branch, and 'yourfeaturename' is a name of your choosing
|
||||
|
||||
9. Add and commit your changes, including a commit message:
|
||||
.. code-block:: bash
|
||||
|
||||
git commit -am 'Add fooBar'
|
||||
|
||||
10. Push committed changes to the remote copy of your new feature branch which will be created in this step:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git push -u origin feature/yourfeaturename
|
||||
|
||||
If it's been some time since performing steps 4 through 7, make sure to checkout 'develop' again and pull the latest changes from upstream before checking out and creating feature/yourfeaturename and pushing changes. Alternatively, checkout 'feature/yourfeaturename' and simply run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git rebase upstream/develop
|
||||
|
||||
and your staged commits will be merged along with the changes. More information on `rebasing here <https://git-scm.com/book/en/v2/Git-Branching-Rebasing>`_
|
||||
|
||||
.. Hint:: -u option sets up your locally created new branch to follow a remote branch which is now created with the same name on your remote repository.
|
||||
|
||||
11. Finally, create a new Pull/Merge Request on Gitlab to merge the remote version of this new branch with commited updates, back into the upstream 'develop' branch, finalising the integration of the new feature.
|
||||
Make sure to set the target branch to 'develop' for features and 'master' for hotfixes. Also, include any milestones or labels, and assignees that may be relevant. By default, the Merge option to 'delete source branch when merge request is activated' will be checked.
|
||||
|
||||
12. Thanks for contributing!
|
||||
|
||||
.. Note:: For hotfixes, replace 'feature/' with 'hotfix/' and base the new branch off the parent 'master' branch instead of 'develop' branch. Make sure to checkout 'master' before running step 8
|
||||
.. Note:: Let's follow the Atlassian `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_, except for one main difference - submitting a pull request rather than merging by ourselves.
|
||||
.. Note:: See `here <https://philna.sh/blog/2018/08/21/git-commands-to-keep-a-fork-up-to-date/>`_ or `here <https://stefanbauer.me/articles/how-to-keep-your-git-fork-up-to-date>`_ for instructions on how to keep a fork up to date.
|
@ -19,7 +19,7 @@
|
||||
bl_info = {
|
||||
"name": "Multi-User",
|
||||
"author": "Swann Martinez",
|
||||
"version": (0, 1, 0),
|
||||
"version": (0, 4, 0),
|
||||
"description": "Enable real-time collaborative workflow inside blender",
|
||||
"blender": (2, 82, 0),
|
||||
"location": "3D View > Sidebar > Multi-User tab",
|
||||
@ -40,16 +40,13 @@ import sys
|
||||
import bpy
|
||||
from bpy.app.handlers import persistent
|
||||
|
||||
from . import environment, utils
|
||||
|
||||
|
||||
DEPENDENCIES = {
|
||||
("replication", '0.0.21a15'),
|
||||
}
|
||||
from . import environment
|
||||
|
||||
|
||||
module_error_msg = "Insufficient rights to install the multi-user \
|
||||
dependencies, aunch blender with administrator rights."
|
||||
|
||||
|
||||
def register():
|
||||
# Setup logging policy
|
||||
logging.basicConfig(
|
||||
@ -58,11 +55,13 @@ def register():
|
||||
level=logging.INFO)
|
||||
|
||||
try:
|
||||
environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
|
||||
environment.register()
|
||||
|
||||
from . import presence
|
||||
from . import operators
|
||||
from . import handlers
|
||||
from . import ui
|
||||
from . import icons
|
||||
from . import preferences
|
||||
from . import addon_updater_ops
|
||||
|
||||
@ -70,7 +69,9 @@ def register():
|
||||
addon_updater_ops.register(bl_info)
|
||||
presence.register()
|
||||
operators.register()
|
||||
handlers.register()
|
||||
ui.register()
|
||||
icons.register()
|
||||
except ModuleNotFoundError as e:
|
||||
raise Exception(module_error_msg)
|
||||
logging.error(module_error_msg)
|
||||
@ -84,17 +85,28 @@ def register():
|
||||
type=preferences.SessionUser
|
||||
)
|
||||
bpy.types.WindowManager.user_index = bpy.props.IntProperty()
|
||||
bpy.types.WindowManager.server_index = bpy.props.IntProperty()
|
||||
bpy.types.TOPBAR_MT_file_import.append(operators.menu_func_import)
|
||||
bpy.types.TOPBAR_MT_file_export.append(operators.menu_func_export)
|
||||
|
||||
|
||||
def unregister():
|
||||
from . import presence
|
||||
from . import operators
|
||||
from . import handlers
|
||||
from . import ui
|
||||
from . import icons
|
||||
from . import preferences
|
||||
from . import addon_updater_ops
|
||||
|
||||
bpy.types.TOPBAR_MT_file_import.remove(operators.menu_func_import)
|
||||
bpy.types.TOPBAR_MT_file_export.remove(operators.menu_func_export)
|
||||
|
||||
presence.unregister()
|
||||
addon_updater_ops.unregister()
|
||||
ui.unregister()
|
||||
icons.unregister()
|
||||
handlers.unregister()
|
||||
operators.unregister()
|
||||
preferences.unregister()
|
||||
|
||||
@ -102,3 +114,6 @@ def unregister():
|
||||
del bpy.types.ID.uuid
|
||||
del bpy.types.WindowManager.online_users
|
||||
del bpy.types.WindowManager.user_index
|
||||
del bpy.types.WindowManager.server_index
|
||||
|
||||
environment.unregister()
|
||||
|
@ -1688,10 +1688,7 @@ class GitlabEngine(object):
|
||||
# Could clash with tag names and if it does, it will
|
||||
# download TAG zip instead of branch zip to get
|
||||
# direct path, would need.
|
||||
return "{}{}{}".format(
|
||||
self.form_repo_url(updater),
|
||||
"/repository/archive.zip?sha=",
|
||||
branch)
|
||||
return f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{branch}/download?job=build"
|
||||
|
||||
def get_zip_url(self, sha, updater):
|
||||
return "{base}/repository/archive.zip?sha={sha}".format(
|
||||
|
@ -122,13 +122,13 @@ class addon_updater_install_popup(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
options={'HIDDEN'}
|
||||
)
|
||||
ignore_enum = bpy.props.EnumProperty(
|
||||
ignore_enum: bpy.props.EnumProperty(
|
||||
name="Process update",
|
||||
description="Decide to install, ignore, or defer new addon update",
|
||||
items=[
|
||||
@ -264,7 +264,7 @@ class addon_updater_update_now(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
@ -332,7 +332,7 @@ class addon_updater_update_target(bpy.types.Operator):
|
||||
i+=1
|
||||
return ret
|
||||
|
||||
target = bpy.props.EnumProperty(
|
||||
target: bpy.props.EnumProperty(
|
||||
name="Target version to install",
|
||||
description="Select the version to install",
|
||||
items=target_version
|
||||
@ -341,7 +341,7 @@ class addon_updater_update_target(bpy.types.Operator):
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
clean_install: bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||
default=False,
|
||||
@ -399,7 +399,7 @@ class addon_updater_install_manually(bpy.types.Operator):
|
||||
bl_description = "Proceed to manually install update"
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
|
||||
error = bpy.props.StringProperty(
|
||||
error: bpy.props.StringProperty(
|
||||
name="Error Occurred",
|
||||
default="",
|
||||
options={'HIDDEN'}
|
||||
@ -461,7 +461,7 @@ class addon_updater_updated_successful(bpy.types.Operator):
|
||||
bl_description = "Update installation response"
|
||||
bl_options = {'REGISTER', 'INTERNAL', 'UNDO'}
|
||||
|
||||
error = bpy.props.StringProperty(
|
||||
error: bpy.props.StringProperty(
|
||||
name="Error Occurred",
|
||||
default="",
|
||||
options={'HIDDEN'}
|
||||
|
@ -15,6 +15,7 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import bpy
|
||||
|
||||
__all__ = [
|
||||
'bl_object',
|
||||
@ -27,7 +28,6 @@ __all__ = [
|
||||
'bl_light',
|
||||
'bl_scene',
|
||||
'bl_material',
|
||||
'bl_library',
|
||||
'bl_armature',
|
||||
'bl_action',
|
||||
'bl_world',
|
||||
@ -37,12 +37,28 @@ __all__ = [
|
||||
'bl_speaker',
|
||||
'bl_font',
|
||||
'bl_sound',
|
||||
'bl_file'
|
||||
'bl_file',
|
||||
'bl_node_group',
|
||||
'bl_texture',
|
||||
"bl_particle",
|
||||
] # Order here defines execution order
|
||||
|
||||
if bpy.app.version >= (2,91,0):
|
||||
__all__.append('bl_volume')
|
||||
|
||||
from . import *
|
||||
from replication.data import ReplicatedDataFactory
|
||||
|
||||
def types_to_register():
|
||||
return __all__
|
||||
|
||||
from replication.protocol import DataTranslationProtocol
|
||||
|
||||
def get_data_translation_protocol()-> DataTranslationProtocol:
|
||||
""" Return a data translation protocol from implemented bpy types
|
||||
"""
|
||||
bpy_protocol = DataTranslationProtocol()
|
||||
for module_name in __all__:
|
||||
impl = globals().get(module_name)
|
||||
if impl and hasattr(impl, "_type") and hasattr(impl, "_type"):
|
||||
bpy_protocol.register_implementation(impl._type, impl._class)
|
||||
return bpy_protocol
|
||||
|
@ -25,8 +25,8 @@ from enum import Enum
|
||||
from .. import utils
|
||||
from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
KEYFRAME = [
|
||||
'amplitude',
|
||||
@ -41,8 +41,68 @@ KEYFRAME = [
|
||||
'interpolation',
|
||||
]
|
||||
|
||||
def has_action(datablock):
|
||||
""" Check if the datablock datablock has actions
|
||||
"""
|
||||
return (hasattr(datablock, 'animation_data')
|
||||
and datablock.animation_data
|
||||
and datablock.animation_data.action)
|
||||
|
||||
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict:
|
||||
|
||||
def has_driver(datablock):
|
||||
""" Check if the datablock datablock is driven
|
||||
"""
|
||||
return (hasattr(datablock, 'animation_data')
|
||||
and datablock.animation_data
|
||||
and datablock.animation_data.drivers)
|
||||
|
||||
|
||||
def dump_driver(driver):
|
||||
dumper = Dumper()
|
||||
dumper.depth = 6
|
||||
data = dumper.dump(driver)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def load_driver(target_datablock, src_driver):
|
||||
loader = Loader()
|
||||
drivers = target_datablock.animation_data.drivers
|
||||
src_driver_data = src_driver['driver']
|
||||
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
|
||||
|
||||
# Settings
|
||||
new_driver.driver.type = src_driver_data['type']
|
||||
new_driver.driver.expression = src_driver_data['expression']
|
||||
loader.load(new_driver, src_driver)
|
||||
|
||||
# Variables
|
||||
for src_variable in src_driver_data['variables']:
|
||||
src_var_data = src_driver_data['variables'][src_variable]
|
||||
new_var = new_driver.driver.variables.new()
|
||||
new_var.name = src_var_data['name']
|
||||
new_var.type = src_var_data['type']
|
||||
|
||||
for src_target in src_var_data['targets']:
|
||||
src_target_data = src_var_data['targets'][src_target]
|
||||
src_id = src_target_data.get('id')
|
||||
if src_id:
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
|
||||
loader.load(new_var.targets[src_target], src_target_data)
|
||||
|
||||
# Fcurve
|
||||
new_fcurve = new_driver.keyframe_points
|
||||
for p in reversed(new_fcurve):
|
||||
new_fcurve.remove(p, fast=True)
|
||||
|
||||
new_fcurve.add(len(src_driver['keyframe_points']))
|
||||
|
||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||
new_point = new_fcurve[index]
|
||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||
|
||||
|
||||
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
||||
""" Dump a sigle curve to a dict
|
||||
|
||||
:arg fcurve: fcurve to dump
|
||||
@ -59,9 +119,8 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict:
|
||||
|
||||
if use_numpy:
|
||||
points = fcurve.keyframe_points
|
||||
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
|
||||
|
||||
else: # Legacy method
|
||||
dumper = Dumper()
|
||||
fcurve_data["keyframe_points"] = []
|
||||
@ -71,6 +130,18 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy:bool =True) -> dict:
|
||||
dumper.dump(k)
|
||||
)
|
||||
|
||||
if fcurve.modifiers:
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'is_valid',
|
||||
'active'
|
||||
]
|
||||
dumped_modifiers = []
|
||||
for modfifier in fcurve.modifiers:
|
||||
dumped_modifiers.append(dumper.dump(modfifier))
|
||||
|
||||
fcurve_data['modifiers'] = dumped_modifiers
|
||||
|
||||
return fcurve_data
|
||||
|
||||
|
||||
@ -83,7 +154,7 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
:type fcurve: bpy.types.FCurve
|
||||
"""
|
||||
use_numpy = fcurve_data.get('use_numpy')
|
||||
|
||||
loader = Loader()
|
||||
keyframe_points = fcurve.keyframe_points
|
||||
|
||||
# Remove all keyframe points
|
||||
@ -92,7 +163,8 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
|
||||
if use_numpy:
|
||||
keyframe_points.add(fcurve_data['keyframes_count'])
|
||||
np_load_collection(fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
|
||||
np_load_collection(
|
||||
fcurve_data["keyframe_points"], keyframe_points, KEYFRAME)
|
||||
|
||||
else:
|
||||
# paste dumped keyframes
|
||||
@ -127,35 +199,102 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
|
||||
fcurve.update()
|
||||
|
||||
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
|
||||
|
||||
if dumped_fcurve_modifiers:
|
||||
# clear modifiers
|
||||
for fmod in fcurve.modifiers:
|
||||
fcurve.modifiers.remove(fmod)
|
||||
|
||||
# Load each modifiers in order
|
||||
for modifier_data in dumped_fcurve_modifiers:
|
||||
modifier = fcurve.modifiers.new(modifier_data['type'])
|
||||
|
||||
loader.load(modifier, modifier_data)
|
||||
elif fcurve.modifiers:
|
||||
for fmod in fcurve.modifiers:
|
||||
fcurve.modifiers.remove(fmod)
|
||||
|
||||
|
||||
def dump_animation_data(datablock):
|
||||
animation_data = {}
|
||||
if has_action(datablock):
|
||||
animation_data['action'] = datablock.animation_data.action.uuid
|
||||
if has_driver(datablock):
|
||||
animation_data['drivers'] = []
|
||||
for driver in datablock.animation_data.drivers:
|
||||
animation_data['drivers'].append(dump_driver(driver))
|
||||
|
||||
return animation_data
|
||||
|
||||
|
||||
def load_animation_data(animation_data, datablock):
|
||||
# Load animation data
|
||||
if animation_data:
|
||||
if datablock.animation_data is None:
|
||||
datablock.animation_data_create()
|
||||
|
||||
for d in datablock.animation_data.drivers:
|
||||
datablock.animation_data.drivers.remove(d)
|
||||
|
||||
if 'drivers' in animation_data:
|
||||
for driver in animation_data['drivers']:
|
||||
load_driver(datablock, driver)
|
||||
|
||||
action = animation_data.get('action')
|
||||
if action:
|
||||
action = resolve_datablock_from_uuid(action, bpy.data.actions)
|
||||
datablock.animation_data.action = action
|
||||
elif datablock.animation_data.action:
|
||||
datablock.animation_data.action = None
|
||||
|
||||
# Remove existing animation data if there is not more to load
|
||||
elif hasattr(datablock, 'animation_data') and datablock.animation_data:
|
||||
datablock.animation_data_clear()
|
||||
|
||||
|
||||
def resolve_animation_dependencies(datablock):
|
||||
if has_action(datablock):
|
||||
return [datablock.animation_data.action]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
class BlAction(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
class BlAction(BlDatablock):
|
||||
bl_id = "actions"
|
||||
bl_class = bpy.types.Action
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'ACTION_TWEAK'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.actions.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
for dumped_fcurve in data["fcurves"]:
|
||||
dumped_data_path = dumped_fcurve["data_path"]
|
||||
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
||||
|
||||
# create fcurve if needed
|
||||
fcurve = target.fcurves.find(
|
||||
fcurve = datablock.fcurves.find(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
if fcurve is None:
|
||||
fcurve = target.fcurves.new(
|
||||
fcurve = datablock.fcurves.new(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
|
||||
load_fcurve(dumped_fcurve, fcurve)
|
||||
target.id_root = data['id_root']
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
id_root = data.get('id_root')
|
||||
|
||||
if id_root:
|
||||
datablock.id_root = id_root
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'name_full',
|
||||
@ -170,11 +309,23 @@ class BlAction(BlDatablock):
|
||||
'users'
|
||||
]
|
||||
dumper.depth = 1
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data["fcurves"] = []
|
||||
|
||||
for fcurve in instance.fcurves:
|
||||
for fcurve in datablock.fcurves:
|
||||
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.actions)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
_type = bpy.types.Action
|
||||
_class = BlAction
|
||||
|
@ -22,32 +22,45 @@ import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .. import presence, operators, utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
def get_roll(bone: bpy.types.Bone) -> float:
|
||||
""" Compute the actuall roll of a pose bone
|
||||
|
||||
:arg pose_bone: target pose bone
|
||||
:type pose_bone: bpy.types.PoseBone
|
||||
:return: float
|
||||
"""
|
||||
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
|
||||
|
||||
|
||||
class BlArmature(BlDatablock):
|
||||
class BlArmature(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "armatures"
|
||||
bl_class = bpy.types.Armature
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 0
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'ARMATURE_DATA'
|
||||
|
||||
def _construct(self, data):
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.armatures.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
# Load parent object
|
||||
parent_object = utils.find_from_attr(
|
||||
'uuid',
|
||||
data['user'],
|
||||
bpy.data.objects
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
if parent_object is None:
|
||||
parent_object = bpy.data.objects.new(
|
||||
data['user_name'], target)
|
||||
data['user_name'], datablock)
|
||||
parent_object.uuid = data['user']
|
||||
|
||||
is_object_in_master = (
|
||||
@ -82,10 +95,10 @@ class BlArmature(BlDatablock):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for bone in data['bones']:
|
||||
if bone not in target.edit_bones:
|
||||
new_bone = target.edit_bones.new(bone)
|
||||
if bone not in datablock.edit_bones:
|
||||
new_bone = datablock.edit_bones.new(bone)
|
||||
else:
|
||||
new_bone = target.edit_bones[bone]
|
||||
new_bone = datablock.edit_bones[bone]
|
||||
|
||||
bone_data = data['bones'].get(bone)
|
||||
|
||||
@ -93,16 +106,16 @@ class BlArmature(BlDatablock):
|
||||
new_bone.head = bone_data['head_local']
|
||||
new_bone.tail_radius = bone_data['tail_radius']
|
||||
new_bone.head_radius = bone_data['head_radius']
|
||||
# new_bone.roll = bone_data['roll']
|
||||
|
||||
new_bone.roll = bone_data['roll']
|
||||
|
||||
if 'parent' in bone_data:
|
||||
new_bone.parent = target.edit_bones[data['bones']
|
||||
[bone]['parent']]
|
||||
new_bone.parent = datablock.edit_bones[data['bones']
|
||||
[bone]['parent']]
|
||||
new_bone.use_connect = bone_data['use_connect']
|
||||
|
||||
loader = Loader()
|
||||
loader.load(new_bone, bone_data)
|
||||
|
||||
|
||||
if bpy.context.mode != 'OBJECT':
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
bpy.context.view_layer.objects.active = current_active_object
|
||||
@ -111,9 +124,10 @@ class BlArmature(BlDatablock):
|
||||
if 'EDIT' in current_mode:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 4
|
||||
dumper.include_filter = [
|
||||
@ -126,16 +140,15 @@ class BlArmature(BlDatablock):
|
||||
'parent',
|
||||
'name',
|
||||
'layers',
|
||||
# 'roll',
|
||||
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
for bone in instance.bones:
|
||||
for bone in datablock.bones:
|
||||
if bone.parent:
|
||||
data['bones'][bone.name]['parent'] = bone.parent.name
|
||||
# get the parent Object
|
||||
object_users = utils.get_datablock_users(instance)[0]
|
||||
# TODO: Use id_data instead
|
||||
object_users = utils.get_datablock_users(datablock)[0]
|
||||
data['user'] = object_users.uuid
|
||||
data['user_name'] = object_users.name
|
||||
|
||||
@ -145,6 +158,26 @@ class BlArmature(BlDatablock):
|
||||
item.name for item in container_users if isinstance(item, bpy.types.Collection)]
|
||||
data['user_scene'] = [
|
||||
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
||||
|
||||
for bone in datablock.bones:
|
||||
data['bones'][bone.name]['roll'] = get_roll(bone)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
name = data.get('name')
|
||||
datablock = resolve_datablock_from_uuid(uuid, bpy.data.armatures)
|
||||
if datablock is None:
|
||||
datablock = bpy.data.armatures.get(name)
|
||||
|
||||
return datablock
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_animation_dependencies(datablock)
|
||||
|
||||
_type = bpy.types.Armature
|
||||
_class = BlArmature
|
@ -20,46 +20,58 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlCamera(BlDatablock):
|
||||
class BlCamera(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "cameras"
|
||||
bl_class = bpy.types.Camera
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'CAMERA_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.cameras.new(data["name"])
|
||||
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
dof_settings = data.get('dof')
|
||||
|
||||
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
# DOF settings
|
||||
if dof_settings:
|
||||
loader.load(target.dof, dof_settings)
|
||||
loader.load(datablock.dof, dof_settings)
|
||||
|
||||
background_images = data.get('background_images')
|
||||
|
||||
datablock.background_images.clear()
|
||||
# TODO: Use image uuid
|
||||
if background_images:
|
||||
target.background_images.clear()
|
||||
for img_name, img_data in background_images.items():
|
||||
target_img = target.background_images.new()
|
||||
target_img.image = bpy.data.images[img_name]
|
||||
loader.load(target_img, img_data)
|
||||
img_id = img_data.get('image')
|
||||
if img_id:
|
||||
target_img = datablock.background_images.new()
|
||||
target_img.image = bpy.data.images[img_id]
|
||||
loader.load(target_img, img_data)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
img_user = img_data.get('image_user')
|
||||
if img_user:
|
||||
loader.load(target_img.image_user, img_user)
|
||||
|
||||
# TODO: background image support
|
||||
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
@ -100,14 +112,37 @@ class BlCamera(BlDatablock):
|
||||
'scale',
|
||||
'use_flip_x',
|
||||
'use_flip_y',
|
||||
'image'
|
||||
'image_user',
|
||||
'image',
|
||||
'frame_duration',
|
||||
'frame_start',
|
||||
'frame_offset',
|
||||
'use_cyclic',
|
||||
'use_auto_refresh'
|
||||
]
|
||||
return dumper.dump(instance)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
for index, image in enumerate(datablock.background_images):
|
||||
if image.image_user:
|
||||
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.cameras)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
for background in self.instance.background_images:
|
||||
for background in datablock.background_images:
|
||||
if background.image:
|
||||
deps.append(background.image)
|
||||
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Camera
|
||||
_class = BlCamera
|
||||
|
@ -19,10 +19,12 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from .dump_anything import Loader, Dumper
|
||||
from deepdiff import DeepDiff, Delta
|
||||
|
||||
from .. import utils
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
def dump_collection_children(collection):
|
||||
collection_children = []
|
||||
@ -71,64 +73,92 @@ def load_collection_childrens(dumped_childrens, collection):
|
||||
if child_collection.uuid not in dumped_childrens:
|
||||
collection.children.unlink(child_collection)
|
||||
|
||||
def resolve_collection_dependencies(collection):
|
||||
deps = []
|
||||
|
||||
class BlCollection(BlDatablock):
|
||||
for child in collection.children:
|
||||
deps.append(child)
|
||||
for object in collection.objects:
|
||||
deps.append(object)
|
||||
|
||||
return deps
|
||||
|
||||
class BlCollection(ReplicatedDatablock):
|
||||
bl_id = "collections"
|
||||
bl_icon = 'FILE_FOLDER'
|
||||
bl_class = bpy.types.Collection
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = True
|
||||
|
||||
def _construct(self, data):
|
||||
if self.is_library:
|
||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||
targetData.collections = [
|
||||
name for name in sourceData.collections if name == self.data['name']]
|
||||
bl_reload_parent = False
|
||||
|
||||
instance = bpy.data.collections[self.data['name']]
|
||||
|
||||
return instance
|
||||
use_delta = True
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = bpy.data.collections.new(data["name"])
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# Objects
|
||||
load_collection_objects(data['objects'], target)
|
||||
load_collection_objects(data['objects'], datablock)
|
||||
|
||||
# Link childrens
|
||||
load_collection_childrens(data['children'], target)
|
||||
load_collection_childrens(data['children'], datablock)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
# FIXME: Find a better way after the replication big refacotoring
|
||||
# Keep other user from deleting collection object by flushing their history
|
||||
utils.flush_history()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
"instance_offset"
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
# dump objects
|
||||
data['objects'] = dump_collection_objects(instance)
|
||||
data['objects'] = dump_collection_objects(datablock)
|
||||
|
||||
# dump children collections
|
||||
data['children'] = dump_collection_children(instance)
|
||||
data['children'] = dump_collection_children(datablock)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
for child in self.instance.children:
|
||||
deps.append(child)
|
||||
for object in self.instance.objects:
|
||||
deps.append(object)
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.collections)
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_collection_dependencies(datablock)
|
||||
|
||||
@staticmethod
|
||||
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||
diff_params = {
|
||||
'ignore_order': True,
|
||||
'report_repetition': True
|
||||
}
|
||||
delta_params = {
|
||||
# 'mutate': True
|
||||
}
|
||||
|
||||
return Delta(
|
||||
DeepDiff(last_data,
|
||||
current_data,
|
||||
cache_size=5000,
|
||||
**diff_params),
|
||||
**delta_params)
|
||||
|
||||
_type = bpy.types.Collection
|
||||
_class = BlCollection
|
@ -21,11 +21,14 @@ import bpy.types as T
|
||||
import mathutils
|
||||
import logging
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from ..utils import get_preferences
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import (Dumper, Loader,
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
SPLINE_BEZIER_POINT = [
|
||||
@ -68,8 +71,6 @@ CURVE_METADATA = [
|
||||
'font_bold',
|
||||
'font_bold_italic',
|
||||
'font_italic',
|
||||
'make_local',
|
||||
'materials',
|
||||
'name',
|
||||
'offset',
|
||||
'offset_x',
|
||||
@ -79,7 +80,6 @@ CURVE_METADATA = [
|
||||
'override_create',
|
||||
'override_library',
|
||||
'path_duration',
|
||||
'preview',
|
||||
'render_resolution_u',
|
||||
'render_resolution_v',
|
||||
'resolution_u',
|
||||
@ -113,8 +113,6 @@ CURVE_METADATA = [
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
SPLINE_METADATA = [
|
||||
'hide',
|
||||
'material_index',
|
||||
@ -138,57 +136,59 @@ SPLINE_METADATA = [
|
||||
]
|
||||
|
||||
|
||||
class BlCurve(BlDatablock):
|
||||
class BlCurve(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "curves"
|
||||
bl_class = bpy.types.Curve
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'CURVE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.curves.new(data["name"], data["type"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
target.splines.clear()
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
datablock.splines.clear()
|
||||
|
||||
# load splines
|
||||
for spline in data['splines'].values():
|
||||
new_spline = target.splines.new(spline['type'])
|
||||
|
||||
new_spline = datablock.splines.new(spline['type'])
|
||||
|
||||
# Load curve geometry data
|
||||
if new_spline.type == 'BEZIER':
|
||||
bezier_points = new_spline.bezier_points
|
||||
bezier_points = new_spline.bezier_points
|
||||
bezier_points.add(spline['bezier_points_count'])
|
||||
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
|
||||
|
||||
if new_spline.type == 'POLY':
|
||||
points = new_spline.points
|
||||
np_load_collection(
|
||||
spline['bezier_points'],
|
||||
bezier_points,
|
||||
SPLINE_BEZIER_POINT)
|
||||
|
||||
if new_spline.type in ['POLY', 'NURBS']:
|
||||
points = new_spline.points
|
||||
points.add(spline['points_count'])
|
||||
np_load_collection(spline['points'], points, SPLINE_POINT)
|
||||
# Not working for now...
|
||||
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
|
||||
if new_spline.type == 'NURBS':
|
||||
logging.error("NURBS not supported.")
|
||||
# new_spline.points.add(len(data['splines'][spline]["points"])-1)
|
||||
# for point_index in data['splines'][spline]["points"]:
|
||||
# loader.load(
|
||||
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
||||
|
||||
loader.load(new_spline, spline)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
# Conflicting attributes
|
||||
# TODO: remove them with the NURBS support
|
||||
dumper.include_filter = CURVE_METADATA
|
||||
|
||||
dumper.exclude_filter = [
|
||||
'users',
|
||||
'order_u',
|
||||
@ -197,38 +197,50 @@ class BlCurve(BlDatablock):
|
||||
'point_count_u',
|
||||
'active_textbox'
|
||||
]
|
||||
if instance.use_auto_texspace:
|
||||
if datablock.use_auto_texspace:
|
||||
dumper.exclude_filter.extend([
|
||||
'texspace_location',
|
||||
'texspace_size'])
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
data['splines'] = {}
|
||||
|
||||
for index, spline in enumerate(instance.splines):
|
||||
for index, spline in enumerate(datablock.splines):
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = SPLINE_METADATA
|
||||
spline_data = dumper.dump(spline)
|
||||
|
||||
if spline.type == 'POLY':
|
||||
spline_data['points_count'] = len(spline.points)-1
|
||||
spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
|
||||
spline_data['points_count'] = len(spline.points)-1
|
||||
spline_data['points'] = np_dump_collection(
|
||||
spline.points, SPLINE_POINT)
|
||||
|
||||
spline_data['bezier_points_count'] = len(spline.bezier_points)-1
|
||||
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||
spline_data['bezier_points'] = np_dump_collection(
|
||||
spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||
data['splines'][index] = spline_data
|
||||
|
||||
if isinstance(instance, T.SurfaceCurve):
|
||||
if isinstance(datablock, T.SurfaceCurve):
|
||||
data['type'] = 'SURFACE'
|
||||
elif isinstance(instance, T.TextCurve):
|
||||
elif isinstance(datablock, T.TextCurve):
|
||||
data['type'] = 'FONT'
|
||||
elif isinstance(instance, T.Curve):
|
||||
elif isinstance(datablock, T.Curve):
|
||||
data['type'] = 'CURVE'
|
||||
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.curves)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
curve = self.instance
|
||||
curve = datablock
|
||||
|
||||
if isinstance(curve, T.TextCurve):
|
||||
deps.extend([
|
||||
@ -236,5 +248,20 @@ class BlCurve(BlDatablock):
|
||||
curve.font_bold,
|
||||
curve.font_bold_italic,
|
||||
curve.font_italic])
|
||||
|
||||
return deps
|
||||
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return 'EDIT' not in bpy.context.mode \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
|
||||
_type = [bpy.types.Curve, bpy.types.TextCurve]
|
||||
_class = BlCurve
|
||||
|
@ -21,78 +21,15 @@ from collections.abc import Iterable
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.constants import DIFF_BINARY, UP
|
||||
from replication.data import ReplicatedDatablock
|
||||
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
|
||||
def has_action(target):
|
||||
""" Check if the target datablock has actions
|
||||
"""
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.action)
|
||||
|
||||
|
||||
def has_driver(target):
|
||||
""" Check if the target datablock is driven
|
||||
"""
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.drivers)
|
||||
|
||||
|
||||
def dump_driver(driver):
|
||||
dumper = Dumper()
|
||||
dumper.depth = 6
|
||||
data = dumper.dump(driver)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def load_driver(target_datablock, src_driver):
|
||||
loader = Loader()
|
||||
drivers = target_datablock.animation_data.drivers
|
||||
src_driver_data = src_driver['driver']
|
||||
new_driver = drivers.new(src_driver['data_path'])
|
||||
|
||||
# Settings
|
||||
new_driver.driver.type = src_driver_data['type']
|
||||
new_driver.driver.expression = src_driver_data['expression']
|
||||
loader.load(new_driver, src_driver)
|
||||
|
||||
# Variables
|
||||
for src_variable in src_driver_data['variables']:
|
||||
src_var_data = src_driver_data['variables'][src_variable]
|
||||
new_var = new_driver.driver.variables.new()
|
||||
new_var.name = src_var_data['name']
|
||||
new_var.type = src_var_data['type']
|
||||
|
||||
for src_target in src_var_data['targets']:
|
||||
src_target_data = src_var_data['targets'][src_target]
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(
|
||||
src_target_data['id'], src_target_data['id_type'])
|
||||
loader.load(
|
||||
new_var.targets[src_target], src_target_data)
|
||||
|
||||
# Fcurve
|
||||
new_fcurve = new_driver.keyframe_points
|
||||
for p in reversed(new_fcurve):
|
||||
new_fcurve.remove(p, fast=True)
|
||||
|
||||
new_fcurve.add(len(src_driver['keyframe_points']))
|
||||
|
||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||
new_point = new_fcurve[index]
|
||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||
|
||||
|
||||
def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||
if not uuid:
|
||||
return default
|
||||
|
||||
for category in dir(bpy.data):
|
||||
root = getattr(bpy.data, category)
|
||||
if isinstance(root, Iterable) and category not in ignore:
|
||||
@ -101,127 +38,8 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||
return item
|
||||
return default
|
||||
|
||||
|
||||
class BlDatablock(ReplicatedDatablock):
|
||||
"""BlDatablock
|
||||
|
||||
bl_id : blender internal storage identifier
|
||||
bl_class : blender internal type
|
||||
bl_delay_refresh : refresh rate in second for observers
|
||||
bl_delay_apply : refresh rate in sec for apply
|
||||
bl_automatic_push : boolean
|
||||
bl_icon : type icon (blender icon name)
|
||||
bl_check_common: enable check even in common rights
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
instance = kwargs.get('instance', None)
|
||||
|
||||
self.preferences = utils.get_preferences()
|
||||
|
||||
# TODO: use is_library_indirect
|
||||
self.is_library = (instance and hasattr(instance, 'library') and
|
||||
instance.library) or \
|
||||
(self.data and 'library' in self.data)
|
||||
|
||||
if instance and hasattr(instance, 'uuid'):
|
||||
instance.uuid = self.uuid
|
||||
|
||||
self.diff_method = DIFF_BINARY
|
||||
|
||||
def resolve(self):
|
||||
datablock_ref = None
|
||||
datablock_root = getattr(bpy.data, self.bl_id)
|
||||
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
||||
|
||||
if not datablock_ref:
|
||||
try:
|
||||
datablock_ref = datablock_root[self.data['name']]
|
||||
except Exception:
|
||||
name = self.data.get('name')
|
||||
logging.debug(f"Constructing {name}")
|
||||
datablock_ref = self._construct(data=self.data)
|
||||
|
||||
if datablock_ref:
|
||||
setattr(datablock_ref, 'uuid', self.uuid)
|
||||
|
||||
self.instance = datablock_ref
|
||||
|
||||
def remove_instance(self):
|
||||
"""
|
||||
Remove instance from blender data
|
||||
"""
|
||||
assert(self.instance)
|
||||
|
||||
datablock_root = getattr(bpy.data, self.bl_id)
|
||||
datablock_root.remove(self.instance)
|
||||
|
||||
def _dump(self, instance=None):
|
||||
dumper = Dumper()
|
||||
data = {}
|
||||
# Dump animation data
|
||||
if has_action(instance):
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = ['action']
|
||||
data['animation_data'] = dumper.dump(instance.animation_data)
|
||||
|
||||
if has_driver(instance):
|
||||
dumped_drivers = {'animation_data': {'drivers': []}}
|
||||
for driver in instance.animation_data.drivers:
|
||||
dumped_drivers['animation_data']['drivers'].append(
|
||||
dump_driver(driver))
|
||||
|
||||
data.update(dumped_drivers)
|
||||
|
||||
if self.is_library:
|
||||
data.update(dumper.dump(instance))
|
||||
else:
|
||||
data.update(self._dump_implementation(data, instance=instance))
|
||||
|
||||
return data
|
||||
|
||||
def _dump_implementation(self, data, target):
|
||||
raise NotImplementedError
|
||||
|
||||
def _load(self, data, target):
|
||||
# Load animation data
|
||||
if 'animation_data' in data.keys():
|
||||
if target.animation_data is None:
|
||||
target.animation_data_create()
|
||||
|
||||
for d in target.animation_data.drivers:
|
||||
target.animation_data.drivers.remove(d)
|
||||
|
||||
if 'drivers' in data['animation_data']:
|
||||
for driver in data['animation_data']['drivers']:
|
||||
load_driver(target, driver)
|
||||
|
||||
if 'action' in data['animation_data']:
|
||||
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
|
||||
|
||||
if self.is_library:
|
||||
return
|
||||
else:
|
||||
self._load_implementation(data, target)
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
raise NotImplementedError
|
||||
|
||||
def resolve_deps(self):
|
||||
dependencies = []
|
||||
|
||||
if has_action(self.instance):
|
||||
dependencies.append(self.instance.animation_data.action)
|
||||
|
||||
if not self.is_library:
|
||||
dependencies.extend(self._resolve_deps_implementation())
|
||||
|
||||
logging.debug(f"{self.instance.name} dependencies: {dependencies}")
|
||||
return dependencies
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return []
|
||||
|
||||
def is_valid(self):
|
||||
return getattr(bpy.data, self.bl_id).get(self.data['name'])
|
||||
def resolve_datablock_from_uuid(uuid, bpy_collection):
|
||||
for item in bpy_collection:
|
||||
if getattr(item, 'uuid', None) == uuid:
|
||||
return item
|
||||
return None
|
||||
|
@ -19,14 +19,15 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from pathlib import Path, WindowsPath, PosixPath
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.constants import DIFF_BINARY, UP
|
||||
from replication.data import ReplicatedDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .. import utils
|
||||
from ..utils import get_preferences
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
|
||||
@ -54,37 +55,20 @@ class BlFile(ReplicatedDatablock):
|
||||
bl_id = 'file'
|
||||
bl_name = "file"
|
||||
bl_class = Path
|
||||
bl_delay_refresh = 0
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'FILE'
|
||||
bl_reload_parent = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.instance = kwargs.get('instance', None)
|
||||
|
||||
if self.instance and not self.instance.exists():
|
||||
raise FileNotFoundError(self.instance)
|
||||
|
||||
self.preferences = utils.get_preferences()
|
||||
self.diff_method = DIFF_BINARY
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return Path(get_filepath(data['name']))
|
||||
|
||||
def resolve(self):
|
||||
if self.data:
|
||||
self.instance = Path(get_filepath(self.data['name']))
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
return Path(get_filepath(data['name']))
|
||||
|
||||
if not self.instance.exists():
|
||||
logging.debug("File don't exist, loading it.")
|
||||
self._load(self.data, self.instance)
|
||||
|
||||
def push(self, socket, identity=None):
|
||||
super().push(socket, identity=None)
|
||||
|
||||
if self.preferences.clear_memory_filecache:
|
||||
del self.data['file']
|
||||
|
||||
def _dump(self, instance=None):
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
"""
|
||||
Read the file and return a dict as:
|
||||
{
|
||||
@ -96,45 +80,62 @@ class BlFile(ReplicatedDatablock):
|
||||
logging.info(f"Extracting file metadata")
|
||||
|
||||
data = {
|
||||
'name': self.instance.name,
|
||||
'name': datablock.name,
|
||||
}
|
||||
|
||||
logging.info(
|
||||
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
|
||||
logging.info(f"Reading {datablock.name} content: {datablock.stat().st_size} bytes")
|
||||
|
||||
try:
|
||||
file = open(self.instance, "rb")
|
||||
file = open(datablock, "rb")
|
||||
data['file'] = file.read()
|
||||
|
||||
file.close()
|
||||
except IOError:
|
||||
logging.warning(f"{self.instance} doesn't exist, skipping")
|
||||
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||
else:
|
||||
file.close()
|
||||
|
||||
return data
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
"""
|
||||
Writing the file
|
||||
"""
|
||||
# TODO: check for empty data
|
||||
|
||||
if target.exists() and not self.diff():
|
||||
logging.info(f"{data['name']} already on the disk, skipping.")
|
||||
return
|
||||
try:
|
||||
file = open(target, "wb")
|
||||
file = open(datablock, "wb")
|
||||
file.write(data['file'])
|
||||
|
||||
if self.preferences.clear_memory_filecache:
|
||||
del self.data['file']
|
||||
if get_preferences().clear_memory_filecache:
|
||||
del data['file']
|
||||
except IOError:
|
||||
logging.warning(f"{target} doesn't exist, skipping")
|
||||
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||
else:
|
||||
file.close()
|
||||
|
||||
def diff(self):
|
||||
memory_size = sys.getsizeof(self.data['file'])-33
|
||||
disk_size = self.instance.stat().st_size
|
||||
return memory_size == disk_size
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
if get_preferences().clear_memory_filecache:
|
||||
return False
|
||||
else:
|
||||
if not datablock:
|
||||
return None
|
||||
|
||||
if not data:
|
||||
return True
|
||||
|
||||
memory_size = sys.getsizeof(data['file'])-33
|
||||
disk_size = datablock.stat().st_size
|
||||
|
||||
if memory_size != disk_size:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
_type = [WindowsPath, PosixPath]
|
||||
_class = BlFile
|
@ -22,21 +22,20 @@ from pathlib import Path
|
||||
|
||||
import bpy
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlFont(BlDatablock):
|
||||
class BlFont(ReplicatedDatablock):
|
||||
bl_id = "fonts"
|
||||
bl_class = bpy.types.VectorFont
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'FILE_FONT'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
filename = data.get('filename')
|
||||
|
||||
if filename == '<builtin>':
|
||||
@ -44,31 +43,43 @@ class BlFont(BlDatablock):
|
||||
else:
|
||||
return bpy.data.fonts.load(get_filepath(filename))
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
pass
|
||||
|
||||
def _dump(self, instance=None):
|
||||
if instance.filepath == '<builtin>':
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if datablock.filepath == '<builtin>':
|
||||
filename = '<builtin>'
|
||||
else:
|
||||
filename = Path(instance.filepath).name
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
if not filename:
|
||||
raise FileExistsError(instance.filepath)
|
||||
raise FileExistsError(datablock.filepath)
|
||||
|
||||
return {
|
||||
'filename': filename,
|
||||
'name': instance.name
|
||||
'name': datablock.name
|
||||
}
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.fonts)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
||||
ensure_unpacked(self.instance)
|
||||
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||
ensure_unpacked(datablock)
|
||||
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
return False
|
||||
|
||||
_type = bpy.types.VectorFont
|
||||
_class = BlFont
|
@ -24,10 +24,12 @@ from .dump_anything import (Dumper,
|
||||
Loader,
|
||||
np_dump_collection,
|
||||
np_load_collection)
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
# GPencil data api is structured as it follow:
|
||||
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from ..utils import get_preferences
|
||||
from ..timers import is_annotating
|
||||
from .bl_material import load_materials_slots, dump_materials_slots
|
||||
|
||||
STROKE_POINT = [
|
||||
'co',
|
||||
@ -38,7 +40,25 @@ STROKE_POINT = [
|
||||
|
||||
]
|
||||
|
||||
if bpy.app.version[1] >= 83:
|
||||
STROKE = [
|
||||
"aspect",
|
||||
"display_mode",
|
||||
"end_cap_mode",
|
||||
"hardness",
|
||||
"line_width",
|
||||
"material_index",
|
||||
"start_cap_mode",
|
||||
"uv_rotation",
|
||||
"uv_scale",
|
||||
"uv_translation",
|
||||
"vertex_color_fill",
|
||||
]
|
||||
if bpy.app.version >= (2,91,0):
|
||||
STROKE.append('use_cyclic')
|
||||
else:
|
||||
STROKE.append('draw_cyclic')
|
||||
|
||||
if bpy.app.version >= (2,83,0):
|
||||
STROKE_POINT.append('vertex_color')
|
||||
|
||||
def dump_stroke(stroke):
|
||||
@ -46,36 +66,9 @@ def dump_stroke(stroke):
|
||||
|
||||
:param stroke: target grease pencil stroke
|
||||
:type stroke: bpy.types.GPencilStroke
|
||||
:return: dict
|
||||
:return: (p_count, p_data)
|
||||
"""
|
||||
|
||||
assert(stroke)
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = [
|
||||
"aspect",
|
||||
"display_mode",
|
||||
"draw_cyclic",
|
||||
"end_cap_mode",
|
||||
"hardeness",
|
||||
"line_width",
|
||||
"material_index",
|
||||
"start_cap_mode",
|
||||
"uv_rotation",
|
||||
"uv_scale",
|
||||
"uv_translation",
|
||||
"vertex_color_fill",
|
||||
]
|
||||
dumped_stroke = dumper.dump(stroke)
|
||||
|
||||
# Stoke points
|
||||
p_count = len(stroke.points)
|
||||
dumped_stroke['p_count'] = p_count
|
||||
dumped_stroke['points'] = np_dump_collection(stroke.points, STROKE_POINT)
|
||||
|
||||
# TODO: uv_factor, uv_rotation
|
||||
|
||||
return dumped_stroke
|
||||
return (len(stroke.points), np_dump_collection(stroke.points, STROKE_POINT))
|
||||
|
||||
|
||||
def load_stroke(stroke_data, stroke):
|
||||
@ -88,12 +81,12 @@ def load_stroke(stroke_data, stroke):
|
||||
"""
|
||||
assert(stroke and stroke_data)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(stroke, stroke_data)
|
||||
stroke.points.add(stroke_data[0])
|
||||
np_load_collection(stroke_data[1], stroke.points, STROKE_POINT)
|
||||
|
||||
stroke.points.add(stroke_data["p_count"])
|
||||
|
||||
np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT)
|
||||
# HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to
|
||||
# fix fill issues
|
||||
stroke.uv_scale = 1.0
|
||||
|
||||
|
||||
def dump_frame(frame):
|
||||
@ -108,11 +101,11 @@ def dump_frame(frame):
|
||||
|
||||
dumped_frame = dict()
|
||||
dumped_frame['frame_number'] = frame.frame_number
|
||||
dumped_frame['strokes'] = []
|
||||
|
||||
# TODO: took existing strokes in account
|
||||
dumped_frame['strokes'] = np_dump_collection(frame.strokes, STROKE)
|
||||
dumped_frame['strokes_points'] = []
|
||||
|
||||
for stroke in frame.strokes:
|
||||
dumped_frame['strokes'].append(dump_stroke(stroke))
|
||||
dumped_frame['strokes_points'].append(dump_stroke(stroke))
|
||||
|
||||
return dumped_frame
|
||||
|
||||
@ -128,14 +121,14 @@ def load_frame(frame_data, frame):
|
||||
|
||||
assert(frame and frame_data)
|
||||
|
||||
# frame.frame_number = frame_data['frame_number']
|
||||
|
||||
# TODO: took existing stroke in account
|
||||
|
||||
for stroke_data in frame_data['strokes']:
|
||||
# Load stroke points
|
||||
for stroke_data in frame_data['strokes_points']:
|
||||
target_stroke = frame.strokes.new()
|
||||
load_stroke(stroke_data, target_stroke)
|
||||
|
||||
# Load stroke metadata
|
||||
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
|
||||
|
||||
|
||||
def dump_layer(layer):
|
||||
""" Dump a grease pencil layer
|
||||
@ -153,7 +146,6 @@ def dump_layer(layer):
|
||||
'opacity',
|
||||
'channel_color',
|
||||
'color',
|
||||
# 'thickness', #TODO: enabling only for annotation
|
||||
'tint_color',
|
||||
'tint_factor',
|
||||
'vertex_paint_opacity',
|
||||
@ -170,7 +162,7 @@ def dump_layer(layer):
|
||||
'hide',
|
||||
'annotation_hide',
|
||||
'lock',
|
||||
# 'lock_frame',
|
||||
'lock_frame',
|
||||
# 'lock_material',
|
||||
# 'use_mask_layer',
|
||||
'use_lights',
|
||||
@ -178,18 +170,22 @@ def dump_layer(layer):
|
||||
'select',
|
||||
'show_points',
|
||||
'show_in_front',
|
||||
# 'thickness'
|
||||
# 'parent',
|
||||
# 'parent_type',
|
||||
# 'parent_bone',
|
||||
# 'matrix_inverse',
|
||||
]
|
||||
if layer.thickness != 0:
|
||||
dumper.include_filter.append('thickness')
|
||||
|
||||
dumped_layer = dumper.dump(layer)
|
||||
|
||||
dumped_layer['frames'] = []
|
||||
|
||||
for frame in layer.frames:
|
||||
dumped_layer['frames'].append(dump_frame(frame))
|
||||
|
||||
|
||||
return dumped_layer
|
||||
|
||||
|
||||
@ -211,73 +207,99 @@ def load_layer(layer_data, layer):
|
||||
load_frame(frame_data, target_frame)
|
||||
|
||||
|
||||
def layer_changed(datablock: object, data: dict) -> bool:
|
||||
if datablock.layers.active and \
|
||||
datablock.layers.active.info != data["active_layers"]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
class BlGpencil(BlDatablock):
|
||||
|
||||
def frame_changed(data: dict) -> bool:
|
||||
return bpy.context.scene.frame_current != data["eval_frame"]
|
||||
|
||||
class BlGpencil(ReplicatedDatablock):
|
||||
bl_id = "grease_pencils"
|
||||
bl_class = bpy.types.GreasePencil
|
||||
bl_delay_refresh = 2
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'GREASEPENCIL'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.grease_pencils.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
target.materials.clear()
|
||||
if "materials" in data.keys():
|
||||
for mat in data['materials']:
|
||||
target.materials.append(bpy.data.materials[mat])
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
loader.load(datablock, data)
|
||||
|
||||
# TODO: reuse existing layer
|
||||
for layer in target.layers:
|
||||
target.layers.remove(layer)
|
||||
for layer in datablock.layers:
|
||||
datablock.layers.remove(layer)
|
||||
|
||||
if "layers" in data.keys():
|
||||
for layer in data["layers"]:
|
||||
layer_data = data["layers"].get(layer)
|
||||
|
||||
# if layer not in target.layers.keys():
|
||||
target_layer = target.layers.new(data["layers"][layer]["info"])
|
||||
# if layer not in datablock.layers.keys():
|
||||
target_layer = datablock.layers.new(data["layers"][layer]["info"])
|
||||
# else:
|
||||
# target_layer = target.layers[layer]
|
||||
# target_layer.clear()
|
||||
|
||||
load_layer(layer_data, target_layer)
|
||||
|
||||
|
||||
|
||||
datablock.layers.update()
|
||||
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
'materials',
|
||||
'name',
|
||||
'zdepth_offset',
|
||||
'stroke_thickness_space',
|
||||
'pixel_factor',
|
||||
'stroke_depth_order'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data = dumper.dump(datablock)
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
data['layers'] = {}
|
||||
|
||||
for layer in instance.layers:
|
||||
|
||||
for layer in datablock.layers:
|
||||
data['layers'][layer.info] = dump_layer(layer)
|
||||
|
||||
data["active_layers"] = datablock.layers.active.info if datablock.layers.active else "None"
|
||||
data["eval_frame"] = bpy.context.scene.frame_current
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.grease_pencils)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
deps.append(material)
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return bpy.context.mode == 'OBJECT' \
|
||||
or layer_changed(datablock, data) \
|
||||
or frame_changed(data) \
|
||||
or get_preferences().sync_flags.sync_during_editmode \
|
||||
or is_annotating(bpy.context)
|
||||
|
||||
_type = bpy.types.GreasePencil
|
||||
_class = BlGpencil
|
||||
|
@ -24,9 +24,12 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
format_to_ext = {
|
||||
'BMP': 'bmp',
|
||||
@ -48,35 +51,37 @@ format_to_ext = {
|
||||
}
|
||||
|
||||
|
||||
class BlImage(BlDatablock):
|
||||
class BlImage(ReplicatedDatablock):
|
||||
bl_id = "images"
|
||||
bl_class = bpy.types.Image
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'IMAGE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.images.new(
|
||||
name=data['name'],
|
||||
width=data['size'][0],
|
||||
height=data['size'][1]
|
||||
)
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(data, target)
|
||||
loader.load(datablock, data)
|
||||
|
||||
target.source = 'FILE'
|
||||
target.filepath_raw = get_filepath(data['filename'])
|
||||
target.colorspace_settings.name = data["colorspace_settings"]["name"]
|
||||
|
||||
# datablock.name = data.get('name')
|
||||
datablock.source = 'FILE'
|
||||
datablock.filepath_raw = get_filepath(data['filename'])
|
||||
color_space_name = data.get("colorspace")
|
||||
|
||||
def _dump(self, instance=None):
|
||||
assert(instance)
|
||||
if color_space_name:
|
||||
datablock.colorspace_settings.name = color_space_name
|
||||
|
||||
filename = Path(instance.filepath).name
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
data = {
|
||||
"filename": filename
|
||||
@ -86,38 +91,47 @@ class BlImage(BlDatablock):
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
# 'source',
|
||||
'size',
|
||||
'height',
|
||||
'alpha',
|
||||
'float_buffer',
|
||||
'alpha_mode',
|
||||
'colorspace_settings']
|
||||
data.update(dumper.dump(instance))
|
||||
'alpha_mode']
|
||||
data.update(dumper.dump(datablock))
|
||||
data['colorspace'] = datablock.colorspace_settings.name
|
||||
|
||||
return data
|
||||
|
||||
def diff(self):
|
||||
if self.instance and (self.instance.name != self.data['name']):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.images)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
if self.instance.filepath:
|
||||
|
||||
if self.instance.packed_file:
|
||||
filename = Path(bpy.path.abspath(self.instance.filepath)).name
|
||||
self.instance.filepath = get_filepath(filename)
|
||||
self.instance.save()
|
||||
# An image can't be unpacked to the modified path
|
||||
# TODO: make a bug report
|
||||
self.instance.unpack(method="REMOVE")
|
||||
if datablock.packed_file:
|
||||
filename = Path(bpy.path.abspath(datablock.filepath)).name
|
||||
datablock.filepath_raw = get_filepath(filename)
|
||||
datablock.save()
|
||||
# An image can't be unpacked to the modified path
|
||||
# TODO: make a bug report
|
||||
datablock.unpack(method="REMOVE")
|
||||
|
||||
elif self.instance.source == "GENERATED":
|
||||
filename = f"{self.instance.name}.png"
|
||||
self.instance.filepath = get_filepath(filename)
|
||||
self.instance.save()
|
||||
elif datablock.source == "GENERATED":
|
||||
filename = f"{datablock.name}.png"
|
||||
datablock.filepath = get_filepath(filename)
|
||||
datablock.save()
|
||||
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
if datablock.filepath:
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
if datablock.is_dirty:
|
||||
datablock.save()
|
||||
|
||||
return True
|
||||
|
||||
_type = bpy.types.Image
|
||||
_class = BlImage
|
||||
|
@ -20,35 +20,41 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.exception import ContextError
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||
|
||||
|
||||
class BlLattice(BlDatablock):
|
||||
class BlLattice(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "lattices"
|
||||
bl_class = bpy.types.Lattice
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'LATTICE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.lattices.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
if target.is_editmode:
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
if datablock.is_editmode:
|
||||
raise ContextError("lattice is in edit mode")
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
np_load_collection(data['points'], target.points, POINT)
|
||||
np_load_collection(data['points'], datablock.points, POINT)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
if instance.is_editmode:
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if datablock.is_editmode:
|
||||
raise ContextError("lattice is in edit mode")
|
||||
|
||||
dumper = Dumper()
|
||||
@ -64,9 +70,20 @@ class BlLattice(BlDatablock):
|
||||
'interpolation_type_w',
|
||||
'use_outside'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data['points'] = np_dump_collection(instance.points, POINT)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['points'] = np_dump_collection(datablock.points, POINT)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lattices)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_animation_dependencies(datablock)
|
||||
|
||||
_type = bpy.types.Lattice
|
||||
_class = BlLattice
|
@ -20,27 +20,34 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlLight(BlDatablock):
|
||||
class BlLight(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "lights"
|
||||
bl_class = bpy.types.Light
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIGHT_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.lights.new(data["name"], data["type"])
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = bpy.data.lights.new(data["name"], data["type"])
|
||||
instance.uuid = data.get("uuid")
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
@ -69,9 +76,23 @@ class BlLight(BlDatablock):
|
||||
'spot_size',
|
||||
'spot_blend'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lights)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
|
||||
_type = [bpy.types.SpotLight, bpy.types.PointLight, bpy.types.AreaLight, bpy.types.SunLight]
|
||||
_class = BlLight
|
||||
|
@ -21,33 +21,35 @@ import mathutils
|
||||
import logging
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
class BlLightprobe(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
class BlLightprobe(BlDatablock):
|
||||
bl_id = "lightprobes"
|
||||
bl_class = bpy.types.LightProbe
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIGHTPROBE_GRID'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
|
||||
# See https://developer.blender.org/D6396
|
||||
if bpy.app.version[1] >= 83:
|
||||
if bpy.app.version >= (2,83,0):
|
||||
return bpy.data.lightprobes.new(data["name"], type)
|
||||
else:
|
||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
if bpy.app.version[1] < 83:
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if bpy.app.version < (2,83,0):
|
||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
dumper = Dumper()
|
||||
@ -73,7 +75,16 @@ class BlLightprobe(BlDatablock):
|
||||
'visibility_blur'
|
||||
]
|
||||
|
||||
return dumper.dump(instance)
|
||||
return dumper.dump(datablock)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lightprobes)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
_type = bpy.types.LightProbe
|
||||
_class = BlLightprobe
|
@ -21,12 +21,18 @@ import mathutils
|
||||
import logging
|
||||
import re
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
||||
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
|
||||
|
||||
def load_node(node_data, node_tree):
|
||||
def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
|
||||
""" Load a node into a node_tree from a dict
|
||||
|
||||
:arg node_data: dumped node data
|
||||
@ -36,61 +42,57 @@ def load_node(node_data, node_tree):
|
||||
"""
|
||||
loader = Loader()
|
||||
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
|
||||
|
||||
target_node.select = False
|
||||
loader.load(target_node, node_data)
|
||||
image_uuid = node_data.get('image_uuid', None)
|
||||
node_tree_uuid = node_data.get('node_tree_uuid', None)
|
||||
|
||||
if image_uuid and not target_node.image:
|
||||
target_node.image = get_datablock_from_uuid(image_uuid,None)
|
||||
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
|
||||
if image is None:
|
||||
logging.error(f"Fail to find material image from uuid {image_uuid}")
|
||||
else:
|
||||
target_node.image = image
|
||||
|
||||
for input in node_data["inputs"]:
|
||||
if hasattr(target_node.inputs[input], "default_value"):
|
||||
try:
|
||||
target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
|
||||
except:
|
||||
logging.error(
|
||||
f"Material {input} parameter not supported, skipping")
|
||||
if node_tree_uuid:
|
||||
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
|
||||
|
||||
inputs_data = node_data.get('inputs')
|
||||
if inputs_data:
|
||||
inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS]
|
||||
for idx, inpt in enumerate(inputs):
|
||||
if idx < len(inputs_data) and hasattr(inpt, "default_value"):
|
||||
loaded_input = inputs_data[idx]
|
||||
try:
|
||||
if inpt.type in ['OBJECT', 'COLLECTION']:
|
||||
inpt.default_value = get_datablock_from_uuid(loaded_input, None)
|
||||
else:
|
||||
inpt.default_value = loaded_input
|
||||
except Exception as e:
|
||||
logging.warning(f"Node {target_node.name} input {inpt.name} parameter not supported, skipping ({e})")
|
||||
else:
|
||||
logging.warning(f"Node {target_node.name} input length mismatch.")
|
||||
|
||||
outputs_data = node_data.get('outputs')
|
||||
if outputs_data:
|
||||
outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS]
|
||||
for idx, output in enumerate(outputs):
|
||||
if idx < len(outputs_data) and hasattr(output, "default_value"):
|
||||
loaded_output = outputs_data[idx]
|
||||
try:
|
||||
if output.type in ['OBJECT', 'COLLECTION']:
|
||||
output.default_value = get_datablock_from_uuid(loaded_output, None)
|
||||
else:
|
||||
output.default_value = loaded_output
|
||||
except Exception as e:
|
||||
logging.warning(
|
||||
f"Node {target_node.name} output {output.name} parameter not supported, skipping ({e})")
|
||||
else:
|
||||
logging.warning(
|
||||
f"Node {target_node.name} output length mismatch.")
|
||||
|
||||
|
||||
def load_links(links_data, node_tree):
|
||||
""" Load node_tree links from a list
|
||||
|
||||
:arg links_data: dumped node links
|
||||
:type links_data: list
|
||||
:arg node_tree: node links collection
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
|
||||
for link in links_data:
|
||||
input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])]
|
||||
output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])]
|
||||
node_tree.links.new(input_socket, output_socket)
|
||||
|
||||
|
||||
def dump_links(links):
|
||||
""" Dump node_tree links collection to a list
|
||||
|
||||
:arg links: node links collection
|
||||
:type links: bpy.types.NodeLinks
|
||||
:retrun: list
|
||||
"""
|
||||
|
||||
links_data = []
|
||||
|
||||
for link in links:
|
||||
to_socket = NODE_SOCKET_INDEX.search(link.to_socket.path_from_id()).group(1)
|
||||
from_socket = NODE_SOCKET_INDEX.search(link.from_socket.path_from_id()).group(1)
|
||||
links_data.append({
|
||||
'to_node': link.to_node.name,
|
||||
'to_socket': to_socket,
|
||||
'from_node': link.from_node.name,
|
||||
'from_socket': from_socket,
|
||||
})
|
||||
|
||||
return links_data
|
||||
|
||||
|
||||
def dump_node(node):
|
||||
def dump_node(node: bpy.types.ShaderNode) -> dict:
|
||||
""" Dump a single node to a dict
|
||||
|
||||
:arg node: target node
|
||||
@ -105,6 +107,7 @@ def dump_node(node):
|
||||
"show_expanded",
|
||||
"name_full",
|
||||
"select",
|
||||
"bl_label",
|
||||
"bl_height_min",
|
||||
"bl_height_max",
|
||||
"bl_height_default",
|
||||
@ -121,23 +124,41 @@ def dump_node(node):
|
||||
"show_preview",
|
||||
"show_texture",
|
||||
"outputs",
|
||||
"width_hidden",
|
||||
"image"
|
||||
"width_hidden"
|
||||
]
|
||||
|
||||
dumped_node = node_dumper.dump(node)
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
dumped_node['inputs'] = {}
|
||||
if node.parent:
|
||||
dumped_node['parent'] = node.parent.name
|
||||
|
||||
for i in node.inputs:
|
||||
input_dumper = Dumper()
|
||||
input_dumper.depth = 2
|
||||
input_dumper.include_filter = ["default_value"]
|
||||
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
|
||||
|
||||
if dump_io_needed:
|
||||
io_dumper = Dumper()
|
||||
io_dumper.depth = 2
|
||||
io_dumper.include_filter = ["default_value"]
|
||||
|
||||
if hasattr(node, 'inputs'):
|
||||
dumped_node['inputs'] = []
|
||||
inputs = [i for i in node.inputs if i.type not in IGNORED_SOCKETS]
|
||||
for idx, inpt in enumerate(inputs):
|
||||
if hasattr(inpt, 'default_value'):
|
||||
if isinstance(inpt.default_value, bpy.types.ID):
|
||||
dumped_input = inpt.default_value.uuid
|
||||
else:
|
||||
dumped_input = io_dumper.dump(inpt.default_value)
|
||||
|
||||
dumped_node['inputs'].append(dumped_input)
|
||||
|
||||
if hasattr(node, 'outputs'):
|
||||
dumped_node['outputs'] = []
|
||||
for idx, output in enumerate(node.outputs):
|
||||
if output.type not in IGNORED_SOCKETS:
|
||||
if hasattr(output, 'default_value'):
|
||||
dumped_node['outputs'].append(
|
||||
io_dumper.dump(output.default_value))
|
||||
|
||||
if hasattr(i, 'default_value'):
|
||||
dumped_node['inputs'][i.name] = input_dumper.dump(
|
||||
i)
|
||||
if hasattr(node, 'color_ramp'):
|
||||
ramp_dumper = Dumper()
|
||||
ramp_dumper.depth = 4
|
||||
@ -145,7 +166,10 @@ def dump_node(node):
|
||||
'elements',
|
||||
'alpha',
|
||||
'color',
|
||||
'position'
|
||||
'position',
|
||||
'interpolation',
|
||||
'hue_interpolation',
|
||||
'color_mode'
|
||||
]
|
||||
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
|
||||
if hasattr(node, 'mapping'):
|
||||
@ -159,116 +183,339 @@ def dump_node(node):
|
||||
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
|
||||
if hasattr(node, 'image') and getattr(node, 'image'):
|
||||
dumped_node['image_uuid'] = node.image.uuid
|
||||
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
|
||||
dumped_node['node_tree_uuid'] = node.node_tree.uuid
|
||||
return dumped_node
|
||||
|
||||
|
||||
class BlMaterial(BlDatablock):
|
||||
|
||||
def load_links(links_data, node_tree):
|
||||
""" Load node_tree links from a list
|
||||
|
||||
:arg links_data: dumped node links
|
||||
:type links_data: list
|
||||
:arg node_tree: node links collection
|
||||
:type node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
|
||||
for link in links_data:
|
||||
input_socket = node_tree.nodes[link['to_node']
|
||||
].inputs[int(link['to_socket'])]
|
||||
output_socket = node_tree.nodes[link['from_node']].outputs[int(
|
||||
link['from_socket'])]
|
||||
node_tree.links.new(input_socket, output_socket)
|
||||
|
||||
|
||||
def dump_links(links):
|
||||
""" Dump node_tree links collection to a list
|
||||
|
||||
:arg links: node links collection
|
||||
:type links: bpy.types.NodeLinks
|
||||
:retrun: list
|
||||
"""
|
||||
|
||||
links_data = []
|
||||
|
||||
for link in links:
|
||||
to_socket = NODE_SOCKET_INDEX.search(
|
||||
link.to_socket.path_from_id()).group(1)
|
||||
from_socket = NODE_SOCKET_INDEX.search(
|
||||
link.from_socket.path_from_id()).group(1)
|
||||
links_data.append({
|
||||
'to_node': link.to_node.name,
|
||||
'to_socket': to_socket,
|
||||
'from_node': link.from_node.name,
|
||||
'from_socket': from_socket,
|
||||
})
|
||||
|
||||
return links_data
|
||||
|
||||
|
||||
def dump_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
|
||||
""" Dump a shader node_tree to a dict including links and nodes
|
||||
|
||||
:arg node_tree: dumped shader node tree
|
||||
:type node_tree: bpy.types.ShaderNodeTree
|
||||
:return: dict
|
||||
"""
|
||||
node_tree_data = {
|
||||
'nodes': {node.name: dump_node(node) for node in node_tree.nodes},
|
||||
'links': dump_links(node_tree.links),
|
||||
'name': node_tree.name,
|
||||
'type': type(node_tree).__name__
|
||||
}
|
||||
|
||||
for socket_id in ['inputs', 'outputs']:
|
||||
socket_collection = getattr(node_tree, socket_id)
|
||||
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
|
||||
|
||||
return node_tree_data
|
||||
|
||||
|
||||
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
|
||||
""" dump sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
:return: dict
|
||||
"""
|
||||
sockets_data = []
|
||||
for socket in sockets:
|
||||
try:
|
||||
socket_uuid = socket['uuid']
|
||||
except Exception:
|
||||
socket_uuid = str(uuid4())
|
||||
socket['uuid'] = socket_uuid
|
||||
|
||||
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
|
||||
|
||||
return sockets_data
|
||||
|
||||
|
||||
def load_node_tree_sockets(sockets: bpy.types.Collection,
|
||||
sockets_data: dict):
|
||||
""" load sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
:arg socket_data: dumped socket data
|
||||
:type socket_data: dict
|
||||
"""
|
||||
# Check for removed sockets
|
||||
for socket in sockets:
|
||||
if not [s for s in sockets_data if 'uuid' in socket and socket['uuid'] == s[2]]:
|
||||
sockets.remove(socket)
|
||||
|
||||
# Check for new sockets
|
||||
for idx, socket_data in enumerate(sockets_data):
|
||||
try:
|
||||
checked_socket = sockets[idx]
|
||||
if checked_socket.name != socket_data[0]:
|
||||
checked_socket.name = socket_data[0]
|
||||
except Exception:
|
||||
s = sockets.new(socket_data[1], socket_data[0])
|
||||
s['uuid'] = socket_data[2]
|
||||
|
||||
|
||||
def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeTree) -> dict:
|
||||
"""Load a shader node_tree from dumped data
|
||||
|
||||
:arg node_tree_data: dumped node data
|
||||
:type node_tree_data: dict
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
# TODO: load only required nodes
|
||||
target_node_tree.nodes.clear()
|
||||
|
||||
if not target_node_tree.is_property_readonly('name'):
|
||||
target_node_tree.name = node_tree_data['name']
|
||||
|
||||
if 'inputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'inputs')
|
||||
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
|
||||
|
||||
if 'outputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'outputs')
|
||||
load_node_tree_sockets(socket_collection, node_tree_data['outputs'])
|
||||
|
||||
# Load nodes
|
||||
for node in node_tree_data["nodes"]:
|
||||
load_node(node_tree_data["nodes"][node], target_node_tree)
|
||||
|
||||
for node_id, node_data in node_tree_data["nodes"].items():
|
||||
target_node = target_node_tree.nodes.get(node_id, None)
|
||||
if target_node is None:
|
||||
continue
|
||||
elif 'parent' in node_data:
|
||||
target_node.parent = target_node_tree.nodes[node_data['parent']]
|
||||
else:
|
||||
target_node.parent = None
|
||||
# TODO: load only required nodes links
|
||||
# Load nodes links
|
||||
target_node_tree.links.clear()
|
||||
|
||||
load_links(node_tree_data["links"], target_node_tree)
|
||||
|
||||
|
||||
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
|
||||
def has_image(node): return (
|
||||
node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
|
||||
|
||||
def has_node_group(node): return (
|
||||
hasattr(node, 'node_tree') and node.node_tree)
|
||||
|
||||
def has_texture(node): return (
|
||||
node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture)
|
||||
deps = []
|
||||
|
||||
for node in node_tree.nodes:
|
||||
if has_image(node):
|
||||
deps.append(node.image)
|
||||
elif has_node_group(node):
|
||||
deps.append(node.node_tree)
|
||||
elif has_texture(node):
|
||||
deps.append(node.texture)
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
def dump_materials_slots(materials: bpy.types.bpy_prop_collection) -> list:
|
||||
""" Dump material slots collection
|
||||
|
||||
:arg materials: material slots collection to dump
|
||||
:type materials: bpy.types.bpy_prop_collection
|
||||
:return: list of tuples (mat_uuid, mat_name)
|
||||
"""
|
||||
return [(m.uuid, m.name) for m in materials if m]
|
||||
|
||||
|
||||
def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_collection):
|
||||
""" Load material slots
|
||||
|
||||
:arg src_materials: dumped material collection (ex: object.materials)
|
||||
:type src_materials: list of tuples (uuid, name)
|
||||
:arg dst_materials: target material collection pointer
|
||||
:type dst_materials: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
# MATERIAL SLOTS
|
||||
dst_materials.clear()
|
||||
|
||||
for mat_uuid, mat_name in src_materials:
|
||||
mat_ref = None
|
||||
if mat_uuid:
|
||||
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
||||
else:
|
||||
mat_ref = bpy.data.materials[mat_name]
|
||||
dst_materials.append(mat_ref)
|
||||
|
||||
|
||||
class BlMaterial(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "materials"
|
||||
bl_class = bpy.types.Material
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'MATERIAL_DATA'
|
||||
bl_reload_parent = False
|
||||
bl_reload_child = True
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.materials.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
target.name = data['name']
|
||||
if data['is_grease_pencil']:
|
||||
if not target.is_grease_pencil:
|
||||
bpy.data.materials.create_gpencil_data(target)
|
||||
|
||||
loader.load(
|
||||
target.grease_pencil, data['grease_pencil'])
|
||||
is_grease_pencil = data.get('is_grease_pencil')
|
||||
use_nodes = data.get('use_nodes')
|
||||
|
||||
if data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
loader.load(datablock, data)
|
||||
|
||||
target.node_tree.nodes.clear()
|
||||
if is_grease_pencil:
|
||||
if not datablock.is_grease_pencil:
|
||||
bpy.data.materials.create_gpencil_data(datablock)
|
||||
loader.load(datablock.grease_pencil, data['grease_pencil'])
|
||||
elif use_nodes:
|
||||
if datablock.node_tree is None:
|
||||
datablock.use_nodes = True
|
||||
|
||||
loader.load(target, data)
|
||||
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||
load_animation_data(data.get('nodes_animation_data'), datablock.node_tree)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
# Load nodes
|
||||
for node in data["node_tree"]["nodes"]:
|
||||
load_node(data["node_tree"]["nodes"][node], target.node_tree)
|
||||
|
||||
# Load nodes links
|
||||
target.node_tree.links.clear()
|
||||
|
||||
load_links(data["node_tree"]["links"], target.node_tree)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
mat_dumper = Dumper()
|
||||
mat_dumper.depth = 2
|
||||
mat_dumper.exclude_filter = [
|
||||
"is_embed_data",
|
||||
"is_evaluated",
|
||||
"name_full",
|
||||
"bl_description",
|
||||
"bl_icon",
|
||||
"bl_idname",
|
||||
"bl_label",
|
||||
"preview",
|
||||
"original",
|
||||
"uuid",
|
||||
"users",
|
||||
"alpha_threshold",
|
||||
"line_color",
|
||||
"view_center",
|
||||
mat_dumper.include_filter = [
|
||||
'name',
|
||||
'blend_method',
|
||||
'shadow_method',
|
||||
'alpha_threshold',
|
||||
'show_transparent_back',
|
||||
'use_backface_culling',
|
||||
'use_screen_refraction',
|
||||
'use_sss_translucency',
|
||||
'refraction_depth',
|
||||
'preview_render_type',
|
||||
'use_preview_world',
|
||||
'pass_index',
|
||||
'use_nodes',
|
||||
'diffuse_color',
|
||||
'specular_color',
|
||||
'roughness',
|
||||
'specular_intensity',
|
||||
'metallic',
|
||||
'line_color',
|
||||
'line_priority',
|
||||
'is_grease_pencil'
|
||||
]
|
||||
data = mat_dumper.dump(instance)
|
||||
data = mat_dumper.dump(datablock)
|
||||
|
||||
if instance.use_nodes:
|
||||
nodes = {}
|
||||
for node in instance.node_tree.nodes:
|
||||
nodes[node.name] = dump_node(node)
|
||||
data["node_tree"]['nodes'] = nodes
|
||||
|
||||
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
|
||||
|
||||
if instance.is_grease_pencil:
|
||||
if datablock.is_grease_pencil:
|
||||
gp_mat_dumper = Dumper()
|
||||
gp_mat_dumper.depth = 3
|
||||
|
||||
gp_mat_dumper.include_filter = [
|
||||
'color',
|
||||
'fill_color',
|
||||
'mix_color',
|
||||
'mix_factor',
|
||||
'mix_stroke_factor',
|
||||
# 'texture_angle',
|
||||
# 'texture_scale',
|
||||
# 'texture_offset',
|
||||
'pixel_size',
|
||||
'hide',
|
||||
'lock',
|
||||
'ghost',
|
||||
# 'texture_clamp',
|
||||
'flip',
|
||||
'use_overlap_strokes',
|
||||
'show_stroke',
|
||||
'show_fill',
|
||||
'alignment_mode',
|
||||
'pass_index',
|
||||
'mode',
|
||||
'stroke_style',
|
||||
'color',
|
||||
'use_overlap_strokes',
|
||||
'show_fill',
|
||||
# 'stroke_image',
|
||||
'fill_style',
|
||||
'fill_color',
|
||||
'pass_index',
|
||||
'alignment_mode',
|
||||
# 'fill_image',
|
||||
'texture_opacity',
|
||||
'mix_factor',
|
||||
'texture_offset',
|
||||
'texture_angle',
|
||||
'texture_scale',
|
||||
'texture_clamp',
|
||||
'gradient_type',
|
||||
'mix_color',
|
||||
'flip'
|
||||
# 'fill_image',
|
||||
'use_stroke_holdout',
|
||||
'use_overlap_strokes',
|
||||
'use_fill_holdout',
|
||||
]
|
||||
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
||||
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
|
||||
elif datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
data['nodes_animation_data'] = dump_animation_data(datablock.node_tree)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
# TODO: resolve node group deps
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.materials)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
if self.instance.use_nodes:
|
||||
for node in self.instance.node_tree.nodes:
|
||||
if node.type in ['TEX_IMAGE','TEX_ENVIRONMENT']:
|
||||
deps.append(node.image)
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
deps.extend(resolve_animation_dependencies(datablock.node_tree))
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Material
|
||||
_class = BlMaterial
|
@ -25,7 +25,13 @@ import numpy as np
|
||||
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||
from replication.constants import DIFF_BINARY
|
||||
from replication.exception import ContextError
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from ..utils import get_preferences
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
VERTICE = ['co']
|
||||
|
||||
@ -33,6 +39,8 @@ EDGE = [
|
||||
'vertices',
|
||||
'crease',
|
||||
'bevel_weight',
|
||||
'use_seam',
|
||||
'use_edge_sharp',
|
||||
]
|
||||
LOOP = [
|
||||
'vertex_index',
|
||||
@ -46,79 +54,79 @@ POLYGON = [
|
||||
'material_index',
|
||||
]
|
||||
|
||||
class BlMesh(BlDatablock):
|
||||
class BlMesh(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "meshes"
|
||||
bl_class = bpy.types.Mesh
|
||||
bl_delay_refresh = 2
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'MESH_DATA'
|
||||
bl_reload_parent = True
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.meshes.new(data["name"])
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.meshes.new(data.get("name"))
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
if not target or target.is_editmode:
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
if not datablock or datablock.is_editmode:
|
||||
raise ContextError
|
||||
else:
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# MATERIAL SLOTS
|
||||
target.materials.clear()
|
||||
|
||||
for m in data["material_list"]:
|
||||
target.materials.append(bpy.data.materials[m])
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
# CLEAR GEOMETRY
|
||||
if target.vertices:
|
||||
target.clear_geometry()
|
||||
if datablock.vertices:
|
||||
datablock.clear_geometry()
|
||||
|
||||
target.vertices.add(data["vertex_count"])
|
||||
target.edges.add(data["egdes_count"])
|
||||
target.loops.add(data["loop_count"])
|
||||
target.polygons.add(data["poly_count"])
|
||||
datablock.vertices.add(data["vertex_count"])
|
||||
datablock.edges.add(data["egdes_count"])
|
||||
datablock.loops.add(data["loop_count"])
|
||||
datablock.polygons.add(data["poly_count"])
|
||||
|
||||
# LOADING
|
||||
np_load_collection(data['vertices'], target.vertices, VERTICE)
|
||||
np_load_collection(data['edges'], target.edges, EDGE)
|
||||
np_load_collection(data['loops'], target.loops, LOOP)
|
||||
np_load_collection(data["polygons"],target.polygons, POLYGON)
|
||||
np_load_collection(data['vertices'], datablock.vertices, VERTICE)
|
||||
np_load_collection(data['edges'], datablock.edges, EDGE)
|
||||
np_load_collection(data['loops'], datablock.loops, LOOP)
|
||||
np_load_collection(data["polygons"],datablock.polygons, POLYGON)
|
||||
|
||||
# UV Layers
|
||||
if 'uv_layers' in data.keys():
|
||||
for layer in data['uv_layers']:
|
||||
if layer not in target.uv_layers:
|
||||
target.uv_layers.new(name=layer)
|
||||
if layer not in datablock.uv_layers:
|
||||
datablock.uv_layers.new(name=layer)
|
||||
|
||||
np_load_collection_primitives(
|
||||
target.uv_layers[layer].data,
|
||||
datablock.uv_layers[layer].data,
|
||||
'uv',
|
||||
data["uv_layers"][layer]['data'])
|
||||
|
||||
# Vertex color
|
||||
if 'vertex_colors' in data.keys():
|
||||
for color_layer in data['vertex_colors']:
|
||||
if color_layer not in target.vertex_colors:
|
||||
target.vertex_colors.new(name=color_layer)
|
||||
if color_layer not in datablock.vertex_colors:
|
||||
datablock.vertex_colors.new(name=color_layer)
|
||||
|
||||
np_load_collection_primitives(
|
||||
target.vertex_colors[color_layer].data,
|
||||
datablock.vertex_colors[color_layer].data,
|
||||
'color',
|
||||
data["vertex_colors"][color_layer]['data'])
|
||||
|
||||
target.validate()
|
||||
target.update()
|
||||
datablock.validate()
|
||||
datablock.update()
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
if instance.is_editmode and not self.preferences.sync_flags.sync_during_editmode:
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
|
||||
raise ContextError("Mesh is in edit mode")
|
||||
mesh = instance
|
||||
mesh = datablock
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
@ -132,6 +140,8 @@ class BlMesh(BlDatablock):
|
||||
|
||||
data = dumper.dump(mesh)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
# VERTICES
|
||||
data["vertex_count"] = len(mesh.vertices)
|
||||
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
|
||||
@ -162,21 +172,31 @@ class BlMesh(BlDatablock):
|
||||
data['vertex_colors'][color_map.name] = {}
|
||||
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
||||
|
||||
# Fix material index
|
||||
m_list = []
|
||||
for material in instance.materials:
|
||||
if material:
|
||||
m_list.append(material.name)
|
||||
|
||||
data['material_list'] = m_list
|
||||
|
||||
# Materials
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.meshes)
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return ('EDIT' not in bpy.context.mode and bpy.context.mode != 'SCULPT') \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
_type = bpy.types.Mesh
|
||||
_class = BlMesh
|
||||
|
@ -23,7 +23,9 @@ from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
|
||||
np_dump_collection, np_load_collection)
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
ELEMENT = [
|
||||
@ -62,31 +64,35 @@ def load_metaball_elements(elements_data, elements):
|
||||
np_load_collection(elements_data, elements, ELEMENT)
|
||||
|
||||
|
||||
class BlMetaball(BlDatablock):
|
||||
class BlMetaball(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "metaballs"
|
||||
bl_class = bpy.types.MetaBall
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'META_BALL'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.metaballs.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
target.elements.clear()
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
datablock.elements.clear()
|
||||
|
||||
for mtype in data["elements"]['type']:
|
||||
new_element = target.elements.new()
|
||||
new_element = datablock.elements.new()
|
||||
|
||||
load_metaball_elements(data['elements'], target.elements)
|
||||
load_metaball_elements(data['elements'], datablock.elements)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -100,7 +106,24 @@ class BlMetaball(BlDatablock):
|
||||
'texspace_size'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
data['elements'] = dump_metaball_elements(instance.elements)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
data['elements'] = dump_metaball_elements(datablock.elements)
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.metaballs)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.MetaBall
|
||||
_class = BlMetaball
|
64
multi_user/bl_types/bl_node_group.py
Normal file
@ -0,0 +1,64 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_material import (dump_node_tree,
|
||||
load_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlNodeGroup(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "node_groups"
|
||||
bl_class = bpy.types.NodeTree
|
||||
bl_check_common = False
|
||||
bl_icon = 'NODETREE'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.node_groups.new(data["name"], data["type"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_node_tree(data, datablock)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
return dump_node_tree(datablock)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.node_groups)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
deps.extend(get_node_tree_dependencies(datablock))
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = [bpy.types.ShaderNodeTree, bpy.types.GeometryNodeTree]
|
||||
_class = BlNodeGroup
|
@ -17,14 +17,169 @@
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import re
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.exception import ContextError
|
||||
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from .dump_anything import Dumper, Loader
|
||||
from replication.exception import ReparentException
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_material import IGNORED_SOCKETS
|
||||
from ..utils import get_preferences
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from .dump_anything import (
|
||||
Dumper,
|
||||
Loader,
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
|
||||
|
||||
SKIN_DATA = [
|
||||
'radius',
|
||||
'use_loose',
|
||||
'use_root'
|
||||
]
|
||||
|
||||
SHAPEKEY_BLOCK_ATTR = [
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
]
|
||||
|
||||
|
||||
if bpy.app.version >= (2,93,0):
|
||||
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
|
||||
else:
|
||||
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str)
|
||||
logging.warning("Geometry node Float parameter not supported in \
|
||||
blender 2.92.")
|
||||
|
||||
|
||||
def get_node_group_properties_identifiers(node_group):
|
||||
props_ids = []
|
||||
# Inputs
|
||||
for inpt in node_group.inputs:
|
||||
if inpt.type in IGNORED_SOCKETS:
|
||||
continue
|
||||
else:
|
||||
props_ids.append((inpt.identifier, inpt.type))
|
||||
|
||||
if inpt.type in ['INT', 'VALUE', 'BOOLEAN', 'RGBA', 'VECTOR']:
|
||||
props_ids.append((f"{inpt.identifier}_attribute_name",'STR'))
|
||||
props_ids.append((f"{inpt.identifier}_use_attribute", 'BOOL'))
|
||||
|
||||
for outpt in node_group.outputs:
|
||||
if outpt.type not in IGNORED_SOCKETS and outpt.type in ['INT', 'VALUE', 'BOOLEAN', 'RGBA', 'VECTOR']:
|
||||
props_ids.append((f"{outpt.identifier}_attribute_name", 'STR'))
|
||||
|
||||
return props_ids
|
||||
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
|
||||
|
||||
|
||||
def dump_physics(target: bpy.types.Object)->dict:
|
||||
"""
|
||||
Dump all physics settings from a given object excluding modifier
|
||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
physics_data = {}
|
||||
|
||||
# Collisions (collision)
|
||||
if target.collision and target.collision.use:
|
||||
physics_data['collision'] = dumper.dump(target.collision)
|
||||
|
||||
# Field (field)
|
||||
if target.field and target.field.type != "NONE":
|
||||
physics_data['field'] = dumper.dump(target.field)
|
||||
|
||||
# Rigid Body (rigid_body)
|
||||
if target.rigid_body:
|
||||
physics_data['rigid_body'] = dumper.dump(target.rigid_body)
|
||||
|
||||
# Rigid Body constraint (rigid_body_constraint)
|
||||
if target.rigid_body_constraint:
|
||||
physics_data['rigid_body_constraint'] = dumper.dump(target.rigid_body_constraint)
|
||||
|
||||
return physics_data
|
||||
|
||||
|
||||
def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
||||
""" Load all physics settings from a given object excluding modifier
|
||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||
"""
|
||||
loader = Loader()
|
||||
|
||||
if 'collision' in dumped_settings:
|
||||
loader.load(target.collision, dumped_settings['collision'])
|
||||
|
||||
if 'field' in dumped_settings:
|
||||
loader.load(target.field, dumped_settings['field'])
|
||||
|
||||
if 'rigid_body' in dumped_settings:
|
||||
if not target.rigid_body:
|
||||
bpy.ops.rigidbody.object_add({"object": target})
|
||||
loader.load(target.rigid_body, dumped_settings['rigid_body'])
|
||||
elif target.rigid_body:
|
||||
bpy.ops.rigidbody.object_remove({"object": target})
|
||||
|
||||
if 'rigid_body_constraint' in dumped_settings:
|
||||
if not target.rigid_body_constraint:
|
||||
bpy.ops.rigidbody.constraint_add({"object": target})
|
||||
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
|
||||
elif target.rigid_body_constraint:
|
||||
bpy.ops.rigidbody.constraint_remove({"object": target})
|
||||
|
||||
|
||||
def dump_modifier_geometry_node_props(modifier: bpy.types.Modifier) -> list:
|
||||
""" Dump geometry node modifier input properties
|
||||
|
||||
:arg modifier: geometry node modifier to dump
|
||||
:type modifier: bpy.type.Modifier
|
||||
"""
|
||||
dumped_props = []
|
||||
|
||||
for prop_value, prop_type in get_node_group_properties_identifiers(modifier.node_group):
|
||||
try:
|
||||
prop_value = modifier[prop_value]
|
||||
except KeyError as e:
|
||||
logging.error(f"fail to dump geomety node modifier property : {prop_value} ({e})")
|
||||
else:
|
||||
dump = None
|
||||
if isinstance(prop_value, bpy.types.ID):
|
||||
dump = prop_value.uuid
|
||||
elif isinstance(prop_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
|
||||
dump = prop_value
|
||||
elif hasattr(prop_value, 'to_list'):
|
||||
dump = prop_value.to_list()
|
||||
|
||||
dumped_props.append((dump, prop_type))
|
||||
# logging.info(prop_value)
|
||||
|
||||
return dumped_props
|
||||
|
||||
|
||||
def load_modifier_geometry_node_props(dumped_modifier: dict, target_modifier: bpy.types.Modifier):
|
||||
""" Load geometry node modifier inputs
|
||||
|
||||
:arg dumped_modifier: source dumped modifier to load
|
||||
:type dumped_modifier: dict
|
||||
:arg target_modifier: target geometry node modifier
|
||||
:type target_modifier: bpy.type.Modifier
|
||||
"""
|
||||
|
||||
for input_index, inpt in enumerate(get_node_group_properties_identifiers(target_modifier.node_group)):
|
||||
dumped_value, dumped_type = dumped_modifier['props'][input_index]
|
||||
input_value = target_modifier[inpt[0]]
|
||||
if dumped_type in ['INT', 'VALUE', 'STR']:
|
||||
logging.info(f"{inpt[0]}/{dumped_value}")
|
||||
target_modifier[inpt[0]] = dumped_value
|
||||
elif dumped_type in ['RGBA', 'VECTOR']:
|
||||
for index in range(len(input_value)):
|
||||
input_value[index] = dumped_value[index]
|
||||
elif dumped_type in ['COLLECTION', 'OBJECT', 'IMAGE', 'TEXTURE', 'MATERIAL']:
|
||||
target_modifier[inpt[0]] = get_datablock_from_uuid(dumped_value, None)
|
||||
|
||||
|
||||
def load_pose(target_bone, data):
|
||||
@ -36,7 +191,7 @@ def load_pose(target_bone, data):
|
||||
def find_data_from_name(name=None):
|
||||
instance = None
|
||||
if not name:
|
||||
pass
|
||||
pass
|
||||
elif name in bpy.data.meshes.keys():
|
||||
instance = bpy.data.meshes[name]
|
||||
elif name in bpy.data.lights.keys():
|
||||
@ -59,11 +214,14 @@ def find_data_from_name(name=None):
|
||||
instance = bpy.data.speakers[name]
|
||||
elif name in bpy.data.lightprobes.keys():
|
||||
# Only supported since 2.83
|
||||
if bpy.app.version[1] >= 83:
|
||||
if bpy.app.version >= (2,83,0):
|
||||
instance = bpy.data.lightprobes[name]
|
||||
else:
|
||||
logging.warning(
|
||||
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
elif bpy.app.version >= (2,91,0) and name in bpy.data.volumes.keys():
|
||||
# Only supported since 2.91
|
||||
instance = bpy.data.volumes[name]
|
||||
return instance
|
||||
|
||||
|
||||
@ -79,107 +237,362 @@ def _is_editmode(object: bpy.types.Object) -> bool:
|
||||
child_data.is_editmode)
|
||||
|
||||
|
||||
class BlObject(BlDatablock):
|
||||
def find_textures_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.Texture]:
|
||||
""" Find textures lying in a modifier stack
|
||||
|
||||
:arg modifiers: modifiers collection
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
:return: list of bpy.types.Texture pointers
|
||||
"""
|
||||
textures = []
|
||||
for mod in modifiers:
|
||||
modifier_attributes = [getattr(mod, attr_name)
|
||||
for attr_name in mod.bl_rna.properties.keys()]
|
||||
for attr in modifier_attributes:
|
||||
if issubclass(type(attr), bpy.types.Texture) and attr is not None:
|
||||
textures.append(attr)
|
||||
|
||||
return textures
|
||||
|
||||
|
||||
def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]:
|
||||
""" Find geometry nodes dependencies from a modifier stack
|
||||
|
||||
:arg modifiers: modifiers collection
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
:return: list of bpy.types.NodeTree pointers
|
||||
"""
|
||||
dependencies = []
|
||||
for mod in modifiers:
|
||||
if mod.type == 'NODES' and mod.node_group:
|
||||
dependencies.append(mod.node_group)
|
||||
for inpt, inpt_type in get_node_group_properties_identifiers(mod.node_group):
|
||||
inpt_value = mod.get(inpt)
|
||||
# Avoid to handle 'COLLECTION', 'OBJECT' to avoid circular dependencies
|
||||
if inpt_type in ['IMAGE', 'TEXTURE', 'MATERIAL'] and inpt_value:
|
||||
dependencies.append(inpt_value)
|
||||
|
||||
return dependencies
|
||||
|
||||
|
||||
def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
|
||||
""" Dump object's vertex groups
|
||||
|
||||
:param target_object: dump vertex groups of this object
|
||||
:type target_object: bpy.types.Object
|
||||
"""
|
||||
if isinstance(src_object.data, bpy.types.GreasePencil):
|
||||
logging.warning(
|
||||
"Grease pencil vertex groups are not supported yet. More info: https://gitlab.com/slumber/multi-user/-/issues/161")
|
||||
else:
|
||||
points_attr = 'vertices' if isinstance(
|
||||
src_object.data, bpy.types.Mesh) else 'points'
|
||||
dumped_vertex_groups = {}
|
||||
|
||||
# Vertex group metadata
|
||||
for vg in src_object.vertex_groups:
|
||||
dumped_vertex_groups[vg.index] = {
|
||||
'name': vg.name,
|
||||
'vertices': []
|
||||
}
|
||||
|
||||
# Vertex group assignation
|
||||
for vert in getattr(src_object.data, points_attr):
|
||||
for vg in vert.groups:
|
||||
vertices = dumped_vertex_groups.get(vg.group)['vertices']
|
||||
vertices.append((vert.index, vg.weight))
|
||||
|
||||
return dumped_vertex_groups
|
||||
|
||||
|
||||
def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Object):
|
||||
""" Load object vertex groups
|
||||
|
||||
:param dumped_vertex_groups: vertex_groups to load
|
||||
:type dumped_vertex_groups: dict
|
||||
:param target_object: object to load the vertex groups into
|
||||
:type target_object: bpy.types.Object
|
||||
"""
|
||||
target_object.vertex_groups.clear()
|
||||
for vg in dumped_vertex_groups.values():
|
||||
vertex_group = target_object.vertex_groups.new(name=vg['name'])
|
||||
for index, weight in vg['vertices']:
|
||||
vertex_group.add([index], weight, 'REPLACE')
|
||||
|
||||
|
||||
def dump_shape_keys(target_key: bpy.types.Key)->dict:
|
||||
""" Dump the target shape_keys datablock to a dict using numpy
|
||||
|
||||
:param dumped_key: target key datablock
|
||||
:type dumped_key: bpy.types.Key
|
||||
:return: dict
|
||||
"""
|
||||
|
||||
dumped_key_blocks = []
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
]
|
||||
for key in target_key.key_blocks:
|
||||
dumped_key_block = dumper.dump(key)
|
||||
dumped_key_block['data'] = np_dump_collection(key.data, ['co'])
|
||||
dumped_key_block['relative_key'] = key.relative_key.name
|
||||
dumped_key_blocks.append(dumped_key_block)
|
||||
|
||||
return {
|
||||
'reference_key': target_key.reference_key.name,
|
||||
'use_relative': target_key.use_relative,
|
||||
'key_blocks': dumped_key_blocks,
|
||||
'animation_data': dump_animation_data(target_key)
|
||||
}
|
||||
|
||||
|
||||
def load_shape_keys(dumped_shape_keys: dict, target_object: bpy.types.Object):
|
||||
""" Load the target shape_keys datablock to a dict using numpy
|
||||
|
||||
:param dumped_key: src key data
|
||||
:type dumped_key: bpy.types.Key
|
||||
:param target_object: object used to load the shapekeys data onto
|
||||
:type target_object: bpy.types.Object
|
||||
"""
|
||||
loader = Loader()
|
||||
# Remove existing ones
|
||||
target_object.shape_key_clear()
|
||||
|
||||
# Create keys and load vertices coords
|
||||
dumped_key_blocks = dumped_shape_keys.get('key_blocks')
|
||||
for dumped_key_block in dumped_key_blocks:
|
||||
key_block = target_object.shape_key_add(name=dumped_key_block['name'])
|
||||
|
||||
loader.load(key_block, dumped_key_block)
|
||||
np_load_collection(dumped_key_block['data'], key_block.data, ['co'])
|
||||
|
||||
# Load relative key after all
|
||||
for dumped_key_block in dumped_key_blocks:
|
||||
relative_key_name = dumped_key_block.get('relative_key')
|
||||
key_name = dumped_key_block.get('name')
|
||||
|
||||
target_keyblock = target_object.data.shape_keys.key_blocks[key_name]
|
||||
relative_key = target_object.data.shape_keys.key_blocks[relative_key_name]
|
||||
|
||||
target_keyblock.relative_key = relative_key
|
||||
|
||||
# Shape keys animation data
|
||||
anim_data = dumped_shape_keys.get('animation_data')
|
||||
|
||||
if anim_data:
|
||||
load_animation_data(anim_data, target_object.data.shape_keys)
|
||||
|
||||
|
||||
def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
|
||||
""" Dump all modifiers of a modifier collection into a dict
|
||||
|
||||
:param modifiers: modifiers
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
:return: dict
|
||||
"""
|
||||
dumped_modifiers = []
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = ['is_active']
|
||||
|
||||
for modifier in modifiers:
|
||||
dumped_modifier = dumper.dump(modifier)
|
||||
# hack to dump geometry nodes inputs
|
||||
if modifier.type == 'NODES':
|
||||
dumped_modifier['props'] = dump_modifier_geometry_node_props(modifier)
|
||||
elif modifier.type == 'PARTICLE_SYSTEM':
|
||||
dumper.exclude_filter = [
|
||||
"is_edited",
|
||||
"is_editable",
|
||||
"is_global_hair"
|
||||
]
|
||||
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
|
||||
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
|
||||
|
||||
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
||||
elif modifier.type == 'UV_PROJECT':
|
||||
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
|
||||
|
||||
dumped_modifiers.append(dumped_modifier)
|
||||
return dumped_modifiers
|
||||
|
||||
def dump_constraints(constraints: bpy.types.bpy_prop_collection)->list:
|
||||
"""Dump all constraints to a list
|
||||
|
||||
:param constraints: constraints
|
||||
:type constraints: bpy.types.bpy_prop_collection
|
||||
:return: dict
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = None
|
||||
dumped_constraints = []
|
||||
for constraint in constraints:
|
||||
dumped_constraints.append(dumper.dump(constraint))
|
||||
return dumped_constraints
|
||||
|
||||
def load_constraints(dumped_constraints: list, constraints: bpy.types.bpy_prop_collection):
|
||||
""" Load dumped constraints
|
||||
|
||||
:param dumped_constraints: list of constraints to load
|
||||
:type dumped_constraints: list
|
||||
:param constraints: constraints
|
||||
:type constraints: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
constraints.clear()
|
||||
for dumped_constraint in dumped_constraints:
|
||||
constraint_type = dumped_constraint.get('type')
|
||||
new_constraint = constraints.new(constraint_type)
|
||||
loader.load(new_constraint, dumped_constraint)
|
||||
|
||||
def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collection):
|
||||
""" Dump all modifiers of a modifier collection into a dict
|
||||
|
||||
:param dumped_modifiers: list of modifiers to load
|
||||
:type dumped_modifiers: list
|
||||
:param modifiers: modifiers
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
modifiers.clear()
|
||||
for dumped_modifier in dumped_modifiers:
|
||||
name = dumped_modifier.get('name')
|
||||
mtype = dumped_modifier.get('type')
|
||||
loaded_modifier = modifiers.new(name, mtype)
|
||||
loader.load(loaded_modifier, dumped_modifier)
|
||||
|
||||
if loaded_modifier.type == 'NODES':
|
||||
load_modifier_geometry_node_props(dumped_modifier, loaded_modifier)
|
||||
elif loaded_modifier.type == 'PARTICLE_SYSTEM':
|
||||
default = loaded_modifier.particle_system.settings
|
||||
dumped_particles = dumped_modifier['particle_system']
|
||||
loader.load(loaded_modifier.particle_system, dumped_particles)
|
||||
|
||||
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
||||
if settings:
|
||||
loaded_modifier.particle_system.settings = settings
|
||||
# Hack to remove the default generated particle settings
|
||||
if not default.uuid:
|
||||
bpy.data.particles.remove(default)
|
||||
elif loaded_modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
loader.load(loaded_modifier.settings, dumped_modifier['settings'])
|
||||
elif loaded_modifier.type == 'UV_PROJECT':
|
||||
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
|
||||
target_object = bpy.data.objects.get(projector_object)
|
||||
if target_object:
|
||||
loaded_modifier.projectors[projector_index].object = target_object
|
||||
else:
|
||||
logging.error("Could't load projector target object {projector_object}")
|
||||
|
||||
|
||||
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
|
||||
""" Load modifiers custom data not managed by the dump_anything loader
|
||||
|
||||
:param dumped_modifiers: modifiers to load
|
||||
:type dumped_modifiers: dict
|
||||
:param modifiers: target modifiers collection
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
|
||||
for modifier in modifiers:
|
||||
dumped_modifier = dumped_modifiers.get(modifier.name)
|
||||
|
||||
|
||||
class BlObject(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "objects"
|
||||
bl_class = bpy.types.Object
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'OBJECT_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = None
|
||||
|
||||
if self.is_library:
|
||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||
targetData.objects = [
|
||||
name for name in sourceData.objects if name == self.data['name']]
|
||||
|
||||
instance = bpy.data.objects[self.data['name']]
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
|
||||
# TODO: refactoring
|
||||
object_name = data.get("name")
|
||||
data_uuid = data.get("data_uuid")
|
||||
data_id = data.get("data")
|
||||
data_type = data.get("type")
|
||||
|
||||
object_data = get_datablock_from_uuid(
|
||||
data_uuid,
|
||||
data_uuid,
|
||||
find_data_from_name(data_id),
|
||||
ignore=['images']) #TODO: use resolve_from_id
|
||||
instance = bpy.data.objects.new(object_name, object_data)
|
||||
instance.uuid = self.uuid
|
||||
ignore=['images']) # TODO: use resolve_from_id
|
||||
|
||||
return instance
|
||||
if data_type != 'EMPTY' and object_data is None:
|
||||
raise Exception(f"Fail to load object {data['name']})")
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
return bpy.data.objects.new(object_name, object_data)
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
data_uuid = data.get("data_uuid")
|
||||
data_id = data.get("data")
|
||||
|
||||
if target.type != data['type']:
|
||||
raise ReparentException()
|
||||
elif target.data and (target.data.name != data_id):
|
||||
target.data = get_datablock_from_uuid(data_uuid, find_data_from_name(data_id), ignore=['images'])
|
||||
if datablock.data and (datablock.data.name != data_id):
|
||||
datablock.data = get_datablock_from_uuid(
|
||||
data_uuid, find_data_from_name(data_id), ignore=['images'])
|
||||
|
||||
# vertex groups
|
||||
if 'vertex_groups' in data:
|
||||
target.vertex_groups.clear()
|
||||
for vg in data['vertex_groups']:
|
||||
vertex_group=target.vertex_groups.new(name = vg['name'])
|
||||
point_attr='vertices' if 'vertices' in vg else 'points'
|
||||
for vert in vg[point_attr]:
|
||||
vertex_group.add(
|
||||
[vert['index']], vert['weight'], 'REPLACE')
|
||||
vertex_groups = data.get('vertex_groups', None)
|
||||
if vertex_groups:
|
||||
load_vertex_groups(vertex_groups, datablock)
|
||||
|
||||
object_data = datablock.data
|
||||
|
||||
# SHAPE KEYS
|
||||
if 'shape_keys' in data:
|
||||
target.shape_key_clear()
|
||||
|
||||
object_data=target.data
|
||||
|
||||
# Create keys and load vertices coords
|
||||
for key_block in data['shape_keys']['key_blocks']:
|
||||
key_data=data['shape_keys']['key_blocks'][key_block]
|
||||
target.shape_key_add(name = key_block)
|
||||
|
||||
loader.load(
|
||||
target.data.shape_keys.key_blocks[key_block], key_data)
|
||||
for vert in key_data['data']:
|
||||
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
||||
|
||||
# Load relative key after all
|
||||
for key_block in data['shape_keys']['key_blocks']:
|
||||
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
|
||||
|
||||
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
||||
shape_keys = data.get('shape_keys')
|
||||
if shape_keys:
|
||||
load_shape_keys(shape_keys, datablock)
|
||||
|
||||
# Load transformation data
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
loader.load(target.display, data['display'])
|
||||
# Object display fields
|
||||
if 'display' in data:
|
||||
loader.load(datablock.display, data['display'])
|
||||
|
||||
# Parenting
|
||||
parent_id = data.get('parent_uid')
|
||||
if parent_id:
|
||||
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
|
||||
# Avoid reloading
|
||||
if datablock.parent != parent and parent is not None:
|
||||
datablock.parent = parent
|
||||
elif datablock.parent:
|
||||
datablock.parent = None
|
||||
|
||||
# Pose
|
||||
if 'pose' in data:
|
||||
if not target.pose:
|
||||
if not datablock.pose:
|
||||
raise Exception('No pose data yet (Fixed in a near futur)')
|
||||
# Bone groups
|
||||
for bg_name in data['pose']['bone_groups']:
|
||||
bg_data = data['pose']['bone_groups'].get(bg_name)
|
||||
bg_target = target.pose.bone_groups.get(bg_name)
|
||||
bg_target = datablock.pose.bone_groups.get(bg_name)
|
||||
|
||||
if not bg_target:
|
||||
bg_target = target.pose.bone_groups.new(name=bg_name)
|
||||
bg_target = datablock.pose.bone_groups.new(name=bg_name)
|
||||
|
||||
loader.load(bg_target, bg_data)
|
||||
# target.pose.bone_groups.get
|
||||
# datablock.pose.bone_groups.get
|
||||
|
||||
# Bones
|
||||
for bone in data['pose']['bones']:
|
||||
target_bone = target.pose.bones.get(bone)
|
||||
target_bone = datablock.pose.bones.get(bone)
|
||||
bone_data = data['pose']['bones'].get(bone)
|
||||
|
||||
if 'constraints' in bone_data.keys():
|
||||
@ -188,20 +601,48 @@ class BlObject(BlDatablock):
|
||||
load_pose(target_bone, bone_data)
|
||||
|
||||
if 'bone_index' in bone_data.keys():
|
||||
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
|
||||
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
|
||||
|
||||
# TODO: find another way...
|
||||
if target.type == 'EMPTY':
|
||||
if datablock.empty_display_type == "IMAGE":
|
||||
img_uuid = data.get('data_uuid')
|
||||
if target.data is None and img_uuid:
|
||||
target.data = get_datablock_from_uuid(img_uuid, None)#bpy.data.images.get(img_key, None)
|
||||
if datablock.data is None and img_uuid:
|
||||
datablock.data = get_datablock_from_uuid(img_uuid, None)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
if hasattr(object_data, 'skin_vertices') \
|
||||
and object_data.skin_vertices\
|
||||
and 'skin_vertices' in data:
|
||||
for index, skin_data in enumerate(object_data.skin_vertices):
|
||||
np_load_collection(
|
||||
data['skin_vertices'][index],
|
||||
skin_data.data,
|
||||
SKIN_DATA)
|
||||
|
||||
if _is_editmode(instance):
|
||||
if self.preferences.sync_flags.sync_during_editmode:
|
||||
instance.update_from_editmode()
|
||||
if hasattr(datablock, 'cycles_visibility') \
|
||||
and 'cycles_visibility' in data:
|
||||
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
|
||||
|
||||
if hasattr(datablock, 'modifiers'):
|
||||
load_modifiers(data['modifiers'], datablock.modifiers)
|
||||
|
||||
constraints = data.get('constraints')
|
||||
if constraints:
|
||||
load_constraints(constraints, datablock.constraints)
|
||||
|
||||
# PHYSICS
|
||||
load_physics(data, datablock)
|
||||
|
||||
transform = data.get('transforms', None)
|
||||
if transform:
|
||||
datablock.matrix_parent_inverse = mathutils.Matrix(transform['matrix_parent_inverse'])
|
||||
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
|
||||
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if _is_editmode(datablock):
|
||||
if get_preferences().sync_flags.sync_during_editmode:
|
||||
datablock.update_from_editmode()
|
||||
else:
|
||||
raise ContextError("Object is in edit-mode.")
|
||||
|
||||
@ -210,9 +651,7 @@ class BlObject(BlDatablock):
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
"rotation_mode",
|
||||
"parent",
|
||||
"data",
|
||||
"children",
|
||||
"library",
|
||||
"empty_display_type",
|
||||
"empty_display_size",
|
||||
@ -226,8 +665,6 @@ class BlObject(BlDatablock):
|
||||
"color",
|
||||
"instance_collection",
|
||||
"instance_type",
|
||||
"location",
|
||||
"scale",
|
||||
'lock_location',
|
||||
'lock_rotation',
|
||||
'lock_scale',
|
||||
@ -242,38 +679,66 @@ class BlObject(BlDatablock):
|
||||
'show_texture_space',
|
||||
'show_in_front',
|
||||
'type',
|
||||
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
|
||||
'parent_type',
|
||||
'parent_bone',
|
||||
'track_axis',
|
||||
'up_axis',
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
dumper.include_filter = [
|
||||
'matrix_parent_inverse',
|
||||
'matrix_local',
|
||||
'matrix_basis']
|
||||
data['transforms'] = dumper.dump(datablock)
|
||||
dumper.include_filter = [
|
||||
'show_shadows',
|
||||
]
|
||||
data['display'] = dumper.dump(instance.display)
|
||||
data['display'] = dumper.dump(datablock.display)
|
||||
|
||||
data['data_uuid'] = getattr(instance.data, 'uuid', None)
|
||||
if self.is_library:
|
||||
return data
|
||||
data['data_uuid'] = getattr(datablock.data, 'uuid', None)
|
||||
|
||||
# PARENTING
|
||||
if datablock.parent:
|
||||
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
|
||||
|
||||
# MODIFIERS
|
||||
if hasattr(instance, 'modifiers'):
|
||||
modifiers = getattr(datablock, 'modifiers', None)
|
||||
if hasattr(datablock, 'modifiers'):
|
||||
data['modifiers'] = dump_modifiers(modifiers)
|
||||
|
||||
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
|
||||
|
||||
if gp_modifiers:
|
||||
dumper.include_filter = None
|
||||
dumper.depth = 1
|
||||
data["modifiers"] = {}
|
||||
for index, modifier in enumerate(instance.modifiers):
|
||||
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
||||
gp_modifiers_data = data["grease_pencil_modifiers"] = {}
|
||||
|
||||
for index, modifier in enumerate(gp_modifiers):
|
||||
gp_mod_data = gp_modifiers_data[modifier.name] = dict()
|
||||
gp_mod_data.update(dumper.dump(modifier))
|
||||
|
||||
if hasattr(modifier, 'use_custom_curve') \
|
||||
and modifier.use_custom_curve:
|
||||
curve_dumper = Dumper()
|
||||
curve_dumper.depth = 5
|
||||
curve_dumper.include_filter = [
|
||||
'curves',
|
||||
'points',
|
||||
'location']
|
||||
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
|
||||
|
||||
|
||||
# CONSTRAINTS
|
||||
if hasattr(instance, 'constraints'):
|
||||
dumper.depth = 3
|
||||
data["constraints"] = dumper.dump(instance.constraints)
|
||||
if hasattr(datablock, 'constraints'):
|
||||
data["constraints"] = dump_constraints(datablock.constraints)
|
||||
|
||||
# POSE
|
||||
if hasattr(instance, 'pose') and instance.pose:
|
||||
if hasattr(datablock, 'pose') and datablock.pose:
|
||||
# BONES
|
||||
bones = {}
|
||||
for bone in instance.pose.bones:
|
||||
for bone in datablock.pose.bones:
|
||||
bones[bone.name] = {}
|
||||
dumper.depth = 1
|
||||
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
||||
@ -298,7 +763,7 @@ class BlObject(BlDatablock):
|
||||
|
||||
# GROUPS
|
||||
bone_groups = {}
|
||||
for group in instance.pose.bone_groups:
|
||||
for group in datablock.pose.bone_groups:
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
@ -307,84 +772,75 @@ class BlObject(BlDatablock):
|
||||
bone_groups[group.name] = dumper.dump(group)
|
||||
data['pose']['bone_groups'] = bone_groups
|
||||
|
||||
# CHILDS
|
||||
if len(instance.children) > 0:
|
||||
childs = []
|
||||
for child in instance.children:
|
||||
childs.append(child.name)
|
||||
|
||||
data["children"] = childs
|
||||
|
||||
# VERTEx GROUP
|
||||
if len(instance.vertex_groups) > 0:
|
||||
points_attr = 'vertices' if isinstance(
|
||||
instance.data, bpy.types.Mesh) else 'points'
|
||||
vg_data = []
|
||||
for vg in instance.vertex_groups:
|
||||
vg_idx = vg.index
|
||||
dumped_vg = {}
|
||||
dumped_vg['name'] = vg.name
|
||||
|
||||
vertices = []
|
||||
|
||||
for i, v in enumerate(getattr(instance.data, points_attr)):
|
||||
for vg in v.groups:
|
||||
if vg.group == vg_idx:
|
||||
vertices.append({
|
||||
'index': i,
|
||||
'weight': vg.weight
|
||||
})
|
||||
|
||||
dumped_vg['vertices'] = vertices
|
||||
|
||||
vg_data.append(dumped_vg)
|
||||
|
||||
data['vertex_groups'] = vg_data
|
||||
if len(datablock.vertex_groups) > 0:
|
||||
data['vertex_groups'] = dump_vertex_groups(datablock)
|
||||
|
||||
# SHAPE KEYS
|
||||
object_data = instance.data
|
||||
object_data = datablock.data
|
||||
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
|
||||
|
||||
# SKIN VERTICES
|
||||
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
|
||||
skin_vertices = list()
|
||||
for skin_data in object_data.skin_vertices:
|
||||
skin_vertices.append(
|
||||
np_dump_collection(skin_data.data, SKIN_DATA))
|
||||
data['skin_vertices'] = skin_vertices
|
||||
|
||||
# CYCLE SETTINGS
|
||||
if hasattr(datablock, 'cycles_visibility'):
|
||||
dumper.include_filter = [
|
||||
'reference_key',
|
||||
'use_relative'
|
||||
'camera',
|
||||
'diffuse',
|
||||
'glossy',
|
||||
'transmission',
|
||||
'scatter',
|
||||
'shadow',
|
||||
]
|
||||
data['shape_keys'] = dumper.dump(object_data.shape_keys)
|
||||
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
|
||||
key_blocks = {}
|
||||
for key in object_data.shape_keys.key_blocks:
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
'data',
|
||||
'mute',
|
||||
'value',
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
'data',
|
||||
'co'
|
||||
]
|
||||
key_blocks[key.name] = dumper.dump(key)
|
||||
key_blocks[key.name]['relative_key'] = key.relative_key.name
|
||||
data['shape_keys']['key_blocks'] = key_blocks
|
||||
data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility)
|
||||
|
||||
# PHYSICS
|
||||
data.update(dump_physics(datablock))
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
# Avoid Empty case
|
||||
if self.instance.data:
|
||||
deps.append(self.instance.data)
|
||||
if len(self.instance.children) > 0:
|
||||
deps.extend(list(self.instance.children))
|
||||
if datablock.data:
|
||||
deps.append(datablock.data)
|
||||
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
# Particle systems
|
||||
for particle_slot in datablock.particle_systems:
|
||||
deps.append(particle_slot.settings)
|
||||
|
||||
if self.instance.instance_type == 'COLLECTION':
|
||||
if datablock.parent:
|
||||
deps.append(datablock.parent)
|
||||
|
||||
if datablock.instance_type == 'COLLECTION':
|
||||
# TODO: uuid based
|
||||
deps.append(self.instance.instance_collection)
|
||||
deps.append(datablock.instance_collection)
|
||||
|
||||
if datablock.modifiers:
|
||||
deps.extend(find_textures_dependencies(datablock.modifiers))
|
||||
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
|
||||
|
||||
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
|
||||
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.objects)
|
||||
|
||||
_type = bpy.types.Object
|
||||
_class = BlObject
|
106
multi_user/bl_types/bl_particle.py
Normal file
@ -0,0 +1,106 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from . import dump_anything
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
|
||||
""" Dump every texture slot collection as the form:
|
||||
[(index, slot_texture_uuid, slot_texture_name), (), ...]
|
||||
"""
|
||||
dumped_slots = []
|
||||
for index, slot in enumerate(texture_slots):
|
||||
if slot and slot.texture:
|
||||
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
|
||||
|
||||
return dumped_slots
|
||||
|
||||
|
||||
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
|
||||
"""
|
||||
"""
|
||||
for index, slot in enumerate(target_slots):
|
||||
if slot:
|
||||
target_slots.clear(index)
|
||||
|
||||
for index, slot_uuid, slot_name in dumped_slots:
|
||||
target_slots.create(index).texture = get_datablock_from_uuid(
|
||||
slot_uuid, slot_name
|
||||
)
|
||||
|
||||
IGNORED_ATTR = [
|
||||
"is_embedded_data",
|
||||
"is_evaluated",
|
||||
"is_fluid",
|
||||
"is_library_indirect",
|
||||
"users"
|
||||
]
|
||||
|
||||
class BlParticle(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "particles"
|
||||
bl_class = bpy.types.ParticleSettings
|
||||
bl_icon = "PARTICLES"
|
||||
bl_check_common = False
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.particles.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
dump_anything.load(datablock, data)
|
||||
|
||||
dump_anything.load(datablock.effector_weights, data["effector_weights"])
|
||||
|
||||
# Force field
|
||||
force_field_1 = data.get("force_field_1", None)
|
||||
if force_field_1:
|
||||
dump_anything.load(datablock.force_field_1, force_field_1)
|
||||
|
||||
force_field_2 = data.get("force_field_2", None)
|
||||
if force_field_2:
|
||||
dump_anything.load(datablock.force_field_2, force_field_2)
|
||||
|
||||
# Texture slots
|
||||
load_texture_slots(data["texture_slots"], datablock.texture_slots)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = IGNORED_ATTR
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
# Particle effectors
|
||||
data["effector_weights"] = dumper.dump(datablock.effector_weights)
|
||||
if datablock.force_field_1:
|
||||
data["force_field_1"] = dumper.dump(datablock.force_field_1)
|
||||
if datablock.force_field_2:
|
||||
data["force_field_2"] = dumper.dump(datablock.force_field_2)
|
||||
|
||||
# Texture slots
|
||||
data["texture_slots"] = dump_textures_slots(datablock.texture_slots)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.particles)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = [t.texture for t in datablock.texture_slots if t and t.texture]
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = bpy.types.ParticleSettings
|
||||
_class = BlParticle
|
@ -16,165 +16,597 @@
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from deepdiff import DeepDiff, Delta
|
||||
from replication.constants import DIFF_JSON, MODIFIED
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_collection import dump_collection_children, dump_collection_objects, load_collection_childrens, load_collection_objects
|
||||
from replication.constants import (DIFF_JSON, MODIFIED)
|
||||
from deepdiff import DeepDiff
|
||||
import logging
|
||||
from ..utils import flush_history, get_preferences
|
||||
from .bl_action import (dump_animation_data, load_animation_data,
|
||||
resolve_animation_dependencies)
|
||||
from .bl_collection import (dump_collection_children, dump_collection_objects,
|
||||
load_collection_childrens, load_collection_objects,
|
||||
resolve_collection_dependencies)
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_file import get_filepath
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
RENDER_SETTINGS = [
|
||||
'dither_intensity',
|
||||
'engine',
|
||||
'film_transparent',
|
||||
'filter_size',
|
||||
'fps',
|
||||
'fps_base',
|
||||
'frame_map_new',
|
||||
'frame_map_old',
|
||||
'hair_subdiv',
|
||||
'hair_type',
|
||||
'line_thickness',
|
||||
'line_thickness_mode',
|
||||
'metadata_input',
|
||||
'motion_blur_shutter',
|
||||
'pixel_aspect_x',
|
||||
'pixel_aspect_y',
|
||||
'preview_pixel_size',
|
||||
'preview_start_resolution',
|
||||
'resolution_percentage',
|
||||
'resolution_x',
|
||||
'resolution_y',
|
||||
'sequencer_gl_preview',
|
||||
'use_bake_clear',
|
||||
'use_bake_lores_mesh',
|
||||
'use_bake_multires',
|
||||
'use_bake_selected_to_active',
|
||||
'use_bake_user_scale',
|
||||
'use_border',
|
||||
'use_compositing',
|
||||
'use_crop_to_border',
|
||||
'use_file_extension',
|
||||
'use_freestyle',
|
||||
'use_full_sample',
|
||||
'use_high_quality_normals',
|
||||
'use_lock_interface',
|
||||
'use_motion_blur',
|
||||
'use_multiview',
|
||||
'use_sequencer',
|
||||
'use_sequencer_override_scene_strip',
|
||||
'use_single_layer',
|
||||
'views_format',
|
||||
]
|
||||
|
||||
EVEE_SETTINGS = [
|
||||
'gi_diffuse_bounces',
|
||||
'gi_cubemap_resolution',
|
||||
'gi_visibility_resolution',
|
||||
'gi_irradiance_smoothing',
|
||||
'gi_glossy_clamp',
|
||||
'gi_filter_quality',
|
||||
'gi_show_irradiance',
|
||||
'gi_show_cubemaps',
|
||||
'gi_irradiance_display_size',
|
||||
'gi_cubemap_display_size',
|
||||
'gi_auto_bake',
|
||||
'taa_samples',
|
||||
'taa_render_samples',
|
||||
'use_taa_reprojection',
|
||||
'sss_samples',
|
||||
'sss_jitter_threshold',
|
||||
'use_ssr',
|
||||
'use_ssr_refraction',
|
||||
'use_ssr_halfres',
|
||||
'ssr_quality',
|
||||
'ssr_max_roughness',
|
||||
'ssr_thickness',
|
||||
'ssr_border_fade',
|
||||
'ssr_firefly_fac',
|
||||
'volumetric_start',
|
||||
'volumetric_end',
|
||||
'volumetric_tile_size',
|
||||
'volumetric_samples',
|
||||
'volumetric_sample_distribution',
|
||||
'use_volumetric_lights',
|
||||
'volumetric_light_clamp',
|
||||
'use_volumetric_shadows',
|
||||
'volumetric_shadow_samples',
|
||||
'use_gtao',
|
||||
'use_gtao_bent_normals',
|
||||
'use_gtao_bounce',
|
||||
'gtao_factor',
|
||||
'gtao_quality',
|
||||
'gtao_distance',
|
||||
'bokeh_max_size',
|
||||
'bokeh_threshold',
|
||||
'use_bloom',
|
||||
'bloom_threshold',
|
||||
'bloom_color',
|
||||
'bloom_knee',
|
||||
'bloom_radius',
|
||||
'bloom_clamp',
|
||||
'bloom_intensity',
|
||||
'use_motion_blur',
|
||||
'motion_blur_shutter',
|
||||
'motion_blur_depth_scale',
|
||||
'motion_blur_max',
|
||||
'motion_blur_steps',
|
||||
'shadow_cube_size',
|
||||
'shadow_cascade_size',
|
||||
'use_shadow_high_bitdepth',
|
||||
'gi_diffuse_bounces',
|
||||
'gi_cubemap_resolution',
|
||||
'gi_visibility_resolution',
|
||||
'gi_irradiance_smoothing',
|
||||
'gi_glossy_clamp',
|
||||
'gi_filter_quality',
|
||||
'gi_show_irradiance',
|
||||
'gi_show_cubemaps',
|
||||
'gi_irradiance_display_size',
|
||||
'gi_cubemap_display_size',
|
||||
'gi_auto_bake',
|
||||
'taa_samples',
|
||||
'taa_render_samples',
|
||||
'use_taa_reprojection',
|
||||
'sss_samples',
|
||||
'sss_jitter_threshold',
|
||||
'use_ssr',
|
||||
'use_ssr_refraction',
|
||||
'use_ssr_halfres',
|
||||
'ssr_quality',
|
||||
'ssr_max_roughness',
|
||||
'ssr_thickness',
|
||||
'ssr_border_fade',
|
||||
'ssr_firefly_fac',
|
||||
'volumetric_start',
|
||||
'volumetric_end',
|
||||
'volumetric_tile_size',
|
||||
'volumetric_samples',
|
||||
'volumetric_sample_distribution',
|
||||
'use_volumetric_lights',
|
||||
'volumetric_light_clamp',
|
||||
'use_volumetric_shadows',
|
||||
'volumetric_shadow_samples',
|
||||
'use_gtao',
|
||||
'use_gtao_bent_normals',
|
||||
'use_gtao_bounce',
|
||||
'gtao_factor',
|
||||
'gtao_quality',
|
||||
'gtao_distance',
|
||||
'bokeh_max_size',
|
||||
'bokeh_threshold',
|
||||
'use_bloom',
|
||||
'bloom_threshold',
|
||||
'bloom_color',
|
||||
'bloom_knee',
|
||||
'bloom_radius',
|
||||
'bloom_clamp',
|
||||
'bloom_intensity',
|
||||
'use_motion_blur',
|
||||
'motion_blur_shutter',
|
||||
'motion_blur_depth_scale',
|
||||
'motion_blur_max',
|
||||
'motion_blur_steps',
|
||||
'shadow_cube_size',
|
||||
'shadow_cascade_size',
|
||||
'use_shadow_high_bitdepth',
|
||||
]
|
||||
|
||||
CYCLES_SETTINGS = [
|
||||
'shading_system',
|
||||
'progressive',
|
||||
'use_denoising',
|
||||
'denoiser',
|
||||
'use_square_samples',
|
||||
'samples',
|
||||
'aa_samples',
|
||||
'diffuse_samples',
|
||||
'glossy_samples',
|
||||
'transmission_samples',
|
||||
'ao_samples',
|
||||
'mesh_light_samples',
|
||||
'subsurface_samples',
|
||||
'volume_samples',
|
||||
'sampling_pattern',
|
||||
'use_layer_samples',
|
||||
'sample_all_lights_direct',
|
||||
'sample_all_lights_indirect',
|
||||
'light_sampling_threshold',
|
||||
'use_adaptive_sampling',
|
||||
'adaptive_threshold',
|
||||
'adaptive_min_samples',
|
||||
'min_light_bounces',
|
||||
'min_transparent_bounces',
|
||||
'caustics_reflective',
|
||||
'caustics_refractive',
|
||||
'blur_glossy',
|
||||
'max_bounces',
|
||||
'diffuse_bounces',
|
||||
'glossy_bounces',
|
||||
'transmission_bounces',
|
||||
'volume_bounces',
|
||||
'transparent_max_bounces',
|
||||
'volume_step_rate',
|
||||
'volume_max_steps',
|
||||
'dicing_rate',
|
||||
'max_subdivisions',
|
||||
'dicing_camera',
|
||||
'offscreen_dicing_scale',
|
||||
'film_exposure',
|
||||
'film_transparent_glass',
|
||||
'film_transparent_roughness',
|
||||
'filter_type',
|
||||
'pixel_filter_type',
|
||||
'filter_width',
|
||||
'seed',
|
||||
'use_animated_seed',
|
||||
'sample_clamp_direct',
|
||||
'sample_clamp_indirect',
|
||||
'tile_order',
|
||||
'use_progressive_refine',
|
||||
'bake_type',
|
||||
'use_camera_cull',
|
||||
'camera_cull_margin',
|
||||
'use_distance_cull',
|
||||
'distance_cull_margin',
|
||||
'motion_blur_position',
|
||||
'rolling_shutter_type',
|
||||
'rolling_shutter_duration',
|
||||
'texture_limit',
|
||||
'texture_limit_render',
|
||||
'ao_bounces',
|
||||
'ao_bounces_render',
|
||||
]
|
||||
|
||||
VIEW_SETTINGS = [
|
||||
'look',
|
||||
'view_transform',
|
||||
'exposure',
|
||||
'gamma',
|
||||
'use_curve_mapping',
|
||||
'white_level',
|
||||
'black_level'
|
||||
]
|
||||
|
||||
|
||||
def dump_sequence(sequence: bpy.types.Sequence) -> dict:
|
||||
""" Dump a sequence to a dict
|
||||
|
||||
:arg sequence: sequence to dump
|
||||
:type sequence: bpy.types.Sequence
|
||||
:return dict:
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'lock',
|
||||
'select',
|
||||
'select_left_handle',
|
||||
'select_right_handle',
|
||||
'strobe'
|
||||
]
|
||||
dumper.depth = 1
|
||||
data = dumper.dump(sequence)
|
||||
|
||||
# TODO: Support multiple images
|
||||
if sequence.type == 'IMAGE':
|
||||
data['filenames'] = [e.filename for e in sequence.elements]
|
||||
|
||||
# Effect strip inputs
|
||||
input_count = getattr(sequence, 'input_count', None)
|
||||
if input_count:
|
||||
for n in range(input_count):
|
||||
input_name = f"input_{n+1}"
|
||||
data[input_name] = getattr(sequence, input_name).name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def load_sequence(sequence_data: dict,
|
||||
sequence_editor: bpy.types.SequenceEditor):
|
||||
""" Load sequence from dumped data
|
||||
|
||||
:arg sequence_data: sequence to dump
|
||||
:type sequence_data:dict
|
||||
:arg sequence_editor: root sequence editor
|
||||
:type sequence_editor: bpy.types.SequenceEditor
|
||||
"""
|
||||
strip_type = sequence_data.get('type')
|
||||
strip_name = sequence_data.get('name')
|
||||
strip_channel = sequence_data.get('channel')
|
||||
strip_frame_start = sequence_data.get('frame_start')
|
||||
|
||||
sequence = sequence_editor.sequences_all.get(strip_name, None)
|
||||
|
||||
if sequence is None:
|
||||
if strip_type == 'SCENE':
|
||||
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
|
||||
sequence = sequence_editor.sequences.new_scene(strip_name,
|
||||
strip_scene,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'MOVIE':
|
||||
filepath = get_filepath(Path(sequence_data['filepath']).name)
|
||||
sequence = sequence_editor.sequences.new_movie(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'SOUND':
|
||||
filepath = bpy.data.sounds[sequence_data['sound']].filepath
|
||||
sequence = sequence_editor.sequences.new_sound(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'IMAGE':
|
||||
images_name = sequence_data.get('filenames')
|
||||
filepath = get_filepath(images_name[0])
|
||||
sequence = sequence_editor.sequences.new_image(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
# load other images
|
||||
if len(images_name) > 1:
|
||||
for img_idx in range(1, len(images_name)):
|
||||
sequence.elements.append((images_name[img_idx]))
|
||||
else:
|
||||
seq = {}
|
||||
|
||||
for i in range(sequence_data['input_count']):
|
||||
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(
|
||||
sequence_data.get(f"input_{i+1}", None))
|
||||
|
||||
sequence = sequence_editor.sequences.new_effect(name=strip_name,
|
||||
type=strip_type,
|
||||
channel=strip_channel,
|
||||
frame_start=strip_frame_start,
|
||||
frame_end=sequence_data['frame_final_end'],
|
||||
**seq)
|
||||
|
||||
loader = Loader()
|
||||
|
||||
loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
|
||||
loader.load(sequence, sequence_data)
|
||||
sequence.select = False
|
||||
|
||||
|
||||
class BlScene(ReplicatedDatablock):
|
||||
is_root = True
|
||||
use_delta = True
|
||||
|
||||
class BlScene(BlDatablock):
|
||||
bl_id = "scenes"
|
||||
bl_class = bpy.types.Scene
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = True
|
||||
bl_icon = 'SCENE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.scenes.new(data["name"])
|
||||
|
||||
self.diff_method = DIFF_JSON
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.scenes.new(data["name"])
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
# Load other meshes metadata
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# Load master collection
|
||||
load_collection_objects(data['collection']['objects'], target.collection)
|
||||
load_collection_childrens(data['collection']['children'], target.collection)
|
||||
load_collection_objects(
|
||||
data['collection']['objects'], datablock.collection)
|
||||
load_collection_childrens(
|
||||
data['collection']['children'], datablock.collection)
|
||||
|
||||
if 'world' in data.keys():
|
||||
target.world = bpy.data.worlds[data['world']]
|
||||
|
||||
# Annotation
|
||||
if 'grease_pencil' in data.keys():
|
||||
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
||||
datablock.world = bpy.data.worlds[data['world']]
|
||||
|
||||
if self.preferences.sync_flags.sync_render_settings:
|
||||
# Annotation
|
||||
gpencil_uid = data.get('grease_pencil')
|
||||
if gpencil_uid:
|
||||
datablock.grease_pencil = resolve_datablock_from_uuid(gpencil_uid, bpy.data.grease_pencils)
|
||||
|
||||
if get_preferences().sync_flags.sync_render_settings:
|
||||
if 'eevee' in data.keys():
|
||||
loader.load(target.eevee, data['eevee'])
|
||||
loader.load(datablock.eevee, data['eevee'])
|
||||
|
||||
if 'cycles' in data.keys():
|
||||
loader.load(target.eevee, data['cycles'])
|
||||
loader.load(datablock.cycles, data['cycles'])
|
||||
|
||||
if 'render' in data.keys():
|
||||
loader.load(target.render, data['render'])
|
||||
loader.load(datablock.render, data['render'])
|
||||
|
||||
if 'view_settings' in data.keys():
|
||||
loader.load(target.view_settings, data['view_settings'])
|
||||
if target.view_settings.use_curve_mapping:
|
||||
#TODO: change this ugly fix
|
||||
target.view_settings.curve_mapping.white_level = data['view_settings']['curve_mapping']['white_level']
|
||||
target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level']
|
||||
target.view_settings.curve_mapping.update()
|
||||
view_settings = data.get('view_settings')
|
||||
if view_settings:
|
||||
loader.load(datablock.view_settings, view_settings)
|
||||
if datablock.view_settings.use_curve_mapping and \
|
||||
'curve_mapping' in view_settings:
|
||||
# TODO: change this ugly fix
|
||||
datablock.view_settings.curve_mapping.white_level = view_settings['curve_mapping']['white_level']
|
||||
datablock.view_settings.curve_mapping.black_level = view_settings['curve_mapping']['black_level']
|
||||
datablock.view_settings.curve_mapping.update()
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
# Sequencer
|
||||
sequences = data.get('sequences')
|
||||
|
||||
if sequences:
|
||||
# Create sequencer data
|
||||
datablock.sequence_editor_create()
|
||||
vse = datablock.sequence_editor
|
||||
|
||||
# Clear removed sequences
|
||||
for seq in vse.sequences_all:
|
||||
if seq.name not in sequences:
|
||||
vse.sequences.remove(seq)
|
||||
# Load existing sequences
|
||||
for seq_data in sequences.values():
|
||||
load_sequence(seq_data, vse)
|
||||
# If the sequence is no longer used, clear it
|
||||
elif datablock.sequence_editor and not sequences:
|
||||
datablock.sequence_editor_clear()
|
||||
|
||||
# Timeline markers
|
||||
markers = data.get('timeline_markers')
|
||||
if markers:
|
||||
datablock.timeline_markers.clear()
|
||||
for name, frame, camera in markers:
|
||||
marker = datablock.timeline_markers.new(name, frame=frame)
|
||||
if camera:
|
||||
marker.camera = resolve_datablock_from_uuid(camera, bpy.data.objects)
|
||||
marker.select = False
|
||||
# FIXME: Find a better way after the replication big refacotoring
|
||||
# Keep other user from deleting collection object by flushing their history
|
||||
flush_history()
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
data = {}
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
# Metadata
|
||||
scene_dumper = Dumper()
|
||||
scene_dumper.depth = 1
|
||||
scene_dumper.include_filter = [
|
||||
'name',
|
||||
'world',
|
||||
'id',
|
||||
'grease_pencil',
|
||||
'frame_start',
|
||||
'frame_end',
|
||||
'frame_step',
|
||||
]
|
||||
if self.preferences.sync_flags.sync_active_camera:
|
||||
if get_preferences().sync_flags.sync_active_camera:
|
||||
scene_dumper.include_filter.append('camera')
|
||||
|
||||
data = scene_dumper.dump(instance)
|
||||
data.update(scene_dumper.dump(datablock))
|
||||
|
||||
scene_dumper.depth = 3
|
||||
|
||||
scene_dumper.include_filter = ['children','objects','name']
|
||||
# Master collection
|
||||
data['collection'] = {}
|
||||
data['collection']['children'] = dump_collection_children(instance.collection)
|
||||
data['collection']['objects'] = dump_collection_objects(instance.collection)
|
||||
|
||||
data['collection']['children'] = dump_collection_children(
|
||||
datablock.collection)
|
||||
data['collection']['objects'] = dump_collection_objects(
|
||||
datablock.collection)
|
||||
|
||||
scene_dumper.depth = 1
|
||||
scene_dumper.include_filter = None
|
||||
|
||||
if self.preferences.sync_flags.sync_render_settings:
|
||||
scene_dumper.exclude_filter = [
|
||||
'gi_cache_info',
|
||||
'feature_set',
|
||||
'debug_use_hair_bvh',
|
||||
'aa_samples',
|
||||
'blur_glossy',
|
||||
'glossy_bounces',
|
||||
'device',
|
||||
'max_bounces',
|
||||
'preview_aa_samples',
|
||||
'preview_samples',
|
||||
'sample_clamp_indirect',
|
||||
'samples',
|
||||
'volume_bounces',
|
||||
'file_extension',
|
||||
'use_denoising'
|
||||
]
|
||||
data['eevee'] = scene_dumper.dump(instance.eevee)
|
||||
data['cycles'] = scene_dumper.dump(instance.cycles)
|
||||
data['view_settings'] = scene_dumper.dump(instance.view_settings)
|
||||
data['render'] = scene_dumper.dump(instance.render)
|
||||
# Render settings
|
||||
if get_preferences().sync_flags.sync_render_settings:
|
||||
scene_dumper.include_filter = RENDER_SETTINGS
|
||||
|
||||
if instance.view_settings.use_curve_mapping:
|
||||
data['view_settings']['curve_mapping'] = scene_dumper.dump(instance.view_settings.curve_mapping)
|
||||
data['render'] = scene_dumper.dump(datablock.render)
|
||||
|
||||
if datablock.render.engine == 'BLENDER_EEVEE':
|
||||
scene_dumper.include_filter = EVEE_SETTINGS
|
||||
data['eevee'] = scene_dumper.dump(datablock.eevee)
|
||||
elif datablock.render.engine == 'CYCLES':
|
||||
scene_dumper.include_filter = CYCLES_SETTINGS
|
||||
data['cycles'] = scene_dumper.dump(datablock.cycles)
|
||||
|
||||
scene_dumper.include_filter = VIEW_SETTINGS
|
||||
data['view_settings'] = scene_dumper.dump(datablock.view_settings)
|
||||
|
||||
if datablock.view_settings.use_curve_mapping:
|
||||
data['view_settings']['curve_mapping'] = scene_dumper.dump(
|
||||
datablock.view_settings.curve_mapping)
|
||||
scene_dumper.depth = 5
|
||||
scene_dumper.include_filter = [
|
||||
'curves',
|
||||
'points',
|
||||
'location'
|
||||
'location',
|
||||
]
|
||||
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(instance.view_settings.curve_mapping.curves)
|
||||
|
||||
|
||||
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
|
||||
datablock.view_settings.curve_mapping.curves)
|
||||
|
||||
# Sequence
|
||||
vse = datablock.sequence_editor
|
||||
if vse:
|
||||
dumped_sequences = {}
|
||||
for seq in vse.sequences_all:
|
||||
dumped_sequences[seq.name] = dump_sequence(seq)
|
||||
data['sequences'] = dumped_sequences
|
||||
|
||||
# Timeline markers
|
||||
if datablock.timeline_markers:
|
||||
data['timeline_markers'] = [(m.name, m.frame, getattr(m.camera, 'uuid', None)) for m in datablock.timeline_markers]
|
||||
|
||||
if datablock.grease_pencil:
|
||||
data['grease_pencil'] = datablock.grease_pencil.uuid
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
# child collections
|
||||
for child in self.instance.collection.children:
|
||||
deps.append(child)
|
||||
|
||||
# childs objects
|
||||
for object in self.instance.objects:
|
||||
deps.append(object)
|
||||
|
||||
# Master Collection
|
||||
deps.extend(resolve_collection_dependencies(datablock.collection))
|
||||
|
||||
# world
|
||||
if self.instance.world:
|
||||
deps.append(self.instance.world)
|
||||
|
||||
if datablock.world:
|
||||
deps.append(datablock.world)
|
||||
|
||||
# annotations
|
||||
if self.instance.grease_pencil:
|
||||
deps.append(self.instance.grease_pencil)
|
||||
if datablock.grease_pencil:
|
||||
deps.append(datablock.grease_pencil)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
# Sequences
|
||||
vse = datablock.sequence_editor
|
||||
if vse:
|
||||
for sequence in vse.sequences_all:
|
||||
if sequence.type == 'MOVIE' and sequence.filepath:
|
||||
deps.append(Path(bpy.path.abspath(sequence.filepath)))
|
||||
elif sequence.type == 'SOUND' and sequence.sound:
|
||||
deps.append(sequence.sound)
|
||||
elif sequence.type == 'IMAGE':
|
||||
for elem in sequence.elements:
|
||||
sequence.append(
|
||||
Path(bpy.path.abspath(sequence.directory),
|
||||
elem.filename))
|
||||
|
||||
return deps
|
||||
|
||||
def diff(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
name = data.get('name')
|
||||
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
|
||||
if datablock is None:
|
||||
datablock = bpy.data.scenes.get(name)
|
||||
|
||||
return datablock
|
||||
|
||||
@staticmethod
|
||||
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||
exclude_path = []
|
||||
|
||||
if not self.preferences.sync_flags.sync_render_settings:
|
||||
if not get_preferences().sync_flags.sync_render_settings:
|
||||
exclude_path.append("root['eevee']")
|
||||
exclude_path.append("root['cycles']")
|
||||
exclude_path.append("root['view_settings']")
|
||||
exclude_path.append("root['render']")
|
||||
|
||||
if not self.preferences.sync_flags.sync_active_camera:
|
||||
if not get_preferences().sync_flags.sync_active_camera:
|
||||
exclude_path.append("root['camera']")
|
||||
|
||||
return DeepDiff(self.data, self._dump(instance=self.instance),exclude_paths=exclude_path, cache_size=5000)
|
||||
diff_params = {
|
||||
'exclude_paths': exclude_path,
|
||||
'ignore_order': True,
|
||||
'report_repetition': True
|
||||
}
|
||||
delta_params = {
|
||||
# 'mutate': True
|
||||
}
|
||||
|
||||
return Delta(
|
||||
DeepDiff(last_data,
|
||||
current_data,
|
||||
cache_size=5000,
|
||||
**diff_params),
|
||||
**delta_params)
|
||||
|
||||
|
||||
_type = bpy.types.Scene
|
||||
_class = BlScene
|
||||
|
@ -23,47 +23,59 @@ from pathlib import Path
|
||||
import bpy
|
||||
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlSound(BlDatablock):
|
||||
class BlSound(ReplicatedDatablock):
|
||||
bl_id = "sounds"
|
||||
bl_class = bpy.types.Sound
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'SOUND'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
filename = data.get('filename')
|
||||
|
||||
return bpy.data.sounds.load(get_filepath(filename))
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
|
||||
def _dump(self, instance=None):
|
||||
filename = Path(instance.filepath).name
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
if not filename:
|
||||
raise FileExistsError(instance.filepath)
|
||||
|
||||
raise FileExistsError(datablock.filepath)
|
||||
|
||||
return {
|
||||
'filename': filename,
|
||||
'name': instance.name
|
||||
'name': datablock.name
|
||||
}
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
||||
ensure_unpacked(self.instance)
|
||||
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||
ensure_unpacked(datablock)
|
||||
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.sounds)
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
return False
|
||||
|
||||
_type = bpy.types.Sound
|
||||
_class = BlSound
|
@ -20,28 +20,31 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlSpeaker(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
class BlSpeaker(BlDatablock):
|
||||
bl_id = "speakers"
|
||||
bl_class = bpy.types.Speaker
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'SPEAKER'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.speakers.new(data["name"])
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -60,17 +63,27 @@ class BlSpeaker(BlDatablock):
|
||||
'cone_volume_outer'
|
||||
]
|
||||
|
||||
return dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.speakers)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
|
||||
sound = self.instance.sound
|
||||
sound = datablock.sound
|
||||
|
||||
if sound:
|
||||
deps.append(sound)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
|
||||
_type = bpy.types.Speaker
|
||||
_class = BlSpeaker
|
||||
|
97
multi_user/bl_types/bl_texture.py
Normal file
@ -0,0 +1,97 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
import bpy.types as T
|
||||
|
||||
class BlTexture(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "textures"
|
||||
bl_class = bpy.types.Texture
|
||||
bl_check_common = False
|
||||
bl_icon = 'TEXTURE'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.textures.new(data["name"], data["type"])
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = [
|
||||
'tag',
|
||||
'original',
|
||||
'users',
|
||||
'uuid',
|
||||
'is_embedded_data',
|
||||
'is_evaluated',
|
||||
'name_full'
|
||||
]
|
||||
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
color_ramp = getattr(datablock, 'color_ramp', None)
|
||||
|
||||
if color_ramp:
|
||||
dumper.depth = 4
|
||||
data['color_ramp'] = dumper.dump(color_ramp)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.textures)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
image = getattr(datablock,"image", None)
|
||||
|
||||
if image:
|
||||
deps.append(image)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = [T.WoodTexture, T.VoronoiTexture,
|
||||
T.StucciTexture, T.NoiseTexture,
|
||||
T.MusgraveTexture, T.MarbleTexture,
|
||||
T.MagicTexture, T.ImageTexture,
|
||||
T.DistortedNoiseTexture, T.CloudsTexture,
|
||||
T.BlendTexture]
|
||||
_class = BlTexture
|
101
multi_user/bl_types/bl_volume.py
Normal file
@ -0,0 +1,101 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from pathlib import Path
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlVolume(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "volumes"
|
||||
bl_class = bpy.types.Volume
|
||||
bl_check_common = False
|
||||
bl_icon = 'VOLUME_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.volumes.new(data["name"])
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = [
|
||||
'tag',
|
||||
'original',
|
||||
'users',
|
||||
'uuid',
|
||||
'is_embedded_data',
|
||||
'is_evaluated',
|
||||
'name_full',
|
||||
'use_fake_user'
|
||||
]
|
||||
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['display'] = dumper.dump(datablock.display)
|
||||
|
||||
# Fix material index
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
loader.load(datablock.display, data['display'])
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.volumes)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
|
||||
external_vdb = Path(bpy.path.abspath(datablock.filepath))
|
||||
if external_vdb.exists() and not external_vdb.is_dir():
|
||||
deps.append(external_vdb)
|
||||
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Volume
|
||||
_class = BlVolume
|
@ -20,44 +20,42 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_material import load_links, load_node, dump_node, dump_links
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_material import (load_node_tree,
|
||||
dump_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlWorld(BlDatablock):
|
||||
class BlWorld(ReplicatedDatablock):
|
||||
use_delta = True
|
||||
|
||||
bl_id = "worlds"
|
||||
bl_class = bpy.types.World
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = True
|
||||
bl_icon = 'WORLD_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.worlds.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
loader.load(datablock, data)
|
||||
|
||||
if data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
if datablock.node_tree is None:
|
||||
datablock.use_nodes = True
|
||||
|
||||
target.node_tree.nodes.clear()
|
||||
|
||||
for node in data["node_tree"]["nodes"]:
|
||||
load_node(data["node_tree"]["nodes"][node], target.node_tree)
|
||||
|
||||
# Load nodes links
|
||||
target.node_tree.links.clear()
|
||||
|
||||
|
||||
load_links(data["node_tree"]["links"], target.node_tree)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
world_dumper = Dumper()
|
||||
world_dumper.depth = 1
|
||||
world_dumper.include_filter = [
|
||||
@ -65,28 +63,27 @@ class BlWorld(BlDatablock):
|
||||
"name",
|
||||
"color"
|
||||
]
|
||||
data = world_dumper.dump(instance)
|
||||
if instance.use_nodes:
|
||||
data['node_tree'] = {}
|
||||
nodes = {}
|
||||
|
||||
for node in instance.node_tree.nodes:
|
||||
nodes[node.name] = dump_node(node)
|
||||
|
||||
data["node_tree"]['nodes'] = nodes
|
||||
|
||||
data["node_tree"]['links'] = dump_links(instance.node_tree.links)
|
||||
data = world_dumper.dump(datablock)
|
||||
if datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.worlds)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
if self.instance.use_nodes:
|
||||
for node in self.instance.node_tree.nodes:
|
||||
if node.type in ['TEX_IMAGE','TEX_ENVIRONMENT']:
|
||||
deps.append(node.image)
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = bpy.types.World
|
||||
_class = BlWorld
|
@ -24,8 +24,8 @@ import numpy as np
|
||||
|
||||
|
||||
BPY_TO_NUMPY_TYPES = {
|
||||
'FLOAT': np.float,
|
||||
'INT': np.int,
|
||||
'FLOAT': np.float32,
|
||||
'INT': np.int32,
|
||||
'BOOL': np.bool}
|
||||
|
||||
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
|
||||
@ -47,7 +47,7 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
|
||||
:type attributes: list
|
||||
"""
|
||||
if not dikt or len(collection) == 0:
|
||||
logging.warning(f'Skipping collection')
|
||||
logging.debug(f'Skipping collection {collection}')
|
||||
return
|
||||
|
||||
if attributes is None:
|
||||
@ -465,6 +465,7 @@ class Loader:
|
||||
self.type_subset = self.match_subset_all
|
||||
self.occlude_read_only = False
|
||||
self.order = ['*']
|
||||
self.exclure_filter = []
|
||||
|
||||
def load(self, dst_data, src_dumped_data):
|
||||
self._load_any(
|
||||
@ -475,7 +476,8 @@ class Loader:
|
||||
|
||||
def _load_any(self, any, dump):
|
||||
for filter_function, load_function in self.type_subset:
|
||||
if filter_function(any):
|
||||
if filter_function(any) and \
|
||||
any.sub_element_name not in self.exclure_filter:
|
||||
load_function(any, dump)
|
||||
return
|
||||
|
||||
@ -505,14 +507,12 @@ class Loader:
|
||||
_constructors = {
|
||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
|
||||
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
}
|
||||
|
||||
destructors = {
|
||||
T.ColorRampElement: DESTRUCTOR_REMOVE,
|
||||
T.Modifier: DESTRUCTOR_CLEAR,
|
||||
T.Constraint: CONSTRUCTOR_NEW,
|
||||
T.GpencilModifier: DESTRUCTOR_CLEAR,
|
||||
}
|
||||
element_type = element.bl_rna_property.fixed_type
|
||||
|
||||
@ -527,7 +527,13 @@ class Loader:
|
||||
if destructor:
|
||||
if destructor == DESTRUCTOR_REMOVE:
|
||||
collection = element.read()
|
||||
for i in range(len(collection)-1):
|
||||
elems_to_remove = len(collection)
|
||||
|
||||
# Color ramp doesn't allow to remove all elements
|
||||
if type(element_type) == T.ColorRampElement:
|
||||
elems_to_remove -= 1
|
||||
|
||||
for i in range(elems_to_remove):
|
||||
collection.remove(collection[0])
|
||||
else:
|
||||
getattr(element.read(), DESTRUCTOR_CLEAR)()
|
||||
@ -574,6 +580,7 @@ class Loader:
|
||||
dst_curve.points[int(point_idx)].location = pos
|
||||
else:
|
||||
dst_curve.points.new(pos[0], pos[1])
|
||||
curves.update()
|
||||
|
||||
def _load_pointer(self, instance, dump):
|
||||
rna_property_type = instance.bl_rna_property.fixed_type
|
||||
@ -585,6 +592,8 @@ class Loader:
|
||||
instance.write(bpy.data.textures.get(dump))
|
||||
elif isinstance(rna_property_type, T.ColorRamp):
|
||||
self._load_default(instance, dump)
|
||||
elif isinstance(rna_property_type, T.NodeTree):
|
||||
instance.write(bpy.data.node_groups.get(dump))
|
||||
elif isinstance(rna_property_type, T.Object):
|
||||
instance.write(bpy.data.objects.get(dump))
|
||||
elif isinstance(rna_property_type, T.Mesh):
|
||||
@ -597,6 +606,8 @@ class Loader:
|
||||
instance.write(bpy.data.fonts.get(dump))
|
||||
elif isinstance(rna_property_type, T.Sound):
|
||||
instance.write(bpy.data.sounds.get(dump))
|
||||
# elif isinstance(rna_property_type, T.ParticleSettings):
|
||||
# instance.write(bpy.data.particles.get(dump))
|
||||
|
||||
def _load_matrix(self, matrix, dump):
|
||||
matrix.write(mathutils.Matrix(dump))
|
||||
@ -626,11 +637,11 @@ class Loader:
|
||||
for k in self._ordered_keys(dump.keys()):
|
||||
v = dump[k]
|
||||
if not hasattr(default.read(), k):
|
||||
logging.debug(f"Load default, skipping {default} : {k}")
|
||||
continue
|
||||
try:
|
||||
self._load_any(default.extend(k), v)
|
||||
except Exception as err:
|
||||
logging.debug(f"Cannot load {k}: {err}")
|
||||
logging.debug(f"Skipping {k}")
|
||||
|
||||
@property
|
||||
def match_subset_all(self):
|
||||
|
@ -1,374 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import logging
|
||||
|
||||
import bpy
|
||||
|
||||
from . import presence, utils
|
||||
from replication.constants import (FETCHED,
|
||||
UP,
|
||||
RP_COMMON,
|
||||
STATE_INITIAL,
|
||||
STATE_QUITTING,
|
||||
STATE_ACTIVE,
|
||||
STATE_SYNCING,
|
||||
STATE_LOBBY,
|
||||
STATE_SRV_SYNC,
|
||||
REPARENT)
|
||||
|
||||
from replication.interface import session
|
||||
|
||||
class Delayable():
|
||||
"""Delayable task interface
|
||||
"""
|
||||
def __init__(self):
|
||||
self.is_registered = False
|
||||
|
||||
def register(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def unregister(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Timer(Delayable):
|
||||
"""Timer binder interface for blender
|
||||
|
||||
Run a bpy.app.Timer in the background looping at the given rate
|
||||
"""
|
||||
|
||||
def __init__(self, duration=1):
|
||||
super().__init__()
|
||||
self._timeout = duration
|
||||
self._running = True
|
||||
|
||||
def register(self):
|
||||
"""Register the timer into the blender timer system
|
||||
"""
|
||||
|
||||
if not self.is_registered:
|
||||
bpy.app.timers.register(self.main)
|
||||
self.is_registered = True
|
||||
logging.debug(f"Register {self.__class__.__name__}")
|
||||
else:
|
||||
logging.debug(f"Timer {self.__class__.__name__} already registered")
|
||||
|
||||
def main(self):
|
||||
self.execute()
|
||||
|
||||
if self._running:
|
||||
return self._timeout
|
||||
|
||||
def execute(self):
|
||||
"""Main timer loop
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def unregister(self):
|
||||
"""Unnegister the timer of the blender timer system
|
||||
"""
|
||||
if bpy.app.timers.is_registered(self.main):
|
||||
bpy.app.timers.unregister(self.main)
|
||||
|
||||
self._running = False
|
||||
|
||||
|
||||
class ApplyTimer(Timer):
|
||||
def __init__(self, timout=1, target_type=None):
|
||||
self._type = target_type
|
||||
super().__init__(timout)
|
||||
|
||||
def execute(self):
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
if self._type:
|
||||
nodes = session.list(filter=self._type)
|
||||
else:
|
||||
nodes = session.list()
|
||||
|
||||
for node in nodes:
|
||||
node_ref = session.get(uuid=node)
|
||||
|
||||
if node_ref.state == FETCHED:
|
||||
try:
|
||||
session.apply(node, force=True)
|
||||
except Exception as e:
|
||||
logging.error(f"Fail to apply {node_ref.uuid}: {e}")
|
||||
elif node_ref.state == REPARENT:
|
||||
# Reload the node
|
||||
node_ref.remove_instance()
|
||||
node_ref.resolve()
|
||||
session.apply(node, force=True)
|
||||
for parent in session._graph.find_parents(node):
|
||||
logging.info(f"Applying parent {parent}")
|
||||
session.apply(parent, force=True)
|
||||
node_ref.state = UP
|
||||
|
||||
|
||||
class DynamicRightSelectTimer(Timer):
|
||||
def __init__(self, timout=.1):
|
||||
super().__init__(timout)
|
||||
self._last_selection = []
|
||||
self._user = None
|
||||
self._right_strategy = RP_COMMON
|
||||
|
||||
def execute(self):
|
||||
settings = utils.get_preferences()
|
||||
|
||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||
# Find user
|
||||
if self._user is None:
|
||||
self._user = session.online_users.get(settings.username)
|
||||
|
||||
if self._user:
|
||||
current_selection = utils.get_selected_objects(
|
||||
bpy.context.scene,
|
||||
bpy.data.window_managers['WinMan'].windows[0].view_layer
|
||||
)
|
||||
if current_selection != self._last_selection:
|
||||
obj_common = [
|
||||
o for o in self._last_selection if o not in current_selection]
|
||||
obj_ours = [
|
||||
o for o in current_selection if o not in self._last_selection]
|
||||
|
||||
# change old selection right to common
|
||||
for obj in obj_common:
|
||||
node = session.get(uuid=obj)
|
||||
|
||||
if node and (node.owner == settings.username or node.owner == RP_COMMON):
|
||||
recursive = True
|
||||
if node.data and 'instance_type' in node.data.keys():
|
||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||
session.change_owner(
|
||||
node.uuid,
|
||||
RP_COMMON,
|
||||
recursive=recursive)
|
||||
|
||||
# change new selection to our
|
||||
for obj in obj_ours:
|
||||
node = session.get(uuid=obj)
|
||||
|
||||
if node and node.owner == RP_COMMON:
|
||||
recursive = True
|
||||
if node.data and 'instance_type' in node.data.keys():
|
||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||
|
||||
session.change_owner(
|
||||
node.uuid,
|
||||
settings.username,
|
||||
recursive=recursive)
|
||||
else:
|
||||
return
|
||||
|
||||
self._last_selection = current_selection
|
||||
|
||||
user_metadata = {
|
||||
'selected_objects': current_selection
|
||||
}
|
||||
|
||||
session.update_user_metadata(user_metadata)
|
||||
logging.debug("Update selection")
|
||||
|
||||
# Fix deselection until right managment refactoring (with Roles concepts)
|
||||
if len(current_selection) == 0 and self._right_strategy == RP_COMMON:
|
||||
owned_keys = session.list(
|
||||
filter_owner=settings.username)
|
||||
for key in owned_keys:
|
||||
node = session.get(uuid=key)
|
||||
|
||||
session.change_owner(
|
||||
key,
|
||||
RP_COMMON,
|
||||
recursive=recursive)
|
||||
|
||||
for user, user_info in session.online_users.items():
|
||||
if user != settings.username:
|
||||
metadata = user_info.get('metadata')
|
||||
|
||||
if 'selected_objects' in metadata:
|
||||
# Update selectionnable objects
|
||||
for obj in bpy.data.objects:
|
||||
if obj.hide_select and obj.uuid not in metadata['selected_objects']:
|
||||
obj.hide_select = False
|
||||
elif not obj.hide_select and obj.uuid in metadata['selected_objects']:
|
||||
obj.hide_select = True
|
||||
|
||||
|
||||
class Draw(Delayable):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._handler = None
|
||||
|
||||
def register(self):
|
||||
if not self.is_registered:
|
||||
self._handler = bpy.types.SpaceView3D.draw_handler_add(
|
||||
self.execute, (), 'WINDOW', 'POST_VIEW')
|
||||
logging.debug(f"Register {self.__class__.__name__}")
|
||||
else:
|
||||
logging.debug(f"Drow {self.__class__.__name__} already registered")
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def unregister(self):
|
||||
try:
|
||||
bpy.types.SpaceView3D.draw_handler_remove(
|
||||
self._handler, "WINDOW")
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class DrawClient(Draw):
|
||||
def execute(self):
|
||||
renderer = getattr(presence, 'renderer', None)
|
||||
prefs = utils.get_preferences()
|
||||
|
||||
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
|
||||
settings = bpy.context.window_manager.session
|
||||
users = session.online_users
|
||||
|
||||
# Update users
|
||||
for user in users.values():
|
||||
metadata = user.get('metadata')
|
||||
color = metadata.get('color')
|
||||
scene_current = metadata.get('scene_current')
|
||||
user_showable = scene_current == bpy.context.scene.name or settings.presence_show_far_user
|
||||
if color and scene_current and user_showable:
|
||||
if settings.presence_show_selected and 'selected_objects' in metadata.keys():
|
||||
renderer.draw_client_selection(
|
||||
user['id'], color, metadata['selected_objects'])
|
||||
if settings.presence_show_user and 'view_corners' in metadata:
|
||||
renderer.draw_client_camera(
|
||||
user['id'], metadata['view_corners'], color)
|
||||
if not user_showable:
|
||||
# TODO: remove this when user event drivent update will be
|
||||
# ready
|
||||
renderer.flush_selection()
|
||||
renderer.flush_users()
|
||||
|
||||
|
||||
class ClientUpdate(Timer):
|
||||
def __init__(self, timout=.1):
|
||||
super().__init__(timout)
|
||||
self.handle_quit = False
|
||||
self.users_metadata = {}
|
||||
|
||||
def execute(self):
|
||||
settings = utils.get_preferences()
|
||||
renderer = getattr(presence, 'renderer', None)
|
||||
|
||||
if session and renderer:
|
||||
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
|
||||
local_user = session.online_users.get(
|
||||
settings.username)
|
||||
|
||||
if not local_user:
|
||||
return
|
||||
else:
|
||||
for username, user_data in session.online_users.items():
|
||||
if username != settings.username:
|
||||
cached_user_data = self.users_metadata.get(
|
||||
username)
|
||||
new_user_data = session.online_users[username]['metadata']
|
||||
|
||||
if cached_user_data is None:
|
||||
self.users_metadata[username] = user_data['metadata']
|
||||
elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']:
|
||||
presence.refresh_3d_view()
|
||||
self.users_metadata[username] = user_data['metadata']
|
||||
break
|
||||
else:
|
||||
self.users_metadata[username] = user_data['metadata']
|
||||
|
||||
local_user_metadata = local_user.get('metadata')
|
||||
scene_current = bpy.context.scene.name
|
||||
local_user = session.online_users.get(settings.username)
|
||||
current_view_corners = presence.get_view_corners()
|
||||
|
||||
# Init client metadata
|
||||
if not local_user_metadata or 'color' not in local_user_metadata.keys():
|
||||
metadata = {
|
||||
'view_corners': presence.get_view_matrix(),
|
||||
'view_matrix': presence.get_view_matrix(),
|
||||
'color': (settings.client_color.r,
|
||||
settings.client_color.g,
|
||||
settings.client_color.b,
|
||||
1),
|
||||
'frame_current': bpy.context.scene.frame_current,
|
||||
'scene_current': scene_current
|
||||
}
|
||||
session.update_user_metadata(metadata)
|
||||
|
||||
# Update client representation
|
||||
# Update client current scene
|
||||
elif scene_current != local_user_metadata['scene_current']:
|
||||
local_user_metadata['scene_current'] = scene_current
|
||||
session.update_user_metadata(local_user_metadata)
|
||||
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
||||
local_user_metadata['view_corners'] = current_view_corners
|
||||
local_user_metadata['view_matrix'] = presence.get_view_matrix(
|
||||
)
|
||||
session.update_user_metadata(local_user_metadata)
|
||||
|
||||
|
||||
class SessionStatusUpdate(Timer):
|
||||
def __init__(self, timout=1):
|
||||
super().__init__(timout)
|
||||
|
||||
def execute(self):
|
||||
presence.refresh_sidebar_view()
|
||||
|
||||
|
||||
class SessionUserSync(Timer):
|
||||
def __init__(self, timout=1):
|
||||
super().__init__(timout)
|
||||
|
||||
def execute(self):
|
||||
renderer = getattr(presence, 'renderer', None)
|
||||
|
||||
if session and renderer:
|
||||
# sync online users
|
||||
session_users = session.online_users
|
||||
ui_users = bpy.context.window_manager.online_users
|
||||
|
||||
for index, user in enumerate(ui_users):
|
||||
if user.username not in session_users.keys():
|
||||
ui_users.remove(index)
|
||||
renderer.flush_selection()
|
||||
renderer.flush_users()
|
||||
break
|
||||
|
||||
for user in session_users:
|
||||
if user not in ui_users:
|
||||
new_key = ui_users.add()
|
||||
new_key.name = user
|
||||
new_key.username = user
|
||||
|
||||
|
||||
class MainThreadExecutor(Timer):
|
||||
def __init__(self, timout=1, execution_queue=None):
|
||||
super().__init__(timout)
|
||||
self.execution_queue = execution_queue
|
||||
|
||||
def execute(self):
|
||||
while not self.execution_queue.empty():
|
||||
function = self.execution_queue.get()
|
||||
logging.debug(f"Executing {function.__name__}")
|
||||
function()
|
@ -24,20 +24,25 @@ import sys
|
||||
from pathlib import Path
|
||||
import socket
|
||||
import re
|
||||
import bpy
|
||||
|
||||
VERSION_EXPR = re.compile('\d+\.\d+\.\d+\w\d+')
|
||||
|
||||
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
VERSION_EXPR = re.compile('\d+.\d+.\d+')
|
||||
DEFAULT_CACHE_DIR = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "cache")
|
||||
REPLICATION_DEPENDENCIES = {
|
||||
"zmq",
|
||||
"deepdiff"
|
||||
}
|
||||
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
REPLICATION = os.path.join(LIBS,"replication")
|
||||
|
||||
PYTHON_PATH = None
|
||||
SUBPROCESS_DIR = None
|
||||
|
||||
|
||||
rtypes = []
|
||||
|
||||
|
||||
def module_can_be_imported(name):
|
||||
def module_can_be_imported(name: str) -> bool:
|
||||
try:
|
||||
__import__(name)
|
||||
return True
|
||||
@ -50,7 +55,7 @@ def install_pip():
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
||||
|
||||
|
||||
def install_package(name, version):
|
||||
def install_package(name: str, install_dir: str):
|
||||
logging.info(f"installing {name} version...")
|
||||
env = os.environ
|
||||
if "PIP_REQUIRE_VIRTUALENV" in env:
|
||||
@ -60,9 +65,13 @@ def install_package(name, version):
|
||||
# env var for the subprocess.
|
||||
env = os.environ.copy()
|
||||
del env["PIP_REQUIRE_VIRTUALENV"]
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
|
||||
|
||||
def check_package_version(name, required_version):
|
||||
if name in sys.modules:
|
||||
del sys.modules[name]
|
||||
|
||||
|
||||
def check_package_version(name: str, required_version: str):
|
||||
logging.info(f"Checking {name} version...")
|
||||
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
||||
|
||||
@ -74,6 +83,7 @@ def check_package_version(name, required_version):
|
||||
logging.info(f"{name} need an update")
|
||||
return False
|
||||
|
||||
|
||||
def get_ip():
|
||||
"""
|
||||
Retrieve the main network interface IP.
|
||||
@ -91,7 +101,25 @@ def check_dir(dir):
|
||||
os.makedirs(dir)
|
||||
|
||||
|
||||
def setup(dependencies, python_path):
|
||||
def setup_paths(paths: list):
|
||||
""" Add missing path to sys.path
|
||||
"""
|
||||
for path in paths:
|
||||
if path not in sys.path:
|
||||
logging.debug(f"Adding {path} dir to the path.")
|
||||
sys.path.insert(0, path)
|
||||
|
||||
|
||||
def remove_paths(paths: list):
|
||||
""" Remove list of path from sys.path
|
||||
"""
|
||||
for path in paths:
|
||||
if path in sys.path:
|
||||
logging.debug(f"Removing {path} dir from the path.")
|
||||
sys.path.remove(path)
|
||||
|
||||
|
||||
def install_modules(dependencies: list, python_path: str, install_dir: str):
|
||||
global PYTHON_PATH, SUBPROCESS_DIR
|
||||
|
||||
PYTHON_PATH = Path(python_path)
|
||||
@ -100,9 +128,23 @@ def setup(dependencies, python_path):
|
||||
if not module_can_be_imported("pip"):
|
||||
install_pip()
|
||||
|
||||
for package_name, package_version in dependencies:
|
||||
for package_name in dependencies:
|
||||
if not module_can_be_imported(package_name):
|
||||
install_package(package_name, package_version)
|
||||
install_package(package_name, install_dir=install_dir)
|
||||
module_can_be_imported(package_name)
|
||||
elif not check_package_version(package_name, package_version):
|
||||
install_package(package_name, package_version)
|
||||
|
||||
def register():
|
||||
if bpy.app.version >= (2,91,0):
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
for module_name in list(sys.modules.keys()):
|
||||
if 'replication' in module_name:
|
||||
del sys.modules[module_name]
|
||||
|
||||
setup_paths([LIBS, REPLICATION])
|
||||
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
|
||||
|
||||
def unregister():
|
||||
remove_paths([REPLICATION, LIBS])
|
155
multi_user/handlers.py
Normal file
@ -0,0 +1,155 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import logging
|
||||
|
||||
import bpy
|
||||
from bpy.app.handlers import persistent
|
||||
from replication import porcelain
|
||||
from replication.constants import RP_COMMON, STATE_ACTIVE, STATE_SYNCING, UP
|
||||
from replication.exception import ContextError, NonAuthorizedOperationError
|
||||
from replication.interface import session
|
||||
|
||||
from . import shared_data, utils
|
||||
|
||||
|
||||
def sanitize_deps_graph(remove_nodes: bool = False):
|
||||
""" Cleanup the replication graph
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
start = utils.current_milli_time()
|
||||
rm_cpt = 0
|
||||
for node in session.repository.graph.values():
|
||||
node.instance = session.repository.rdp.resolve(node.data)
|
||||
if node is None \
|
||||
or (node.state == UP and not node.instance):
|
||||
if remove_nodes:
|
||||
try:
|
||||
porcelain.rm(session.repository,
|
||||
node.uuid,
|
||||
remove_dependencies=False)
|
||||
logging.info(f"Removing {node.uuid}")
|
||||
rm_cpt += 1
|
||||
except NonAuthorizedOperationError:
|
||||
continue
|
||||
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
|
||||
|
||||
|
||||
def update_external_dependencies():
|
||||
"""Force external dependencies(files such as images) evaluation
|
||||
"""
|
||||
external_types = ['WindowsPath', 'PosixPath', 'Image']
|
||||
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in external_types]
|
||||
for node_id in nodes_ids:
|
||||
node = session.repository.graph.get(node_id)
|
||||
if node and node.owner in [session.repository.username, RP_COMMON]:
|
||||
porcelain.commit(session.repository, node_id)
|
||||
porcelain.push(session.repository, 'origin', node_id)
|
||||
|
||||
|
||||
@persistent
|
||||
def on_scene_update(scene):
|
||||
"""Forward blender depsgraph update to replication
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
context = bpy.context
|
||||
blender_depsgraph = bpy.context.view_layer.depsgraph
|
||||
dependency_updates = [u for u in blender_depsgraph.updates]
|
||||
settings = utils.get_preferences()
|
||||
incoming_updates = shared_data.session.applied_updates
|
||||
|
||||
distant_update = [getattr(u.id, 'uuid', None) for u in dependency_updates if getattr(u.id, 'uuid', None) in incoming_updates]
|
||||
if distant_update:
|
||||
for u in distant_update:
|
||||
shared_data.session.applied_updates.remove(u)
|
||||
logging.debug(f"Ignoring distant update of {dependency_updates[0].id.name}")
|
||||
return
|
||||
|
||||
# NOTE: maybe we don't need to check each update but only the first
|
||||
for update in reversed(dependency_updates):
|
||||
update_uuid = getattr(update.id, 'uuid', None)
|
||||
if update_uuid:
|
||||
node = session.repository.graph.get(update.id.uuid)
|
||||
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
|
||||
|
||||
if node and (node.owner == session.repository.username or check_common):
|
||||
logging.debug(f"Evaluate {update.id.name}")
|
||||
if node.state == UP:
|
||||
try:
|
||||
porcelain.commit(session.repository, node.uuid)
|
||||
porcelain.push(session.repository,
|
||||
'origin', node.uuid)
|
||||
except ReferenceError:
|
||||
logging.debug(f"Reference error {node.uuid}")
|
||||
except ContextError as e:
|
||||
logging.debug(e)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
else:
|
||||
continue
|
||||
elif isinstance(update.id, bpy.types.Scene):
|
||||
scene = bpy.data.scenes.get(update.id.name)
|
||||
scn_uuid = porcelain.add(session.repository, scene)
|
||||
porcelain.commit(session.repository, scn_uuid)
|
||||
porcelain.push(session.repository, 'origin', scn_uuid)
|
||||
|
||||
scene_graph_changed = [u for u in reversed(dependency_updates) if getattr(u.id, 'uuid', None) and isinstance(u.id,(bpy.types.Scene,bpy.types.Collection))]
|
||||
if scene_graph_changed:
|
||||
porcelain.purge_orphan_nodes(session.repository)
|
||||
|
||||
update_external_dependencies()
|
||||
|
||||
@persistent
|
||||
def resolve_deps_graph(dummy):
|
||||
"""Resolve deps graph
|
||||
|
||||
Temporary solution to resolve each node pointers after a Undo.
|
||||
A future solution should be to avoid storing dataclock reference...
|
||||
|
||||
"""
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
sanitize_deps_graph(remove_nodes=True)
|
||||
|
||||
|
||||
@persistent
|
||||
def load_pre_handler(dummy):
|
||||
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
|
||||
bpy.ops.session.stop()
|
||||
|
||||
|
||||
@persistent
|
||||
def update_client_frame(scene):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
porcelain.update_user_metadata(session.repository, {
|
||||
'frame_current': scene.frame_current
|
||||
})
|
||||
|
||||
|
||||
def register():
|
||||
bpy.app.handlers.undo_post.append(resolve_deps_graph)
|
||||
bpy.app.handlers.redo_post.append(resolve_deps_graph)
|
||||
|
||||
bpy.app.handlers.load_pre.append(load_pre_handler)
|
||||
bpy.app.handlers.frame_change_pre.append(update_client_frame)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.app.handlers.undo_post.remove(resolve_deps_graph)
|
||||
bpy.app.handlers.redo_post.remove(resolve_deps_graph)
|
||||
|
||||
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
||||
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
@ -15,33 +15,31 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
import os
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from pathlib import Path
|
||||
import bpy.utils.previews
|
||||
|
||||
def register():
|
||||
|
||||
global icons_col
|
||||
|
||||
pcoll = bpy.utils.previews.new()
|
||||
icons_dir = os.path.join(os.path.dirname(__file__), ".")
|
||||
for png in Path(icons_dir).rglob("*.png"):
|
||||
pcoll.load(png.stem, str(png), "IMAGE")
|
||||
|
||||
icons_col = pcoll
|
||||
|
||||
|
||||
class BlLibrary(BlDatablock):
|
||||
bl_id = "libraries"
|
||||
bl_class = bpy.types.Library
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIBRARY_DATA_DIRECT'
|
||||
def unregister():
|
||||
|
||||
def _construct(self, data):
|
||||
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
|
||||
targetData = sourceData
|
||||
return sourceData
|
||||
def _load(self, data, target):
|
||||
global icons_col
|
||||
|
||||
try:
|
||||
bpy.utils.previews.remove(icons_col)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _dump(self, instance=None):
|
||||
assert(instance)
|
||||
dumper = Dumper()
|
||||
return dumper.dump(instance)
|
||||
|
||||
|
||||
icons_col = None
|
BIN
multi_user/icons/repository_merge.png
Normal file
After Width: | Height: | Size: 6.5 KiB |
BIN
multi_user/icons/repository_push.png
Normal file
After Width: | Height: | Size: 4.2 KiB |
BIN
multi_user/icons/server_offline.png
Normal file
After Width: | Height: | Size: 6.7 KiB |
BIN
multi_user/icons/server_online.png
Normal file
After Width: | Height: | Size: 9.5 KiB |
BIN
multi_user/icons/session_status_offline.png
Normal file
After Width: | Height: | Size: 9.4 KiB |