Compare commits
292 Commits
96-add-a-k
...
132-fix-un
Author | SHA1 | Date | |
---|---|---|---|
45437660ba | |||
ee93a5b209 | |||
f90c12b27f | |||
3573db0969 | |||
92bde00a5a | |||
2c82560d24 | |||
6f364d2b88 | |||
760b52c02b | |||
4dd932fc56 | |||
ba1a03cbfa | |||
18b5fa795c | |||
1a82ec72e4 | |||
804747c73b | |||
7ee705332f | |||
4bd0055056 | |||
716c78e380 | |||
5e4ce4556f | |||
aa9ea08151 | |||
f56890128e | |||
8865556229 | |||
5bc9b10c12 | |||
7db3c18213 | |||
f151c61d7b | |||
ff35e34032 | |||
9f8222afa7 | |||
1828bfac22 | |||
3a1087ecb8 | |||
b398541787 | |||
f0b33d8471 | |||
5a282a3e22 | |||
4283fc0fff | |||
753f4d3f27 | |||
9dd02b2756 | |||
c74d12c843 | |||
e1d9982276 | |||
8861986213 | |||
1cb9fb410c | |||
c4a8cc4606 | |||
187f11071c | |||
530fae8cb4 | |||
6771c371a1 | |||
c844c6e54f | |||
a4d0b1a68b | |||
2fdc11692d | |||
dbfca4568f | |||
069a528276 | |||
030f2661fd | |||
e589e3eec4 | |||
04140ced1b | |||
0d9ce43e74 | |||
d3969b4fd4 | |||
e21f64ac98 | |||
b25b380d21 | |||
1146d9d304 | |||
51b60521e6 | |||
035f8a1dcd | |||
cefaef5c4b | |||
4714e60ff7 | |||
3eca25ae19 | |||
96346f8a25 | |||
a258c2c182 | |||
6862df5331 | |||
f271a9d0e3 | |||
bdff6eb5c9 | |||
b661407952 | |||
d5eb7fda02 | |||
35e8ac9c33 | |||
4453d256b8 | |||
299e330ec6 | |||
34b9f7ae27 | |||
9d100d84ad | |||
2f677c399e | |||
e967b35c38 | |||
7bd0a196b4 | |||
7892b5e9b6 | |||
f779678c0e | |||
629fc2d223 | |||
724c2345df | |||
673c4e69a4 | |||
fbfff6c7ec | |||
f592294335 | |||
8e7be5afde | |||
fc76b2a8e6 | |||
1a8bcddb74 | |||
60fba5b9df | |||
be0eb1fa42 | |||
93d9bea3ae | |||
022b7f7822 | |||
ae34846509 | |||
d328077cb0 | |||
0c4740eef8 | |||
d7b2c7e2f6 | |||
efbb9e7096 | |||
e0b56d8990 | |||
7a94c21187 | |||
0687090f05 | |||
920744334c | |||
dfa7f98126 | |||
ea530f0f96 | |||
c3546ff74f | |||
83aa9b57ec | |||
28a265be68 | |||
7dfabb16c7 | |||
ea5d9371ca | |||
3df73a0716 | |||
ae3c994ff1 | |||
bd73b385b6 | |||
f054b1c5f2 | |||
d083100a2a | |||
b813b8df9e | |||
d0e966ff1a | |||
56cbf14fe1 | |||
8bf55ebd46 | |||
edbc5ee343 | |||
4a92511582 | |||
b42df2cf4a | |||
7549466824 | |||
423e71476d | |||
3bc4b20035 | |||
9966a24b5e | |||
577c01a594 | |||
3d72796c10 | |||
edcbd7b02a | |||
b368c985b8 | |||
cab1a71eaa | |||
33cb188509 | |||
0a3dd9b5b8 | |||
7fbdbdcc21 | |||
8f9d5aabf9 | |||
824d4d6a83 | |||
5f4bccbcd9 | |||
8e8e54fe7d | |||
04b13cc0b7 | |||
ba98875560 | |||
a9fb84a5c6 | |||
2f139178d3 | |||
e466f81600 | |||
cb836e30f5 | |||
152e356dad | |||
7b13e8978b | |||
e0839fe1fb | |||
aec3e8b8bf | |||
a89564de6b | |||
e301a10456 | |||
cfc6ce91bc | |||
4f731c6640 | |||
9b1b8f11fd | |||
e742c824fc | |||
6757bbbd30 | |||
f6a39e4290 | |||
410d8d2f1a | |||
bd64c17f05 | |||
dc063b5954 | |||
0ae34d5702 | |||
167b39f15e | |||
9adc0d7d6e | |||
fb622fa098 | |||
c533d4b86a | |||
6c47e095be | |||
f992d06b03 | |||
af3afc1124 | |||
b77ab2dd05 | |||
150054d19c | |||
8d2b9e5580 | |||
6870331c34 | |||
6f73b7fc29 | |||
6385830f53 | |||
b705228f4a | |||
73d2da4c47 | |||
b28e7c2149 | |||
38f06683be | |||
62221c9e49 | |||
e9f416f682 | |||
3108a06e89 | |||
470df50dc2 | |||
d8a94e3f5e | |||
47a0efef27 | |||
ca5aebfeff | |||
fe6ffd19b4 | |||
b9a6ddafe9 | |||
ae71d7757e | |||
34ed5da6f0 | |||
2c16f07ae7 | |||
60f25359d1 | |||
975b50a988 | |||
66417dc84a | |||
514f90d602 | |||
086876ad2e | |||
71c179f32f | |||
2399096b07 | |||
0c4d1aaa5f | |||
de8fbb0629 | |||
d7396e578c | |||
7f5b5866f2 | |||
3eb1af406b | |||
79ccac915f | |||
f5232ccea0 | |||
c599a4e6ea | |||
b3230177d8 | |||
f2da4cb8e9 | |||
605bcc7581 | |||
e31d76a641 | |||
97c2118b7e | |||
352977e442 | |||
a46d5fa227 | |||
ade736d8a5 | |||
d7f7e86015 | |||
5e7d1e1dda | |||
fa5f0c7296 | |||
f14d0915c8 | |||
d1e088d229 | |||
aa35da9c56 | |||
f26c3b2606 | |||
00d60be75b | |||
bb5b9fe4c8 | |||
c6af49492e | |||
6158ef5171 | |||
6475b4fc08 | |||
e4e09d63ff | |||
4b07ae0cc3 | |||
49a419cbe2 | |||
5d52fb2460 | |||
f1e09c1507 | |||
f915c52bd0 | |||
dee2e77552 | |||
7953a2a177 | |||
3f0082927e | |||
07ffe05a84 | |||
09ee1cf826 | |||
61bcec98c3 | |||
1c85d436fd | |||
03318026d4 | |||
7a0b142d69 | |||
eb874110f8 | |||
6e0c7bc332 | |||
ee83e61b09 | |||
99b2dc0539 | |||
53f1118181 | |||
2791264a92 | |||
6c2ee0cad3 | |||
20f8c25f55 | |||
0224f55104 | |||
644702ebdf | |||
9377b2be9b | |||
29cbf23142 | |||
a645f71d19 | |||
909d92a7a1 | |||
7ee9089087 | |||
6201c82392 | |||
0faf7d9436 | |||
e69e61117a | |||
25e988d423 | |||
8a3ab895e0 | |||
06a8e3c0ab | |||
c1c1628a38 | |||
022e3354d9 | |||
211cb848b9 | |||
25e233f328 | |||
9bc3d9b29d | |||
15debf339d | |||
56df7d182d | |||
26e1579e35 | |||
a0e290ad6d | |||
092384b2e4 | |||
2dc3654e6c | |||
f37a9efc60 | |||
0c5d323063 | |||
b9f1b8a871 | |||
2f6d8e1701 | |||
9e64584f2d | |||
154aaf71c8 | |||
ac24ab69ff | |||
ad431378f8 | |||
784506cd95 | |||
eb7542b1dd | |||
25c19471bb | |||
9e4e646bb1 | |||
f8fa407a45 | |||
a0676f4e37 | |||
61a05dc347 | |||
a6e1566f89 | |||
adeb694b2d | |||
50d14e663e | |||
9b8d69042d | |||
b2475081b6 | |||
aef1d8987c | |||
292f76aea5 | |||
28c4ccf1f3 | |||
549b0b3784 | |||
fc9ab1a7e6 | |||
44bffc1850 | |||
a141e9bfe7 |
@ -1,8 +1,10 @@
|
|||||||
stages:
|
stages:
|
||||||
- test
|
- test
|
||||||
- build
|
- build
|
||||||
|
- deploy
|
||||||
|
|
||||||
|
|
||||||
include:
|
include:
|
||||||
- local: .gitlab/ci/test.gitlab-ci.yml
|
- local: .gitlab/ci/test.gitlab-ci.yml
|
||||||
- local: .gitlab/ci/build.gitlab-ci.yml
|
- local: .gitlab/ci/build.gitlab-ci.yml
|
||||||
|
- local: .gitlab/ci/deploy.gitlab-ci.yml
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
build:
|
build:
|
||||||
stage: build
|
stage: build
|
||||||
image: python:latest
|
image: debian:stable-slim
|
||||||
script:
|
script:
|
||||||
- git submodule init
|
|
||||||
- git submodule update
|
|
||||||
- cd multi_user/libs/replication
|
|
||||||
- rm -rf tests .git .gitignore script
|
- rm -rf tests .git .gitignore script
|
||||||
|
|
||||||
artifacts:
|
artifacts:
|
||||||
name: multi_user
|
name: multi_user
|
||||||
paths:
|
paths:
|
||||||
- multi_user
|
- multi_user
|
||||||
|
only:
|
||||||
|
refs:
|
||||||
|
- master
|
||||||
|
- develop
|
23
.gitlab/ci/deploy.gitlab-ci.yml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
deploy:
|
||||||
|
stage: deploy
|
||||||
|
image: slumber/docker-python
|
||||||
|
variables:
|
||||||
|
DOCKER_DRIVER: overlay2
|
||||||
|
DOCKER_TLS_CERTDIR: "/certs"
|
||||||
|
|
||||||
|
services:
|
||||||
|
- docker:19.03.12-dind
|
||||||
|
|
||||||
|
script:
|
||||||
|
- RP_VERSION="$(python scripts/get_replication_version.py)"
|
||||||
|
- VERSION="$(python scripts/get_addon_version.py)"
|
||||||
|
- echo "Building docker image with replication ${RP_VERSION}"
|
||||||
|
- docker build --build-arg replication_version=${RP_VERSION} --build-arg version={VERSION} -t registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION} ./scripts/docker_server
|
||||||
|
- echo "Pushing to gitlab registry ${VERSION}"
|
||||||
|
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||||
|
- docker push registry.gitlab.com/slumber/multi-user/multi-user-server:${VERSION}
|
||||||
|
|
||||||
|
only:
|
||||||
|
refs:
|
||||||
|
- master
|
||||||
|
- develop
|
@ -1,14 +1,5 @@
|
|||||||
test:
|
test:
|
||||||
stage: test
|
stage: test
|
||||||
image: python:latest
|
image: slumber/blender-addon-testing:latest
|
||||||
script:
|
script:
|
||||||
- git submodule init
|
|
||||||
- git submodule update
|
|
||||||
- apt update
|
|
||||||
# install blender to get all required dependencies
|
|
||||||
# TODO: indtall only dependencies
|
|
||||||
- apt install -f -y gcc python-dev python3.7-dev
|
|
||||||
- apt install -f -y blender
|
|
||||||
- python3 -m pip install blender-addon-tester
|
|
||||||
- python3 scripts/test_addon.py
|
- python3 scripts/test_addon.py
|
||||||
|
|
||||||
|
3
.gitmodules
vendored
@ -1,3 +0,0 @@
|
|||||||
[submodule "multi_user/libs/replication"]
|
|
||||||
path = multi_user/libs/replication
|
|
||||||
url = https://gitlab.com/slumber/replication.git
|
|
||||||
|
64
CHANGELOG.md
@ -37,7 +37,7 @@ All notable changes to this project will be documented in this file.
|
|||||||
- Serialization is now based on marshal (2x performance improvements).
|
- Serialization is now based on marshal (2x performance improvements).
|
||||||
- Let pip chose python dependencies install path.
|
- Let pip chose python dependencies install path.
|
||||||
|
|
||||||
## [0.0.3] - Upcoming
|
## [0.0.3] - 2020-07-29
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@ -60,8 +60,68 @@ All notable changes to this project will be documented in this file.
|
|||||||
- user localization
|
- user localization
|
||||||
- repository init
|
- repository init
|
||||||
|
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
|
|
||||||
- Unused strict right management strategy
|
- Unused strict right management strategy
|
||||||
- Legacy config management system
|
- Legacy config management system
|
||||||
|
|
||||||
|
## [0.1.0] - 2020-10-05
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Dependency graph driven updates [experimental]
|
||||||
|
- Edit Mode updates
|
||||||
|
- Late join mechanism
|
||||||
|
- Sync Axis lock replication
|
||||||
|
- Sync collection offset
|
||||||
|
- Sync camera orthographic scale
|
||||||
|
- Sync custom fonts
|
||||||
|
- Sync sound files
|
||||||
|
- Logging configuration (file output and level)
|
||||||
|
- Object visibility type replication
|
||||||
|
- Optionnal sync for active camera
|
||||||
|
- Curve->Mesh conversion
|
||||||
|
- Mesh->gpencil conversion
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Auto updater now handle installation from branches
|
||||||
|
- Use uuid for collection loading
|
||||||
|
- Moved session instance to replication package
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Prevent unsupported data types to crash the session
|
||||||
|
- Modifier vertex group assignation
|
||||||
|
- World sync
|
||||||
|
- Snapshot UUID error
|
||||||
|
- The world is not synchronized
|
||||||
|
|
||||||
|
## [0.1.1] - 2020-10-16
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Session status widget
|
||||||
|
- Affect dependencies during change owner
|
||||||
|
- Dedicated server managment scripts(@brybalicious)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Refactored presence.py
|
||||||
|
- Reset button UI icon
|
||||||
|
- Documentation `How to contribute` improvements (@brybalicious)
|
||||||
|
- Documentation `Hosting guide` improvements (@brybalicious)
|
||||||
|
- Show flags are now available from the viewport overlay
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Render sync race condition (causing scene errors)
|
||||||
|
- Binary differentials
|
||||||
|
- Hybrid session crashes between Linux/Windows
|
||||||
|
- Materials node default output value
|
||||||
|
- Right selection
|
||||||
|
- Client node rights changed to COMMON after disconnecting from the server
|
||||||
|
- Collection instances selection draw
|
||||||
|
- Packed image save error
|
||||||
|
- Material replication
|
||||||
|
- UI spelling errors (@brybalicious)
|
||||||
|
25
README.md
@ -11,7 +11,7 @@ This tool aims to allow multiple users to work on the same scene over the networ
|
|||||||
|
|
||||||
## Quick installation
|
## Quick installation
|
||||||
|
|
||||||
1. Download latest release [multi_user.zip](/uploads/8aef79c7cf5b1d9606dc58307fd9ad8b/multi_user.zip).
|
1. Download latest release [multi_user.zip](https://gitlab.com/slumber/multi-user/-/jobs/artifacts/master/download?job=build).
|
||||||
2. Run blender as administrator (dependencies installation).
|
2. Run blender as administrator (dependencies installation).
|
||||||
3. Install last_version.zip from your addon preferences.
|
3. Install last_version.zip from your addon preferences.
|
||||||
|
|
||||||
@ -26,23 +26,28 @@ See the [documentation](https://multi-user.readthedocs.io/en/latest/) for detail
|
|||||||
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
|
||||||
|
|
||||||
| Name | Status | Comment |
|
| Name | Status | Comment |
|
||||||
| ----------- | :----: | :-----------------------------------------------------------: |
|
| ----------- | :----: | :--------------------------------------------------------------------------: |
|
||||||
| action | ❗ | Not stable |
|
| action | ✔️ | |
|
||||||
| armature | ❗ | Not stable |
|
| armature | ❗ | Not stable |
|
||||||
| camera | ✔️ | |
|
| camera | ✔️ | |
|
||||||
| collection | ✔️ | |
|
| collection | ✔️ | |
|
||||||
| curve | ✔️ | Nurbs surface don't load correctly |
|
| curve | ❗ | Nurbs not supported |
|
||||||
| gpencil | ✔️ | |
|
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) |
|
||||||
| image | ❗ | Not stable yet |
|
| image | ✔️ | |
|
||||||
| mesh | ✔️ | |
|
| mesh | ✔️ | |
|
||||||
| material | ✔️ | |
|
| material | ✔️ | |
|
||||||
| metaball | ✔️ | |
|
| metaball | ✔️ | |
|
||||||
| object | ✔️ | |
|
| object | ✔️ | |
|
||||||
|
| texts | ✔️ | |
|
||||||
| scene | ✔️ | |
|
| scene | ✔️ | |
|
||||||
| world | ✔️ | |
|
| world | ✔️ | |
|
||||||
| lightprobes | ✔️ | |
|
| lightprobes | ✔️ | |
|
||||||
|
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
|
||||||
|
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
|
||||||
|
| nla | ❌ | |
|
||||||
|
| volumes | ❌ | |
|
||||||
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
|
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) |
|
||||||
| speakers | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
|
||||||
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
| vse | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||||
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
|
||||||
| libraries | ❗ | Partial |
|
| libraries | ❗ | Partial |
|
||||||
@ -57,14 +62,16 @@ I'm working on it.
|
|||||||
|
|
||||||
| Dependencies | Version | Needed |
|
| Dependencies | Version | Needed |
|
||||||
| ------------ | :-----: | -----: |
|
| ------------ | :-----: | -----: |
|
||||||
| ZeroMQ | latest | yes |
|
| Replication | latest | yes |
|
||||||
| JsonDiff | latest | yes |
|
|
||||||
|
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
|
See [contributing section](https://multi-user.readthedocs.io/en/latest/ways_to_contribute.html) of the documentation.
|
||||||
|
|
||||||
|
Feel free to [join the discord server](https://discord.gg/aBPvGws) to chat, seek help and contribute.
|
||||||
|
|
||||||
## Licensing
|
## Licensing
|
||||||
|
|
||||||
See [license](LICENSE)
|
See [license](LICENSE)
|
||||||
|
@ -22,7 +22,7 @@ copyright = '2020, Swann Martinez'
|
|||||||
author = 'Swann Martinez'
|
author = 'Swann Martinez'
|
||||||
|
|
||||||
# The full version, including alpha/beta/rc tags
|
# The full version, including alpha/beta/rc tags
|
||||||
release = '0.0.2'
|
release = '0.1.0'
|
||||||
|
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
# -- General configuration ---------------------------------------------------
|
||||||
|
Before Width: | Height: | Size: 21 KiB After Width: | Height: | Size: 8.4 KiB |
BIN
docs/getting_started/img/quickstart_advanced_cache.png
Normal file
After Width: | Height: | Size: 7.6 KiB |
BIN
docs/getting_started/img/quickstart_advanced_logging.png
Normal file
After Width: | Height: | Size: 2.9 KiB |
BIN
docs/getting_started/img/quickstart_advanced_network.png
Normal file
After Width: | Height: | Size: 4.1 KiB |
BIN
docs/getting_started/img/quickstart_advanced_replication.png
Normal file
After Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 9.7 KiB After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 13 KiB |
Before Width: | Height: | Size: 7.1 KiB After Width: | Height: | Size: 559 B |
BIN
docs/getting_started/img/quickstart_replication.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
docs/getting_started/img/quickstart_status.png
Normal file
After Width: | Height: | Size: 70 KiB |
@ -8,5 +8,4 @@ Getting started
|
|||||||
|
|
||||||
install
|
install
|
||||||
quickstart
|
quickstart
|
||||||
known_problems
|
|
||||||
glossary
|
glossary
|
||||||
|
@ -1,46 +0,0 @@
|
|||||||
.. _known-problems:
|
|
||||||
|
|
||||||
==============
|
|
||||||
Known problems
|
|
||||||
==============
|
|
||||||
|
|
||||||
.. rubric:: What do you need to do in order to use Multi-User through internet?
|
|
||||||
|
|
||||||
1. Use Hamachi or ZeroTier (I prefer Hamachi) and create a network.
|
|
||||||
2. All participants need to join this network.
|
|
||||||
3. Go to Blender and install Multi-User in the preferneces.
|
|
||||||
4. Setup and start the session:
|
|
||||||
|
|
||||||
* **Host**: After activating Multi-User as an Add-On, press N and go on Multi-User.
|
|
||||||
Then, put the IP of your network where IP is asked for.
|
|
||||||
Leave Port and IPC Port on default(5555 and 5561). Increase the Timeout(ms) if the connection is not stable.
|
|
||||||
Then press on "host".
|
|
||||||
|
|
||||||
* **Guest**: After activating Multi-User as an Add-On, press N and go to Multi-User
|
|
||||||
Then, put the IP of your network where IP is asked for.
|
|
||||||
Leave Port and IPC Port on default(5555 and 5561)(Simpler, put the same information that the host is using.
|
|
||||||
BUT,it needs 4 ports for communication. Therefore, you need to put 5555+count of guests [up to 4]. ).
|
|
||||||
Increase the Timeout(ms) if the connection is not stable. Then press on "connexion".
|
|
||||||
|
|
||||||
.. rubric:: What do you need to check if you can't host?
|
|
||||||
|
|
||||||
You need to check, if the IP and all ports are correct. If it's not loading, because you laoded a project before hosting, it's not your fault.
|
|
||||||
Then the version is not sable yet (the project contains data, that is not made stable yet).
|
|
||||||
|
|
||||||
.. rubric:: What do you need to check if you can't connect?
|
|
||||||
|
|
||||||
Check, if you are connected to the network (VPN) of the host. Also, check if you have all of the information like the host has.
|
|
||||||
Maybe you have different versions (which shouldn't be the case after Auto-Updater is introduced).
|
|
||||||
|
|
||||||
.. rubric:: You are connected, but you dont see anything?
|
|
||||||
|
|
||||||
After pressing N, go presence overlay and check the box.
|
|
||||||
Also, go down and uncheck the box "Show only owned"(unless you need privacy ( ͡° ͜ʖ ͡°) ).
|
|
||||||
|
|
||||||
If it's still not working, hit the support channel on the discord channel "multi-user". This little helping text is produced by my own experience
|
|
||||||
(Ultr-X).
|
|
||||||
In order to bring attention to other problems, please @ me on the support channel. Every problem brought to me will be documentated to optimize and update this text.
|
|
||||||
Thank you and have fun with Multi-User, brought to you by "swann".
|
|
||||||
|
|
||||||
Here the discord server: https://discord.gg/v5eKgm
|
|
||||||
|
|
@ -161,6 +161,19 @@ The collaboration quality directly depend on the communication quality. This sec
|
|||||||
various tools made in an effort to ease the communication between the different session users.
|
various tools made in an effort to ease the communication between the different session users.
|
||||||
Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
|
Feel free to suggest any idea for communication tools `here <https://gitlab.com/slumber/multi-user/-/issues/75>`_ .
|
||||||
|
|
||||||
|
---------------------------
|
||||||
|
Change replication behavior
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
During a session, the multi-user will replicate your modifications to other instances.
|
||||||
|
In order to avoid annoying other users when you are experimenting, some of those modifications can be ignored via
|
||||||
|
various flags present at the top of the panel (see red area in the image bellow). Those flags are explained in the :ref:`replication` section.
|
||||||
|
|
||||||
|
.. figure:: img/quickstart_replication.png
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
Session replication flags
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
Monitor online users
|
Monitor online users
|
||||||
--------------------
|
--------------------
|
||||||
@ -238,10 +251,20 @@ it draw users related information in your viewport such as:
|
|||||||
The presence overlay panel (see image above) allow you to enable/disable
|
The presence overlay panel (see image above) allow you to enable/disable
|
||||||
various drawn parts via the following flags:
|
various drawn parts via the following flags:
|
||||||
|
|
||||||
|
- **Show session statut**: display the session status in the viewport
|
||||||
|
|
||||||
|
.. figure:: img/quickstart_status.png
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
- **Text scale**: session status text size
|
||||||
|
- **Vertical/Horizontal position**: session position in the viewport
|
||||||
|
|
||||||
- **Show selected objects**: display other users current selection
|
- **Show selected objects**: display other users current selection
|
||||||
- **Show users**: display users current viewpoint
|
- **Show users**: display users current viewpoint
|
||||||
- **Show different scenes**: display users working on other scenes
|
- **Show different scenes**: display users working on other scenes
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
-----------
|
-----------
|
||||||
Manage data
|
Manage data
|
||||||
-----------
|
-----------
|
||||||
@ -299,37 +322,105 @@ Here is a quick list of available actions:
|
|||||||
|
|
||||||
.. _advanced:
|
.. _advanced:
|
||||||
|
|
||||||
Advanced configuration
|
Advanced settings
|
||||||
======================
|
=================
|
||||||
|
|
||||||
This section contains optional settings to configure the session behavior.
|
This section contains optional settings to configure the session behavior.
|
||||||
|
|
||||||
.. figure:: img/quickstart_advanced.png
|
.. figure:: img/quickstart_advanced.png
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
Repository panel
|
Advanced configuration panel
|
||||||
|
|
||||||
.. rubric:: Network
|
-------
|
||||||
|
Network
|
||||||
|
-------
|
||||||
|
|
||||||
|
.. figure:: img/quickstart_advanced_network.png
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
Advanced network settings
|
||||||
|
|
||||||
**IPC Port** is the port used for Inter Process Communication. This port is used
|
**IPC Port** is the port used for Inter Process Communication. This port is used
|
||||||
by the multi-users subprocesses to communicate with each others. If different instances
|
by the multi-users subprocesses to communicate with each others. If different instances
|
||||||
of the multi-user are using the same IPC port it will create conflict !
|
of the multi-user are using the same IPC port it will create conflict !
|
||||||
|
|
||||||
You only need to modify it if you need to launch multiple clients from the same
|
.. note::
|
||||||
computer(or if you try to host and join on the same computer). You should just enter a different
|
You only need to modify it if you need to launch multiple clients from the same
|
||||||
**IPC port** for each blender instance.
|
computer(or if you try to host and join on the same computer). You should just enter a different
|
||||||
|
**IPC port** for each blender instance.
|
||||||
|
|
||||||
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
|
**Timeout (in milliseconds)** is the maximum ping authorized before auto-disconnecting.
|
||||||
You should only increase it if you have a bad connection.
|
You should only increase it if you have a bad connection.
|
||||||
|
|
||||||
.. rubric:: Replication
|
.. _replication:
|
||||||
|
|
||||||
|
-----------
|
||||||
|
Replication
|
||||||
|
-----------
|
||||||
|
|
||||||
|
.. figure:: img/quickstart_advanced_replication.png
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
Advanced replication settings
|
||||||
|
|
||||||
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
|
**Synchronize render settings** (only host) enable replication of EEVEE and CYCLES render settings to match render between clients.
|
||||||
|
|
||||||
|
**Synchronize active camera** sync the scene active camera.
|
||||||
|
|
||||||
|
**Edit Mode Updates** enable objects update while you are in Edit_Mode.
|
||||||
|
|
||||||
|
.. warning:: Edit Mode Updates kill performances with complex objects (heavy meshes, gpencil, etc...).
|
||||||
|
|
||||||
|
**Update method** allow you to change how replication update are triggered. Until now two update methode are implemented:
|
||||||
|
|
||||||
|
- **Default**: Use external threads to monitor datablocks changes, slower and less accurate.
|
||||||
|
- **Despgraph ⚠️**: Use the blender dependency graph to trigger updates. Faster but experimental and unstable !
|
||||||
|
|
||||||
**Properties frequency gird** allow to set a custom replication frequency for each type of data-block:
|
**Properties frequency gird** allow to set a custom replication frequency for each type of data-block:
|
||||||
|
|
||||||
- **Refresh**: pushed data update rate (in second)
|
- **Refresh**: pushed data update rate (in second)
|
||||||
- **Apply**: pulled data update rate (in second)
|
- **Apply**: pulled data update rate (in second)
|
||||||
|
|
||||||
.. note:: Per-data type settings will soon be revamped for simplification purposes
|
-----
|
||||||
|
Cache
|
||||||
|
-----
|
||||||
|
|
||||||
|
The multi-user allows to replicate external blend dependencies such as images, movies sounds.
|
||||||
|
On each client, those files are stored into the cache folder.
|
||||||
|
|
||||||
|
.. figure:: img/quickstart_advanced_cache.png
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
Advanced cache settings
|
||||||
|
|
||||||
|
**cache_directory** allows to choose where cached files (images, sound, movies) will be saved.
|
||||||
|
|
||||||
|
**Clear memory filecache** will save memory space at runtime by removing the file content from memory as soon as it have been written to the disk.
|
||||||
|
|
||||||
|
**Clear cache** will remove all file from the cache folder.
|
||||||
|
|
||||||
|
.. warning:: Clear cash could break your scene image/movie/sound if they are used into the blend !
|
||||||
|
|
||||||
|
---
|
||||||
|
Log
|
||||||
|
---
|
||||||
|
|
||||||
|
.. figure:: img/quickstart_advanced_logging.png
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
Advanced log settings
|
||||||
|
|
||||||
|
**log level** allow to set the logging level of detail. Here is the detail for each values:
|
||||||
|
|
||||||
|
+-----------+-----------------------------------------------+
|
||||||
|
| Log level | Description |
|
||||||
|
+===========+===============================================+
|
||||||
|
| ERROR | Shows only critical error |
|
||||||
|
+-----------+-----------------------------------------------+
|
||||||
|
| WARNING | Shows only errors (all kind) |
|
||||||
|
+-----------+-----------------------------------------------+
|
||||||
|
| INFO | Shows only status related messages and errors |
|
||||||
|
+-----------+-----------------------------------------------+
|
||||||
|
| DEBUG | Shows every possible information. |
|
||||||
|
+-----------+-----------------------------------------------+
|
@ -48,7 +48,6 @@ Documentation is organized into the following sections:
|
|||||||
|
|
||||||
getting_started/install
|
getting_started/install
|
||||||
getting_started/quickstart
|
getting_started/quickstart
|
||||||
getting_started/known_problems
|
|
||||||
getting_started/glossary
|
getting_started/glossary
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
@ -144,7 +144,7 @@ Let's check the connection status. Right click on the tray icon and click on **S
|
|||||||
Network status.
|
Network status.
|
||||||
|
|
||||||
The network status must be **OK** for each user(like in the picture above) otherwise it means that you are not connected to the network.
|
The network status must be **OK** for each user(like in the picture above) otherwise it means that you are not connected to the network.
|
||||||
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the :ref:`network-authorization` section.
|
If you see something like **ACCESS_DENIED**, it means that you were not authorized to join the network. Please check the section :ref:`network-authorization`
|
||||||
|
|
||||||
This is it for the ZeroTier network setup. Now everything should be setup to use the multi-user add-on over internet ! You can now follow the :ref:`quickstart` guide to start using the multi-user add-on !
|
This is it for the ZeroTier network setup. Now everything should be setup to use the multi-user add-on over internet ! You can now follow the :ref:`quickstart` guide to start using the multi-user add-on !
|
||||||
|
|
||||||
@ -171,46 +171,50 @@ From the dedicated server
|
|||||||
run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first.
|
run it at home for LAN but for internet hosting you need to follow the :ref:`port-forwarding` setup first.
|
||||||
|
|
||||||
The dedicated server allow you to host a session with simplicity from any location.
|
The dedicated server allow you to host a session with simplicity from any location.
|
||||||
It was developed to improve intaernet hosting performance.
|
It was developed to improve internet hosting performance.
|
||||||
|
|
||||||
The dedicated server can be run in tow ways:
|
The dedicated server can be run in two ways:
|
||||||
|
|
||||||
- :ref:`cmd-line`
|
- :ref:`cmd-line`
|
||||||
- :ref:`docker`
|
- :ref:`docker`
|
||||||
|
|
||||||
|
.. Note:: There are shell scripts to conveniently start a dedicated server via either of these approaches available in the gitlab repository. See section: :ref:`serverstartscripts`
|
||||||
|
|
||||||
.. _cmd-line:
|
.. _cmd-line:
|
||||||
|
|
||||||
Using a regular command line
|
Using a regular command line
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
You can run the dedicated server on any platform by following those steps:
|
You can run the dedicated server on any platform by following these steps:
|
||||||
|
|
||||||
1. Firstly, download and intall python 3 (3.6 or above).
|
1. Firstly, download and intall python 3 (3.6 or above).
|
||||||
2. Download and extract the dedicated server from `here <https://gitlab.com/slumber/replication/-/archive/develop/replication-develop.zip>`_
|
2. Install the latest version of the replication library:
|
||||||
3. Open a terminal in the extracted folder and install python dependencies by running:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
python -m pip install -r requirements.txt
|
python -m pip install replication==0.0.21a15
|
||||||
|
|
||||||
4. Launch the server from the same terminal with:
|
4. Launch the server with:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
python scripts/server.py
|
replication.serve
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
You can also specify a custom **port** (-p), **timeout** (-t) and **admin password** (-pwd) with the following optionnal argument
|
You can also specify a custom **port** (-p), **timeout** (-t), **admin password** (-pwd), **log level (ERROR, WARNING, INFO or DEBUG)** (-l) and **log file** (-lf) with the following optional arguments
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
python scripts/server.py -p 5555 -pwd toto -t 1000
|
replication.serve -p 5555 -pwd admin -t 1000 -l INFO -lf server.log
|
||||||
|
|
||||||
|
Here, for example, a server is instantiated on port 5555, with password 'admin', a 1 second timeout, and logging enabled.
|
||||||
|
|
||||||
|
As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
|
||||||
|
|
||||||
As soon as the dedicated server is running, you can connect to it from blender (follow :ref:`how-to-join`).
|
|
||||||
|
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
|
Some commands are available to enable an administrator to manage the session. Check :ref:`dedicated-management` to learn more.
|
||||||
|
|
||||||
|
|
||||||
.. _docker:
|
.. _docker:
|
||||||
@ -218,22 +222,56 @@ As soon as the dedicated server is running, you can connect to it from blender (
|
|||||||
Using a pre-configured image on docker engine
|
Using a pre-configured image on docker engine
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
|
||||||
Launching the dedicated server from a docker server is simple as:
|
Launching the dedicated server from a docker server is simple as running:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
docker run -d \
|
docker run -d \
|
||||||
-p 5555-5560:5555-5560 \
|
-p 5555-5560:5555-5560 \
|
||||||
-e port=5555 \
|
-e port=5555 \
|
||||||
|
-e log_level=DEBUG \
|
||||||
-e password=admin \
|
-e password=admin \
|
||||||
-e timeout=1000 \
|
-e timeout=1000 \
|
||||||
registry.gitlab.com/slumber/multi-user/multi-user-server:0.0.3
|
registry.gitlab.com/slumber/multi-user/multi-user-server:0.1.0
|
||||||
|
|
||||||
As soon as the dedicated server is running, you can connect to it from blender.
|
As soon as the dedicated server is running, you can connect to it from blender by following :ref:`how-to-join`.
|
||||||
You can check the :ref:`how-to-join` section.
|
|
||||||
|
You can check your container is running, and find its ID with:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
docker ps
|
||||||
|
|
||||||
|
Logs for the server running in the docker container can be accessed by outputting the following to a log file:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
docker log your-container-id >& dockerserver.log
|
||||||
|
|
||||||
|
.. Note:: If using WSL2 on Windows 10 (Windows Subsystem for Linux), it is preferable to run a dedicated server via regular command line approach (or the associated startup script) from within Windows - docker desktop for windows 10 usually uses the WSL2 backend where it is available.
|
||||||
|
|
||||||
|
|
||||||
|
.. _serverstartscripts:
|
||||||
|
|
||||||
|
Server startup scripts
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Convenient scripts are available in the Gitlab repository: https://gitlab.com/slumber/multi-user/scripts/startup_scripts/
|
||||||
|
|
||||||
|
Simply run the relevant script in a shell on the host machine to start a server with one line of code via replication directly or via a docker container. Choose between the two methods:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
./start-server.sh
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
./run-dockerfile.sh
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
Some commands are available to manage the session. Check :ref:`dedicated-management` to learn more.
|
Once your server is up and running, some commands are available to manage the session :ref:`dedicated-management`
|
||||||
|
|
||||||
.. _dedicated-management:
|
.. _dedicated-management:
|
||||||
|
|
||||||
|
@ -21,11 +21,11 @@ In order to help with the testing, you have several possibilities:
|
|||||||
- Test `development branch <https://gitlab.com/slumber/multi-user/-/branches>`_
|
- Test `development branch <https://gitlab.com/slumber/multi-user/-/branches>`_
|
||||||
|
|
||||||
--------------------------
|
--------------------------
|
||||||
Filling an issue on Gitlab
|
Filing an issue on Gitlab
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
The `gitlab issue tracker <https://gitlab.com/slumber/multi-user/issues>`_ is used for bug report and enhancement suggestion.
|
The `gitlab issue tracker <https://gitlab.com/slumber/multi-user/issues>`_ is used for bug report and enhancement suggestion.
|
||||||
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button.
|
You will need a Gitlab account to be able to open a new issue there and click on "New issue" button in the main multi-user project.
|
||||||
|
|
||||||
Here are some useful information you should provide in a bug report:
|
Here are some useful information you should provide in a bug report:
|
||||||
|
|
||||||
@ -35,8 +35,75 @@ Here are some useful information you should provide in a bug report:
|
|||||||
Contributing code
|
Contributing code
|
||||||
=================
|
=================
|
||||||
|
|
||||||
1. Fork it (https://gitlab.com/yourname/yourproject/fork)
|
In general, this project follows the `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_. It may help to understand that there are three different repositories - the upstream (main multi-user project repository, designated in git by 'upstream'), remote (forked repository, designated in git by 'origin'), and the local repository on your machine.
|
||||||
2. Create your feature branch (git checkout -b feature/fooBar)
|
The following example suggests how to contribute a feature.
|
||||||
3. Commit your changes (git commit -am 'Add some fooBar')
|
|
||||||
4. Push to the branch (git push origin feature/fooBar)
|
1. Fork the project into a new repository:
|
||||||
5. Create a new Pull Request
|
https://gitlab.com/yourname/multi-user
|
||||||
|
|
||||||
|
2. Clone the new repository locally:
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git clone https://gitlab.com/yourname/multi-user.git
|
||||||
|
|
||||||
|
3. Keep your fork in sync with the main repository by setting up the upstream pointer once. cd into your git repo and then run:
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git remote add upstream https://gitlab.com/slumber/multi-user.git
|
||||||
|
|
||||||
|
4. Now, locally check out the develop branch, upon which to base your new feature branch:
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git checkout develop
|
||||||
|
|
||||||
|
5. Fetch any changes from the main upstream repository into your fork (especially if some time has passed since forking):
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git fetch upstream
|
||||||
|
|
||||||
|
'Fetch' downloads objects and refs from the repository, but doesn’t apply them to the branch we are working on. We want to apply the updates to the branch we will work from, which we checked out in step 4.
|
||||||
|
|
||||||
|
6. Let's merge any recent changes from the remote upstream (original repository's) 'develop' branch into our local 'develop' branch:
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git merge upstream/develop
|
||||||
|
|
||||||
|
7. Update your forked repository's remote 'develop' branch with the fetched changes, just to keep things tidy. Make sure you haven't committed any local changes in the interim:
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git push origin develop
|
||||||
|
|
||||||
|
8. Locally create your own new feature branch from the develop branch, using the syntax:
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git checkout -b feature/yourfeaturename
|
||||||
|
...where 'feature/' designates a feature branch, and 'yourfeaturename' is a name of your choosing
|
||||||
|
|
||||||
|
9. Add and commit your changes, including a commit message:
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git commit -am 'Add fooBar'
|
||||||
|
|
||||||
|
10. Push committed changes to the remote copy of your new feature branch which will be created in this step:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git push -u origin feature/yourfeaturename
|
||||||
|
|
||||||
|
If it's been some time since performing steps 4 through 7, make sure to checkout 'develop' again and pull the latest changes from upstream before checking out and creating feature/yourfeaturename and pushing changes. Alternatively, checkout 'feature/yourfeaturename' and simply run:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git rebase upstream/develop
|
||||||
|
|
||||||
|
and your staged commits will be merged along with the changes. More information on `rebasing here <https://git-scm.com/book/en/v2/Git-Branching-Rebasing>`_
|
||||||
|
|
||||||
|
.. Hint:: -u option sets up your locally created new branch to follow a remote branch which is now created with the same name on your remote repository.
|
||||||
|
|
||||||
|
11. Finally, create a new Pull/Merge Request on Gitlab to merge the remote version of this new branch with commited updates, back into the upstream develop branch, finalising the integration of the new feature.
|
||||||
|
|
||||||
|
12. Thanks for contributing!
|
||||||
|
|
||||||
|
.. Note:: For hotfixes, replace 'feature/' with 'hotfix/' and base the new branch off the parent 'master' branch instead of 'develop' branch. Make sure to checkout 'master' before running step 8
|
||||||
|
.. Note:: Let's follow the Atlassian `Gitflow Workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_, except for one main difference - submitting a pull request rather than merging by ourselves.
|
||||||
|
.. Note:: See `here <https://philna.sh/blog/2018/08/21/git-commands-to-keep-a-fork-up-to-date/>`_ or `here <https://stefanbauer.me/articles/how-to-keep-your-git-fork-up-to-date>`_ for instructions on how to keep a fork up to date.
|
@ -19,9 +19,9 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Multi-User",
|
"name": "Multi-User",
|
||||||
"author": "Swann Martinez",
|
"author": "Swann Martinez",
|
||||||
"version": (0, 0, 3),
|
"version": (0, 2, 0),
|
||||||
"description": "Enable real-time collaborative workflow inside blender",
|
"description": "Enable real-time collaborative workflow inside blender",
|
||||||
"blender": (2, 80, 0),
|
"blender": (2, 82, 0),
|
||||||
"location": "3D View > Sidebar > Multi-User tab",
|
"location": "3D View > Sidebar > Multi-User tab",
|
||||||
"warning": "Unstable addon, use it at your own risks",
|
"warning": "Unstable addon, use it at your own risks",
|
||||||
"category": "Collaboration",
|
"category": "Collaboration",
|
||||||
@ -40,32 +40,25 @@ import sys
|
|||||||
import bpy
|
import bpy
|
||||||
from bpy.app.handlers import persistent
|
from bpy.app.handlers import persistent
|
||||||
|
|
||||||
from . import environment, utils
|
from . import environment
|
||||||
|
|
||||||
|
|
||||||
# TODO: remove dependency as soon as replication will be installed as a module
|
|
||||||
DEPENDENCIES = {
|
DEPENDENCIES = {
|
||||||
("zmq","zmq"),
|
("replication", '0.2.0'),
|
||||||
("jsondiff","jsondiff"),
|
|
||||||
("deepdiff", "deepdiff"),
|
|
||||||
("psutil","psutil")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
libs = os.path.dirname(os.path.abspath(__file__))+"\\libs\\replication\\replication"
|
module_error_msg = "Insufficient rights to install the multi-user \
|
||||||
|
dependencies, aunch blender with administrator rights."
|
||||||
def register():
|
def register():
|
||||||
# Setup logging policy
|
# Setup logging policy
|
||||||
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
|
logging.basicConfig(
|
||||||
|
format='%(asctime)s CLIENT %(levelname)-8s %(message)s',
|
||||||
if libs not in sys.path:
|
datefmt='%H:%M:%S',
|
||||||
sys.path.append(libs)
|
level=logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
|
environment.setup(DEPENDENCIES, bpy.app.binary_path_python)
|
||||||
except ModuleNotFoundError:
|
|
||||||
logging.fatal("Fail to install multi-user dependencies, try to execute blender with admin rights.")
|
|
||||||
return
|
|
||||||
|
|
||||||
from . import presence
|
from . import presence
|
||||||
from . import operators
|
from . import operators
|
||||||
@ -78,6 +71,9 @@ def register():
|
|||||||
presence.register()
|
presence.register()
|
||||||
operators.register()
|
operators.register()
|
||||||
ui.register()
|
ui.register()
|
||||||
|
except ModuleNotFoundError as e:
|
||||||
|
raise Exception(module_error_msg)
|
||||||
|
logging.error(module_error_msg)
|
||||||
|
|
||||||
bpy.types.WindowManager.session = bpy.props.PointerProperty(
|
bpy.types.WindowManager.session = bpy.props.PointerProperty(
|
||||||
type=preferences.SessionProps)
|
type=preferences.SessionProps)
|
||||||
|
@ -23,7 +23,11 @@ https://github.com/CGCookie/blender-addon-updater
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
__version__ = "1.0.8"
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
|
import traceback
|
||||||
|
import platform
|
||||||
import ssl
|
import ssl
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import urllib
|
import urllib
|
||||||
@ -98,6 +102,7 @@ class Singleton_updater(object):
|
|||||||
|
|
||||||
# runtime variables, initial conditions
|
# runtime variables, initial conditions
|
||||||
self._verbose = False
|
self._verbose = False
|
||||||
|
self._use_print_traces = True
|
||||||
self._fake_install = False
|
self._fake_install = False
|
||||||
self._async_checking = False # only true when async daemon started
|
self._async_checking = False # only true when async daemon started
|
||||||
self._update_ready = None
|
self._update_ready = None
|
||||||
@ -133,6 +138,13 @@ class Singleton_updater(object):
|
|||||||
self._select_link = select_link_function
|
self._select_link = select_link_function
|
||||||
|
|
||||||
|
|
||||||
|
# called from except blocks, to print the exception details,
|
||||||
|
# according to the use_print_traces option
|
||||||
|
def print_trace():
|
||||||
|
if self._use_print_traces:
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
# Getters and setters
|
# Getters and setters
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
@ -166,7 +178,7 @@ class Singleton_updater(object):
|
|||||||
try:
|
try:
|
||||||
self._auto_reload_post_update = bool(value)
|
self._auto_reload_post_update = bool(value)
|
||||||
except:
|
except:
|
||||||
raise ValueError("Must be a boolean value")
|
raise ValueError("auto_reload_post_update must be a boolean value")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def backup_current(self):
|
def backup_current(self):
|
||||||
@ -351,7 +363,7 @@ class Singleton_updater(object):
|
|||||||
try:
|
try:
|
||||||
self._repo = str(value)
|
self._repo = str(value)
|
||||||
except:
|
except:
|
||||||
raise ValueError("User must be a string")
|
raise ValueError("repo must be a string value")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def select_link(self):
|
def select_link(self):
|
||||||
@ -377,6 +389,7 @@ class Singleton_updater(object):
|
|||||||
os.makedirs(value)
|
os.makedirs(value)
|
||||||
except:
|
except:
|
||||||
if self._verbose: print("Error trying to staging path")
|
if self._verbose: print("Error trying to staging path")
|
||||||
|
self.print_trace()
|
||||||
return
|
return
|
||||||
self._updater_path = value
|
self._updater_path = value
|
||||||
|
|
||||||
@ -446,6 +459,16 @@ class Singleton_updater(object):
|
|||||||
except:
|
except:
|
||||||
raise ValueError("Verbose must be a boolean value")
|
raise ValueError("Verbose must be a boolean value")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def use_print_traces(self):
|
||||||
|
return self._use_print_traces
|
||||||
|
@use_print_traces.setter
|
||||||
|
def use_print_traces(self, value):
|
||||||
|
try:
|
||||||
|
self._use_print_traces = bool(value)
|
||||||
|
except:
|
||||||
|
raise ValueError("use_print_traces must be a boolean value")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_max_update(self):
|
def version_max_update(self):
|
||||||
return self._version_max_update
|
return self._version_max_update
|
||||||
@ -637,6 +660,9 @@ class Singleton_updater(object):
|
|||||||
else:
|
else:
|
||||||
if self._verbose: print("Tokens not setup for engine yet")
|
if self._verbose: print("Tokens not setup for engine yet")
|
||||||
|
|
||||||
|
# Always set user agent
|
||||||
|
request.add_header('User-Agent', "Python/"+str(platform.python_version()))
|
||||||
|
|
||||||
# run the request
|
# run the request
|
||||||
try:
|
try:
|
||||||
if context:
|
if context:
|
||||||
@ -652,6 +678,7 @@ class Singleton_updater(object):
|
|||||||
self._error = "HTTP error"
|
self._error = "HTTP error"
|
||||||
self._error_msg = str(e.code)
|
self._error_msg = str(e.code)
|
||||||
print(self._error, self._error_msg)
|
print(self._error, self._error_msg)
|
||||||
|
self.print_trace()
|
||||||
self._update_ready = None
|
self._update_ready = None
|
||||||
except urllib.error.URLError as e:
|
except urllib.error.URLError as e:
|
||||||
reason = str(e.reason)
|
reason = str(e.reason)
|
||||||
@ -663,6 +690,7 @@ class Singleton_updater(object):
|
|||||||
self._error = "URL error, check internet connection"
|
self._error = "URL error, check internet connection"
|
||||||
self._error_msg = reason
|
self._error_msg = reason
|
||||||
print(self._error, self._error_msg)
|
print(self._error, self._error_msg)
|
||||||
|
self.print_trace()
|
||||||
self._update_ready = None
|
self._update_ready = None
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
@ -684,6 +712,7 @@ class Singleton_updater(object):
|
|||||||
self._error_msg = str(e.reason)
|
self._error_msg = str(e.reason)
|
||||||
self._update_ready = None
|
self._update_ready = None
|
||||||
print(self._error, self._error_msg)
|
print(self._error, self._error_msg)
|
||||||
|
self.print_trace()
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
@ -700,15 +729,17 @@ class Singleton_updater(object):
|
|||||||
if self._verbose: print("Preparing staging folder for download:\n",local)
|
if self._verbose: print("Preparing staging folder for download:\n",local)
|
||||||
if os.path.isdir(local) == True:
|
if os.path.isdir(local) == True:
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(local)
|
shutil.rmtree(local, ignore_errors=True)
|
||||||
os.makedirs(local)
|
os.makedirs(local)
|
||||||
except:
|
except:
|
||||||
error = "failed to remove existing staging directory"
|
error = "failed to remove existing staging directory"
|
||||||
|
self.print_trace()
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
os.makedirs(local)
|
os.makedirs(local)
|
||||||
except:
|
except:
|
||||||
error = "failed to create staging directory"
|
error = "failed to create staging directory"
|
||||||
|
self.print_trace()
|
||||||
|
|
||||||
if error != None:
|
if error != None:
|
||||||
if self._verbose: print("Error: Aborting update, "+error)
|
if self._verbose: print("Error: Aborting update, "+error)
|
||||||
@ -733,6 +764,10 @@ class Singleton_updater(object):
|
|||||||
request.add_header('PRIVATE-TOKEN',self._engine.token)
|
request.add_header('PRIVATE-TOKEN',self._engine.token)
|
||||||
else:
|
else:
|
||||||
if self._verbose: print("Tokens not setup for selected engine yet")
|
if self._verbose: print("Tokens not setup for selected engine yet")
|
||||||
|
|
||||||
|
# Always set user agent
|
||||||
|
request.add_header('User-Agent', "Python/"+str(platform.python_version()))
|
||||||
|
|
||||||
self.urlretrieve(urllib.request.urlopen(request,context=context), self._source_zip)
|
self.urlretrieve(urllib.request.urlopen(request,context=context), self._source_zip)
|
||||||
# add additional checks on file size being non-zero
|
# add additional checks on file size being non-zero
|
||||||
if self._verbose: print("Successfully downloaded update zip")
|
if self._verbose: print("Successfully downloaded update zip")
|
||||||
@ -743,6 +778,7 @@ class Singleton_updater(object):
|
|||||||
if self._verbose:
|
if self._verbose:
|
||||||
print("Error retrieving download, bad link?")
|
print("Error retrieving download, bad link?")
|
||||||
print("Error: {}".format(e))
|
print("Error: {}".format(e))
|
||||||
|
self.print_trace()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@ -757,16 +793,18 @@ class Singleton_updater(object):
|
|||||||
|
|
||||||
if os.path.isdir(local):
|
if os.path.isdir(local):
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(local)
|
shutil.rmtree(local, ignore_errors=True)
|
||||||
except:
|
except:
|
||||||
if self._verbose:print("Failed to removed previous backup folder, contininuing")
|
if self._verbose:print("Failed to removed previous backup folder, contininuing")
|
||||||
|
self.print_trace()
|
||||||
|
|
||||||
# remove the temp folder; shouldn't exist but could if previously interrupted
|
# remove the temp folder; shouldn't exist but could if previously interrupted
|
||||||
if os.path.isdir(tempdest):
|
if os.path.isdir(tempdest):
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(tempdest)
|
shutil.rmtree(tempdest, ignore_errors=True)
|
||||||
except:
|
except:
|
||||||
if self._verbose:print("Failed to remove existing temp folder, contininuing")
|
if self._verbose:print("Failed to remove existing temp folder, contininuing")
|
||||||
|
self.print_trace()
|
||||||
# make the full addon copy, which temporarily places outside the addon folder
|
# make the full addon copy, which temporarily places outside the addon folder
|
||||||
if self._backup_ignore_patterns != None:
|
if self._backup_ignore_patterns != None:
|
||||||
shutil.copytree(
|
shutil.copytree(
|
||||||
@ -794,7 +832,7 @@ class Singleton_updater(object):
|
|||||||
|
|
||||||
# make the copy
|
# make the copy
|
||||||
shutil.move(backuploc,tempdest)
|
shutil.move(backuploc,tempdest)
|
||||||
shutil.rmtree(self._addon_root)
|
shutil.rmtree(self._addon_root, ignore_errors=True)
|
||||||
os.rename(tempdest,self._addon_root)
|
os.rename(tempdest,self._addon_root)
|
||||||
|
|
||||||
self._json["backup_date"] = ""
|
self._json["backup_date"] = ""
|
||||||
@ -815,7 +853,7 @@ class Singleton_updater(object):
|
|||||||
# clear the existing source folder in case previous files remain
|
# clear the existing source folder in case previous files remain
|
||||||
outdir = os.path.join(self._updater_path, "source")
|
outdir = os.path.join(self._updater_path, "source")
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(outdir)
|
shutil.rmtree(outdir, ignore_errors=True)
|
||||||
if self._verbose:
|
if self._verbose:
|
||||||
print("Source folder cleared")
|
print("Source folder cleared")
|
||||||
except:
|
except:
|
||||||
@ -828,6 +866,7 @@ class Singleton_updater(object):
|
|||||||
except Exception as err:
|
except Exception as err:
|
||||||
print("Error occurred while making extract dir:")
|
print("Error occurred while making extract dir:")
|
||||||
print(str(err))
|
print(str(err))
|
||||||
|
self.print_trace()
|
||||||
self._error = "Install failed"
|
self._error = "Install failed"
|
||||||
self._error_msg = "Failed to make extract directory"
|
self._error_msg = "Failed to make extract directory"
|
||||||
return -1
|
return -1
|
||||||
@ -869,6 +908,7 @@ class Singleton_updater(object):
|
|||||||
if exc.errno != errno.EEXIST:
|
if exc.errno != errno.EEXIST:
|
||||||
self._error = "Install failed"
|
self._error = "Install failed"
|
||||||
self._error_msg = "Could not create folder from zip"
|
self._error_msg = "Could not create folder from zip"
|
||||||
|
self.print_trace()
|
||||||
return -1
|
return -1
|
||||||
else:
|
else:
|
||||||
with open(os.path.join(outdir, subpath), "wb") as outfile:
|
with open(os.path.join(outdir, subpath), "wb") as outfile:
|
||||||
@ -962,12 +1002,13 @@ class Singleton_updater(object):
|
|||||||
print("Clean removing file {}".format(os.path.join(base,f)))
|
print("Clean removing file {}".format(os.path.join(base,f)))
|
||||||
for f in folders:
|
for f in folders:
|
||||||
if os.path.join(base,f)==self._updater_path: continue
|
if os.path.join(base,f)==self._updater_path: continue
|
||||||
shutil.rmtree(os.path.join(base,f))
|
shutil.rmtree(os.path.join(base,f), ignore_errors=True)
|
||||||
print("Clean removing folder and contents {}".format(os.path.join(base,f)))
|
print("Clean removing folder and contents {}".format(os.path.join(base,f)))
|
||||||
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
error = "failed to create clean existing addon folder"
|
error = "failed to create clean existing addon folder"
|
||||||
print(error, str(err))
|
print(error, str(err))
|
||||||
|
self.print_trace()
|
||||||
|
|
||||||
# Walk through the base addon folder for rules on pre-removing
|
# Walk through the base addon folder for rules on pre-removing
|
||||||
# but avoid removing/altering backup and updater file
|
# but avoid removing/altering backup and updater file
|
||||||
@ -983,6 +1024,7 @@ class Singleton_updater(object):
|
|||||||
if self._verbose: print("Pre-removed file "+file)
|
if self._verbose: print("Pre-removed file "+file)
|
||||||
except OSError:
|
except OSError:
|
||||||
print("Failed to pre-remove "+file)
|
print("Failed to pre-remove "+file)
|
||||||
|
self.print_trace()
|
||||||
|
|
||||||
# Walk through the temp addon sub folder for replacements
|
# Walk through the temp addon sub folder for replacements
|
||||||
# this implements the overwrite rules, which apply after
|
# this implements the overwrite rules, which apply after
|
||||||
@ -1006,7 +1048,7 @@ class Singleton_updater(object):
|
|||||||
# otherwise, check each file to see if matches an overwrite pattern
|
# otherwise, check each file to see if matches an overwrite pattern
|
||||||
replaced=False
|
replaced=False
|
||||||
for ptrn in self._overwrite_patterns:
|
for ptrn in self._overwrite_patterns:
|
||||||
if fnmatch.filter([destFile],ptrn):
|
if fnmatch.filter([file],ptrn):
|
||||||
replaced=True
|
replaced=True
|
||||||
break
|
break
|
||||||
if replaced:
|
if replaced:
|
||||||
@ -1022,10 +1064,11 @@ class Singleton_updater(object):
|
|||||||
|
|
||||||
# now remove the temp staging folder and downloaded zip
|
# now remove the temp staging folder and downloaded zip
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(staging_path)
|
shutil.rmtree(staging_path, ignore_errors=True)
|
||||||
except:
|
except:
|
||||||
error = "Error: Failed to remove existing staging directory, consider manually removing "+staging_path
|
error = "Error: Failed to remove existing staging directory, consider manually removing "+staging_path
|
||||||
if self._verbose: print(error)
|
if self._verbose: print(error)
|
||||||
|
self.print_trace()
|
||||||
|
|
||||||
|
|
||||||
def reload_addon(self):
|
def reload_addon(self):
|
||||||
@ -1041,9 +1084,16 @@ class Singleton_updater(object):
|
|||||||
|
|
||||||
# not allowed in restricted context, such as register module
|
# not allowed in restricted context, such as register module
|
||||||
# toggle to refresh
|
# toggle to refresh
|
||||||
|
if "addon_disable" in dir(bpy.ops.wm): # 2.7
|
||||||
bpy.ops.wm.addon_disable(module=self._addon_package)
|
bpy.ops.wm.addon_disable(module=self._addon_package)
|
||||||
bpy.ops.wm.addon_refresh()
|
bpy.ops.wm.addon_refresh()
|
||||||
bpy.ops.wm.addon_enable(module=self._addon_package)
|
bpy.ops.wm.addon_enable(module=self._addon_package)
|
||||||
|
print("2.7 reload complete")
|
||||||
|
else: # 2.8
|
||||||
|
bpy.ops.preferences.addon_disable(module=self._addon_package)
|
||||||
|
bpy.ops.preferences.addon_refresh()
|
||||||
|
bpy.ops.preferences.addon_enable(module=self._addon_package)
|
||||||
|
print("2.8 reload complete")
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
@ -1375,7 +1425,7 @@ class Singleton_updater(object):
|
|||||||
|
|
||||||
if "last_check" not in self._json or self._json["last_check"] == "":
|
if "last_check" not in self._json or self._json["last_check"] == "":
|
||||||
return True
|
return True
|
||||||
else:
|
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
last_check = datetime.strptime(self._json["last_check"],
|
last_check = datetime.strptime(self._json["last_check"],
|
||||||
"%Y-%m-%d %H:%M:%S.%f")
|
"%Y-%m-%d %H:%M:%S.%f")
|
||||||
@ -1391,7 +1441,7 @@ class Singleton_updater(object):
|
|||||||
if self._verbose:
|
if self._verbose:
|
||||||
print("{} Updater: Time to check for updates!".format(self._addon))
|
print("{} Updater: Time to check for updates!".format(self._addon))
|
||||||
return True
|
return True
|
||||||
else:
|
|
||||||
if self._verbose:
|
if self._verbose:
|
||||||
print("{} Updater: Determined it's not yet time to check for updates".format(self._addon))
|
print("{} Updater: Determined it's not yet time to check for updates".format(self._addon))
|
||||||
return False
|
return False
|
||||||
@ -1413,6 +1463,7 @@ class Singleton_updater(object):
|
|||||||
except Exception as err:
|
except Exception as err:
|
||||||
print("Other OS error occurred while trying to rename old JSON")
|
print("Other OS error occurred while trying to rename old JSON")
|
||||||
print(err)
|
print(err)
|
||||||
|
self.print_trace()
|
||||||
return json_path
|
return json_path
|
||||||
|
|
||||||
def set_updater_json(self):
|
def set_updater_json(self):
|
||||||
@ -1513,6 +1564,7 @@ class Singleton_updater(object):
|
|||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
print("Checking for update error:")
|
print("Checking for update error:")
|
||||||
print(exception)
|
print(exception)
|
||||||
|
self.print_trace()
|
||||||
if not self._error:
|
if not self._error:
|
||||||
self._update_ready = False
|
self._update_ready = False
|
||||||
self._update_version = None
|
self._update_version = None
|
||||||
@ -1624,9 +1676,6 @@ class GitlabEngine(object):
|
|||||||
return "{}{}{}".format(self.api_url,"/api/v4/projects/",updater.repo)
|
return "{}{}{}".format(self.api_url,"/api/v4/projects/",updater.repo)
|
||||||
|
|
||||||
def form_tags_url(self, updater):
|
def form_tags_url(self, updater):
|
||||||
if updater.use_releases:
|
|
||||||
return "{}{}".format(self.form_repo_url(updater),"/releases")
|
|
||||||
else:
|
|
||||||
return "{}{}".format(self.form_repo_url(updater),"/repository/tags")
|
return "{}{}".format(self.form_repo_url(updater),"/repository/tags")
|
||||||
|
|
||||||
def form_branch_list_url(self, updater):
|
def form_branch_list_url(self, updater):
|
||||||
@ -1655,14 +1704,9 @@ class GitlabEngine(object):
|
|||||||
def parse_tags(self, response, updater):
|
def parse_tags(self, response, updater):
|
||||||
if response == None:
|
if response == None:
|
||||||
return []
|
return []
|
||||||
# Return asset links from release
|
|
||||||
if updater.use_releases:
|
|
||||||
return [{"name": release["name"], "zipball_url": release["assets"]["links"][0]["url"]} for release in response]
|
|
||||||
else:
|
|
||||||
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
|
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# The module-shared class instance,
|
# The module-shared class instance,
|
||||||
# should be what's imported to other files
|
# should be what's imported to other files
|
||||||
|
@ -16,7 +16,13 @@
|
|||||||
#
|
#
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
"""Blender UI integrations for the addon updater.
|
||||||
|
|
||||||
|
Implements draw calls, popups, and operators that use the addon_updater.
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import traceback
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
from bpy.app.handlers import persistent
|
from bpy.app.handlers import persistent
|
||||||
@ -28,16 +34,16 @@ try:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("ERROR INITIALIZING UPDATER")
|
print("ERROR INITIALIZING UPDATER")
|
||||||
print(str(e))
|
print(str(e))
|
||||||
|
traceback.print_exc()
|
||||||
class Singleton_updater_none(object):
|
class Singleton_updater_none(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.addon = None
|
self.addon = None
|
||||||
self.verbose = False
|
self.verbose = False
|
||||||
|
self.use_print_traces = True
|
||||||
self.invalidupdater = True # used to distinguish bad install
|
self.invalidupdater = True # used to distinguish bad install
|
||||||
self.error = None
|
self.error = None
|
||||||
self.error_msg = None
|
self.error_msg = None
|
||||||
self.async_checking = None
|
self.async_checking = None
|
||||||
|
|
||||||
def clear_state(self):
|
def clear_state(self):
|
||||||
self.addon = None
|
self.addon = None
|
||||||
self.verbose = False
|
self.verbose = False
|
||||||
@ -45,7 +51,6 @@ except Exception as e:
|
|||||||
self.error = None
|
self.error = None
|
||||||
self.error_msg = None
|
self.error_msg = None
|
||||||
self.async_checking = None
|
self.async_checking = None
|
||||||
|
|
||||||
def run_update(self): pass
|
def run_update(self): pass
|
||||||
def check_for_update(self): pass
|
def check_for_update(self): pass
|
||||||
updater = Singleton_updater_none()
|
updater = Singleton_updater_none()
|
||||||
@ -127,14 +132,14 @@ class addon_updater_install_popup(bpy.types.Operator):
|
|||||||
name="Process update",
|
name="Process update",
|
||||||
description="Decide to install, ignore, or defer new addon update",
|
description="Decide to install, ignore, or defer new addon update",
|
||||||
items=[
|
items=[
|
||||||
("install", "Update Now", "Install update now"),
|
("install","Update Now","Install update now"),
|
||||||
("ignore", "Ignore", "Ignore this update to prevent future popups"),
|
("ignore","Ignore", "Ignore this update to prevent future popups"),
|
||||||
("defer", "Defer", "Defer choice till next blender session")
|
("defer","Defer","Defer choice till next blender session")
|
||||||
],
|
],
|
||||||
options={'HIDDEN'}
|
options={'HIDDEN'}
|
||||||
)
|
)
|
||||||
|
|
||||||
def check(self, context):
|
def check (self, context):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def invoke(self, context, event):
|
def invoke(self, context, event):
|
||||||
@ -150,11 +155,10 @@ class addon_updater_install_popup(bpy.types.Operator):
|
|||||||
col.scale_y = 0.7
|
col.scale_y = 0.7
|
||||||
col.label(text="Update {} ready!".format(str(updater.update_version)),
|
col.label(text="Update {} ready!".format(str(updater.update_version)),
|
||||||
icon="LOOP_FORWARDS")
|
icon="LOOP_FORWARDS")
|
||||||
col.label(
|
col.label(text="Choose 'Update Now' & press OK to install, ",icon="BLANK1")
|
||||||
text="Choose 'Update Now' & press OK to install, ", icon="BLANK1")
|
col.label(text="or click outside window to defer",icon="BLANK1")
|
||||||
col.label(text="or click outside window to defer", icon="BLANK1")
|
|
||||||
row = col.row()
|
row = col.row()
|
||||||
row.prop(self, "ignore_enum", expand=True)
|
row.prop(self,"ignore_enum",expand=True)
|
||||||
col.split()
|
col.split()
|
||||||
elif updater.update_ready == False:
|
elif updater.update_ready == False:
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
@ -170,23 +174,23 @@ class addon_updater_install_popup(bpy.types.Operator):
|
|||||||
# potentially in future, could have UI for 'check to select old version'
|
# potentially in future, could have UI for 'check to select old version'
|
||||||
# to revert back to.
|
# to revert back to.
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self,context):
|
||||||
|
|
||||||
# in case of error importing updater
|
# in case of error importing updater
|
||||||
if updater.invalidupdater == True:
|
if updater.invalidupdater == True:
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
if updater.manual_only == True:
|
if updater.manual_only==True:
|
||||||
bpy.ops.wm.url_open(url=updater.website)
|
bpy.ops.wm.url_open(url=updater.website)
|
||||||
elif updater.update_ready == True:
|
elif updater.update_ready == True:
|
||||||
|
|
||||||
# action based on enum selection
|
# action based on enum selection
|
||||||
if self.ignore_enum == 'defer':
|
if self.ignore_enum=='defer':
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
elif self.ignore_enum == 'ignore':
|
elif self.ignore_enum=='ignore':
|
||||||
updater.ignore_update()
|
updater.ignore_update()
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
# else: "install update now!"
|
#else: "install update now!"
|
||||||
|
|
||||||
res = updater.run_update(
|
res = updater.run_update(
|
||||||
force=False,
|
force=False,
|
||||||
@ -194,7 +198,7 @@ class addon_updater_install_popup(bpy.types.Operator):
|
|||||||
clean=self.clean_install)
|
clean=self.clean_install)
|
||||||
# should return 0, if not something happened
|
# should return 0, if not something happened
|
||||||
if updater.verbose:
|
if updater.verbose:
|
||||||
if res == 0:
|
if res==0:
|
||||||
print("Updater returned successful")
|
print("Updater returned successful")
|
||||||
else:
|
else:
|
||||||
print("Updater returned {}, error occurred".format(res))
|
print("Updater returned {}, error occurred".format(res))
|
||||||
@ -203,7 +207,7 @@ class addon_updater_install_popup(bpy.types.Operator):
|
|||||||
|
|
||||||
# re-launch this dialog
|
# re-launch this dialog
|
||||||
atr = addon_updater_install_popup.bl_idname.split(".")
|
atr = addon_updater_install_popup.bl_idname.split(".")
|
||||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
getattr(getattr(bpy.ops, atr[0]),atr[1])('INVOKE_DEFAULT')
|
||||||
else:
|
else:
|
||||||
if updater.verbose:
|
if updater.verbose:
|
||||||
print("Doing nothing, not ready for update")
|
print("Doing nothing, not ready for update")
|
||||||
@ -218,7 +222,7 @@ class addon_updater_check_now(bpy.types.Operator):
|
|||||||
x=updater.addon)
|
x=updater.addon)
|
||||||
bl_options = {'REGISTER', 'INTERNAL'}
|
bl_options = {'REGISTER', 'INTERNAL'}
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self,context):
|
||||||
if updater.invalidupdater == True:
|
if updater.invalidupdater == True:
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
@ -267,7 +271,7 @@ class addon_updater_update_now(bpy.types.Operator):
|
|||||||
options={'HIDDEN'}
|
options={'HIDDEN'}
|
||||||
)
|
)
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self,context):
|
||||||
|
|
||||||
# in case of error importing updater
|
# in case of error importing updater
|
||||||
if updater.invalidupdater == True:
|
if updater.invalidupdater == True:
|
||||||
@ -285,26 +289,26 @@ class addon_updater_update_now(bpy.types.Operator):
|
|||||||
|
|
||||||
# should return 0, if not something happened
|
# should return 0, if not something happened
|
||||||
if updater.verbose:
|
if updater.verbose:
|
||||||
if res == 0:
|
if res==0: print("Updater returned successful")
|
||||||
print("Updater returned successful")
|
else: print("Updater returned "+str(res)+", error occurred")
|
||||||
else:
|
|
||||||
print("Updater returned "+str(res)+", error occurred")
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
updater._error = "Error trying to run update"
|
updater._error = "Error trying to run update"
|
||||||
updater._error_msg = str(e)
|
updater._error_msg = str(e)
|
||||||
|
updater.print_trace()
|
||||||
atr = addon_updater_install_manually.bl_idname.split(".")
|
atr = addon_updater_install_manually.bl_idname.split(".")
|
||||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
getattr(getattr(bpy.ops, atr[0]),atr[1])('INVOKE_DEFAULT')
|
||||||
elif updater.update_ready == None:
|
elif updater.update_ready == None:
|
||||||
(update_ready, version, link) = updater.check_for_update(now=True)
|
(update_ready, version, link) = updater.check_for_update(now=True)
|
||||||
# re-launch this dialog
|
# re-launch this dialog
|
||||||
atr = addon_updater_install_popup.bl_idname.split(".")
|
atr = addon_updater_install_popup.bl_idname.split(".")
|
||||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
getattr(getattr(bpy.ops, atr[0]),atr[1])('INVOKE_DEFAULT')
|
||||||
|
|
||||||
elif updater.update_ready == False:
|
elif updater.update_ready == False:
|
||||||
self.report({'INFO'}, "Nothing to update")
|
self.report({'INFO'}, "Nothing to update")
|
||||||
|
return {'CANCELLED'}
|
||||||
else:
|
else:
|
||||||
self.report(
|
self.report({'ERROR'}, "Encountered problem while trying to update")
|
||||||
{'ERROR'}, "Encountered problem while trying to update")
|
return {'CANCELLED'}
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
@ -322,10 +326,10 @@ class addon_updater_update_target(bpy.types.Operator):
|
|||||||
ret = []
|
ret = []
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
i = 0
|
i=0
|
||||||
for tag in updater.tags:
|
for tag in updater.tags:
|
||||||
ret.append((tag, tag, "Select to install "+tag))
|
ret.append( (tag,tag,"Select to install "+tag) )
|
||||||
i += 1
|
i+=1
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
target = bpy.props.EnumProperty(
|
target = bpy.props.EnumProperty(
|
||||||
@ -346,9 +350,8 @@ class addon_updater_update_target(bpy.types.Operator):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
if updater.invalidupdater == True:
|
if updater.invalidupdater == True: return False
|
||||||
return False
|
return updater.update_ready != None and len(updater.tags)>0
|
||||||
return updater.update_ready != None and len(updater.tags) > 0
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
def invoke(self, context, event):
|
||||||
return context.window_manager.invoke_props_dialog(self)
|
return context.window_manager.invoke_props_dialog(self)
|
||||||
@ -364,7 +367,8 @@ class addon_updater_update_target(bpy.types.Operator):
|
|||||||
subcol = split.column()
|
subcol = split.column()
|
||||||
subcol.prop(self, "target", text="")
|
subcol.prop(self, "target", text="")
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
|
def execute(self,context):
|
||||||
|
|
||||||
# in case of error importing updater
|
# in case of error importing updater
|
||||||
if updater.invalidupdater == True:
|
if updater.invalidupdater == True:
|
||||||
@ -377,7 +381,7 @@ class addon_updater_update_target(bpy.types.Operator):
|
|||||||
clean=self.clean_install)
|
clean=self.clean_install)
|
||||||
|
|
||||||
# should return 0, if not something happened
|
# should return 0, if not something happened
|
||||||
if res == 0:
|
if res==0:
|
||||||
if updater.verbose:
|
if updater.verbose:
|
||||||
print("Updater returned successful")
|
print("Updater returned successful")
|
||||||
else:
|
else:
|
||||||
@ -412,14 +416,12 @@ class addon_updater_install_manually(bpy.types.Operator):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# use a "failed flag"? it shows this label if the case failed.
|
# use a "failed flag"? it shows this label if the case failed.
|
||||||
if self.error != "":
|
if self.error!="":
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
col.scale_y = 0.7
|
col.scale_y = 0.7
|
||||||
col.label(
|
col.label(text="There was an issue trying to auto-install",icon="ERROR")
|
||||||
text="There was an issue trying to auto-install", icon="ERROR")
|
col.label(text="Press the download button below and install",icon="BLANK1")
|
||||||
col.label(
|
col.label(text="the zip file like a normal addon.",icon="BLANK1")
|
||||||
text="Press the download button below and install", icon="BLANK1")
|
|
||||||
col.label(text="the zip file like a normal addon.", icon="BLANK1")
|
|
||||||
else:
|
else:
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
col.scale_y = 0.7
|
col.scale_y = 0.7
|
||||||
@ -434,7 +436,7 @@ class addon_updater_install_manually(bpy.types.Operator):
|
|||||||
|
|
||||||
if updater.update_link != None:
|
if updater.update_link != None:
|
||||||
row.operator("wm.url_open",
|
row.operator("wm.url_open",
|
||||||
text="Direct download").url = updater.update_link
|
text="Direct download").url=updater.update_link
|
||||||
else:
|
else:
|
||||||
row.operator("wm.url_open",
|
row.operator("wm.url_open",
|
||||||
text="(failed to retrieve direct download)")
|
text="(failed to retrieve direct download)")
|
||||||
@ -442,14 +444,13 @@ class addon_updater_install_manually(bpy.types.Operator):
|
|||||||
|
|
||||||
if updater.website != None:
|
if updater.website != None:
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.operator("wm.url_open", text="Open website").url =\
|
row.operator("wm.url_open",text="Open website").url=\
|
||||||
updater.website
|
updater.website
|
||||||
else:
|
else:
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.label(text="See source website to download the update")
|
row.label(text="See source website to download the update")
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self,context):
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
@ -491,22 +492,29 @@ class addon_updater_updated_successful(bpy.types.Operator):
|
|||||||
rw.operator("wm.url_open",
|
rw.operator("wm.url_open",
|
||||||
text="Click for manual download.",
|
text="Click for manual download.",
|
||||||
icon="BLANK1"
|
icon="BLANK1"
|
||||||
).url = updater.website
|
).url=updater.website
|
||||||
# manual download button here
|
# manual download button here
|
||||||
elif updater.auto_reload_post_update == False:
|
elif updater.auto_reload_post_update == False:
|
||||||
# tell user to restart blender
|
# tell user to restart blender
|
||||||
if "just_restored" in saved and saved["just_restored"] == True:
|
if "just_restored" in saved and saved["just_restored"] == True:
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
col.scale_y = 0.7
|
|
||||||
col.label(text="Addon restored", icon="RECOVER_LAST")
|
col.label(text="Addon restored", icon="RECOVER_LAST")
|
||||||
col.label(text="Restart blender to reload.", icon="BLANK1")
|
alert_row = col.row()
|
||||||
|
alert_row.alert = True
|
||||||
|
alert_row.operator(
|
||||||
|
"wm.quit_blender",
|
||||||
|
text="Restart blender to reload",
|
||||||
|
icon="BLANK1")
|
||||||
updater.json_reset_restore()
|
updater.json_reset_restore()
|
||||||
else:
|
else:
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
col.scale_y = 0.7
|
col.label(text="Addon successfully installed", icon="FILE_TICK")
|
||||||
col.label(text="Addon successfully installed",
|
alert_row = col.row()
|
||||||
icon="FILE_TICK")
|
alert_row.alert = True
|
||||||
col.label(text="Restart blender to reload.", icon="BLANK1")
|
alert_row.operator(
|
||||||
|
"wm.quit_blender",
|
||||||
|
text="Restart blender to reload",
|
||||||
|
icon="BLANK1")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# reload addon, but still recommend they restart blender
|
# reload addon, but still recommend they restart blender
|
||||||
@ -520,8 +528,7 @@ class addon_updater_updated_successful(bpy.types.Operator):
|
|||||||
else:
|
else:
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
col.scale_y = 0.7
|
col.scale_y = 0.7
|
||||||
col.label(text="Addon successfully installed",
|
col.label(text="Addon successfully installed", icon="FILE_TICK")
|
||||||
icon="FILE_TICK")
|
|
||||||
col.label(text="Consider restarting blender to fully reload.",
|
col.label(text="Consider restarting blender to fully reload.",
|
||||||
icon="BLANK1")
|
icon="BLANK1")
|
||||||
|
|
||||||
@ -539,7 +546,7 @@ class addon_updater_restore_backup(bpy.types.Operator):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
try:
|
try:
|
||||||
return os.path.isdir(os.path.join(updater.stage_path, "backup"))
|
return os.path.isdir(os.path.join(updater.stage_path,"backup"))
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -572,7 +579,7 @@ class addon_updater_ignore(bpy.types.Operator):
|
|||||||
if updater.invalidupdater == True:
|
if updater.invalidupdater == True:
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
updater.ignore_update()
|
updater.ignore_update()
|
||||||
self.report({"INFO"}, "Open addon preferences for updater options")
|
self.report({"INFO"},"Open addon preferences for updater options")
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
@ -610,7 +617,6 @@ ran_update_sucess_popup = False
|
|||||||
# global var for preventing successive calls
|
# global var for preventing successive calls
|
||||||
ran_background_check = False
|
ran_background_check = False
|
||||||
|
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def updater_run_success_popup_handler(scene):
|
def updater_run_success_popup_handler(scene):
|
||||||
global ran_update_sucess_popup
|
global ran_update_sucess_popup
|
||||||
@ -621,13 +627,17 @@ def updater_run_success_popup_handler(scene):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if "scene_update_post" in dir(bpy.app.handlers):
|
||||||
bpy.app.handlers.scene_update_post.remove(
|
bpy.app.handlers.scene_update_post.remove(
|
||||||
updater_run_success_popup_handler)
|
updater_run_success_popup_handler)
|
||||||
|
else:
|
||||||
|
bpy.app.handlers.depsgraph_update_post.remove(
|
||||||
|
updater_run_success_popup_handler)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
atr = addon_updater_updated_successful.bl_idname.split(".")
|
atr = addon_updater_updated_successful.bl_idname.split(".")
|
||||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
getattr(getattr(bpy.ops, atr[0]),atr[1])('INVOKE_DEFAULT')
|
||||||
|
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
@ -640,8 +650,12 @@ def updater_run_install_popup_handler(scene):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if "scene_update_post" in dir(bpy.app.handlers):
|
||||||
bpy.app.handlers.scene_update_post.remove(
|
bpy.app.handlers.scene_update_post.remove(
|
||||||
updater_run_install_popup_handler)
|
updater_run_install_popup_handler)
|
||||||
|
else:
|
||||||
|
bpy.app.handlers.depsgraph_update_post.remove(
|
||||||
|
updater_run_install_popup_handler)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -659,12 +673,12 @@ def updater_run_install_popup_handler(scene):
|
|||||||
# user probably manually installed to get the up to date addon
|
# user probably manually installed to get the up to date addon
|
||||||
# in here. Clear out the update flag using this function
|
# in here. Clear out the update flag using this function
|
||||||
if updater.verbose:
|
if updater.verbose:
|
||||||
print("{} updater: appears user updated, clearing flag".format(
|
print("{} updater: appears user updated, clearing flag".format(\
|
||||||
updater.addon))
|
updater.addon))
|
||||||
updater.json_reset_restore()
|
updater.json_reset_restore()
|
||||||
return
|
return
|
||||||
atr = addon_updater_install_popup.bl_idname.split(".")
|
atr = addon_updater_install_popup.bl_idname.split(".")
|
||||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
getattr(getattr(bpy.ops, atr[0]),atr[1])('INVOKE_DEFAULT')
|
||||||
|
|
||||||
|
|
||||||
def background_update_callback(update_ready):
|
def background_update_callback(update_ready):
|
||||||
@ -678,11 +692,24 @@ def background_update_callback(update_ready):
|
|||||||
return
|
return
|
||||||
if update_ready != True:
|
if update_ready != True:
|
||||||
return
|
return
|
||||||
if updater_run_install_popup_handler not in \
|
|
||||||
bpy.app.handlers.scene_update_post and \
|
# see if we need add to the update handler to trigger the popup
|
||||||
ran_autocheck_install_popup == False:
|
handlers = []
|
||||||
|
if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
|
||||||
|
handlers = bpy.app.handlers.scene_update_post
|
||||||
|
else: # 2.8x
|
||||||
|
handlers = bpy.app.handlers.depsgraph_update_post
|
||||||
|
in_handles = updater_run_install_popup_handler in handlers
|
||||||
|
|
||||||
|
if in_handles or ran_autocheck_install_popup:
|
||||||
|
return
|
||||||
|
|
||||||
|
if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
|
||||||
bpy.app.handlers.scene_update_post.append(
|
bpy.app.handlers.scene_update_post.append(
|
||||||
updater_run_install_popup_handler)
|
updater_run_install_popup_handler)
|
||||||
|
else: # 2.8x
|
||||||
|
bpy.app.handlers.depsgraph_update_post.append(
|
||||||
|
updater_run_install_popup_handler)
|
||||||
ran_autocheck_install_popup = True
|
ran_autocheck_install_popup = True
|
||||||
|
|
||||||
|
|
||||||
@ -701,22 +728,21 @@ def post_update_callback(module_name, res=None):
|
|||||||
if updater.invalidupdater == True:
|
if updater.invalidupdater == True:
|
||||||
return
|
return
|
||||||
|
|
||||||
if res == None:
|
if res==None:
|
||||||
# this is the same code as in conditional at the end of the register function
|
# this is the same code as in conditional at the end of the register function
|
||||||
# ie if "auto_reload_post_update" == True, comment out this code
|
# ie if "auto_reload_post_update" == True, comment out this code
|
||||||
if updater.verbose:
|
if updater.verbose:
|
||||||
print("{} updater: Running post update callback".format(updater.addon))
|
print("{} updater: Running post update callback".format(updater.addon))
|
||||||
# bpy.app.handlers.scene_update_post.append(updater_run_success_popup_handler)
|
|
||||||
|
|
||||||
atr = addon_updater_updated_successful.bl_idname.split(".")
|
atr = addon_updater_updated_successful.bl_idname.split(".")
|
||||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
getattr(getattr(bpy.ops, atr[0]),atr[1])('INVOKE_DEFAULT')
|
||||||
global ran_update_sucess_popup
|
global ran_update_sucess_popup
|
||||||
ran_update_sucess_popup = True
|
ran_update_sucess_popup = True
|
||||||
else:
|
else:
|
||||||
# some kind of error occurred and it was unable to install,
|
# some kind of error occurred and it was unable to install,
|
||||||
# offer manual download instead
|
# offer manual download instead
|
||||||
atr = addon_updater_updated_successful.bl_idname.split(".")
|
atr = addon_updater_updated_successful.bl_idname.split(".")
|
||||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT', error=res)
|
getattr(getattr(bpy.ops, atr[0]),atr[1])('INVOKE_DEFAULT',error=res)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@ -760,7 +786,7 @@ def check_for_update_background():
|
|||||||
# this function should take a bool input, if true: update ready
|
# this function should take a bool input, if true: update ready
|
||||||
# if false, no update ready
|
# if false, no update ready
|
||||||
if updater.verbose:
|
if updater.verbose:
|
||||||
print("{} updater: Running background check for update".format(
|
print("{} updater: Running background check for update".format(\
|
||||||
updater.addon))
|
updater.addon))
|
||||||
updater.check_for_update_async(background_update_callback)
|
updater.check_for_update_async(background_update_callback)
|
||||||
ran_background_check = True
|
ran_background_check = True
|
||||||
@ -789,10 +815,9 @@ def check_for_update_nonthreaded(self, context):
|
|||||||
(update_ready, version, link) = updater.check_for_update(now=False)
|
(update_ready, version, link) = updater.check_for_update(now=False)
|
||||||
if update_ready == True:
|
if update_ready == True:
|
||||||
atr = addon_updater_install_popup.bl_idname.split(".")
|
atr = addon_updater_install_popup.bl_idname.split(".")
|
||||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
getattr(getattr(bpy.ops, atr[0]),atr[1])('INVOKE_DEFAULT')
|
||||||
else:
|
else:
|
||||||
if updater.verbose:
|
if updater.verbose: print("No update ready")
|
||||||
print("No update ready")
|
|
||||||
self.report({'INFO'}, "No update ready")
|
self.report({'INFO'}, "No update ready")
|
||||||
|
|
||||||
|
|
||||||
@ -806,22 +831,36 @@ def showReloadPopup():
|
|||||||
saved_state = updater.json
|
saved_state = updater.json
|
||||||
global ran_update_sucess_popup
|
global ran_update_sucess_popup
|
||||||
|
|
||||||
a = saved_state != None
|
has_state = saved_state != None
|
||||||
b = "just_updated" in saved_state
|
just_updated = "just_updated" in saved_state
|
||||||
c = saved_state["just_updated"]
|
updated_info = saved_state["just_updated"]
|
||||||
|
|
||||||
|
if not (has_state and just_updated and updated_info):
|
||||||
|
return
|
||||||
|
|
||||||
if a and b and c:
|
|
||||||
updater.json_reset_postupdate() # so this only runs once
|
updater.json_reset_postupdate() # so this only runs once
|
||||||
|
|
||||||
# no handlers in this case
|
# no handlers in this case
|
||||||
if updater.auto_reload_post_update == False:
|
if updater.auto_reload_post_update == False:
|
||||||
return
|
return
|
||||||
|
|
||||||
if updater_run_success_popup_handler not in \
|
# see if we need add to the update handler to trigger the popup
|
||||||
bpy.app.handlers.scene_update_post \
|
handlers = []
|
||||||
and ran_update_sucess_popup == False:
|
if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
|
||||||
|
handlers = bpy.app.handlers.scene_update_post
|
||||||
|
else: # 2.8x
|
||||||
|
handlers = bpy.app.handlers.depsgraph_update_post
|
||||||
|
in_handles = updater_run_success_popup_handler in handlers
|
||||||
|
|
||||||
|
if in_handles or ran_update_sucess_popup is True:
|
||||||
|
return
|
||||||
|
|
||||||
|
if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
|
||||||
bpy.app.handlers.scene_update_post.append(
|
bpy.app.handlers.scene_update_post.append(
|
||||||
updater_run_success_popup_handler)
|
updater_run_success_popup_handler)
|
||||||
|
else: # 2.8x
|
||||||
|
bpy.app.handlers.depsgraph_update_post.append(
|
||||||
|
updater_run_success_popup_handler)
|
||||||
ran_update_sucess_popup = True
|
ran_update_sucess_popup = True
|
||||||
|
|
||||||
|
|
||||||
@ -847,9 +886,14 @@ def update_notice_box_ui(self, context):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
box = layout.box()
|
box = layout.box()
|
||||||
col = box.column()
|
col = box.column()
|
||||||
col.scale_y = 0.7
|
alert_row = col.row()
|
||||||
col.label(text="Restart blender", icon="ERROR")
|
alert_row.alert = True
|
||||||
|
alert_row.operator(
|
||||||
|
"wm.quit_blender",
|
||||||
|
text="Restart blender",
|
||||||
|
icon="ERROR")
|
||||||
col.label(text="to complete update")
|
col.label(text="to complete update")
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# if user pressed ignore, don't draw the box
|
# if user pressed ignore, don't draw the box
|
||||||
@ -861,16 +905,16 @@ def update_notice_box_ui(self, context):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
box = layout.box()
|
box = layout.box()
|
||||||
col = box.column(align=True)
|
col = box.column(align=True)
|
||||||
col.label(text="Update ready!", icon="ERROR")
|
col.label(text="Update ready!",icon="ERROR")
|
||||||
col.separator()
|
col.separator()
|
||||||
row = col.row(align=True)
|
row = col.row(align=True)
|
||||||
split = row.split(align=True)
|
split = row.split(align=True)
|
||||||
colL = split.column(align=True)
|
colL = split.column(align=True)
|
||||||
colL.scale_y = 1.5
|
colL.scale_y = 1.5
|
||||||
colL.operator(addon_updater_ignore.bl_idname, icon="X", text="Ignore")
|
colL.operator(addon_updater_ignore.bl_idname,icon="X",text="Ignore")
|
||||||
colR = split.column(align=True)
|
colR = split.column(align=True)
|
||||||
colR.scale_y = 1.5
|
colR.scale_y = 1.5
|
||||||
if updater.manual_only == False:
|
if updater.manual_only==False:
|
||||||
colR.operator(addon_updater_update_now.bl_idname,
|
colR.operator(addon_updater_update_now.bl_idname,
|
||||||
text="Update", icon="LOOP_FORWARDS")
|
text="Update", icon="LOOP_FORWARDS")
|
||||||
col.operator("wm.url_open", text="Open website").url = updater.website
|
col.operator("wm.url_open", text="Open website").url = updater.website
|
||||||
@ -891,7 +935,7 @@ def update_settings_ui(self, context, element=None):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# element is a UI element, such as layout, a row, column, or box
|
# element is a UI element, such as layout, a row, column, or box
|
||||||
if element == None:
|
if element==None:
|
||||||
element = self.layout
|
element = self.layout
|
||||||
box = element.box()
|
box = element.box()
|
||||||
|
|
||||||
@ -913,27 +957,33 @@ def update_settings_ui(self, context, element=None):
|
|||||||
if updater.auto_reload_post_update == False:
|
if updater.auto_reload_post_update == False:
|
||||||
saved_state = updater.json
|
saved_state = updater.json
|
||||||
if "just_updated" in saved_state and saved_state["just_updated"] == True:
|
if "just_updated" in saved_state and saved_state["just_updated"] == True:
|
||||||
row.label(text="Restart blender to complete update", icon="ERROR")
|
row.alert = True
|
||||||
|
row.operator(
|
||||||
|
"wm.quit_blender",
|
||||||
|
text="Restart blender to complete update",
|
||||||
|
icon="ERROR")
|
||||||
return
|
return
|
||||||
|
|
||||||
split = layout_split(row, factor=0.3)
|
split = layout_split(row, factor=0.4)
|
||||||
subcol = split.column()
|
subcol = split.column()
|
||||||
subcol.prop(settings, "auto_check_update")
|
subcol.prop(settings, "auto_check_update")
|
||||||
subcol = split.column()
|
subcol = split.column()
|
||||||
|
|
||||||
if settings.auto_check_update == False:
|
if settings.auto_check_update==False:
|
||||||
subcol.enabled = False
|
subcol.enabled = False
|
||||||
subrow = subcol.row()
|
subrow = subcol.row()
|
||||||
subrow.label(text="Interval between checks")
|
subrow.label(text="Interval between checks")
|
||||||
subrow = subcol.row(align=True)
|
subrow = subcol.row(align=True)
|
||||||
checkcol = subrow.column(align=True)
|
checkcol = subrow.column(align=True)
|
||||||
checkcol.prop(settings, "updater_intrval_months")
|
checkcol.prop(settings,"updater_intrval_months")
|
||||||
checkcol = subrow.column(align=True)
|
checkcol = subrow.column(align=True)
|
||||||
checkcol.prop(settings, "updater_intrval_days")
|
checkcol.prop(settings,"updater_intrval_days")
|
||||||
checkcol = subrow.column(align=True)
|
checkcol = subrow.column(align=True)
|
||||||
checkcol.prop(settings, "updater_intrval_hours")
|
|
||||||
checkcol = subrow.column(align=True)
|
# Consider un-commenting for local dev (e.g. to set shorter intervals)
|
||||||
checkcol.prop(settings, "updater_intrval_minutes")
|
# checkcol.prop(settings,"updater_intrval_hours")
|
||||||
|
# checkcol = subrow.column(align=True)
|
||||||
|
# checkcol.prop(settings,"updater_intrval_minutes")
|
||||||
|
|
||||||
# checking / managing updates
|
# checking / managing updates
|
||||||
row = box.row()
|
row = box.row()
|
||||||
@ -954,7 +1004,7 @@ def update_settings_ui(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_check_now.bl_idname,
|
split.operator(addon_updater_check_now.bl_idname,
|
||||||
text="", icon="FILE_REFRESH")
|
text = "", icon="FILE_REFRESH")
|
||||||
|
|
||||||
elif updater.update_ready == None and updater.async_checking == False:
|
elif updater.update_ready == None and updater.async_checking == False:
|
||||||
col.scale_y = 2
|
col.scale_y = 2
|
||||||
@ -970,11 +1020,11 @@ def update_settings_ui(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_end_background.bl_idname,
|
split.operator(addon_updater_end_background.bl_idname,
|
||||||
text="", icon="X")
|
text = "", icon="X")
|
||||||
|
|
||||||
elif updater.include_branches == True and \
|
elif updater.include_branches==True and \
|
||||||
len(updater.tags) == len(updater.include_branch_list) and \
|
len(updater.tags)==len(updater.include_branch_list) and \
|
||||||
updater.manual_only == False:
|
updater.manual_only==False:
|
||||||
# no releases found, but still show the appropriate branch
|
# no releases found, but still show the appropriate branch
|
||||||
subcol = col.row(align=True)
|
subcol = col.row(align=True)
|
||||||
subcol.scale_y = 1
|
subcol.scale_y = 1
|
||||||
@ -985,9 +1035,9 @@ def update_settings_ui(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_check_now.bl_idname,
|
split.operator(addon_updater_check_now.bl_idname,
|
||||||
text="", icon="FILE_REFRESH")
|
text = "", icon="FILE_REFRESH")
|
||||||
|
|
||||||
elif updater.update_ready == True and updater.manual_only == False:
|
elif updater.update_ready==True and updater.manual_only==False:
|
||||||
subcol = col.row(align=True)
|
subcol = col.row(align=True)
|
||||||
subcol.scale_y = 1
|
subcol.scale_y = 1
|
||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
@ -997,12 +1047,12 @@ def update_settings_ui(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_check_now.bl_idname,
|
split.operator(addon_updater_check_now.bl_idname,
|
||||||
text="", icon="FILE_REFRESH")
|
text = "", icon="FILE_REFRESH")
|
||||||
|
|
||||||
elif updater.update_ready == True and updater.manual_only == True:
|
elif updater.update_ready==True and updater.manual_only==True:
|
||||||
col.scale_y = 2
|
col.scale_y = 2
|
||||||
col.operator("wm.url_open",
|
col.operator("wm.url_open",
|
||||||
text="Download "+str(updater.update_version)).url = updater.website
|
text="Download "+str(updater.update_version)).url=updater.website
|
||||||
else: # i.e. that updater.update_ready == False
|
else: # i.e. that updater.update_ready == False
|
||||||
subcol = col.row(align=True)
|
subcol = col.row(align=True)
|
||||||
subcol.scale_y = 1
|
subcol.scale_y = 1
|
||||||
@ -1014,12 +1064,12 @@ def update_settings_ui(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_check_now.bl_idname,
|
split.operator(addon_updater_check_now.bl_idname,
|
||||||
text="", icon="FILE_REFRESH")
|
text = "", icon="FILE_REFRESH")
|
||||||
|
|
||||||
if updater.manual_only == False:
|
if updater.manual_only == False:
|
||||||
col = row.column(align=True)
|
col = row.column(align=True)
|
||||||
# col.operator(addon_updater_update_target.bl_idname,
|
#col.operator(addon_updater_update_target.bl_idname,
|
||||||
if updater.include_branches == True and len(updater.include_branch_list) > 0:
|
if updater.include_branches == True and len(updater.include_branch_list)>0:
|
||||||
branch = updater.include_branch_list[0]
|
branch = updater.include_branch_list[0]
|
||||||
col.operator(addon_updater_update_target.bl_idname,
|
col.operator(addon_updater_update_target.bl_idname,
|
||||||
text="Install latest {} / old version".format(branch))
|
text="Install latest {} / old version".format(branch))
|
||||||
@ -1027,7 +1077,7 @@ def update_settings_ui(self, context, element=None):
|
|||||||
col.operator(addon_updater_update_target.bl_idname,
|
col.operator(addon_updater_update_target.bl_idname,
|
||||||
text="Reinstall / install old version")
|
text="Reinstall / install old version")
|
||||||
lastdate = "none found"
|
lastdate = "none found"
|
||||||
backuppath = os.path.join(updater.stage_path, "backup")
|
backuppath = os.path.join(updater.stage_path,"backup")
|
||||||
if "backup_date" in updater.json and os.path.isdir(backuppath):
|
if "backup_date" in updater.json and os.path.isdir(backuppath):
|
||||||
if updater.json["backup_date"] == "":
|
if updater.json["backup_date"] == "":
|
||||||
lastdate = "Date not found"
|
lastdate = "Date not found"
|
||||||
@ -1042,7 +1092,7 @@ def update_settings_ui(self, context, element=None):
|
|||||||
if updater.error != None and updater.error_msg != None:
|
if updater.error != None and updater.error_msg != None:
|
||||||
row.label(text=updater.error_msg)
|
row.label(text=updater.error_msg)
|
||||||
elif lastcheck != "" and lastcheck != None:
|
elif lastcheck != "" and lastcheck != None:
|
||||||
lastcheck = lastcheck[0: lastcheck.index(".")]
|
lastcheck = lastcheck[0: lastcheck.index(".") ]
|
||||||
row.label(text="Last update check: " + lastcheck)
|
row.label(text="Last update check: " + lastcheck)
|
||||||
else:
|
else:
|
||||||
row.label(text="Last update check: Never")
|
row.label(text="Last update check: Never")
|
||||||
@ -1055,7 +1105,7 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# element is a UI element, such as layout, a row, column, or box
|
# element is a UI element, such as layout, a row, column, or box
|
||||||
if element == None:
|
if element==None:
|
||||||
element = self.layout
|
element = self.layout
|
||||||
row = element.row()
|
row = element.row()
|
||||||
|
|
||||||
@ -1073,7 +1123,11 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||||||
if updater.auto_reload_post_update == False:
|
if updater.auto_reload_post_update == False:
|
||||||
saved_state = updater.json
|
saved_state = updater.json
|
||||||
if "just_updated" in saved_state and saved_state["just_updated"] == True:
|
if "just_updated" in saved_state and saved_state["just_updated"] == True:
|
||||||
row.label(text="Restart blender to complete update", icon="ERROR")
|
row.alert = True # mark red
|
||||||
|
row.operator(
|
||||||
|
"wm.quit_blender",
|
||||||
|
text="Restart blender to complete update",
|
||||||
|
icon="ERROR")
|
||||||
return
|
return
|
||||||
|
|
||||||
col = row.column()
|
col = row.column()
|
||||||
@ -1093,7 +1147,7 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_check_now.bl_idname,
|
split.operator(addon_updater_check_now.bl_idname,
|
||||||
text="", icon="FILE_REFRESH")
|
text = "", icon="FILE_REFRESH")
|
||||||
|
|
||||||
elif updater.update_ready == None and updater.async_checking == False:
|
elif updater.update_ready == None and updater.async_checking == False:
|
||||||
col.scale_y = 2
|
col.scale_y = 2
|
||||||
@ -1109,11 +1163,11 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_end_background.bl_idname,
|
split.operator(addon_updater_end_background.bl_idname,
|
||||||
text="", icon="X")
|
text = "", icon="X")
|
||||||
|
|
||||||
elif updater.include_branches == True and \
|
elif updater.include_branches==True and \
|
||||||
len(updater.tags) == len(updater.include_branch_list) and \
|
len(updater.tags)==len(updater.include_branch_list) and \
|
||||||
updater.manual_only == False:
|
updater.manual_only==False:
|
||||||
# no releases found, but still show the appropriate branch
|
# no releases found, but still show the appropriate branch
|
||||||
subcol = col.row(align=True)
|
subcol = col.row(align=True)
|
||||||
subcol.scale_y = 1
|
subcol.scale_y = 1
|
||||||
@ -1124,9 +1178,9 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_check_now.bl_idname,
|
split.operator(addon_updater_check_now.bl_idname,
|
||||||
text="", icon="FILE_REFRESH")
|
text = "", icon="FILE_REFRESH")
|
||||||
|
|
||||||
elif updater.update_ready == True and updater.manual_only == False:
|
elif updater.update_ready==True and updater.manual_only==False:
|
||||||
subcol = col.row(align=True)
|
subcol = col.row(align=True)
|
||||||
subcol.scale_y = 1
|
subcol.scale_y = 1
|
||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
@ -1136,12 +1190,12 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_check_now.bl_idname,
|
split.operator(addon_updater_check_now.bl_idname,
|
||||||
text="", icon="FILE_REFRESH")
|
text = "", icon="FILE_REFRESH")
|
||||||
|
|
||||||
elif updater.update_ready == True and updater.manual_only == True:
|
elif updater.update_ready==True and updater.manual_only==True:
|
||||||
col.scale_y = 2
|
col.scale_y = 2
|
||||||
col.operator("wm.url_open",
|
col.operator("wm.url_open",
|
||||||
text="Download "+str(updater.update_version)).url = updater.website
|
text="Download "+str(updater.update_version)).url=updater.website
|
||||||
else: # i.e. that updater.update_ready == False
|
else: # i.e. that updater.update_ready == False
|
||||||
subcol = col.row(align=True)
|
subcol = col.row(align=True)
|
||||||
subcol.scale_y = 1
|
subcol.scale_y = 1
|
||||||
@ -1153,7 +1207,7 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||||||
split = subcol.split(align=True)
|
split = subcol.split(align=True)
|
||||||
split.scale_y = 2
|
split.scale_y = 2
|
||||||
split.operator(addon_updater_check_now.bl_idname,
|
split.operator(addon_updater_check_now.bl_idname,
|
||||||
text="", icon="FILE_REFRESH")
|
text = "", icon="FILE_REFRESH")
|
||||||
|
|
||||||
row = element.row()
|
row = element.row()
|
||||||
row.prop(settings, "auto_check_update")
|
row.prop(settings, "auto_check_update")
|
||||||
@ -1164,7 +1218,7 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||||||
if updater.error != None and updater.error_msg != None:
|
if updater.error != None and updater.error_msg != None:
|
||||||
row.label(text=updater.error_msg)
|
row.label(text=updater.error_msg)
|
||||||
elif lastcheck != "" and lastcheck != None:
|
elif lastcheck != "" and lastcheck != None:
|
||||||
lastcheck = lastcheck[0: lastcheck.index(".")]
|
lastcheck = lastcheck[0: lastcheck.index(".") ]
|
||||||
row.label(text="Last check: " + lastcheck)
|
row.label(text="Last check: " + lastcheck)
|
||||||
else:
|
else:
|
||||||
row.label(text="Last check: Never")
|
row.label(text="Last check: Never")
|
||||||
@ -1194,13 +1248,11 @@ def skip_tag_function(self, tag):
|
|||||||
|
|
||||||
if self.include_branches == True:
|
if self.include_branches == True:
|
||||||
for branch in self.include_branch_list:
|
for branch in self.include_branch_list:
|
||||||
if tag["name"].lower() == branch:
|
if tag["name"].lower() == branch: return False
|
||||||
return False
|
|
||||||
|
|
||||||
# function converting string to tuple, ignoring e.g. leading 'v'
|
# function converting string to tuple, ignoring e.g. leading 'v'
|
||||||
tupled = self.version_tuple_from_text(tag["name"])
|
tupled = self.version_tuple_from_text(tag["name"])
|
||||||
if type(tupled) != type((1, 2, 3)):
|
if type(tupled) != type( (1,2,3) ): return True
|
||||||
return True
|
|
||||||
|
|
||||||
# select the min tag version - change tuple accordingly
|
# select the min tag version - change tuple accordingly
|
||||||
if self.version_min_update != None:
|
if self.version_min_update != None:
|
||||||
@ -1227,7 +1279,7 @@ def select_link_function(self, tag):
|
|||||||
link = tag["zipball_url"]
|
link = tag["zipball_url"]
|
||||||
|
|
||||||
# -- Example: select the first (or only) asset instead source code --
|
# -- Example: select the first (or only) asset instead source code --
|
||||||
# if "assets" in tag and "browser_download_url" in tag["assets"][0]:
|
#if "assets" in tag and "browser_download_url" in tag["assets"][0]:
|
||||||
# link = tag["assets"][0]["browser_download_url"]
|
# link = tag["assets"][0]["browser_download_url"]
|
||||||
|
|
||||||
# -- Example: select asset based on OS, where multiple builds exist --
|
# -- Example: select asset based on OS, where multiple builds exist --
|
||||||
@ -1272,7 +1324,9 @@ def register(bl_info):
|
|||||||
updater.clear_state() # clear internal vars, avoids reloading oddities
|
updater.clear_state() # clear internal vars, avoids reloading oddities
|
||||||
|
|
||||||
# confirm your updater "engine" (Github is default if not specified)
|
# confirm your updater "engine" (Github is default if not specified)
|
||||||
|
# updater.engine = "Github"
|
||||||
updater.engine = "GitLab"
|
updater.engine = "GitLab"
|
||||||
|
# updater.engine = "Bitbucket"
|
||||||
|
|
||||||
# If using private repository, indicate the token here
|
# If using private repository, indicate the token here
|
||||||
# Must be set after assigning the engine.
|
# Must be set after assigning the engine.
|
||||||
@ -1286,8 +1340,7 @@ def register(bl_info):
|
|||||||
|
|
||||||
# choose your own repository, must match git name
|
# choose your own repository, must match git name
|
||||||
updater.repo = "10515801"
|
updater.repo = "10515801"
|
||||||
|
#updater.addon = # define at top of module, MUST be done first
|
||||||
# updater.addon = # define at top of module, MUST be done first
|
|
||||||
|
|
||||||
# Website for manual addon download, optional but recommended to set
|
# Website for manual addon download, optional but recommended to set
|
||||||
updater.website = "https://gitlab.com/slumber/multi-user/"
|
updater.website = "https://gitlab.com/slumber/multi-user/"
|
||||||
@ -1295,7 +1348,7 @@ def register(bl_info):
|
|||||||
# Addon subfolder path
|
# Addon subfolder path
|
||||||
# "sample/path/to/addon"
|
# "sample/path/to/addon"
|
||||||
# default is "" or None, meaning root
|
# default is "" or None, meaning root
|
||||||
updater.subfolder_path = "multi-user"
|
updater.subfolder_path = "multi_user"
|
||||||
|
|
||||||
# used to check/compare versions
|
# used to check/compare versions
|
||||||
updater.current_version = bl_info["version"]
|
updater.current_version = bl_info["version"]
|
||||||
@ -1307,7 +1360,7 @@ def register(bl_info):
|
|||||||
|
|
||||||
# Optional, consider turning off for production or allow as an option
|
# Optional, consider turning off for production or allow as an option
|
||||||
# This will print out additional debugging info to the console
|
# This will print out additional debugging info to the console
|
||||||
updater.verbose = True # make False for production default
|
updater.verbose = False # make False for production default
|
||||||
|
|
||||||
# Optional, customize where the addon updater processing subfolder is,
|
# Optional, customize where the addon updater processing subfolder is,
|
||||||
# essentially a staging folder used by the updater on its own
|
# essentially a staging folder used by the updater on its own
|
||||||
@ -1334,7 +1387,7 @@ def register(bl_info):
|
|||||||
# as a part of the pattern list below so they will always be overwritten by an
|
# as a part of the pattern list below so they will always be overwritten by an
|
||||||
# update. If a pattern file is not found in new update, no action is taken
|
# update. If a pattern file is not found in new update, no action is taken
|
||||||
# This does NOT detele anything, only defines what is allowed to be overwritten
|
# This does NOT detele anything, only defines what is allowed to be overwritten
|
||||||
updater.overwrite_patterns = ["*.png", "*.jpg", "README.md", "LICENSE.txt"]
|
updater.overwrite_patterns = ["*.png","*.jpg","README.md","LICENSE.txt"]
|
||||||
# updater.overwrite_patterns = []
|
# updater.overwrite_patterns = []
|
||||||
# other examples:
|
# other examples:
|
||||||
# ["*"] means ALL files/folders will be overwritten by update, was the behavior pre updater v1.0.4
|
# ["*"] means ALL files/folders will be overwritten by update, was the behavior pre updater v1.0.4
|
||||||
@ -1368,11 +1421,11 @@ def register(bl_info):
|
|||||||
# the "install {branch}/older version" operator.
|
# the "install {branch}/older version" operator.
|
||||||
updater.include_branches = True
|
updater.include_branches = True
|
||||||
|
|
||||||
# (GitHub/Gitlab only) This options allows the user to use releases over tags for data,
|
# (GitHub only) This options allows the user to use releases over tags for data,
|
||||||
# which enables pulling down release logs/notes, as well as specify installs from
|
# which enables pulling down release logs/notes, as well as specify installs from
|
||||||
# release-attached zips (instead of just the auto-packaged code generated with
|
# release-attached zips (instead of just the auto-packaged code generated with
|
||||||
# a release/tag). Setting has no impact on BitBucket or GitLab repos
|
# a release/tag). Setting has no impact on BitBucket or GitLab repos
|
||||||
updater.use_releases = True
|
updater.use_releases = False
|
||||||
# note: Releases always have a tag, but a tag may not always be a release
|
# note: Releases always have a tag, but a tag may not always be a release
|
||||||
# Therefore, setting True above will filter out any non-annoted tags
|
# Therefore, setting True above will filter out any non-annoted tags
|
||||||
# note 2: Using this option will also display the release name instead of
|
# note 2: Using this option will also display the release name instead of
|
||||||
@ -1382,8 +1435,7 @@ def register(bl_info):
|
|||||||
# updater.include_branch_list defaults to ['master'] branch if set to none
|
# updater.include_branch_list defaults to ['master'] branch if set to none
|
||||||
# example targeting another multiple branches allowed to pull from
|
# example targeting another multiple branches allowed to pull from
|
||||||
# updater.include_branch_list = ['master', 'dev'] # example with two branches
|
# updater.include_branch_list = ['master', 'dev'] # example with two branches
|
||||||
# None is the equivalent to setting ['master']
|
updater.include_branch_list = ['master','develop'] # None is the equivalent to setting ['master']
|
||||||
updater.include_branch_list = None
|
|
||||||
|
|
||||||
# Only allow manual install, thus prompting the user to open
|
# Only allow manual install, thus prompting the user to open
|
||||||
# the addon's web page to download, specifically: updater.website
|
# the addon's web page to download, specifically: updater.website
|
||||||
@ -1408,7 +1460,7 @@ def register(bl_info):
|
|||||||
# Set the min and max versions allowed to install.
|
# Set the min and max versions allowed to install.
|
||||||
# Optional, default None
|
# Optional, default None
|
||||||
# min install (>=) will install this and higher
|
# min install (>=) will install this and higher
|
||||||
updater.version_min_update = (0, 0, 1)
|
updater.version_min_update = (0,0,3)
|
||||||
# updater.version_min_update = None # if not wanting to define a min
|
# updater.version_min_update = None # if not wanting to define a min
|
||||||
|
|
||||||
# max install (<) will install strictly anything lower
|
# max install (<) will install strictly anything lower
|
||||||
@ -1421,6 +1473,11 @@ def register(bl_info):
|
|||||||
# Function defined above, customize as appropriate per repository; not required
|
# Function defined above, customize as appropriate per repository; not required
|
||||||
updater.select_link = select_link_function
|
updater.select_link = select_link_function
|
||||||
|
|
||||||
|
# Recommended false to encourage blender restarts on update completion
|
||||||
|
# Setting this option to True is NOT as stable as false (could cause
|
||||||
|
# blender crashes)
|
||||||
|
updater.auto_reload_post_update = False
|
||||||
|
|
||||||
# The register line items for all operators/panels
|
# The register line items for all operators/panels
|
||||||
# If using bpy.utils.register_module(__name__) to register elsewhere
|
# If using bpy.utils.register_module(__name__) to register elsewhere
|
||||||
# in the addon, delete these lines (also from unregister)
|
# in the addon, delete these lines (also from unregister)
|
||||||
|
@ -34,11 +34,14 @@ __all__ = [
|
|||||||
'bl_metaball',
|
'bl_metaball',
|
||||||
'bl_lattice',
|
'bl_lattice',
|
||||||
'bl_lightprobe',
|
'bl_lightprobe',
|
||||||
'bl_speaker'
|
'bl_speaker',
|
||||||
|
'bl_font',
|
||||||
|
'bl_sound',
|
||||||
|
'bl_file'
|
||||||
] # Order here defines execution order
|
] # Order here defines execution order
|
||||||
|
|
||||||
from . import *
|
from . import *
|
||||||
from ..libs.replication.replication.data import ReplicatedDataFactory
|
from replication.data import ReplicatedDataFactory
|
||||||
|
|
||||||
def types_to_register():
|
def types_to_register():
|
||||||
return __all__
|
return __all__
|
||||||
|
@ -134,6 +134,7 @@ class BlAction(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'ACTION_TWEAK'
|
bl_icon = 'ACTION_TWEAK'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
|
@ -31,6 +31,7 @@ class BlArmature(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 0
|
bl_delay_apply = 0
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'ARMATURE_DATA'
|
bl_icon = 'ARMATURE_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
@ -92,6 +93,7 @@ class BlArmature(BlDatablock):
|
|||||||
new_bone.head = bone_data['head_local']
|
new_bone.head = bone_data['head_local']
|
||||||
new_bone.tail_radius = bone_data['tail_radius']
|
new_bone.tail_radius = bone_data['tail_radius']
|
||||||
new_bone.head_radius = bone_data['head_radius']
|
new_bone.head_radius = bone_data['head_radius']
|
||||||
|
# new_bone.roll = bone_data['roll']
|
||||||
|
|
||||||
if 'parent' in bone_data:
|
if 'parent' in bone_data:
|
||||||
new_bone.parent = target.edit_bones[data['bones']
|
new_bone.parent = target.edit_bones[data['bones']
|
||||||
@ -123,7 +125,8 @@ class BlArmature(BlDatablock):
|
|||||||
'use_connect',
|
'use_connect',
|
||||||
'parent',
|
'parent',
|
||||||
'name',
|
'name',
|
||||||
'layers'
|
'layers',
|
||||||
|
# 'roll',
|
||||||
|
|
||||||
]
|
]
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(instance)
|
||||||
|
@ -29,6 +29,7 @@ class BlCamera(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'CAMERA_DATA'
|
bl_icon = 'CAMERA_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
@ -45,13 +46,22 @@ class BlCamera(BlDatablock):
|
|||||||
if dof_settings:
|
if dof_settings:
|
||||||
loader.load(target.dof, dof_settings)
|
loader.load(target.dof, dof_settings)
|
||||||
|
|
||||||
|
background_images = data.get('background_images')
|
||||||
|
|
||||||
|
if background_images:
|
||||||
|
target.background_images.clear()
|
||||||
|
for img_name, img_data in background_images.items():
|
||||||
|
target_img = target.background_images.new()
|
||||||
|
target_img.image = bpy.data.images[img_name]
|
||||||
|
loader.load(target_img, img_data)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(instance)
|
assert(instance)
|
||||||
|
|
||||||
# TODO: background image support
|
# TODO: background image support
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 3
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
"name",
|
"name",
|
||||||
'type',
|
'type',
|
||||||
@ -70,6 +80,7 @@ class BlCamera(BlDatablock):
|
|||||||
'aperture_fstop',
|
'aperture_fstop',
|
||||||
'aperture_blades',
|
'aperture_blades',
|
||||||
'aperture_rotation',
|
'aperture_rotation',
|
||||||
|
'ortho_scale',
|
||||||
'aperture_ratio',
|
'aperture_ratio',
|
||||||
'display_size',
|
'display_size',
|
||||||
'show_limits',
|
'show_limits',
|
||||||
@ -79,7 +90,24 @@ class BlCamera(BlDatablock):
|
|||||||
'sensor_fit',
|
'sensor_fit',
|
||||||
'sensor_height',
|
'sensor_height',
|
||||||
'sensor_width',
|
'sensor_width',
|
||||||
|
'show_background_images',
|
||||||
|
'background_images',
|
||||||
|
'alpha',
|
||||||
|
'display_depth',
|
||||||
|
'frame_method',
|
||||||
|
'offset',
|
||||||
|
'rotation',
|
||||||
|
'scale',
|
||||||
|
'use_flip_x',
|
||||||
|
'use_flip_y',
|
||||||
|
'image'
|
||||||
]
|
]
|
||||||
return dumper.dump(instance)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
def _resolve_deps_implementation(self):
|
||||||
|
deps = []
|
||||||
|
for background in self.instance.background_images:
|
||||||
|
if background.image:
|
||||||
|
deps.append(background.image)
|
||||||
|
|
||||||
|
return deps
|
||||||
|
@ -21,6 +21,55 @@ import mathutils
|
|||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
from .dump_anything import Loader, Dumper
|
||||||
|
|
||||||
|
|
||||||
|
def dump_collection_children(collection):
|
||||||
|
collection_children = []
|
||||||
|
for child in collection.children:
|
||||||
|
if child not in collection_children:
|
||||||
|
collection_children.append(child.uuid)
|
||||||
|
return collection_children
|
||||||
|
|
||||||
|
|
||||||
|
def dump_collection_objects(collection):
|
||||||
|
collection_objects = []
|
||||||
|
for object in collection.objects:
|
||||||
|
if object not in collection_objects:
|
||||||
|
collection_objects.append(object.uuid)
|
||||||
|
|
||||||
|
return collection_objects
|
||||||
|
|
||||||
|
|
||||||
|
def load_collection_objects(dumped_objects, collection):
|
||||||
|
for object in dumped_objects:
|
||||||
|
object_ref = utils.find_from_attr('uuid', object, bpy.data.objects)
|
||||||
|
|
||||||
|
if object_ref is None:
|
||||||
|
continue
|
||||||
|
elif object_ref.name not in collection.objects.keys():
|
||||||
|
collection.objects.link(object_ref)
|
||||||
|
|
||||||
|
for object in collection.objects:
|
||||||
|
if object.uuid not in dumped_objects:
|
||||||
|
collection.objects.unlink(object)
|
||||||
|
|
||||||
|
|
||||||
|
def load_collection_childrens(dumped_childrens, collection):
|
||||||
|
for child_collection in dumped_childrens:
|
||||||
|
collection_ref = utils.find_from_attr(
|
||||||
|
'uuid',
|
||||||
|
child_collection,
|
||||||
|
bpy.data.collections)
|
||||||
|
|
||||||
|
if collection_ref is None:
|
||||||
|
continue
|
||||||
|
if collection_ref.name not in collection.children.keys():
|
||||||
|
collection.children.link(collection_ref)
|
||||||
|
|
||||||
|
for child_collection in collection.children:
|
||||||
|
if child_collection.uuid not in dumped_childrens:
|
||||||
|
collection.children.unlink(child_collection)
|
||||||
|
|
||||||
|
|
||||||
class BlCollection(BlDatablock):
|
class BlCollection(BlDatablock):
|
||||||
@ -30,6 +79,7 @@ class BlCollection(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = True
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
@ -45,56 +95,31 @@ class BlCollection(BlDatablock):
|
|||||||
return instance
|
return instance
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
# Load other meshes metadata
|
loader = Loader()
|
||||||
target.name = data["name"]
|
loader.load(target, data)
|
||||||
|
|
||||||
# Objects
|
# Objects
|
||||||
for object in data["objects"]:
|
load_collection_objects(data['objects'], target)
|
||||||
object_ref = bpy.data.objects.get(object)
|
|
||||||
|
|
||||||
if object_ref is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if object not in target.objects.keys():
|
|
||||||
target.objects.link(object_ref)
|
|
||||||
|
|
||||||
for object in target.objects:
|
|
||||||
if object.name not in data["objects"]:
|
|
||||||
target.objects.unlink(object)
|
|
||||||
|
|
||||||
# Link childrens
|
# Link childrens
|
||||||
for collection in data["children"]:
|
load_collection_childrens(data['children'], target)
|
||||||
collection_ref = bpy.data.collections.get(collection)
|
|
||||||
|
|
||||||
if collection_ref is None:
|
|
||||||
continue
|
|
||||||
if collection_ref.name not in target.children.keys():
|
|
||||||
target.children.link(collection_ref)
|
|
||||||
|
|
||||||
for collection in target.children:
|
|
||||||
if collection.name not in data["children"]:
|
|
||||||
target.children.unlink(collection)
|
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(instance)
|
assert(instance)
|
||||||
data = {}
|
|
||||||
data['name'] = instance.name
|
dumper = Dumper()
|
||||||
|
dumper.depth = 1
|
||||||
|
dumper.include_filter = [
|
||||||
|
"name",
|
||||||
|
"instance_offset"
|
||||||
|
]
|
||||||
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
# dump objects
|
# dump objects
|
||||||
collection_objects = []
|
data['objects'] = dump_collection_objects(instance)
|
||||||
for object in instance.objects:
|
|
||||||
if object not in collection_objects:
|
|
||||||
collection_objects.append(object.name)
|
|
||||||
|
|
||||||
data['objects'] = collection_objects
|
|
||||||
|
|
||||||
# dump children collections
|
# dump children collections
|
||||||
collection_children = []
|
data['children'] = dump_collection_children(instance)
|
||||||
for child in instance.children:
|
|
||||||
if child not in collection_children:
|
|
||||||
collection_children.append(child.name)
|
|
||||||
|
|
||||||
data['children'] = collection_children
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -46,12 +46,105 @@ SPLINE_POINT = [
|
|||||||
"radius",
|
"radius",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
CURVE_METADATA = [
|
||||||
|
'align_x',
|
||||||
|
'align_y',
|
||||||
|
'bevel_depth',
|
||||||
|
'bevel_factor_end',
|
||||||
|
'bevel_factor_mapping_end',
|
||||||
|
'bevel_factor_mapping_start',
|
||||||
|
'bevel_factor_start',
|
||||||
|
'bevel_object',
|
||||||
|
'bevel_resolution',
|
||||||
|
'body',
|
||||||
|
'body_format',
|
||||||
|
'dimensions',
|
||||||
|
'eval_time',
|
||||||
|
'extrude',
|
||||||
|
'family',
|
||||||
|
'fill_mode',
|
||||||
|
'follow_curve',
|
||||||
|
'font',
|
||||||
|
'font_bold',
|
||||||
|
'font_bold_italic',
|
||||||
|
'font_italic',
|
||||||
|
'make_local',
|
||||||
|
'materials',
|
||||||
|
'name',
|
||||||
|
'offset',
|
||||||
|
'offset_x',
|
||||||
|
'offset_y',
|
||||||
|
'overflow',
|
||||||
|
'original',
|
||||||
|
'override_create',
|
||||||
|
'override_library',
|
||||||
|
'path_duration',
|
||||||
|
'preview',
|
||||||
|
'render_resolution_u',
|
||||||
|
'render_resolution_v',
|
||||||
|
'resolution_u',
|
||||||
|
'resolution_v',
|
||||||
|
'shape_keys',
|
||||||
|
'shear',
|
||||||
|
'size',
|
||||||
|
'small_caps_scale',
|
||||||
|
'space_character',
|
||||||
|
'space_line',
|
||||||
|
'space_word',
|
||||||
|
'type',
|
||||||
|
'taper_object',
|
||||||
|
'texspace_location',
|
||||||
|
'texspace_size',
|
||||||
|
'transform',
|
||||||
|
'twist_mode',
|
||||||
|
'twist_smooth',
|
||||||
|
'underline_height',
|
||||||
|
'underline_position',
|
||||||
|
'use_auto_texspace',
|
||||||
|
'use_deform_bounds',
|
||||||
|
'use_fake_user',
|
||||||
|
'use_fill_caps',
|
||||||
|
'use_fill_deform',
|
||||||
|
'use_map_taper',
|
||||||
|
'use_path',
|
||||||
|
'use_path_follow',
|
||||||
|
'use_radius',
|
||||||
|
'use_stretch',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
SPLINE_METADATA = [
|
||||||
|
'hide',
|
||||||
|
'material_index',
|
||||||
|
# 'order_u',
|
||||||
|
# 'order_v',
|
||||||
|
# 'point_count_u',
|
||||||
|
# 'point_count_v',
|
||||||
|
'points',
|
||||||
|
'radius_interpolation',
|
||||||
|
'resolution_u',
|
||||||
|
'resolution_v',
|
||||||
|
'tilt_interpolation',
|
||||||
|
'type',
|
||||||
|
'use_bezier_u',
|
||||||
|
'use_bezier_v',
|
||||||
|
'use_cyclic_u',
|
||||||
|
'use_cyclic_v',
|
||||||
|
'use_endpoint_u',
|
||||||
|
'use_endpoint_v',
|
||||||
|
'use_smooth',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class BlCurve(BlDatablock):
|
class BlCurve(BlDatablock):
|
||||||
bl_id = "curves"
|
bl_id = "curves"
|
||||||
bl_class = bpy.types.Curve
|
bl_class = bpy.types.Curve
|
||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'CURVE_DATA'
|
bl_icon = 'CURVE_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
@ -62,6 +155,7 @@ class BlCurve(BlDatablock):
|
|||||||
loader.load(target, data)
|
loader.load(target, data)
|
||||||
|
|
||||||
target.splines.clear()
|
target.splines.clear()
|
||||||
|
|
||||||
# load splines
|
# load splines
|
||||||
for spline in data['splines'].values():
|
for spline in data['splines'].values():
|
||||||
new_spline = target.splines.new(spline['type'])
|
new_spline = target.splines.new(spline['type'])
|
||||||
@ -73,7 +167,11 @@ class BlCurve(BlDatablock):
|
|||||||
bezier_points.add(spline['bezier_points_count'])
|
bezier_points.add(spline['bezier_points_count'])
|
||||||
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
|
np_load_collection(spline['bezier_points'], bezier_points, SPLINE_BEZIER_POINT)
|
||||||
|
|
||||||
# Not really working for now...
|
if new_spline.type == 'POLY':
|
||||||
|
points = new_spline.points
|
||||||
|
points.add(spline['points_count'])
|
||||||
|
np_load_collection(spline['points'], points, SPLINE_POINT)
|
||||||
|
# Not working for now...
|
||||||
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
|
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
|
||||||
if new_spline.type == 'NURBS':
|
if new_spline.type == 'NURBS':
|
||||||
logging.error("NURBS not supported.")
|
logging.error("NURBS not supported.")
|
||||||
@ -83,11 +181,14 @@ class BlCurve(BlDatablock):
|
|||||||
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
# new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
||||||
|
|
||||||
loader.load(new_spline, spline)
|
loader.load(new_spline, spline)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(instance)
|
assert(instance)
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
# Conflicting attributes
|
# Conflicting attributes
|
||||||
# TODO: remove them with the NURBS support
|
# TODO: remove them with the NURBS support
|
||||||
|
dumper.include_filter = CURVE_METADATA
|
||||||
|
|
||||||
dumper.exclude_filter = [
|
dumper.exclude_filter = [
|
||||||
'users',
|
'users',
|
||||||
'order_u',
|
'order_u',
|
||||||
@ -105,8 +206,13 @@ class BlCurve(BlDatablock):
|
|||||||
|
|
||||||
for index, spline in enumerate(instance.splines):
|
for index, spline in enumerate(instance.splines):
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
|
dumper.include_filter = SPLINE_METADATA
|
||||||
spline_data = dumper.dump(spline)
|
spline_data = dumper.dump(spline)
|
||||||
# spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
|
|
||||||
|
if spline.type == 'POLY':
|
||||||
|
spline_data['points_count'] = len(spline.points)-1
|
||||||
|
spline_data['points'] = np_dump_collection(spline.points, SPLINE_POINT)
|
||||||
|
|
||||||
spline_data['bezier_points_count'] = len(spline.bezier_points)-1
|
spline_data['bezier_points_count'] = len(spline.bezier_points)-1
|
||||||
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
|
spline_data['bezier_points'] = np_dump_collection(spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||||
data['splines'][index] = spline_data
|
data['splines'][index] = spline_data
|
||||||
@ -118,3 +224,17 @@ class BlCurve(BlDatablock):
|
|||||||
elif isinstance(instance, T.Curve):
|
elif isinstance(instance, T.Curve):
|
||||||
data['type'] = 'CURVE'
|
data['type'] = 'CURVE'
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def _resolve_deps_implementation(self):
|
||||||
|
# TODO: resolve material
|
||||||
|
deps = []
|
||||||
|
curve = self.instance
|
||||||
|
|
||||||
|
if isinstance(curve, T.TextCurve):
|
||||||
|
deps.extend([
|
||||||
|
curve.font,
|
||||||
|
curve.font_bold,
|
||||||
|
curve.font_bold_italic,
|
||||||
|
curve.font_italic])
|
||||||
|
|
||||||
|
return deps
|
@ -16,13 +16,16 @@
|
|||||||
# ##### END GPL LICENSE BLOCK #####
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from collections.abc import Iterable
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
from replication.constants import DIFF_BINARY, UP
|
||||||
|
from replication.data import ReplicatedDatablock
|
||||||
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Dumper, Loader
|
||||||
from ..libs.replication.replication.data import ReplicatedDatablock
|
|
||||||
from ..libs.replication.replication.constants import (UP, DIFF_BINARY)
|
|
||||||
|
|
||||||
|
|
||||||
def has_action(target):
|
def has_action(target):
|
||||||
@ -86,6 +89,18 @@ def load_driver(target_datablock, src_driver):
|
|||||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||||
|
|
||||||
|
|
||||||
|
def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||||
|
if not uuid:
|
||||||
|
return default
|
||||||
|
for category in dir(bpy.data):
|
||||||
|
root = getattr(bpy.data, category)
|
||||||
|
if isinstance(root, Iterable) and category not in ignore:
|
||||||
|
for item in root:
|
||||||
|
if getattr(item, 'uuid', None) == uuid:
|
||||||
|
return item
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
class BlDatablock(ReplicatedDatablock):
|
class BlDatablock(ReplicatedDatablock):
|
||||||
"""BlDatablock
|
"""BlDatablock
|
||||||
|
|
||||||
@ -95,36 +110,55 @@ class BlDatablock(ReplicatedDatablock):
|
|||||||
bl_delay_apply : refresh rate in sec for apply
|
bl_delay_apply : refresh rate in sec for apply
|
||||||
bl_automatic_push : boolean
|
bl_automatic_push : boolean
|
||||||
bl_icon : type icon (blender icon name)
|
bl_icon : type icon (blender icon name)
|
||||||
|
bl_check_common: enable check even in common rights
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
instance = kwargs.get('instance', None)
|
instance = kwargs.get('instance', None)
|
||||||
|
|
||||||
|
self.preferences = utils.get_preferences()
|
||||||
|
|
||||||
# TODO: use is_library_indirect
|
# TODO: use is_library_indirect
|
||||||
self.is_library = (instance and hasattr(instance, 'library') and
|
self.is_library = (instance and hasattr(instance, 'library') and
|
||||||
instance.library) or \
|
instance.library) or \
|
||||||
(self.data and 'library' in self.data)
|
(hasattr(self,'data') and self.data and 'library' in self.data)
|
||||||
|
|
||||||
if instance and hasattr(instance, 'uuid'):
|
if instance and hasattr(instance, 'uuid'):
|
||||||
instance.uuid = self.uuid
|
instance.uuid = self.uuid
|
||||||
|
|
||||||
self.diff_method = DIFF_BINARY
|
# self.diff_method = DIFF_BINARY
|
||||||
|
|
||||||
def resolve(self):
|
def resolve(self, construct = True):
|
||||||
datablock_ref = None
|
datablock_ref = None
|
||||||
datablock_root = getattr(bpy.data, self.bl_id)
|
datablock_root = getattr(bpy.data, self.bl_id)
|
||||||
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
||||||
|
|
||||||
if not datablock_ref:
|
if not datablock_ref:
|
||||||
datablock_ref = datablock_root.get(
|
try:
|
||||||
self.data['name'], # Resolve by name
|
datablock_ref = datablock_root[self.data['name']]
|
||||||
self._construct(data=self.data)) # If it doesn't exist create it
|
except Exception:
|
||||||
|
if construct:
|
||||||
|
name = self.data.get('name')
|
||||||
|
logging.debug(f"Constructing {name}")
|
||||||
|
datablock_ref = self._construct(data=self.data)
|
||||||
|
|
||||||
if datablock_ref:
|
if datablock_ref is not None:
|
||||||
setattr(datablock_ref, 'uuid', self.uuid)
|
setattr(datablock_ref, 'uuid', self.uuid)
|
||||||
|
|
||||||
self.instance = datablock_ref
|
self.instance = datablock_ref
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def remove_instance(self):
|
||||||
|
"""
|
||||||
|
Remove instance from blender data
|
||||||
|
"""
|
||||||
|
assert(self.instance)
|
||||||
|
|
||||||
|
datablock_root = getattr(bpy.data, self.bl_id)
|
||||||
|
datablock_root.remove(self.instance)
|
||||||
|
|
||||||
def _dump(self, instance=None):
|
def _dump(self, instance=None):
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
@ -186,6 +220,7 @@ class BlDatablock(ReplicatedDatablock):
|
|||||||
if not self.is_library:
|
if not self.is_library:
|
||||||
dependencies.extend(self._resolve_deps_implementation())
|
dependencies.extend(self._resolve_deps_implementation())
|
||||||
|
|
||||||
|
logging.debug(f"{self.instance.name} dependencies: {dependencies}")
|
||||||
return dependencies
|
return dependencies
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
def _resolve_deps_implementation(self):
|
||||||
|
143
multi_user/bl_types/bl_file.py
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import mathutils
|
||||||
|
from replication.constants import DIFF_BINARY, UP
|
||||||
|
from replication.data import ReplicatedDatablock
|
||||||
|
|
||||||
|
from .. import utils
|
||||||
|
from .dump_anything import Dumper, Loader
|
||||||
|
|
||||||
|
|
||||||
|
def get_filepath(filename):
|
||||||
|
"""
|
||||||
|
Construct the local filepath
|
||||||
|
"""
|
||||||
|
return str(Path(
|
||||||
|
utils.get_preferences().cache_directory,
|
||||||
|
filename
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_unpacked(datablock):
|
||||||
|
if datablock.packed_file:
|
||||||
|
logging.info(f"Unpacking {datablock.name}")
|
||||||
|
|
||||||
|
filename = Path(bpy.path.abspath(datablock.filepath)).name
|
||||||
|
datablock.filepath = get_filepath(filename)
|
||||||
|
|
||||||
|
datablock.unpack(method="WRITE_ORIGINAL")
|
||||||
|
|
||||||
|
|
||||||
|
class BlFile(ReplicatedDatablock):
|
||||||
|
bl_id = 'file'
|
||||||
|
bl_name = "file"
|
||||||
|
bl_class = Path
|
||||||
|
bl_delay_refresh = 0
|
||||||
|
bl_delay_apply = 1
|
||||||
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
|
bl_icon = 'FILE'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.instance = kwargs.get('instance', None)
|
||||||
|
|
||||||
|
if self.instance and not self.instance.exists():
|
||||||
|
raise FileNotFoundError(str(self.instance))
|
||||||
|
|
||||||
|
self.preferences = utils.get_preferences()
|
||||||
|
self.diff_method = DIFF_BINARY
|
||||||
|
|
||||||
|
def resolve(self):
|
||||||
|
if self.data:
|
||||||
|
self.instance = Path(get_filepath(self.data['name']))
|
||||||
|
|
||||||
|
if not self.instance.exists():
|
||||||
|
logging.debug("File don't exist, loading it.")
|
||||||
|
self._load(self.data, self.instance)
|
||||||
|
|
||||||
|
def push(self, socket, identity=None):
|
||||||
|
super().push(socket, identity=None)
|
||||||
|
|
||||||
|
if self.preferences.clear_memory_filecache:
|
||||||
|
del self.data['file']
|
||||||
|
|
||||||
|
def _dump(self, instance=None):
|
||||||
|
"""
|
||||||
|
Read the file and return a dict as:
|
||||||
|
{
|
||||||
|
name : filename
|
||||||
|
extension :
|
||||||
|
file: file content
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
logging.info(f"Extracting file metadata")
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'name': self.instance.name,
|
||||||
|
}
|
||||||
|
|
||||||
|
logging.info(
|
||||||
|
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
|
||||||
|
|
||||||
|
try:
|
||||||
|
file = open(self.instance, "rb")
|
||||||
|
data['file'] = file.read()
|
||||||
|
|
||||||
|
file.close()
|
||||||
|
except IOError:
|
||||||
|
logging.warning(f"{self.instance} doesn't exist, skipping")
|
||||||
|
else:
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _load(self, data, target):
|
||||||
|
"""
|
||||||
|
Writing the file
|
||||||
|
"""
|
||||||
|
# TODO: check for empty data
|
||||||
|
|
||||||
|
if target.exists() and not self.diff():
|
||||||
|
logging.info(f"{data['name']} already on the disk, skipping.")
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
file = open(target, "wb")
|
||||||
|
file.write(data['file'])
|
||||||
|
|
||||||
|
if self.preferences.clear_memory_filecache:
|
||||||
|
del self.data['file']
|
||||||
|
except IOError:
|
||||||
|
logging.warning(f"{target} doesn't exist, skipping")
|
||||||
|
else:
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
def diff(self):
|
||||||
|
if self.preferences.clear_memory_filecache:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
memory_size = sys.getsizeof(self.data['file'])-33
|
||||||
|
disk_size = self.instance.stat().st_size
|
||||||
|
return memory_size == disk_size
|
74
multi_user/bl_types/bl_font.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
from .bl_datablock import BlDatablock
|
||||||
|
from .bl_file import get_filepath, ensure_unpacked
|
||||||
|
from .dump_anything import Dumper, Loader
|
||||||
|
|
||||||
|
|
||||||
|
class BlFont(BlDatablock):
|
||||||
|
bl_id = "fonts"
|
||||||
|
bl_class = bpy.types.VectorFont
|
||||||
|
bl_delay_refresh = 1
|
||||||
|
bl_delay_apply = 1
|
||||||
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
|
bl_icon = 'FILE_FONT'
|
||||||
|
|
||||||
|
def _construct(self, data):
|
||||||
|
filename = data.get('filename')
|
||||||
|
|
||||||
|
if filename == '<builtin>':
|
||||||
|
return bpy.data.fonts.load(filename)
|
||||||
|
else:
|
||||||
|
return bpy.data.fonts.load(get_filepath(filename))
|
||||||
|
|
||||||
|
def _load(self, data, target):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _dump(self, instance=None):
|
||||||
|
if instance.filepath == '<builtin>':
|
||||||
|
filename = '<builtin>'
|
||||||
|
else:
|
||||||
|
filename = Path(instance.filepath).name
|
||||||
|
|
||||||
|
if not filename:
|
||||||
|
raise FileExistsError(instance.filepath)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'filename': filename,
|
||||||
|
'name': instance.name
|
||||||
|
}
|
||||||
|
|
||||||
|
def diff(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _resolve_deps_implementation(self):
|
||||||
|
deps = []
|
||||||
|
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
||||||
|
ensure_unpacked(self.instance)
|
||||||
|
|
||||||
|
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||||
|
|
||||||
|
return deps
|
@ -218,6 +218,7 @@ class BlGpencil(BlDatablock):
|
|||||||
bl_delay_refresh = 2
|
bl_delay_refresh = 2
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'GREASEPENCIL'
|
bl_icon = 'GREASEPENCIL'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
|
@ -16,44 +16,45 @@
|
|||||||
# ##### END GPL LICENSE BLOCK #####
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
from .dump_anything import Loader, Dumper
|
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
from .dump_anything import Dumper, Loader
|
||||||
|
from .bl_file import get_filepath, ensure_unpacked
|
||||||
|
|
||||||
def dump_image(image):
|
format_to_ext = {
|
||||||
pixels = None
|
'BMP': 'bmp',
|
||||||
if image.source == "GENERATED" or image.packed_file is not None:
|
'IRIS': 'sgi',
|
||||||
prefs = utils.get_preferences()
|
'PNG': 'png',
|
||||||
img_name = f"{image.name}.png"
|
'JPEG': 'jpg',
|
||||||
|
'JPEG2000': 'jp2',
|
||||||
|
'TARGA': 'tga',
|
||||||
|
'TARGA_RAW': 'tga',
|
||||||
|
'CINEON': 'cin',
|
||||||
|
'DPX': 'dpx',
|
||||||
|
'OPEN_EXR_MULTILAYER': 'exr',
|
||||||
|
'OPEN_EXR': 'exr',
|
||||||
|
'HDR': 'hdr',
|
||||||
|
'TIFF': 'tiff',
|
||||||
|
'AVI_JPEG': 'avi',
|
||||||
|
'AVI_RAW': 'avi',
|
||||||
|
'FFMPEG': 'mpeg',
|
||||||
|
}
|
||||||
|
|
||||||
# Cache the image on the disk
|
|
||||||
image.filepath_raw = os.path.join(prefs.cache_directory, img_name)
|
|
||||||
os.makedirs(prefs.cache_directory, exist_ok=True)
|
|
||||||
image.file_format = "PNG"
|
|
||||||
image.save()
|
|
||||||
|
|
||||||
if image.source == "FILE":
|
|
||||||
image_path = bpy.path.abspath(image.filepath_raw)
|
|
||||||
image_directory = os.path.dirname(image_path)
|
|
||||||
os.makedirs(image_directory, exist_ok=True)
|
|
||||||
image.save()
|
|
||||||
file = open(image_path, "rb")
|
|
||||||
pixels = file.read()
|
|
||||||
file.close()
|
|
||||||
else:
|
|
||||||
raise ValueError()
|
|
||||||
return pixels
|
|
||||||
|
|
||||||
class BlImage(BlDatablock):
|
class BlImage(BlDatablock):
|
||||||
bl_id = "images"
|
bl_id = "images"
|
||||||
bl_class = bpy.types.Image
|
bl_class = bpy.types.Image
|
||||||
bl_delay_refresh = 0
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = False
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'IMAGE_DATA'
|
bl_icon = 'IMAGE_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
@ -64,26 +65,23 @@ class BlImage(BlDatablock):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _load(self, data, target):
|
def _load(self, data, target):
|
||||||
image = target
|
loader = Loader()
|
||||||
prefs = utils.get_preferences()
|
loader.load(data, target)
|
||||||
|
|
||||||
img_name = f"{image.name}.png"
|
target.source = 'FILE'
|
||||||
|
target.filepath_raw = get_filepath(data['filename'])
|
||||||
img_path = os.path.join(prefs.cache_directory,img_name)
|
target.colorspace_settings.name = data["colorspace_settings"]["name"]
|
||||||
os.makedirs(prefs.cache_directory, exist_ok=True)
|
|
||||||
file = open(img_path, 'wb')
|
|
||||||
file.write(data["pixels"])
|
|
||||||
file.close()
|
|
||||||
|
|
||||||
image.source = 'FILE'
|
|
||||||
image.filepath = img_path
|
|
||||||
image.colorspace_settings.name = data["colorspace_settings"]["name"]
|
|
||||||
|
|
||||||
|
|
||||||
def _dump(self, instance=None):
|
def _dump(self, instance=None):
|
||||||
assert(instance)
|
assert(instance)
|
||||||
data = {}
|
|
||||||
data['pixels'] = dump_image(instance)
|
filename = Path(instance.filepath).name
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"filename": filename
|
||||||
|
}
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
dumper.depth = 2
|
dumper.depth = 2
|
||||||
dumper.include_filter = [
|
dumper.include_filter = [
|
||||||
@ -92,14 +90,34 @@ class BlImage(BlDatablock):
|
|||||||
'height',
|
'height',
|
||||||
'alpha',
|
'alpha',
|
||||||
'float_buffer',
|
'float_buffer',
|
||||||
'filepath',
|
'alpha_mode',
|
||||||
'source',
|
|
||||||
'colorspace_settings']
|
'colorspace_settings']
|
||||||
data.update(dumper.dump(instance))
|
data.update(dumper.dump(instance))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def diff(self):
|
def diff(self):
|
||||||
|
if self.instance and (self.instance.name != self.data['name']):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _resolve_deps_implementation(self):
|
||||||
|
deps = []
|
||||||
|
if self.instance.filepath:
|
||||||
|
|
||||||
|
if self.instance.packed_file:
|
||||||
|
filename = Path(bpy.path.abspath(self.instance.filepath)).name
|
||||||
|
self.instance.filepath_raw = get_filepath(filename)
|
||||||
|
self.instance.save()
|
||||||
|
# An image can't be unpacked to the modified path
|
||||||
|
# TODO: make a bug report
|
||||||
|
self.instance.unpack(method="REMOVE")
|
||||||
|
|
||||||
|
elif self.instance.source == "GENERATED":
|
||||||
|
filename = f"{self.instance.name}.png"
|
||||||
|
self.instance.filepath = get_filepath(filename)
|
||||||
|
self.instance.save()
|
||||||
|
|
||||||
|
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||||
|
|
||||||
|
return deps
|
||||||
|
@ -21,7 +21,7 @@ import mathutils
|
|||||||
|
|
||||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
from ..libs.replication.replication.exception import ContextError
|
from replication.exception import ContextError
|
||||||
|
|
||||||
POINT = ['co', 'weight_softbody', 'co_deform']
|
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||||
|
|
||||||
@ -32,6 +32,7 @@ class BlLattice(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'LATTICE_DATA'
|
bl_icon = 'LATTICE_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
|
@ -29,6 +29,7 @@ class BlLibrary(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'LIBRARY_DATA_DIRECT'
|
bl_icon = 'LIBRARY_DATA_DIRECT'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
|
@ -29,6 +29,7 @@ class BlLight(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'LIGHT_DATA'
|
bl_icon = 'LIGHT_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
|
@ -30,6 +30,7 @@ class BlLightprobe(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'LIGHTPROBE_GRID'
|
bl_icon = 'LIGHTPROBE_GRID'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
|
@ -19,10 +19,12 @@
|
|||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
from .. import utils
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||||
|
|
||||||
|
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
||||||
|
|
||||||
|
|
||||||
def load_node(node_data, node_tree):
|
def load_node(node_data, node_tree):
|
||||||
@ -37,15 +39,26 @@ def load_node(node_data, node_tree):
|
|||||||
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
|
target_node = node_tree.nodes.new(type=node_data["bl_idname"])
|
||||||
|
|
||||||
loader.load(target_node, node_data)
|
loader.load(target_node, node_data)
|
||||||
|
image_uuid = node_data.get('image_uuid', None)
|
||||||
|
|
||||||
|
if image_uuid and not target_node.image:
|
||||||
|
target_node.image = get_datablock_from_uuid(image_uuid, None)
|
||||||
|
|
||||||
for input in node_data["inputs"]:
|
for input in node_data["inputs"]:
|
||||||
if hasattr(target_node.inputs[input], "default_value"):
|
if hasattr(target_node.inputs[input], "default_value"):
|
||||||
try:
|
try:
|
||||||
target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
|
target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"]
|
||||||
except:
|
except:
|
||||||
logging.error(f"Material {input} parameter not supported, skipping")
|
logging.error(
|
||||||
|
f"Material {input} parameter not supported, skipping")
|
||||||
|
|
||||||
|
for output in node_data["outputs"]:
|
||||||
|
if hasattr(target_node.outputs[output], "default_value"):
|
||||||
|
try:
|
||||||
|
target_node.outputs[output].default_value = node_data["outputs"][output]["default_value"]
|
||||||
|
except:
|
||||||
|
logging.error(
|
||||||
|
f"Material {output} parameter not supported, skipping")
|
||||||
|
|
||||||
|
|
||||||
def load_links(links_data, node_tree):
|
def load_links(links_data, node_tree):
|
||||||
@ -58,9 +71,10 @@ def load_links(links_data, node_tree):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
for link in links_data:
|
for link in links_data:
|
||||||
input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])]
|
input_socket = node_tree.nodes[link['to_node']
|
||||||
output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])]
|
].inputs[int(link['to_socket'])]
|
||||||
|
output_socket = node_tree.nodes[link['from_node']].outputs[int(
|
||||||
|
link['from_socket'])]
|
||||||
node_tree.links.new(input_socket, output_socket)
|
node_tree.links.new(input_socket, output_socket)
|
||||||
|
|
||||||
|
|
||||||
@ -75,11 +89,15 @@ def dump_links(links):
|
|||||||
links_data = []
|
links_data = []
|
||||||
|
|
||||||
for link in links:
|
for link in links:
|
||||||
|
to_socket = NODE_SOCKET_INDEX.search(
|
||||||
|
link.to_socket.path_from_id()).group(1)
|
||||||
|
from_socket = NODE_SOCKET_INDEX.search(
|
||||||
|
link.from_socket.path_from_id()).group(1)
|
||||||
links_data.append({
|
links_data.append({
|
||||||
'to_node':link.to_node.name,
|
'to_node': link.to_node.name,
|
||||||
'to_socket':link.to_socket.path_from_id()[-2:-1],
|
'to_socket': to_socket,
|
||||||
'from_node':link.from_node.name,
|
'from_node': link.from_node.name,
|
||||||
'from_socket':link.from_socket.path_from_id()[-2:-1],
|
'from_socket': from_socket,
|
||||||
})
|
})
|
||||||
|
|
||||||
return links_data
|
return links_data
|
||||||
@ -100,6 +118,7 @@ def dump_node(node):
|
|||||||
"show_expanded",
|
"show_expanded",
|
||||||
"name_full",
|
"name_full",
|
||||||
"select",
|
"select",
|
||||||
|
"bl_label",
|
||||||
"bl_height_min",
|
"bl_height_min",
|
||||||
"bl_height_max",
|
"bl_height_max",
|
||||||
"bl_height_default",
|
"bl_height_default",
|
||||||
@ -116,7 +135,8 @@ def dump_node(node):
|
|||||||
"show_preview",
|
"show_preview",
|
||||||
"show_texture",
|
"show_texture",
|
||||||
"outputs",
|
"outputs",
|
||||||
"width_hidden"
|
"width_hidden",
|
||||||
|
"image"
|
||||||
]
|
]
|
||||||
|
|
||||||
dumped_node = node_dumper.dump(node)
|
dumped_node = node_dumper.dump(node)
|
||||||
@ -130,8 +150,17 @@ def dump_node(node):
|
|||||||
input_dumper.include_filter = ["default_value"]
|
input_dumper.include_filter = ["default_value"]
|
||||||
|
|
||||||
if hasattr(i, 'default_value'):
|
if hasattr(i, 'default_value'):
|
||||||
dumped_node['inputs'][i.name] = input_dumper.dump(
|
dumped_node['inputs'][i.name] = input_dumper.dump(i)
|
||||||
i)
|
|
||||||
|
dumped_node['outputs'] = {}
|
||||||
|
for i in node.outputs:
|
||||||
|
output_dumper = Dumper()
|
||||||
|
output_dumper.depth = 2
|
||||||
|
output_dumper.include_filter = ["default_value"]
|
||||||
|
|
||||||
|
if hasattr(i, 'default_value'):
|
||||||
|
dumped_node['outputs'][i.name] = output_dumper.dump(i)
|
||||||
|
|
||||||
if hasattr(node, 'color_ramp'):
|
if hasattr(node, 'color_ramp'):
|
||||||
ramp_dumper = Dumper()
|
ramp_dumper = Dumper()
|
||||||
ramp_dumper.depth = 4
|
ramp_dumper.depth = 4
|
||||||
@ -151,16 +180,24 @@ def dump_node(node):
|
|||||||
'location'
|
'location'
|
||||||
]
|
]
|
||||||
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
|
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
|
||||||
|
if hasattr(node, 'image') and getattr(node, 'image'):
|
||||||
|
dumped_node['image_uuid'] = node.image.uuid
|
||||||
return dumped_node
|
return dumped_node
|
||||||
|
|
||||||
|
|
||||||
|
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
|
||||||
|
has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
|
||||||
|
|
||||||
|
return [node.image for node in node_tree.nodes if has_image(node)]
|
||||||
|
|
||||||
|
|
||||||
class BlMaterial(BlDatablock):
|
class BlMaterial(BlDatablock):
|
||||||
bl_id = "materials"
|
bl_id = "materials"
|
||||||
bl_class = bpy.types.Material
|
bl_class = bpy.types.Material
|
||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'MATERIAL_DATA'
|
bl_icon = 'MATERIAL_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
@ -168,23 +205,22 @@ class BlMaterial(BlDatablock):
|
|||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
target.name = data['name']
|
|
||||||
if data['is_grease_pencil']:
|
is_grease_pencil = data.get('is_grease_pencil')
|
||||||
|
use_nodes = data.get('use_nodes')
|
||||||
|
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
|
if is_grease_pencil:
|
||||||
if not target.is_grease_pencil:
|
if not target.is_grease_pencil:
|
||||||
bpy.data.materials.create_gpencil_data(target)
|
bpy.data.materials.create_gpencil_data(target)
|
||||||
|
loader.load(target.grease_pencil, data['grease_pencil'])
|
||||||
loader.load(
|
elif use_nodes:
|
||||||
target.grease_pencil, data['grease_pencil'])
|
|
||||||
|
|
||||||
|
|
||||||
if data["use_nodes"]:
|
|
||||||
if target.node_tree is None:
|
if target.node_tree is None:
|
||||||
target.use_nodes = True
|
target.use_nodes = True
|
||||||
|
|
||||||
target.node_tree.nodes.clear()
|
target.node_tree.nodes.clear()
|
||||||
|
|
||||||
loader.load(target,data)
|
|
||||||
|
|
||||||
# Load nodes
|
# Load nodes
|
||||||
for node in data["node_tree"]["nodes"]:
|
for node in data["node_tree"]["nodes"]:
|
||||||
load_node(data["node_tree"]["nodes"][node], target.node_tree)
|
load_node(data["node_tree"]["nodes"][node], target.node_tree)
|
||||||
@ -198,59 +234,71 @@ class BlMaterial(BlDatablock):
|
|||||||
assert(instance)
|
assert(instance)
|
||||||
mat_dumper = Dumper()
|
mat_dumper = Dumper()
|
||||||
mat_dumper.depth = 2
|
mat_dumper.depth = 2
|
||||||
mat_dumper.exclude_filter = [
|
mat_dumper.include_filter = [
|
||||||
"is_embed_data",
|
'name',
|
||||||
"is_evaluated",
|
'blend_method',
|
||||||
"name_full",
|
'shadow_method',
|
||||||
"bl_description",
|
'alpha_threshold',
|
||||||
"bl_icon",
|
'show_transparent_back',
|
||||||
"bl_idname",
|
'use_backface_culling',
|
||||||
"bl_label",
|
'use_screen_refraction',
|
||||||
"preview",
|
'use_sss_translucency',
|
||||||
"original",
|
'refraction_depth',
|
||||||
"uuid",
|
'preview_render_type',
|
||||||
"users",
|
'use_preview_world',
|
||||||
"alpha_threshold",
|
'pass_index',
|
||||||
"line_color",
|
'use_nodes',
|
||||||
"view_center",
|
'diffuse_color',
|
||||||
|
'specular_color',
|
||||||
|
'roughness',
|
||||||
|
'specular_intensity',
|
||||||
|
'metallic',
|
||||||
|
'line_color',
|
||||||
|
'line_priority',
|
||||||
|
'is_grease_pencil'
|
||||||
]
|
]
|
||||||
data = mat_dumper.dump(instance)
|
data = mat_dumper.dump(instance)
|
||||||
|
|
||||||
if instance.use_nodes:
|
|
||||||
nodes = {}
|
|
||||||
for node in instance.node_tree.nodes:
|
|
||||||
nodes[node.name] = dump_node(node)
|
|
||||||
data["node_tree"]['nodes'] = nodes
|
|
||||||
|
|
||||||
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
|
|
||||||
|
|
||||||
if instance.is_grease_pencil:
|
if instance.is_grease_pencil:
|
||||||
gp_mat_dumper = Dumper()
|
gp_mat_dumper = Dumper()
|
||||||
gp_mat_dumper.depth = 3
|
gp_mat_dumper.depth = 3
|
||||||
|
|
||||||
gp_mat_dumper.include_filter = [
|
gp_mat_dumper.include_filter = [
|
||||||
|
'color',
|
||||||
|
'fill_color',
|
||||||
|
'mix_color',
|
||||||
|
'mix_factor',
|
||||||
|
'mix_stroke_factor',
|
||||||
|
# 'texture_angle',
|
||||||
|
# 'texture_scale',
|
||||||
|
# 'texture_offset',
|
||||||
|
'pixel_size',
|
||||||
|
'hide',
|
||||||
|
'lock',
|
||||||
|
'ghost',
|
||||||
|
# 'texture_clamp',
|
||||||
|
'flip',
|
||||||
|
'use_overlap_strokes',
|
||||||
'show_stroke',
|
'show_stroke',
|
||||||
|
'show_fill',
|
||||||
|
'alignment_mode',
|
||||||
|
'pass_index',
|
||||||
'mode',
|
'mode',
|
||||||
'stroke_style',
|
'stroke_style',
|
||||||
'color',
|
# 'stroke_image',
|
||||||
'use_overlap_strokes',
|
|
||||||
'show_fill',
|
|
||||||
'fill_style',
|
'fill_style',
|
||||||
'fill_color',
|
|
||||||
'pass_index',
|
|
||||||
'alignment_mode',
|
|
||||||
# 'fill_image',
|
|
||||||
'texture_opacity',
|
|
||||||
'mix_factor',
|
|
||||||
'texture_offset',
|
|
||||||
'texture_angle',
|
|
||||||
'texture_scale',
|
|
||||||
'texture_clamp',
|
|
||||||
'gradient_type',
|
'gradient_type',
|
||||||
'mix_color',
|
# 'fill_image',
|
||||||
'flip'
|
|
||||||
]
|
]
|
||||||
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
||||||
|
elif instance.use_nodes:
|
||||||
|
nodes = {}
|
||||||
|
data["node_tree"] = {}
|
||||||
|
for node in instance.node_tree.nodes:
|
||||||
|
nodes[node.name] = dump_node(node)
|
||||||
|
data["node_tree"]['nodes'] = nodes
|
||||||
|
|
||||||
|
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
def _resolve_deps_implementation(self):
|
||||||
@ -258,11 +306,8 @@ class BlMaterial(BlDatablock):
|
|||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.instance.use_nodes:
|
if self.instance.use_nodes:
|
||||||
for node in self.instance.node_tree.nodes:
|
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
|
||||||
if node.type == 'TEX_IMAGE':
|
|
||||||
deps.append(node.image)
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
deps.append(self.instance.library)
|
deps.append(self.instance.library)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@ -23,10 +23,9 @@ import logging
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||||
from ..libs.replication.replication.constants import DIFF_BINARY
|
from replication.constants import DIFF_BINARY
|
||||||
from ..libs.replication.replication.exception import ContextError
|
from replication.exception import ContextError
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||||
|
|
||||||
|
|
||||||
VERTICE = ['co']
|
VERTICE = ['co']
|
||||||
|
|
||||||
@ -53,6 +52,7 @@ class BlMesh(BlDatablock):
|
|||||||
bl_delay_refresh = 2
|
bl_delay_refresh = 2
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'MESH_DATA'
|
bl_icon = 'MESH_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
@ -70,8 +70,17 @@ class BlMesh(BlDatablock):
|
|||||||
# MATERIAL SLOTS
|
# MATERIAL SLOTS
|
||||||
target.materials.clear()
|
target.materials.clear()
|
||||||
|
|
||||||
for m in data["material_list"]:
|
for mat_uuid, mat_name in data["material_list"]:
|
||||||
target.materials.append(bpy.data.materials[m])
|
mat_ref = None
|
||||||
|
if mat_uuid is not None:
|
||||||
|
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
||||||
|
else:
|
||||||
|
mat_ref = bpy.data.materials.get(mat_name, None)
|
||||||
|
|
||||||
|
if mat_ref is None:
|
||||||
|
raise Exception("Material doesn't exist")
|
||||||
|
|
||||||
|
target.materials.append(mat_ref)
|
||||||
|
|
||||||
# CLEAR GEOMETRY
|
# CLEAR GEOMETRY
|
||||||
if target.vertices:
|
if target.vertices:
|
||||||
@ -89,6 +98,7 @@ class BlMesh(BlDatablock):
|
|||||||
np_load_collection(data["polygons"],target.polygons, POLYGON)
|
np_load_collection(data["polygons"],target.polygons, POLYGON)
|
||||||
|
|
||||||
# UV Layers
|
# UV Layers
|
||||||
|
if 'uv_layers' in data.keys():
|
||||||
for layer in data['uv_layers']:
|
for layer in data['uv_layers']:
|
||||||
if layer not in target.uv_layers:
|
if layer not in target.uv_layers:
|
||||||
target.uv_layers.new(name=layer)
|
target.uv_layers.new(name=layer)
|
||||||
@ -99,6 +109,7 @@ class BlMesh(BlDatablock):
|
|||||||
data["uv_layers"][layer]['data'])
|
data["uv_layers"][layer]['data'])
|
||||||
|
|
||||||
# Vertex color
|
# Vertex color
|
||||||
|
if 'vertex_colors' in data.keys():
|
||||||
for color_layer in data['vertex_colors']:
|
for color_layer in data['vertex_colors']:
|
||||||
if color_layer not in target.vertex_colors:
|
if color_layer not in target.vertex_colors:
|
||||||
target.vertex_colors.new(name=color_layer)
|
target.vertex_colors.new(name=color_layer)
|
||||||
@ -114,7 +125,7 @@ class BlMesh(BlDatablock):
|
|||||||
def _dump_implementation(self, data, instance=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(instance)
|
assert(instance)
|
||||||
|
|
||||||
if instance.is_editmode:
|
if instance.is_editmode and not self.preferences.sync_flags.sync_during_editmode:
|
||||||
raise ContextError("Mesh is in edit mode")
|
raise ContextError("Mesh is in edit mode")
|
||||||
mesh = instance
|
mesh = instance
|
||||||
|
|
||||||
@ -147,12 +158,14 @@ class BlMesh(BlDatablock):
|
|||||||
data["loops"] = np_dump_collection(mesh.loops, LOOP)
|
data["loops"] = np_dump_collection(mesh.loops, LOOP)
|
||||||
|
|
||||||
# UV Layers
|
# UV Layers
|
||||||
|
if mesh.uv_layers:
|
||||||
data['uv_layers'] = {}
|
data['uv_layers'] = {}
|
||||||
for layer in mesh.uv_layers:
|
for layer in mesh.uv_layers:
|
||||||
data['uv_layers'][layer.name] = {}
|
data['uv_layers'][layer.name] = {}
|
||||||
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
|
data['uv_layers'][layer.name]['data'] = np_dump_collection_primitive(layer.data, 'uv')
|
||||||
|
|
||||||
# Vertex color
|
# Vertex color
|
||||||
|
if mesh.vertex_colors:
|
||||||
data['vertex_colors'] = {}
|
data['vertex_colors'] = {}
|
||||||
for color_map in mesh.vertex_colors:
|
for color_map in mesh.vertex_colors:
|
||||||
data['vertex_colors'][color_map.name] = {}
|
data['vertex_colors'][color_map.name] = {}
|
||||||
@ -162,7 +175,7 @@ class BlMesh(BlDatablock):
|
|||||||
m_list = []
|
m_list = []
|
||||||
for material in instance.materials:
|
for material in instance.materials:
|
||||||
if material:
|
if material:
|
||||||
m_list.append(material.name)
|
m_list.append((material.uuid,material.name))
|
||||||
|
|
||||||
data['material_list'] = m_list
|
data['material_list'] = m_list
|
||||||
|
|
||||||
|
@ -68,6 +68,7 @@ class BlMetaball(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'META_BALL'
|
bl_icon = 'META_BALL'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
|
@ -16,13 +16,15 @@
|
|||||||
# ##### END GPL LICENSE BLOCK #####
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
import mathutils
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
import bpy
|
||||||
from .bl_datablock import BlDatablock
|
import mathutils
|
||||||
from ..libs.replication.replication.exception import ContextError
|
from replication.exception import ContextError
|
||||||
|
|
||||||
|
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||||
|
from .dump_anything import Dumper, Loader
|
||||||
|
from replication.exception import ReparentException
|
||||||
|
|
||||||
|
|
||||||
def load_pose(target_bone, data):
|
def load_pose(target_bone, data):
|
||||||
@ -31,12 +33,59 @@ def load_pose(target_bone, data):
|
|||||||
loader.load(target_bone, data)
|
loader.load(target_bone, data)
|
||||||
|
|
||||||
|
|
||||||
|
def find_data_from_name(name=None):
|
||||||
|
instance = None
|
||||||
|
if not name:
|
||||||
|
pass
|
||||||
|
elif name in bpy.data.meshes.keys():
|
||||||
|
instance = bpy.data.meshes[name]
|
||||||
|
elif name in bpy.data.lights.keys():
|
||||||
|
instance = bpy.data.lights[name]
|
||||||
|
elif name in bpy.data.cameras.keys():
|
||||||
|
instance = bpy.data.cameras[name]
|
||||||
|
elif name in bpy.data.curves.keys():
|
||||||
|
instance = bpy.data.curves[name]
|
||||||
|
elif name in bpy.data.metaballs.keys():
|
||||||
|
instance = bpy.data.metaballs[name]
|
||||||
|
elif name in bpy.data.armatures.keys():
|
||||||
|
instance = bpy.data.armatures[name]
|
||||||
|
elif name in bpy.data.grease_pencils.keys():
|
||||||
|
instance = bpy.data.grease_pencils[name]
|
||||||
|
elif name in bpy.data.curves.keys():
|
||||||
|
instance = bpy.data.curves[name]
|
||||||
|
elif name in bpy.data.lattices.keys():
|
||||||
|
instance = bpy.data.lattices[name]
|
||||||
|
elif name in bpy.data.speakers.keys():
|
||||||
|
instance = bpy.data.speakers[name]
|
||||||
|
elif name in bpy.data.lightprobes.keys():
|
||||||
|
# Only supported since 2.83
|
||||||
|
if bpy.app.version[1] >= 83:
|
||||||
|
instance = bpy.data.lightprobes[name]
|
||||||
|
else:
|
||||||
|
logging.warning(
|
||||||
|
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||||
|
return instance
|
||||||
|
|
||||||
|
|
||||||
|
def load_data(object, name):
|
||||||
|
logging.info("loading data")
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _is_editmode(object: bpy.types.Object) -> bool:
|
||||||
|
child_data = getattr(object, 'data', None)
|
||||||
|
return (child_data and
|
||||||
|
hasattr(child_data, 'is_editmode') and
|
||||||
|
child_data.is_editmode)
|
||||||
|
|
||||||
|
|
||||||
class BlObject(BlDatablock):
|
class BlObject(BlDatablock):
|
||||||
bl_id = "objects"
|
bl_id = "objects"
|
||||||
bl_class = bpy.types.Object
|
bl_class = bpy.types.Object
|
||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'OBJECT_DATA'
|
bl_icon = 'OBJECT_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
@ -52,45 +101,67 @@ class BlObject(BlDatablock):
|
|||||||
return instance
|
return instance
|
||||||
|
|
||||||
# TODO: refactoring
|
# TODO: refactoring
|
||||||
if "data" not in data:
|
object_name = data.get("name")
|
||||||
pass
|
data_uuid = data.get("data_uuid")
|
||||||
elif data["data"] in bpy.data.meshes.keys():
|
data_id = data.get("data")
|
||||||
instance = bpy.data.meshes[data["data"]]
|
|
||||||
elif data["data"] in bpy.data.lights.keys():
|
object_data = get_datablock_from_uuid(
|
||||||
instance = bpy.data.lights[data["data"]]
|
data_uuid,
|
||||||
elif data["data"] in bpy.data.cameras.keys():
|
find_data_from_name(data_id),
|
||||||
instance = bpy.data.cameras[data["data"]]
|
ignore=['images']) #TODO: use resolve_from_id
|
||||||
elif data["data"] in bpy.data.curves.keys():
|
instance = bpy.data.objects.new(object_name, object_data)
|
||||||
instance = bpy.data.curves[data["data"]]
|
|
||||||
elif data["data"] in bpy.data.metaballs.keys():
|
|
||||||
instance = bpy.data.metaballs[data["data"]]
|
|
||||||
elif data["data"] in bpy.data.armatures.keys():
|
|
||||||
instance = bpy.data.armatures[data["data"]]
|
|
||||||
elif data["data"] in bpy.data.grease_pencils.keys():
|
|
||||||
instance = bpy.data.grease_pencils[data["data"]]
|
|
||||||
elif data["data"] in bpy.data.curves.keys():
|
|
||||||
instance = bpy.data.curves[data["data"]]
|
|
||||||
elif data["data"] in bpy.data.lattices.keys():
|
|
||||||
instance = bpy.data.lattices[data["data"]]
|
|
||||||
elif data["data"] in bpy.data.speakers.keys():
|
|
||||||
instance = bpy.data.speakers[data["data"]]
|
|
||||||
elif data["data"] in bpy.data.lightprobes.keys():
|
|
||||||
# Only supported since 2.83
|
|
||||||
if bpy.app.version[1] >= 83:
|
|
||||||
instance = bpy.data.lightprobes[data["data"]]
|
|
||||||
else:
|
|
||||||
logging.warning(
|
|
||||||
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
|
||||||
instance = bpy.data.objects.new(data["name"], instance)
|
|
||||||
instance.uuid = self.uuid
|
instance.uuid = self.uuid
|
||||||
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
# Load transformation data
|
|
||||||
loader = Loader()
|
loader = Loader()
|
||||||
|
|
||||||
|
data_uuid = data.get("data_uuid")
|
||||||
|
data_id = data.get("data")
|
||||||
|
|
||||||
|
if target.type != data['type']:
|
||||||
|
raise ReparentException()
|
||||||
|
elif target.data and (target.data.name != data_id):
|
||||||
|
target.data = get_datablock_from_uuid(data_uuid, find_data_from_name(data_id), ignore=['images'])
|
||||||
|
|
||||||
|
# vertex groups
|
||||||
|
if 'vertex_groups' in data:
|
||||||
|
target.vertex_groups.clear()
|
||||||
|
for vg in data['vertex_groups']:
|
||||||
|
vertex_group=target.vertex_groups.new(name = vg['name'])
|
||||||
|
point_attr='vertices' if 'vertices' in vg else 'points'
|
||||||
|
for vert in vg[point_attr]:
|
||||||
|
vertex_group.add(
|
||||||
|
[vert['index']], vert['weight'], 'REPLACE')
|
||||||
|
|
||||||
|
# SHAPE KEYS
|
||||||
|
if 'shape_keys' in data:
|
||||||
|
target.shape_key_clear()
|
||||||
|
|
||||||
|
object_data=target.data
|
||||||
|
|
||||||
|
# Create keys and load vertices coords
|
||||||
|
for key_block in data['shape_keys']['key_blocks']:
|
||||||
|
key_data=data['shape_keys']['key_blocks'][key_block]
|
||||||
|
target.shape_key_add(name = key_block)
|
||||||
|
|
||||||
|
loader.load(
|
||||||
|
target.data.shape_keys.key_blocks[key_block], key_data)
|
||||||
|
for vert in key_data['data']:
|
||||||
|
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
||||||
|
|
||||||
|
# Load relative key after all
|
||||||
|
for key_block in data['shape_keys']['key_blocks']:
|
||||||
|
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
|
||||||
|
|
||||||
|
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
||||||
|
|
||||||
|
# Load transformation data
|
||||||
loader.load(target, data)
|
loader.load(target, data)
|
||||||
|
|
||||||
|
loader.load(target.display, data['display'])
|
||||||
|
|
||||||
# Pose
|
# Pose
|
||||||
if 'pose' in data:
|
if 'pose' in data:
|
||||||
if not target.pose:
|
if not target.pose:
|
||||||
@ -114,50 +185,24 @@ class BlObject(BlDatablock):
|
|||||||
if 'constraints' in bone_data.keys():
|
if 'constraints' in bone_data.keys():
|
||||||
loader.load(target_bone, bone_data['constraints'])
|
loader.load(target_bone, bone_data['constraints'])
|
||||||
|
|
||||||
|
|
||||||
load_pose(target_bone, bone_data)
|
load_pose(target_bone, bone_data)
|
||||||
|
|
||||||
if 'bone_index' in bone_data.keys():
|
if 'bone_index' in bone_data.keys():
|
||||||
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
|
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
|
||||||
|
|
||||||
# vertex groups
|
# TODO: find another way...
|
||||||
if 'vertex_groups' in data:
|
if target.type == 'EMPTY':
|
||||||
target.vertex_groups.clear()
|
img_uuid = data.get('data_uuid')
|
||||||
for vg in data['vertex_groups']:
|
if target.data is None and img_uuid:
|
||||||
vertex_group = target.vertex_groups.new(name=vg['name'])
|
target.data = get_datablock_from_uuid(img_uuid, None)#bpy.data.images.get(img_key, None)
|
||||||
point_attr = 'vertices' if 'vertices' in vg else 'points'
|
|
||||||
for vert in vg[point_attr]:
|
|
||||||
vertex_group.add(
|
|
||||||
[vert['index']], vert['weight'], 'REPLACE')
|
|
||||||
|
|
||||||
# SHAPE KEYS
|
|
||||||
if 'shape_keys' in data:
|
|
||||||
target.shape_key_clear()
|
|
||||||
|
|
||||||
object_data = target.data
|
|
||||||
|
|
||||||
# Create keys and load vertices coords
|
|
||||||
for key_block in data['shape_keys']['key_blocks']:
|
|
||||||
key_data = data['shape_keys']['key_blocks'][key_block]
|
|
||||||
target.shape_key_add(name=key_block)
|
|
||||||
|
|
||||||
loader.load(
|
|
||||||
target.data.shape_keys.key_blocks[key_block], key_data)
|
|
||||||
for vert in key_data['data']:
|
|
||||||
target.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
|
|
||||||
|
|
||||||
# Load relative key after all
|
|
||||||
for key_block in data['shape_keys']['key_blocks']:
|
|
||||||
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
|
|
||||||
|
|
||||||
target.data.shape_keys.key_blocks[key_block].relative_key = target.data.shape_keys.key_blocks[reference]
|
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(instance)
|
assert(instance)
|
||||||
|
|
||||||
child_data = getattr(instance, 'data', None)
|
if _is_editmode(instance):
|
||||||
|
if self.preferences.sync_flags.sync_during_editmode:
|
||||||
if child_data and hasattr(child_data, 'is_editmode') and child_data.is_editmode:
|
instance.update_from_editmode()
|
||||||
|
else:
|
||||||
raise ContextError("Object is in edit-mode.")
|
raise ContextError("Object is in edit-mode.")
|
||||||
|
|
||||||
dumper = Dumper()
|
dumper = Dumper()
|
||||||
@ -171,28 +216,55 @@ class BlObject(BlDatablock):
|
|||||||
"library",
|
"library",
|
||||||
"empty_display_type",
|
"empty_display_type",
|
||||||
"empty_display_size",
|
"empty_display_size",
|
||||||
|
"empty_image_offset",
|
||||||
|
"empty_image_depth",
|
||||||
|
"empty_image_side",
|
||||||
|
"show_empty_image_orthographic",
|
||||||
|
"show_empty_image_perspective",
|
||||||
|
"show_empty_image_only_axis_aligned",
|
||||||
|
"use_empty_image_alpha",
|
||||||
|
"color",
|
||||||
"instance_collection",
|
"instance_collection",
|
||||||
"instance_type",
|
"instance_type",
|
||||||
"location",
|
"location",
|
||||||
"scale",
|
"scale",
|
||||||
|
'lock_location',
|
||||||
|
'lock_rotation',
|
||||||
|
'lock_scale',
|
||||||
|
'hide_render',
|
||||||
|
'display_type',
|
||||||
|
'display_bounds_type',
|
||||||
|
'show_bounds',
|
||||||
|
'show_name',
|
||||||
|
'show_axis',
|
||||||
|
'show_wire',
|
||||||
|
'show_all_edges',
|
||||||
|
'show_texture_space',
|
||||||
|
'show_in_front',
|
||||||
|
'type',
|
||||||
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
|
'rotation_quaternion' if instance.rotation_mode == 'QUATERNION' else 'rotation_euler',
|
||||||
]
|
]
|
||||||
|
|
||||||
data = dumper.dump(instance)
|
data = dumper.dump(instance)
|
||||||
|
|
||||||
|
dumper.include_filter = [
|
||||||
|
'show_shadows',
|
||||||
|
]
|
||||||
|
data['display'] = dumper.dump(instance.display)
|
||||||
|
|
||||||
|
data['data_uuid'] = getattr(instance.data, 'uuid', None)
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
# MODIFIERS
|
# MODIFIERS
|
||||||
if hasattr(instance, 'modifiers'):
|
if hasattr(instance, 'modifiers'):
|
||||||
dumper.include_filter = None
|
dumper.include_filter = None
|
||||||
dumper.depth = 2
|
dumper.depth = 1
|
||||||
data["modifiers"] = {}
|
data["modifiers"] = {}
|
||||||
for index, modifier in enumerate(instance.modifiers):
|
for index, modifier in enumerate(instance.modifiers):
|
||||||
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
data["modifiers"][modifier.name] = dumper.dump(modifier)
|
||||||
|
|
||||||
# CONSTRAINTS
|
# CONSTRAINTS
|
||||||
# OBJECT
|
|
||||||
if hasattr(instance, 'constraints'):
|
if hasattr(instance, 'constraints'):
|
||||||
dumper.depth = 3
|
dumper.depth = 3
|
||||||
data["constraints"] = dumper.dump(instance.constraints)
|
data["constraints"] = dumper.dump(instance.constraints)
|
||||||
@ -245,7 +317,8 @@ class BlObject(BlDatablock):
|
|||||||
|
|
||||||
# VERTEx GROUP
|
# VERTEx GROUP
|
||||||
if len(instance.vertex_groups) > 0:
|
if len(instance.vertex_groups) > 0:
|
||||||
points_attr = 'vertices' if isinstance(instance.data, bpy.types.Mesh) else 'points'
|
points_attr = 'vertices' if isinstance(
|
||||||
|
instance.data, bpy.types.Mesh) else 'points'
|
||||||
vg_data = []
|
vg_data = []
|
||||||
for vg in instance.vertex_groups:
|
for vg in instance.vertex_groups:
|
||||||
vg_idx = vg.index
|
vg_idx = vg.index
|
||||||
@ -315,4 +388,3 @@ class BlObject(BlDatablock):
|
|||||||
deps.append(self.instance.instance_collection)
|
deps.append(self.instance.instance_collection)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@ -21,8 +21,245 @@ import mathutils
|
|||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
|
from .bl_collection import dump_collection_children, dump_collection_objects, load_collection_childrens, load_collection_objects
|
||||||
|
from replication.constants import (DIFF_JSON, MODIFIED)
|
||||||
|
from deepdiff import DeepDiff
|
||||||
|
import logging
|
||||||
|
|
||||||
from ..utils import get_preferences
|
RENDER_SETTINGS = [
|
||||||
|
'dither_intensity',
|
||||||
|
'engine',
|
||||||
|
'film_transparent',
|
||||||
|
'filter_size',
|
||||||
|
'fps',
|
||||||
|
'fps_base',
|
||||||
|
'frame_map_new',
|
||||||
|
'frame_map_old',
|
||||||
|
'hair_subdiv',
|
||||||
|
'hair_type',
|
||||||
|
'line_thickness',
|
||||||
|
'line_thickness_mode',
|
||||||
|
'metadata_input',
|
||||||
|
'motion_blur_shutter',
|
||||||
|
'pixel_aspect_x',
|
||||||
|
'pixel_aspect_y',
|
||||||
|
'preview_pixel_size',
|
||||||
|
'preview_start_resolution',
|
||||||
|
'resolution_percentage',
|
||||||
|
'resolution_x',
|
||||||
|
'resolution_y',
|
||||||
|
'sequencer_gl_preview',
|
||||||
|
'use_bake_clear',
|
||||||
|
'use_bake_lores_mesh',
|
||||||
|
'use_bake_multires',
|
||||||
|
'use_bake_selected_to_active',
|
||||||
|
'use_bake_user_scale',
|
||||||
|
'use_border',
|
||||||
|
'use_compositing',
|
||||||
|
'use_crop_to_border',
|
||||||
|
'use_file_extension',
|
||||||
|
'use_freestyle',
|
||||||
|
'use_full_sample',
|
||||||
|
'use_high_quality_normals',
|
||||||
|
'use_lock_interface',
|
||||||
|
'use_motion_blur',
|
||||||
|
'use_multiview',
|
||||||
|
'use_sequencer',
|
||||||
|
'use_sequencer_override_scene_strip',
|
||||||
|
'use_single_layer',
|
||||||
|
'views_format',
|
||||||
|
]
|
||||||
|
|
||||||
|
EVEE_SETTINGS = [
|
||||||
|
'gi_diffuse_bounces',
|
||||||
|
'gi_cubemap_resolution',
|
||||||
|
'gi_visibility_resolution',
|
||||||
|
'gi_irradiance_smoothing',
|
||||||
|
'gi_glossy_clamp',
|
||||||
|
'gi_filter_quality',
|
||||||
|
'gi_show_irradiance',
|
||||||
|
'gi_show_cubemaps',
|
||||||
|
'gi_irradiance_display_size',
|
||||||
|
'gi_cubemap_display_size',
|
||||||
|
'gi_auto_bake',
|
||||||
|
'taa_samples',
|
||||||
|
'taa_render_samples',
|
||||||
|
'use_taa_reprojection',
|
||||||
|
'sss_samples',
|
||||||
|
'sss_jitter_threshold',
|
||||||
|
'use_ssr',
|
||||||
|
'use_ssr_refraction',
|
||||||
|
'use_ssr_halfres',
|
||||||
|
'ssr_quality',
|
||||||
|
'ssr_max_roughness',
|
||||||
|
'ssr_thickness',
|
||||||
|
'ssr_border_fade',
|
||||||
|
'ssr_firefly_fac',
|
||||||
|
'volumetric_start',
|
||||||
|
'volumetric_end',
|
||||||
|
'volumetric_tile_size',
|
||||||
|
'volumetric_samples',
|
||||||
|
'volumetric_sample_distribution',
|
||||||
|
'use_volumetric_lights',
|
||||||
|
'volumetric_light_clamp',
|
||||||
|
'use_volumetric_shadows',
|
||||||
|
'volumetric_shadow_samples',
|
||||||
|
'use_gtao',
|
||||||
|
'use_gtao_bent_normals',
|
||||||
|
'use_gtao_bounce',
|
||||||
|
'gtao_factor',
|
||||||
|
'gtao_quality',
|
||||||
|
'gtao_distance',
|
||||||
|
'bokeh_max_size',
|
||||||
|
'bokeh_threshold',
|
||||||
|
'use_bloom',
|
||||||
|
'bloom_threshold',
|
||||||
|
'bloom_color',
|
||||||
|
'bloom_knee',
|
||||||
|
'bloom_radius',
|
||||||
|
'bloom_clamp',
|
||||||
|
'bloom_intensity',
|
||||||
|
'use_motion_blur',
|
||||||
|
'motion_blur_shutter',
|
||||||
|
'motion_blur_depth_scale',
|
||||||
|
'motion_blur_max',
|
||||||
|
'motion_blur_steps',
|
||||||
|
'shadow_cube_size',
|
||||||
|
'shadow_cascade_size',
|
||||||
|
'use_shadow_high_bitdepth',
|
||||||
|
'gi_diffuse_bounces',
|
||||||
|
'gi_cubemap_resolution',
|
||||||
|
'gi_visibility_resolution',
|
||||||
|
'gi_irradiance_smoothing',
|
||||||
|
'gi_glossy_clamp',
|
||||||
|
'gi_filter_quality',
|
||||||
|
'gi_show_irradiance',
|
||||||
|
'gi_show_cubemaps',
|
||||||
|
'gi_irradiance_display_size',
|
||||||
|
'gi_cubemap_display_size',
|
||||||
|
'gi_auto_bake',
|
||||||
|
'taa_samples',
|
||||||
|
'taa_render_samples',
|
||||||
|
'use_taa_reprojection',
|
||||||
|
'sss_samples',
|
||||||
|
'sss_jitter_threshold',
|
||||||
|
'use_ssr',
|
||||||
|
'use_ssr_refraction',
|
||||||
|
'use_ssr_halfres',
|
||||||
|
'ssr_quality',
|
||||||
|
'ssr_max_roughness',
|
||||||
|
'ssr_thickness',
|
||||||
|
'ssr_border_fade',
|
||||||
|
'ssr_firefly_fac',
|
||||||
|
'volumetric_start',
|
||||||
|
'volumetric_end',
|
||||||
|
'volumetric_tile_size',
|
||||||
|
'volumetric_samples',
|
||||||
|
'volumetric_sample_distribution',
|
||||||
|
'use_volumetric_lights',
|
||||||
|
'volumetric_light_clamp',
|
||||||
|
'use_volumetric_shadows',
|
||||||
|
'volumetric_shadow_samples',
|
||||||
|
'use_gtao',
|
||||||
|
'use_gtao_bent_normals',
|
||||||
|
'use_gtao_bounce',
|
||||||
|
'gtao_factor',
|
||||||
|
'gtao_quality',
|
||||||
|
'gtao_distance',
|
||||||
|
'bokeh_max_size',
|
||||||
|
'bokeh_threshold',
|
||||||
|
'use_bloom',
|
||||||
|
'bloom_threshold',
|
||||||
|
'bloom_color',
|
||||||
|
'bloom_knee',
|
||||||
|
'bloom_radius',
|
||||||
|
'bloom_clamp',
|
||||||
|
'bloom_intensity',
|
||||||
|
'use_motion_blur',
|
||||||
|
'motion_blur_shutter',
|
||||||
|
'motion_blur_depth_scale',
|
||||||
|
'motion_blur_max',
|
||||||
|
'motion_blur_steps',
|
||||||
|
'shadow_cube_size',
|
||||||
|
'shadow_cascade_size',
|
||||||
|
'use_shadow_high_bitdepth',
|
||||||
|
]
|
||||||
|
|
||||||
|
CYCLES_SETTINGS = [
|
||||||
|
'shading_system',
|
||||||
|
'progressive',
|
||||||
|
'use_denoising',
|
||||||
|
'denoiser',
|
||||||
|
'use_square_samples',
|
||||||
|
'samples',
|
||||||
|
'aa_samples',
|
||||||
|
'diffuse_samples',
|
||||||
|
'glossy_samples',
|
||||||
|
'transmission_samples',
|
||||||
|
'ao_samples',
|
||||||
|
'mesh_light_samples',
|
||||||
|
'subsurface_samples',
|
||||||
|
'volume_samples',
|
||||||
|
'sampling_pattern',
|
||||||
|
'use_layer_samples',
|
||||||
|
'sample_all_lights_direct',
|
||||||
|
'sample_all_lights_indirect',
|
||||||
|
'light_sampling_threshold',
|
||||||
|
'use_adaptive_sampling',
|
||||||
|
'adaptive_threshold',
|
||||||
|
'adaptive_min_samples',
|
||||||
|
'min_light_bounces',
|
||||||
|
'min_transparent_bounces',
|
||||||
|
'caustics_reflective',
|
||||||
|
'caustics_refractive',
|
||||||
|
'blur_glossy',
|
||||||
|
'max_bounces',
|
||||||
|
'diffuse_bounces',
|
||||||
|
'glossy_bounces',
|
||||||
|
'transmission_bounces',
|
||||||
|
'volume_bounces',
|
||||||
|
'transparent_max_bounces',
|
||||||
|
'volume_step_rate',
|
||||||
|
'volume_max_steps',
|
||||||
|
'dicing_rate',
|
||||||
|
'max_subdivisions',
|
||||||
|
'dicing_camera',
|
||||||
|
'offscreen_dicing_scale',
|
||||||
|
'film_exposure',
|
||||||
|
'film_transparent_glass',
|
||||||
|
'film_transparent_roughness',
|
||||||
|
'filter_type',
|
||||||
|
'pixel_filter_type',
|
||||||
|
'filter_width',
|
||||||
|
'seed',
|
||||||
|
'use_animated_seed',
|
||||||
|
'sample_clamp_direct',
|
||||||
|
'sample_clamp_indirect',
|
||||||
|
'tile_order',
|
||||||
|
'use_progressive_refine',
|
||||||
|
'bake_type',
|
||||||
|
'use_camera_cull',
|
||||||
|
'camera_cull_margin',
|
||||||
|
'use_distance_cull',
|
||||||
|
'distance_cull_margin',
|
||||||
|
'motion_blur_position',
|
||||||
|
'rolling_shutter_type',
|
||||||
|
'rolling_shutter_duration',
|
||||||
|
'texture_limit',
|
||||||
|
'texture_limit_render',
|
||||||
|
'ao_bounces',
|
||||||
|
'ao_bounces_render',
|
||||||
|
]
|
||||||
|
|
||||||
|
VIEW_SETTINGS = [
|
||||||
|
'look',
|
||||||
|
'view_transform',
|
||||||
|
'exposure',
|
||||||
|
'gamma',
|
||||||
|
'use_curve_mapping',
|
||||||
|
'white_level',
|
||||||
|
'black_level'
|
||||||
|
]
|
||||||
|
|
||||||
class BlScene(BlDatablock):
|
class BlScene(BlDatablock):
|
||||||
bl_id = "scenes"
|
bl_id = "scenes"
|
||||||
@ -30,8 +267,14 @@ class BlScene(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = True
|
||||||
bl_icon = 'SCENE_DATA'
|
bl_icon = 'SCENE_DATA'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
self.diff_method = DIFF_JSON
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
instance = bpy.data.scenes.new(data["name"])
|
instance = bpy.data.scenes.new(data["name"])
|
||||||
return instance
|
return instance
|
||||||
@ -42,24 +285,10 @@ class BlScene(BlDatablock):
|
|||||||
loader.load(target, data)
|
loader.load(target, data)
|
||||||
|
|
||||||
# Load master collection
|
# Load master collection
|
||||||
for object in data["collection"]["objects"]:
|
load_collection_objects(
|
||||||
if object not in target.collection.objects.keys():
|
data['collection']['objects'], target.collection)
|
||||||
target.collection.objects.link(bpy.data.objects[object])
|
load_collection_childrens(
|
||||||
|
data['collection']['children'], target.collection)
|
||||||
for object in target.collection.objects.keys():
|
|
||||||
if object not in data["collection"]["objects"]:
|
|
||||||
target.collection.objects.unlink(bpy.data.objects[object])
|
|
||||||
|
|
||||||
# load collections
|
|
||||||
for collection in data["collection"]["children"]:
|
|
||||||
if collection not in target.collection.children.keys():
|
|
||||||
target.collection.children.link(
|
|
||||||
bpy.data.collections[collection])
|
|
||||||
|
|
||||||
for collection in target.collection.children.keys():
|
|
||||||
if collection not in data["collection"]["children"]:
|
|
||||||
target.collection.children.unlink(
|
|
||||||
bpy.data.collections[collection])
|
|
||||||
|
|
||||||
if 'world' in data.keys():
|
if 'world' in data.keys():
|
||||||
target.world = bpy.data.worlds[data['world']]
|
target.world = bpy.data.worlds[data['world']]
|
||||||
@ -68,18 +297,25 @@ class BlScene(BlDatablock):
|
|||||||
if 'grease_pencil' in data.keys():
|
if 'grease_pencil' in data.keys():
|
||||||
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
||||||
|
|
||||||
|
if self.preferences.sync_flags.sync_render_settings:
|
||||||
if 'eevee' in data.keys():
|
if 'eevee' in data.keys():
|
||||||
loader.load(target.eevee, data['eevee'])
|
loader.load(target.eevee, data['eevee'])
|
||||||
|
|
||||||
if 'cycles' in data.keys():
|
if 'cycles' in data.keys():
|
||||||
loader.load(target.eevee, data['cycles'])
|
loader.load(target.cycles, data['cycles'])
|
||||||
|
|
||||||
|
if 'render' in data.keys():
|
||||||
|
loader.load(target.render, data['render'])
|
||||||
|
|
||||||
if 'view_settings' in data.keys():
|
if 'view_settings' in data.keys():
|
||||||
loader.load(target.view_settings, data['view_settings'])
|
loader.load(target.view_settings, data['view_settings'])
|
||||||
if target.view_settings.use_curve_mapping:
|
if target.view_settings.use_curve_mapping and \
|
||||||
#TODO: change this ugly fix
|
'curve_mapping' in data['view_settings']:
|
||||||
target.view_settings.curve_mapping.white_level = data['view_settings']['curve_mapping']['white_level']
|
# TODO: change this ugly fix
|
||||||
target.view_settings.curve_mapping.black_level = data['view_settings']['curve_mapping']['black_level']
|
target.view_settings.curve_mapping.white_level = data[
|
||||||
|
'view_settings']['curve_mapping']['white_level']
|
||||||
|
target.view_settings.curve_mapping.black_level = data[
|
||||||
|
'view_settings']['curve_mapping']['black_level']
|
||||||
target.view_settings.curve_mapping.update()
|
target.view_settings.curve_mapping.update()
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
@ -92,51 +328,54 @@ class BlScene(BlDatablock):
|
|||||||
'name',
|
'name',
|
||||||
'world',
|
'world',
|
||||||
'id',
|
'id',
|
||||||
'camera',
|
|
||||||
'grease_pencil',
|
'grease_pencil',
|
||||||
|
'frame_start',
|
||||||
|
'frame_end',
|
||||||
|
'frame_step',
|
||||||
]
|
]
|
||||||
|
if self.preferences.sync_flags.sync_active_camera:
|
||||||
|
scene_dumper.include_filter.append('camera')
|
||||||
|
|
||||||
data = scene_dumper.dump(instance)
|
data = scene_dumper.dump(instance)
|
||||||
|
|
||||||
scene_dumper.depth = 3
|
scene_dumper.depth = 3
|
||||||
|
|
||||||
scene_dumper.include_filter = ['children','objects','name']
|
scene_dumper.include_filter = ['children', 'objects', 'name']
|
||||||
data['collection'] = scene_dumper.dump(instance.collection)
|
data['collection'] = {}
|
||||||
|
data['collection']['children'] = dump_collection_children(
|
||||||
|
instance.collection)
|
||||||
|
data['collection']['objects'] = dump_collection_objects(
|
||||||
|
instance.collection)
|
||||||
|
|
||||||
scene_dumper.depth = 1
|
scene_dumper.depth = 1
|
||||||
scene_dumper.include_filter = None
|
scene_dumper.include_filter = None
|
||||||
|
|
||||||
pref = get_preferences()
|
if self.preferences.sync_flags.sync_render_settings:
|
||||||
|
scene_dumper.include_filter = RENDER_SETTINGS
|
||||||
|
|
||||||
if pref.sync_flags.sync_render_settings:
|
data['render'] = scene_dumper.dump(instance.render)
|
||||||
scene_dumper.exclude_filter = [
|
|
||||||
'gi_cache_info',
|
if instance.render.engine == 'BLENDER_EEVEE':
|
||||||
'feature_set',
|
scene_dumper.include_filter = EVEE_SETTINGS
|
||||||
'debug_use_hair_bvh',
|
|
||||||
'aa_samples',
|
|
||||||
'blur_glossy',
|
|
||||||
'glossy_bounces',
|
|
||||||
'device',
|
|
||||||
'max_bounces',
|
|
||||||
'preview_aa_samples',
|
|
||||||
'preview_samples',
|
|
||||||
'sample_clamp_indirect',
|
|
||||||
'samples',
|
|
||||||
'volume_bounces'
|
|
||||||
]
|
|
||||||
data['eevee'] = scene_dumper.dump(instance.eevee)
|
data['eevee'] = scene_dumper.dump(instance.eevee)
|
||||||
|
elif instance.render.engine == 'CYCLES':
|
||||||
|
scene_dumper.include_filter = CYCLES_SETTINGS
|
||||||
data['cycles'] = scene_dumper.dump(instance.cycles)
|
data['cycles'] = scene_dumper.dump(instance.cycles)
|
||||||
|
|
||||||
|
scene_dumper.include_filter = VIEW_SETTINGS
|
||||||
data['view_settings'] = scene_dumper.dump(instance.view_settings)
|
data['view_settings'] = scene_dumper.dump(instance.view_settings)
|
||||||
|
|
||||||
if instance.view_settings.use_curve_mapping:
|
if instance.view_settings.use_curve_mapping:
|
||||||
data['view_settings']['curve_mapping'] = scene_dumper.dump(instance.view_settings.curve_mapping)
|
data['view_settings']['curve_mapping'] = scene_dumper.dump(
|
||||||
|
instance.view_settings.curve_mapping)
|
||||||
scene_dumper.depth = 5
|
scene_dumper.depth = 5
|
||||||
scene_dumper.include_filter = [
|
scene_dumper.include_filter = [
|
||||||
'curves',
|
'curves',
|
||||||
'points',
|
'points',
|
||||||
'location'
|
'location',
|
||||||
]
|
]
|
||||||
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(instance.view_settings.curve_mapping.curves)
|
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
|
||||||
|
instance.view_settings.curve_mapping.curves)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@ -148,7 +387,7 @@ class BlScene(BlDatablock):
|
|||||||
deps.append(child)
|
deps.append(child)
|
||||||
|
|
||||||
# childs objects
|
# childs objects
|
||||||
for object in self.instance.objects:
|
for object in self.instance.collection.objects:
|
||||||
deps.append(object)
|
deps.append(object)
|
||||||
|
|
||||||
# world
|
# world
|
||||||
@ -160,3 +399,17 @@ class BlScene(BlDatablock):
|
|||||||
deps.append(self.instance.grease_pencil)
|
deps.append(self.instance.grease_pencil)
|
||||||
|
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
def diff(self):
|
||||||
|
exclude_path = []
|
||||||
|
|
||||||
|
if not self.preferences.sync_flags.sync_render_settings:
|
||||||
|
exclude_path.append("root['eevee']")
|
||||||
|
exclude_path.append("root['cycles']")
|
||||||
|
exclude_path.append("root['view_settings']")
|
||||||
|
exclude_path.append("root['render']")
|
||||||
|
|
||||||
|
if not self.preferences.sync_flags.sync_active_camera:
|
||||||
|
exclude_path.append("root['camera']")
|
||||||
|
|
||||||
|
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)
|
||||||
|
69
multi_user/bl_types/bl_sound.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
from .bl_file import get_filepath, ensure_unpacked
|
||||||
|
from .bl_datablock import BlDatablock
|
||||||
|
from .dump_anything import Dumper, Loader
|
||||||
|
|
||||||
|
|
||||||
|
class BlSound(BlDatablock):
|
||||||
|
bl_id = "sounds"
|
||||||
|
bl_class = bpy.types.Sound
|
||||||
|
bl_delay_refresh = 1
|
||||||
|
bl_delay_apply = 1
|
||||||
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
|
bl_icon = 'SOUND'
|
||||||
|
|
||||||
|
def _construct(self, data):
|
||||||
|
filename = data.get('filename')
|
||||||
|
|
||||||
|
return bpy.data.sounds.load(get_filepath(filename))
|
||||||
|
|
||||||
|
def _load(self, data, target):
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
|
def diff(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _dump(self, instance=None):
|
||||||
|
filename = Path(instance.filepath).name
|
||||||
|
|
||||||
|
if not filename:
|
||||||
|
raise FileExistsError(instance.filepath)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'filename': filename,
|
||||||
|
'name': instance.name
|
||||||
|
}
|
||||||
|
|
||||||
|
def _resolve_deps_implementation(self):
|
||||||
|
deps = []
|
||||||
|
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
||||||
|
ensure_unpacked(self.instance)
|
||||||
|
|
||||||
|
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||||
|
|
||||||
|
return deps
|
@ -29,6 +29,7 @@ class BlSpeaker(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = False
|
||||||
bl_icon = 'SPEAKER'
|
bl_icon = 'SPEAKER'
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
@ -48,6 +49,7 @@ class BlSpeaker(BlDatablock):
|
|||||||
'volume',
|
'volume',
|
||||||
'name',
|
'name',
|
||||||
'pitch',
|
'pitch',
|
||||||
|
'sound',
|
||||||
'volume_min',
|
'volume_min',
|
||||||
'volume_max',
|
'volume_max',
|
||||||
'attenuation',
|
'attenuation',
|
||||||
@ -60,6 +62,15 @@ class BlSpeaker(BlDatablock):
|
|||||||
|
|
||||||
return dumper.dump(instance)
|
return dumper.dump(instance)
|
||||||
|
|
||||||
|
def _resolve_deps_implementation(self):
|
||||||
|
# TODO: resolve material
|
||||||
|
deps = []
|
||||||
|
|
||||||
|
sound = self.instance.sound
|
||||||
|
|
||||||
|
if sound:
|
||||||
|
deps.append(sound)
|
||||||
|
|
||||||
|
return deps
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +21,11 @@ import mathutils
|
|||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock
|
from .bl_datablock import BlDatablock
|
||||||
from .bl_material import load_links, load_node, dump_node, dump_links
|
from .bl_material import (load_links,
|
||||||
|
load_node,
|
||||||
|
dump_node,
|
||||||
|
dump_links,
|
||||||
|
get_node_tree_dependencies)
|
||||||
|
|
||||||
|
|
||||||
class BlWorld(BlDatablock):
|
class BlWorld(BlDatablock):
|
||||||
@ -30,12 +34,16 @@ class BlWorld(BlDatablock):
|
|||||||
bl_delay_refresh = 1
|
bl_delay_refresh = 1
|
||||||
bl_delay_apply = 1
|
bl_delay_apply = 1
|
||||||
bl_automatic_push = True
|
bl_automatic_push = True
|
||||||
|
bl_check_common = True
|
||||||
bl_icon = 'WORLD_DATA'
|
bl_icon = 'WORLD_DATA'
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.worlds.new(data["name"])
|
return bpy.data.worlds.new(data["name"])
|
||||||
|
|
||||||
def _load_implementation(self, data, target):
|
def _load_implementation(self, data, target):
|
||||||
|
loader = Loader()
|
||||||
|
loader.load(target, data)
|
||||||
|
|
||||||
if data["use_nodes"]:
|
if data["use_nodes"]:
|
||||||
if target.node_tree is None:
|
if target.node_tree is None:
|
||||||
target.use_nodes = True
|
target.use_nodes = True
|
||||||
@ -48,26 +56,21 @@ class BlWorld(BlDatablock):
|
|||||||
# Load nodes links
|
# Load nodes links
|
||||||
target.node_tree.links.clear()
|
target.node_tree.links.clear()
|
||||||
|
|
||||||
|
|
||||||
load_links(data["node_tree"]["links"], target.node_tree)
|
load_links(data["node_tree"]["links"], target.node_tree)
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(instance)
|
assert(instance)
|
||||||
|
|
||||||
world_dumper = Dumper()
|
world_dumper = Dumper()
|
||||||
world_dumper.depth = 2
|
world_dumper.depth = 1
|
||||||
world_dumper.exclude_filter = [
|
world_dumper.include_filter = [
|
||||||
"preview",
|
"use_nodes",
|
||||||
"original",
|
"name",
|
||||||
"uuid",
|
"color"
|
||||||
"color",
|
|
||||||
"cycles",
|
|
||||||
"light_settings",
|
|
||||||
"users",
|
|
||||||
"view_center"
|
|
||||||
]
|
]
|
||||||
data = world_dumper.dump(instance)
|
data = world_dumper.dump(instance)
|
||||||
if instance.use_nodes:
|
if instance.use_nodes:
|
||||||
|
data['node_tree'] = {}
|
||||||
nodes = {}
|
nodes = {}
|
||||||
|
|
||||||
for node in instance.node_tree.nodes:
|
for node in instance.node_tree.nodes:
|
||||||
@ -83,10 +86,7 @@ class BlWorld(BlDatablock):
|
|||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
if self.instance.use_nodes:
|
if self.instance.use_nodes:
|
||||||
for node in self.instance.node_tree.nodes:
|
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
|
||||||
if node.type == 'TEX_IMAGE':
|
|
||||||
deps.append(node.image)
|
|
||||||
if self.is_library:
|
if self.is_library:
|
||||||
deps.append(self.instance.library)
|
deps.append(self.instance.library)
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@ -24,8 +24,8 @@ import numpy as np
|
|||||||
|
|
||||||
|
|
||||||
BPY_TO_NUMPY_TYPES = {
|
BPY_TO_NUMPY_TYPES = {
|
||||||
'FLOAT': np.float,
|
'FLOAT': np.float32,
|
||||||
'INT': np.int,
|
'INT': np.int32,
|
||||||
'BOOL': np.bool}
|
'BOOL': np.bool}
|
||||||
|
|
||||||
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
|
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
|
||||||
@ -47,7 +47,7 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
|
|||||||
:type attributes: list
|
:type attributes: list
|
||||||
"""
|
"""
|
||||||
if not dikt or len(collection) == 0:
|
if not dikt or len(collection) == 0:
|
||||||
logging.warning(f'Skipping collection')
|
logging.debug(f'Skipping collection {collection}')
|
||||||
return
|
return
|
||||||
|
|
||||||
if attributes is None:
|
if attributes is None:
|
||||||
@ -115,7 +115,7 @@ def np_dump_collection_primitive(collection: bpy.types.CollectionProperty, attri
|
|||||||
:return: numpy byte buffer
|
:return: numpy byte buffer
|
||||||
"""
|
"""
|
||||||
if len(collection) == 0:
|
if len(collection) == 0:
|
||||||
logging.warning(f'Skipping empty {attribute} attribute')
|
logging.debug(f'Skipping empty {attribute} attribute')
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||||
@ -192,7 +192,7 @@ def np_load_collection_primitives(collection: bpy.types.CollectionProperty, attr
|
|||||||
:type sequence: strr
|
:type sequence: strr
|
||||||
"""
|
"""
|
||||||
if len(collection) == 0 or not sequence:
|
if len(collection) == 0 or not sequence:
|
||||||
logging.warning(f"Skipping loadin {attribute}")
|
logging.debug(f"Skipping loading {attribute}")
|
||||||
return
|
return
|
||||||
|
|
||||||
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||||
@ -301,7 +301,7 @@ class Dumper:
|
|||||||
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
|
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
|
||||||
self._dump_collection = (
|
self._dump_collection = (
|
||||||
self._dump_default_as_leaf, self._dump_collection_as_branch)
|
self._dump_default_as_leaf, self._dump_collection_as_branch)
|
||||||
self._dump_array = (self._dump_default_as_leaf,
|
self._dump_array = (self._dump_array_as_branch,
|
||||||
self._dump_array_as_branch)
|
self._dump_array_as_branch)
|
||||||
self._dump_matrix = (self._dump_matrix_as_leaf,
|
self._dump_matrix = (self._dump_matrix_as_leaf,
|
||||||
self._dump_matrix_as_leaf)
|
self._dump_matrix_as_leaf)
|
||||||
@ -593,6 +593,10 @@ class Loader:
|
|||||||
instance.write(bpy.data.materials.get(dump))
|
instance.write(bpy.data.materials.get(dump))
|
||||||
elif isinstance(rna_property_type, T.Collection):
|
elif isinstance(rna_property_type, T.Collection):
|
||||||
instance.write(bpy.data.collections.get(dump))
|
instance.write(bpy.data.collections.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.VectorFont):
|
||||||
|
instance.write(bpy.data.fonts.get(dump))
|
||||||
|
elif isinstance(rna_property_type, T.Sound):
|
||||||
|
instance.write(bpy.data.sounds.get(dump))
|
||||||
|
|
||||||
def _load_matrix(self, matrix, dump):
|
def _load_matrix(self, matrix, dump):
|
||||||
matrix.write(mathutils.Matrix(dump))
|
matrix.write(mathutils.Matrix(dump))
|
||||||
@ -622,11 +626,11 @@ class Loader:
|
|||||||
for k in self._ordered_keys(dump.keys()):
|
for k in self._ordered_keys(dump.keys()):
|
||||||
v = dump[k]
|
v = dump[k]
|
||||||
if not hasattr(default.read(), k):
|
if not hasattr(default.read(), k):
|
||||||
logging.debug(f"Load default, skipping {default} : {k}")
|
continue
|
||||||
try:
|
try:
|
||||||
self._load_any(default.extend(k), v)
|
self._load_any(default.extend(k), v)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
logging.debug(f"Cannot load {k}: {err}")
|
logging.debug(f"Skipping {k}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def match_subset_all(self):
|
def match_subset_all(self):
|
||||||
|
@ -19,21 +19,36 @@ import logging
|
|||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from . import operators, presence, utils
|
from . import utils
|
||||||
from .libs.replication.replication.constants import (FETCHED,
|
from .presence import (renderer,
|
||||||
|
UserFrustumWidget,
|
||||||
|
UserNameWidget,
|
||||||
|
UserSelectionWidget,
|
||||||
|
refresh_3d_view,
|
||||||
|
generate_user_camera,
|
||||||
|
get_view_matrix,
|
||||||
|
refresh_sidebar_view)
|
||||||
|
from replication.constants import (FETCHED,
|
||||||
|
UP,
|
||||||
RP_COMMON,
|
RP_COMMON,
|
||||||
STATE_INITIAL,
|
STATE_INITIAL,
|
||||||
STATE_QUITTING,
|
STATE_QUITTING,
|
||||||
STATE_ACTIVE,
|
STATE_ACTIVE,
|
||||||
STATE_SYNCING,
|
STATE_SYNCING,
|
||||||
STATE_LOBBY,
|
STATE_LOBBY,
|
||||||
STATE_SRV_SYNC)
|
STATE_SRV_SYNC,
|
||||||
|
REPARENT)
|
||||||
|
|
||||||
|
from replication.interface import session
|
||||||
|
from replication.exception import NonAuthorizedOperationError
|
||||||
|
|
||||||
class Delayable():
|
class Delayable():
|
||||||
"""Delayable task interface
|
"""Delayable task interface
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.is_registered = False
|
||||||
|
|
||||||
def register(self):
|
def register(self):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@ -51,13 +66,21 @@ class Timer(Delayable):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, duration=1):
|
def __init__(self, duration=1):
|
||||||
|
super().__init__()
|
||||||
self._timeout = duration
|
self._timeout = duration
|
||||||
self._running = True
|
self._running = True
|
||||||
|
|
||||||
def register(self):
|
def register(self):
|
||||||
"""Register the timer into the blender timer system
|
"""Register the timer into the blender timer system
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if not self.is_registered:
|
||||||
bpy.app.timers.register(self.main)
|
bpy.app.timers.register(self.main)
|
||||||
|
self.is_registered = True
|
||||||
|
logging.debug(f"Register {self.__class__.__name__}")
|
||||||
|
else:
|
||||||
|
logging.debug(
|
||||||
|
f"Timer {self.__class__.__name__} already registered")
|
||||||
|
|
||||||
def main(self):
|
def main(self):
|
||||||
self.execute()
|
self.execute()
|
||||||
@ -85,18 +108,29 @@ class ApplyTimer(Timer):
|
|||||||
super().__init__(timout)
|
super().__init__(timout)
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
client = operators.client
|
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||||
if client and client.state['STATE'] == STATE_ACTIVE:
|
if self._type:
|
||||||
nodes = client.list(filter=self._type)
|
nodes = session.list(filter=self._type)
|
||||||
|
else:
|
||||||
|
nodes = session.list()
|
||||||
|
|
||||||
for node in nodes:
|
for node in nodes:
|
||||||
node_ref = client.get(uuid=node)
|
node_ref = session.get(uuid=node)
|
||||||
|
|
||||||
if node_ref.state == FETCHED:
|
if node_ref.state == FETCHED:
|
||||||
try:
|
try:
|
||||||
client.apply(node)
|
session.apply(node)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Fail to apply {node_ref.uuid}: {e}")
|
logging.error(f"Fail to apply {node_ref.uuid}: {e}")
|
||||||
|
elif node_ref.state == REPARENT:
|
||||||
|
# Reload the node
|
||||||
|
node_ref.remove_instance()
|
||||||
|
node_ref.resolve()
|
||||||
|
session.apply(node)
|
||||||
|
for parent in session._graph.find_parents(node):
|
||||||
|
logging.info(f"Applying parent {parent}")
|
||||||
|
session.apply(parent, force=True)
|
||||||
|
node_ref.state = UP
|
||||||
|
|
||||||
|
|
||||||
class DynamicRightSelectTimer(Timer):
|
class DynamicRightSelectTimer(Timer):
|
||||||
@ -107,7 +141,6 @@ class DynamicRightSelectTimer(Timer):
|
|||||||
self._right_strategy = RP_COMMON
|
self._right_strategy = RP_COMMON
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
session = operators.client
|
|
||||||
settings = utils.get_preferences()
|
settings = utils.get_preferences()
|
||||||
|
|
||||||
if session and session.state['STATE'] == STATE_ACTIVE:
|
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||||
@ -134,10 +167,14 @@ class DynamicRightSelectTimer(Timer):
|
|||||||
recursive = True
|
recursive = True
|
||||||
if node.data and 'instance_type' in node.data.keys():
|
if node.data and 'instance_type' in node.data.keys():
|
||||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||||
|
try:
|
||||||
session.change_owner(
|
session.change_owner(
|
||||||
node.uuid,
|
node.uuid,
|
||||||
RP_COMMON,
|
RP_COMMON,
|
||||||
recursive=recursive)
|
ignore_warnings=True,
|
||||||
|
affect_dependencies=recursive)
|
||||||
|
except NonAuthorizedOperationError:
|
||||||
|
logging.warning(f"Not authorized to change {node} owner")
|
||||||
|
|
||||||
# change new selection to our
|
# change new selection to our
|
||||||
for obj in obj_ours:
|
for obj in obj_ours:
|
||||||
@ -148,10 +185,14 @@ class DynamicRightSelectTimer(Timer):
|
|||||||
if node.data and 'instance_type' in node.data.keys():
|
if node.data and 'instance_type' in node.data.keys():
|
||||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||||
|
|
||||||
|
try:
|
||||||
session.change_owner(
|
session.change_owner(
|
||||||
node.uuid,
|
node.uuid,
|
||||||
settings.username,
|
settings.username,
|
||||||
recursive=recursive)
|
ignore_warnings=True,
|
||||||
|
affect_dependencies=recursive)
|
||||||
|
except NonAuthorizedOperationError:
|
||||||
|
logging.warning(f"Not authorized to change {node} owner")
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -170,101 +211,49 @@ class DynamicRightSelectTimer(Timer):
|
|||||||
filter_owner=settings.username)
|
filter_owner=settings.username)
|
||||||
for key in owned_keys:
|
for key in owned_keys:
|
||||||
node = session.get(uuid=key)
|
node = session.get(uuid=key)
|
||||||
|
try:
|
||||||
session.change_owner(
|
session.change_owner(
|
||||||
key,
|
key,
|
||||||
RP_COMMON,
|
RP_COMMON,
|
||||||
recursive=recursive)
|
ignore_warnings=True,
|
||||||
|
affect_dependencies=recursive)
|
||||||
|
except NonAuthorizedOperationError:
|
||||||
|
logging.warning(f"Not authorized to change {key} owner")
|
||||||
|
|
||||||
for user, user_info in session.online_users.items():
|
|
||||||
if user != settings.username:
|
|
||||||
metadata = user_info.get('metadata')
|
|
||||||
|
|
||||||
if 'selected_objects' in metadata:
|
|
||||||
# Update selectionnable objects
|
|
||||||
for obj in bpy.data.objects:
|
for obj in bpy.data.objects:
|
||||||
if obj.hide_select and obj.uuid not in metadata['selected_objects']:
|
object_uuid = getattr(obj, 'uuid', None)
|
||||||
obj.hide_select = False
|
if object_uuid:
|
||||||
elif not obj.hide_select and obj.uuid in metadata['selected_objects']:
|
is_selectable = not session.is_readonly(object_uuid)
|
||||||
obj.hide_select = True
|
if obj.hide_select != is_selectable:
|
||||||
|
obj.hide_select = is_selectable
|
||||||
|
|
||||||
class Draw(Delayable):
|
|
||||||
def __init__(self):
|
|
||||||
self._handler = None
|
|
||||||
|
|
||||||
def register(self):
|
|
||||||
self._handler = bpy.types.SpaceView3D.draw_handler_add(
|
|
||||||
self.execute, (), 'WINDOW', 'POST_VIEW')
|
|
||||||
|
|
||||||
def execute(self):
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def unregister(self):
|
|
||||||
try:
|
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(
|
|
||||||
self._handler, "WINDOW")
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DrawClient(Draw):
|
|
||||||
def execute(self):
|
|
||||||
session = getattr(operators, 'client', None)
|
|
||||||
renderer = getattr(presence, 'renderer', None)
|
|
||||||
prefs = utils.get_preferences()
|
|
||||||
|
|
||||||
if session and renderer and session.state['STATE'] == STATE_ACTIVE:
|
|
||||||
settings = bpy.context.window_manager.session
|
|
||||||
users = session.online_users
|
|
||||||
|
|
||||||
# Update users
|
|
||||||
for user in users.values():
|
|
||||||
metadata = user.get('metadata')
|
|
||||||
color = metadata.get('color')
|
|
||||||
scene_current = metadata.get('scene_current')
|
|
||||||
user_showable = scene_current == bpy.context.scene.name or settings.presence_show_far_user
|
|
||||||
if color and scene_current and user_showable:
|
|
||||||
if settings.presence_show_selected and 'selected_objects' in metadata.keys():
|
|
||||||
renderer.draw_client_selection(
|
|
||||||
user['id'], color, metadata['selected_objects'])
|
|
||||||
if settings.presence_show_user and 'view_corners' in metadata:
|
|
||||||
renderer.draw_client_camera(
|
|
||||||
user['id'], metadata['view_corners'], color)
|
|
||||||
if not user_showable:
|
|
||||||
# TODO: remove this when user event drivent update will be
|
|
||||||
# ready
|
|
||||||
renderer.flush_selection()
|
|
||||||
renderer.flush_users()
|
|
||||||
|
|
||||||
|
|
||||||
class ClientUpdate(Timer):
|
class ClientUpdate(Timer):
|
||||||
def __init__(self, timout=.016):
|
def __init__(self, timout=.1):
|
||||||
super().__init__(timout)
|
super().__init__(timout)
|
||||||
self.handle_quit = False
|
self.handle_quit = False
|
||||||
self.users_metadata = {}
|
self.users_metadata = {}
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
settings = utils.get_preferences()
|
settings = utils.get_preferences()
|
||||||
session = getattr(operators, 'client', None)
|
|
||||||
renderer = getattr(presence, 'renderer', None)
|
|
||||||
|
|
||||||
if session and renderer:
|
if session and renderer:
|
||||||
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
|
if session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]:
|
||||||
local_user = operators.client.online_users.get(settings.username)
|
local_user = session.online_users.get(
|
||||||
|
settings.username)
|
||||||
|
|
||||||
if not local_user:
|
if not local_user:
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
for username, user_data in operators.client.online_users.items():
|
for username, user_data in session.online_users.items():
|
||||||
if username != settings.username:
|
if username != settings.username:
|
||||||
cached_user_data = self.users_metadata.get(username)
|
cached_user_data = self.users_metadata.get(
|
||||||
new_user_data = operators.client.online_users[username]['metadata']
|
username)
|
||||||
|
new_user_data = session.online_users[username]['metadata']
|
||||||
|
|
||||||
if cached_user_data is None:
|
if cached_user_data is None:
|
||||||
self.users_metadata[username] = user_data['metadata']
|
self.users_metadata[username] = user_data['metadata']
|
||||||
elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']:
|
elif 'view_matrix' in cached_user_data and 'view_matrix' in new_user_data and cached_user_data['view_matrix'] != new_user_data['view_matrix']:
|
||||||
presence.refresh_3d_view()
|
refresh_3d_view()
|
||||||
self.users_metadata[username] = user_data['metadata']
|
self.users_metadata[username] = user_data['metadata']
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
@ -273,13 +262,13 @@ class ClientUpdate(Timer):
|
|||||||
local_user_metadata = local_user.get('metadata')
|
local_user_metadata = local_user.get('metadata')
|
||||||
scene_current = bpy.context.scene.name
|
scene_current = bpy.context.scene.name
|
||||||
local_user = session.online_users.get(settings.username)
|
local_user = session.online_users.get(settings.username)
|
||||||
current_view_corners = presence.get_view_corners()
|
current_view_corners = generate_user_camera()
|
||||||
|
|
||||||
# Init client metadata
|
# Init client metadata
|
||||||
if not local_user_metadata or 'color' not in local_user_metadata.keys():
|
if not local_user_metadata or 'color' not in local_user_metadata.keys():
|
||||||
metadata = {
|
metadata = {
|
||||||
'view_corners': presence.get_view_matrix(),
|
'view_corners': get_view_matrix(),
|
||||||
'view_matrix': presence.get_view_matrix(),
|
'view_matrix': get_view_matrix(),
|
||||||
'color': (settings.client_color.r,
|
'color': (settings.client_color.r,
|
||||||
settings.client_color.g,
|
settings.client_color.g,
|
||||||
settings.client_color.b,
|
settings.client_color.b,
|
||||||
@ -296,17 +285,37 @@ class ClientUpdate(Timer):
|
|||||||
session.update_user_metadata(local_user_metadata)
|
session.update_user_metadata(local_user_metadata)
|
||||||
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
||||||
local_user_metadata['view_corners'] = current_view_corners
|
local_user_metadata['view_corners'] = current_view_corners
|
||||||
local_user_metadata['view_matrix'] = presence.get_view_matrix()
|
local_user_metadata['view_matrix'] = get_view_matrix(
|
||||||
|
)
|
||||||
session.update_user_metadata(local_user_metadata)
|
session.update_user_metadata(local_user_metadata)
|
||||||
|
|
||||||
|
|
||||||
|
class SessionStatusUpdate(Timer):
|
||||||
|
def __init__(self, timout=1):
|
||||||
|
super().__init__(timout)
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
refresh_sidebar_view()
|
||||||
|
|
||||||
|
|
||||||
|
class SessionUserSync(Timer):
|
||||||
|
def __init__(self, timout=1):
|
||||||
|
super().__init__(timout)
|
||||||
|
self.settings = utils.get_preferences()
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
if session and renderer:
|
||||||
# sync online users
|
# sync online users
|
||||||
session_users = operators.client.online_users
|
session_users = session.online_users
|
||||||
ui_users = bpy.context.window_manager.online_users
|
ui_users = bpy.context.window_manager.online_users
|
||||||
|
|
||||||
for index, user in enumerate(ui_users):
|
for index, user in enumerate(ui_users):
|
||||||
if user.username not in session_users.keys():
|
if user.username not in session_users.keys() and \
|
||||||
|
user.username != self.settings.username:
|
||||||
|
renderer.remove_widget(f"{user.username}_cam")
|
||||||
|
renderer.remove_widget(f"{user.username}_select")
|
||||||
|
renderer.remove_widget(f"{user.username}_name")
|
||||||
ui_users.remove(index)
|
ui_users.remove(index)
|
||||||
renderer.flush_selection()
|
|
||||||
renderer.flush_users()
|
|
||||||
break
|
break
|
||||||
|
|
||||||
for user in session_users:
|
for user in session_users:
|
||||||
@ -314,15 +323,22 @@ class ClientUpdate(Timer):
|
|||||||
new_key = ui_users.add()
|
new_key = ui_users.add()
|
||||||
new_key.name = user
|
new_key.name = user
|
||||||
new_key.username = user
|
new_key.username = user
|
||||||
elif session.state['STATE'] == STATE_QUITTING:
|
if user != self.settings.username:
|
||||||
presence.refresh_sidebar_view()
|
renderer.add_widget(
|
||||||
self.handle_quit = True
|
f"{user}_cam", UserFrustumWidget(user))
|
||||||
elif session.state['STATE'] == STATE_INITIAL and self.handle_quit:
|
renderer.add_widget(
|
||||||
self.handle_quit = False
|
f"{user}_select", UserSelectionWidget(user))
|
||||||
presence.refresh_sidebar_view()
|
renderer.add_widget(
|
||||||
|
f"{user}_name", UserNameWidget(user))
|
||||||
|
|
||||||
operators.unregister_delayables()
|
|
||||||
|
|
||||||
presence.renderer.stop()
|
class MainThreadExecutor(Timer):
|
||||||
|
def __init__(self, timout=1, execution_queue=None):
|
||||||
|
super().__init__(timout)
|
||||||
|
self.execution_queue = execution_queue
|
||||||
|
|
||||||
presence.refresh_sidebar_view()
|
def execute(self):
|
||||||
|
while not self.execution_queue.empty():
|
||||||
|
function = self.execution_queue.get()
|
||||||
|
logging.debug(f"Executing {function.__name__}")
|
||||||
|
function()
|
||||||
|
@ -23,6 +23,9 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import socket
|
import socket
|
||||||
|
import re
|
||||||
|
|
||||||
|
VERSION_EXPR = re.compile('\d+\.\d+\.\d+\w\d+')
|
||||||
|
|
||||||
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||||
DEFAULT_CACHE_DIR = os.path.join(
|
DEFAULT_CACHE_DIR = os.path.join(
|
||||||
@ -47,10 +50,29 @@ def install_pip():
|
|||||||
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
||||||
|
|
||||||
|
|
||||||
def install_package(name):
|
def install_package(name, version):
|
||||||
logging.debug(f"Using {PYTHON_PATH} for installation")
|
logging.info(f"installing {name} version...")
|
||||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", name])
|
env = os.environ
|
||||||
|
if "PIP_REQUIRE_VIRTUALENV" in env:
|
||||||
|
# PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env
|
||||||
|
# https://docs.python-guide.org/dev/pip-virtualenv/
|
||||||
|
# But since Blender's pip is outside of a virtual env, it can block our packages installation, so we unset the
|
||||||
|
# env var for the subprocess.
|
||||||
|
env = os.environ.copy()
|
||||||
|
del env["PIP_REQUIRE_VIRTUALENV"]
|
||||||
|
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
|
||||||
|
|
||||||
|
def check_package_version(name, required_version):
|
||||||
|
logging.info(f"Checking {name} version...")
|
||||||
|
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
||||||
|
|
||||||
|
version = VERSION_EXPR.search(out.stdout.decode())
|
||||||
|
if version and version.group() == required_version:
|
||||||
|
logging.info(f"{name} is up to date")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logging.info(f"{name} need an update")
|
||||||
|
return False
|
||||||
|
|
||||||
def get_ip():
|
def get_ip():
|
||||||
"""
|
"""
|
||||||
@ -78,7 +100,9 @@ def setup(dependencies, python_path):
|
|||||||
if not module_can_be_imported("pip"):
|
if not module_can_be_imported("pip"):
|
||||||
install_pip()
|
install_pip()
|
||||||
|
|
||||||
for module_name, package_name in dependencies:
|
for package_name, package_version in dependencies:
|
||||||
if not module_can_be_imported(module_name):
|
if not module_can_be_imported(package_name):
|
||||||
install_package(package_name)
|
install_package(package_name, package_version)
|
||||||
module_can_be_imported(package_name)
|
module_can_be_imported(package_name)
|
||||||
|
elif not check_package_version(package_name, package_version):
|
||||||
|
install_package(package_name, package_version)
|
||||||
|
@ -21,35 +21,81 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import queue
|
import queue
|
||||||
import random
|
import random
|
||||||
|
import shutil
|
||||||
import string
|
import string
|
||||||
import time
|
import time
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from subprocess import PIPE, Popen, TimeoutExpired
|
from queue import Queue
|
||||||
import zmq
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
from bpy.app.handlers import persistent
|
from bpy.app.handlers import persistent
|
||||||
|
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
|
||||||
|
STATE_INITIAL, STATE_SYNCING, UP)
|
||||||
|
from replication.data import ReplicatedDataFactory
|
||||||
|
from replication.exception import NonAuthorizedOperationError
|
||||||
|
from replication.interface import session
|
||||||
|
|
||||||
from . import bl_types, delayable, environment, presence, ui, utils
|
from . import bl_types, delayable, environment, ui, utils
|
||||||
from .libs.replication.replication.constants import (FETCHED, STATE_ACTIVE,
|
from .presence import (SessionStatusWidget, renderer, view3d_find)
|
||||||
STATE_INITIAL,
|
|
||||||
STATE_SYNCING)
|
|
||||||
from .libs.replication.replication.data import ReplicatedDataFactory
|
|
||||||
from .libs.replication.replication.exception import NonAuthorizedOperationError
|
|
||||||
from .libs.replication.replication.interface import Session
|
|
||||||
|
|
||||||
|
background_execution_queue = Queue()
|
||||||
client = None
|
|
||||||
delayables = []
|
delayables = []
|
||||||
stop_modal_executor = False
|
stop_modal_executor = False
|
||||||
modal_executor_queue = None
|
|
||||||
|
|
||||||
|
|
||||||
def unregister_delayables():
|
def session_callback(name):
|
||||||
|
""" Session callback wrapper
|
||||||
|
|
||||||
|
This allow to encapsulate session callbacks to background_execution_queue.
|
||||||
|
By doing this way callback are executed from the main thread.
|
||||||
|
"""
|
||||||
|
def func_wrapper(func):
|
||||||
|
@session.register(name)
|
||||||
|
def add_background_task():
|
||||||
|
background_execution_queue.put(func)
|
||||||
|
return add_background_task
|
||||||
|
return func_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@session_callback('on_connection')
|
||||||
|
def initialize_session():
|
||||||
|
"""Session connection init hander
|
||||||
|
"""
|
||||||
|
settings = utils.get_preferences()
|
||||||
|
runtime_settings = bpy.context.window_manager.session
|
||||||
|
|
||||||
|
# Step 1: Constrect nodes
|
||||||
|
for node in session._graph.list_ordered():
|
||||||
|
node_ref = session.get(node)
|
||||||
|
if node_ref.state == FETCHED:
|
||||||
|
node_ref.resolve()
|
||||||
|
|
||||||
|
# Step 2: Load nodes
|
||||||
|
for node in session._graph.list_ordered():
|
||||||
|
node_ref = session.get(node)
|
||||||
|
if node_ref.state == FETCHED:
|
||||||
|
node_ref.apply()
|
||||||
|
|
||||||
|
# Step 4: Register blender timers
|
||||||
|
for d in delayables:
|
||||||
|
d.register()
|
||||||
|
|
||||||
|
if settings.update_method == 'DEPSGRAPH':
|
||||||
|
bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation)
|
||||||
|
|
||||||
|
bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
|
||||||
|
|
||||||
|
|
||||||
|
@session_callback('on_exit')
|
||||||
|
def on_connection_end():
|
||||||
|
"""Session connection finished handler
|
||||||
|
"""
|
||||||
global delayables, stop_modal_executor
|
global delayables, stop_modal_executor
|
||||||
|
settings = utils.get_preferences()
|
||||||
|
|
||||||
|
# Step 1: Unregister blender timers
|
||||||
for d in delayables:
|
for d in delayables:
|
||||||
try:
|
try:
|
||||||
d.unregister()
|
d.unregister()
|
||||||
@ -58,9 +104,18 @@ def unregister_delayables():
|
|||||||
|
|
||||||
stop_modal_executor = True
|
stop_modal_executor = True
|
||||||
|
|
||||||
|
if settings.update_method == 'DEPSGRAPH':
|
||||||
|
bpy.app.handlers.depsgraph_update_post.remove(
|
||||||
|
depsgraph_evaluation)
|
||||||
|
|
||||||
|
# Step 3: remove file handled
|
||||||
|
logger = logging.getLogger()
|
||||||
|
for handler in logger.handlers:
|
||||||
|
if isinstance(handler, logging.FileHandler):
|
||||||
|
logger.removeHandler(handler)
|
||||||
|
|
||||||
|
|
||||||
# OPERATORS
|
# OPERATORS
|
||||||
|
|
||||||
|
|
||||||
class SessionStartOperator(bpy.types.Operator):
|
class SessionStartOperator(bpy.types.Operator):
|
||||||
bl_idname = "session.start"
|
bl_idname = "session.start"
|
||||||
bl_label = "start"
|
bl_label = "start"
|
||||||
@ -73,17 +128,38 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
global client, delayables
|
global delayables
|
||||||
|
|
||||||
settings = utils.get_preferences()
|
settings = utils.get_preferences()
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
users = bpy.data.window_managers['WinMan'].online_users
|
users = bpy.data.window_managers['WinMan'].online_users
|
||||||
admin_pass = runtime_settings.password
|
admin_pass = runtime_settings.password
|
||||||
|
use_extern_update = settings.update_method == 'DEPSGRAPH'
|
||||||
unregister_delayables()
|
|
||||||
users.clear()
|
users.clear()
|
||||||
delayables.clear()
|
delayables.clear()
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
if len(logger.handlers) == 1:
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
fmt='%(asctime)s CLIENT %(levelname)-8s %(message)s',
|
||||||
|
datefmt='%H:%M:%S'
|
||||||
|
)
|
||||||
|
|
||||||
|
log_directory = os.path.join(
|
||||||
|
settings.cache_directory,
|
||||||
|
"multiuser_client.log")
|
||||||
|
|
||||||
|
os.makedirs(settings.cache_directory, exist_ok=True)
|
||||||
|
|
||||||
|
handler = logging.FileHandler(log_directory, mode='w')
|
||||||
|
logger.addHandler(handler)
|
||||||
|
|
||||||
|
for handler in logger.handlers:
|
||||||
|
if isinstance(handler, logging.NullHandler):
|
||||||
|
continue
|
||||||
|
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
|
||||||
bpy_factory = ReplicatedDataFactory()
|
bpy_factory = ReplicatedDataFactory()
|
||||||
supported_bl_types = []
|
supported_bl_types = []
|
||||||
|
|
||||||
@ -95,24 +171,35 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
|
|
||||||
supported_bl_types.append(type_module_class.bl_id)
|
supported_bl_types.append(type_module_class.bl_id)
|
||||||
|
|
||||||
# Retreive local replicated types settings
|
if type_impl_name not in settings.supported_datablocks:
|
||||||
|
logging.info(f"{type_impl_name} not found, \
|
||||||
|
regenerate type settings...")
|
||||||
|
settings.generate_supported_types()
|
||||||
|
|
||||||
type_local_config = settings.supported_datablocks[type_impl_name]
|
type_local_config = settings.supported_datablocks[type_impl_name]
|
||||||
|
|
||||||
bpy_factory.register_type(
|
bpy_factory.register_type(
|
||||||
type_module_class.bl_class,
|
type_module_class.bl_class,
|
||||||
type_module_class,
|
type_module_class,
|
||||||
timer=type_local_config.bl_delay_refresh,
|
timer=type_local_config.bl_delay_refresh*1000,
|
||||||
automatic=type_local_config.auto_push)
|
automatic=type_local_config.auto_push,
|
||||||
|
check_common=type_module_class.bl_check_common)
|
||||||
|
|
||||||
|
if settings.update_method == 'DEFAULT':
|
||||||
if type_local_config.bl_delay_apply > 0:
|
if type_local_config.bl_delay_apply > 0:
|
||||||
delayables.append(
|
delayables.append(
|
||||||
delayable.ApplyTimer(
|
delayable.ApplyTimer(
|
||||||
timout=type_local_config.bl_delay_apply,
|
timout=type_local_config.bl_delay_apply,
|
||||||
target_type=type_module_class))
|
target_type=type_module_class))
|
||||||
|
|
||||||
client = Session(
|
session.configure(
|
||||||
factory=bpy_factory,
|
factory=bpy_factory,
|
||||||
python_path=bpy.app.binary_path_python)
|
python_path=bpy.app.binary_path_python,
|
||||||
|
external_update_handling=use_extern_update)
|
||||||
|
|
||||||
|
if settings.update_method == 'DEPSGRAPH':
|
||||||
|
delayables.append(delayable.ApplyTimer(
|
||||||
|
settings.depsgraph_update_rate/1000))
|
||||||
|
|
||||||
# Host a session
|
# Host a session
|
||||||
if self.host:
|
if self.host:
|
||||||
@ -122,30 +209,34 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
runtime_settings.is_host = True
|
runtime_settings.is_host = True
|
||||||
runtime_settings.internet_ip = environment.get_ip()
|
runtime_settings.internet_ip = environment.get_ip()
|
||||||
|
|
||||||
for scene in bpy.data.scenes:
|
|
||||||
client.add(scene)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.host(
|
for scene in bpy.data.scenes:
|
||||||
|
session.add(scene)
|
||||||
|
|
||||||
|
session.host(
|
||||||
id=settings.username,
|
id=settings.username,
|
||||||
port=settings.port,
|
port=settings.port,
|
||||||
ipc_port=settings.ipc_port,
|
ipc_port=settings.ipc_port,
|
||||||
timeout=settings.connection_timeout,
|
timeout=settings.connection_timeout,
|
||||||
password=admin_pass
|
password=admin_pass,
|
||||||
|
cache_directory=settings.cache_directory,
|
||||||
|
server_log_level=logging.getLevelName(
|
||||||
|
logging.getLogger().level),
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.report({'ERROR'}, repr(e))
|
self.report({'ERROR'}, repr(e))
|
||||||
logging.error(f"Error: {e}")
|
logging.error(f"Error: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
# Join a session
|
# Join a session
|
||||||
else:
|
else:
|
||||||
if not runtime_settings.admin:
|
if not runtime_settings.admin:
|
||||||
utils.clean_scene()
|
utils.clean_scene()
|
||||||
# regular client, no password needed
|
# regular session, no password needed
|
||||||
admin_pass = None
|
admin_pass = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.connect(
|
session.connect(
|
||||||
id=settings.username,
|
id=settings.username,
|
||||||
address=settings.ip,
|
address=settings.ip,
|
||||||
port=settings.port,
|
port=settings.port,
|
||||||
@ -158,22 +249,23 @@ class SessionStartOperator(bpy.types.Operator):
|
|||||||
logging.error(str(e))
|
logging.error(str(e))
|
||||||
|
|
||||||
# Background client updates service
|
# Background client updates service
|
||||||
#TODO: Refactoring
|
|
||||||
delayables.append(delayable.ClientUpdate())
|
delayables.append(delayable.ClientUpdate())
|
||||||
delayables.append(delayable.DrawClient())
|
|
||||||
delayables.append(delayable.DynamicRightSelectTimer())
|
delayables.append(delayable.DynamicRightSelectTimer())
|
||||||
|
|
||||||
# Launch drawing module
|
session_update = delayable.SessionStatusUpdate()
|
||||||
if runtime_settings.enable_presence:
|
session_user_sync = delayable.SessionUserSync()
|
||||||
presence.renderer.run()
|
session_background_executor = delayable.MainThreadExecutor(
|
||||||
|
execution_queue=background_execution_queue)
|
||||||
|
|
||||||
|
session_update.register()
|
||||||
|
session_user_sync.register()
|
||||||
|
session_background_executor.register()
|
||||||
|
|
||||||
|
delayables.append(session_background_executor)
|
||||||
|
delayables.append(session_update)
|
||||||
|
delayables.append(session_user_sync)
|
||||||
|
|
||||||
# Register blender main thread tools
|
|
||||||
for d in delayables:
|
|
||||||
d.register()
|
|
||||||
|
|
||||||
global modal_executor_queue
|
|
||||||
modal_executor_queue = queue.Queue()
|
|
||||||
bpy.ops.session.apply_armature_operator()
|
|
||||||
|
|
||||||
self.report(
|
self.report(
|
||||||
{'INFO'},
|
{'INFO'},
|
||||||
@ -209,15 +301,13 @@ class SessionInitOperator(bpy.types.Operator):
|
|||||||
return wm.invoke_props_dialog(self)
|
return wm.invoke_props_dialog(self)
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
global client
|
|
||||||
|
|
||||||
if self.init_method == 'EMPTY':
|
if self.init_method == 'EMPTY':
|
||||||
utils.clean_scene()
|
utils.clean_scene()
|
||||||
|
|
||||||
for scene in bpy.data.scenes:
|
for scene in bpy.data.scenes:
|
||||||
client.add(scene)
|
session.add(scene)
|
||||||
|
|
||||||
client.init()
|
session.init()
|
||||||
|
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
|
|
||||||
@ -233,11 +323,12 @@ class SessionStopOperator(bpy.types.Operator):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
global client, delayables, stop_modal_executor
|
global delayables, stop_modal_executor
|
||||||
|
|
||||||
if client:
|
if session:
|
||||||
try:
|
try:
|
||||||
client.disconnect()
|
session.disconnect()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.report({'ERROR'}, repr(e))
|
self.report({'ERROR'}, repr(e))
|
||||||
else:
|
else:
|
||||||
@ -249,7 +340,7 @@ class SessionStopOperator(bpy.types.Operator):
|
|||||||
class SessionKickOperator(bpy.types.Operator):
|
class SessionKickOperator(bpy.types.Operator):
|
||||||
bl_idname = "session.kick"
|
bl_idname = "session.kick"
|
||||||
bl_label = "Kick"
|
bl_label = "Kick"
|
||||||
bl_description = "Kick the user"
|
bl_description = "Kick the target user"
|
||||||
bl_options = {"REGISTER"}
|
bl_options = {"REGISTER"}
|
||||||
|
|
||||||
user: bpy.props.StringProperty()
|
user: bpy.props.StringProperty()
|
||||||
@ -259,11 +350,11 @@ class SessionKickOperator(bpy.types.Operator):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
global client, delayables, stop_modal_executor
|
global delayables, stop_modal_executor
|
||||||
assert(client)
|
assert(session)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client.kick(self.user)
|
session.kick(self.user)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.report({'ERROR'}, repr(e))
|
self.report({'ERROR'}, repr(e))
|
||||||
|
|
||||||
@ -279,8 +370,9 @@ class SessionKickOperator(bpy.types.Operator):
|
|||||||
|
|
||||||
class SessionPropertyRemoveOperator(bpy.types.Operator):
|
class SessionPropertyRemoveOperator(bpy.types.Operator):
|
||||||
bl_idname = "session.remove_prop"
|
bl_idname = "session.remove_prop"
|
||||||
bl_label = "remove"
|
bl_label = "Delete cache"
|
||||||
bl_description = "broadcast a property to connected client_instances"
|
bl_description = "Stop tracking modification on the target datablock." + \
|
||||||
|
"The datablock will no longer be updated for others client. "
|
||||||
bl_options = {"REGISTER"}
|
bl_options = {"REGISTER"}
|
||||||
|
|
||||||
property_path: bpy.props.StringProperty(default="None")
|
property_path: bpy.props.StringProperty(default="None")
|
||||||
@ -290,9 +382,8 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
global client
|
|
||||||
try:
|
try:
|
||||||
client.remove(self.property_path)
|
session.remove(self.property_path)
|
||||||
|
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
except: # NonAuthorizedOperationError:
|
except: # NonAuthorizedOperationError:
|
||||||
@ -304,11 +395,12 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
|
|||||||
|
|
||||||
class SessionPropertyRightOperator(bpy.types.Operator):
|
class SessionPropertyRightOperator(bpy.types.Operator):
|
||||||
bl_idname = "session.right"
|
bl_idname = "session.right"
|
||||||
bl_label = "Change owner to"
|
bl_label = "Change modification rights"
|
||||||
bl_description = "Change owner of specified datablock"
|
bl_description = "Modify the owner of the target datablock"
|
||||||
bl_options = {"REGISTER"}
|
bl_options = {"REGISTER"}
|
||||||
|
|
||||||
key: bpy.props.StringProperty(default="None")
|
key: bpy.props.StringProperty(default="None")
|
||||||
|
recursive: bpy.props.BoolProperty(default=True)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
@ -322,15 +414,21 @@ class SessionPropertyRightOperator(bpy.types.Operator):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
|
|
||||||
col = layout.column()
|
row = layout.row()
|
||||||
col.prop(runtime_settings, "clients")
|
row.label(text="Give the owning rights to:")
|
||||||
|
row.prop(runtime_settings, "clients", text="")
|
||||||
|
row = layout.row()
|
||||||
|
row.label(text="Affect dependencies")
|
||||||
|
row.prop(self, "recursive", text="")
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
global client
|
|
||||||
|
|
||||||
if client:
|
if session:
|
||||||
client.change_owner(self.key, runtime_settings.clients)
|
session.change_owner(self.key,
|
||||||
|
runtime_settings.clients,
|
||||||
|
ignore_warnings=True,
|
||||||
|
affect_dependencies=self.recursive)
|
||||||
|
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
|
|
||||||
@ -376,11 +474,10 @@ class SessionSnapUserOperator(bpy.types.Operator):
|
|||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
if event.type == 'TIMER':
|
if event.type == 'TIMER':
|
||||||
area, region, rv3d = presence.view3d_find()
|
area, region, rv3d = view3d_find()
|
||||||
global client
|
|
||||||
|
|
||||||
if client:
|
if session:
|
||||||
target_ref = client.online_users.get(self.target_client)
|
target_ref = session.online_users.get(self.target_client)
|
||||||
|
|
||||||
if target_ref:
|
if target_ref:
|
||||||
target_scene = target_ref['metadata']['scene_current']
|
target_scene = target_ref['metadata']['scene_current']
|
||||||
@ -389,14 +486,16 @@ class SessionSnapUserOperator(bpy.types.Operator):
|
|||||||
if target_scene != context.scene.name:
|
if target_scene != context.scene.name:
|
||||||
blender_scene = bpy.data.scenes.get(target_scene, None)
|
blender_scene = bpy.data.scenes.get(target_scene, None)
|
||||||
if blender_scene is None:
|
if blender_scene is None:
|
||||||
self.report({'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.")
|
self.report(
|
||||||
|
{'ERROR'}, f"Scene {target_scene} doesn't exist on the local client.")
|
||||||
session_sessings.time_snap_running = False
|
session_sessings.time_snap_running = False
|
||||||
return {"CANCELLED"}
|
return {"CANCELLED"}
|
||||||
|
|
||||||
bpy.context.window.scene = blender_scene
|
bpy.context.window.scene = blender_scene
|
||||||
|
|
||||||
# Update client viewmatrix
|
# Update client viewmatrix
|
||||||
client_vmatrix = target_ref['metadata'].get('view_matrix', None)
|
client_vmatrix = target_ref['metadata'].get(
|
||||||
|
'view_matrix', None)
|
||||||
|
|
||||||
if client_vmatrix:
|
if client_vmatrix:
|
||||||
rv3d.view_matrix = mathutils.Matrix(client_vmatrix)
|
rv3d.view_matrix = mathutils.Matrix(client_vmatrix)
|
||||||
@ -449,10 +548,8 @@ class SessionSnapTimeOperator(bpy.types.Operator):
|
|||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
if event.type == 'TIMER':
|
if event.type == 'TIMER':
|
||||||
global client
|
if session:
|
||||||
|
target_ref = session.online_users.get(self.target_client)
|
||||||
if client:
|
|
||||||
target_ref = client.online_users.get(self.target_client)
|
|
||||||
|
|
||||||
if target_ref:
|
if target_ref:
|
||||||
context.scene.frame_current = target_ref['metadata']['frame_current']
|
context.scene.frame_current = target_ref['metadata']['frame_current']
|
||||||
@ -464,28 +561,31 @@ class SessionSnapTimeOperator(bpy.types.Operator):
|
|||||||
|
|
||||||
class SessionApply(bpy.types.Operator):
|
class SessionApply(bpy.types.Operator):
|
||||||
bl_idname = "session.apply"
|
bl_idname = "session.apply"
|
||||||
bl_label = "apply selected block into blender"
|
bl_label = "Revert"
|
||||||
bl_description = "Apply selected block into blender"
|
bl_description = "Revert the selected datablock from his cached" + \
|
||||||
|
" version."
|
||||||
bl_options = {"REGISTER"}
|
bl_options = {"REGISTER"}
|
||||||
|
|
||||||
target: bpy.props.StringProperty()
|
target: bpy.props.StringProperty()
|
||||||
|
reset_dependencies: bpy.props.BoolProperty(default=False)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
global client
|
logging.debug(f"Running apply on {self.target}")
|
||||||
|
session.apply(self.target,
|
||||||
client.apply(self.target)
|
force=True,
|
||||||
|
force_dependencies=self.reset_dependencies)
|
||||||
|
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
class SessionCommit(bpy.types.Operator):
|
class SessionCommit(bpy.types.Operator):
|
||||||
bl_idname = "session.commit"
|
bl_idname = "session.commit"
|
||||||
bl_label = "commit and push selected datablock to server"
|
bl_label = "Force server update"
|
||||||
bl_description = "commit and push selected datablock to server"
|
bl_description = "Commit and push the target datablock to server"
|
||||||
bl_options = {"REGISTER"}
|
bl_options = {"REGISTER"}
|
||||||
|
|
||||||
target: bpy.props.StringProperty()
|
target: bpy.props.StringProperty()
|
||||||
@ -495,10 +595,9 @@ class SessionCommit(bpy.types.Operator):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
global client
|
# session.get(uuid=target).diff()
|
||||||
# client.get(uuid=target).diff()
|
session.commit(uuid=self.target)
|
||||||
client.commit(uuid=self.target)
|
session.push(self.target)
|
||||||
client.push(self.target)
|
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
@ -516,18 +615,17 @@ class ApplyArmatureOperator(bpy.types.Operator):
|
|||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
if event.type == 'TIMER':
|
if event.type == 'TIMER':
|
||||||
global client
|
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||||
if client and client.state['STATE'] == STATE_ACTIVE:
|
nodes = session.list(filter=bl_types.bl_armature.BlArmature)
|
||||||
nodes = client.list(filter=bl_types.bl_armature.BlArmature)
|
|
||||||
|
|
||||||
for node in nodes:
|
for node in nodes:
|
||||||
node_ref = client.get(uuid=node)
|
node_ref = session.get(uuid=node)
|
||||||
|
|
||||||
if node_ref.state == FETCHED:
|
if node_ref.state == FETCHED:
|
||||||
try:
|
try:
|
||||||
client.apply(node)
|
session.apply(node)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error("Dail to apply armature: {e}")
|
logging.error("Fail to apply armature: {e}")
|
||||||
|
|
||||||
return {'PASS_THROUGH'}
|
return {'PASS_THROUGH'}
|
||||||
|
|
||||||
@ -546,6 +644,35 @@ class ApplyArmatureOperator(bpy.types.Operator):
|
|||||||
stop_modal_executor = False
|
stop_modal_executor = False
|
||||||
|
|
||||||
|
|
||||||
|
class ClearCache(bpy.types.Operator):
|
||||||
|
"Clear local session cache"
|
||||||
|
bl_idname = "session.clear_cache"
|
||||||
|
bl_label = "Modal Executor Operator"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
cache_dir = utils.get_preferences().cache_directory
|
||||||
|
try:
|
||||||
|
for root, dirs, files in os.walk(cache_dir):
|
||||||
|
for name in files:
|
||||||
|
Path(root, name).unlink()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.report({'ERROR'}, repr(e))
|
||||||
|
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
return context.window_manager.invoke_props_dialog(self)
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
row = self.layout
|
||||||
|
row.label(text=f" Do you really want to remove local cache ? ")
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
SessionStartOperator,
|
SessionStartOperator,
|
||||||
SessionStopOperator,
|
SessionStopOperator,
|
||||||
@ -558,7 +685,7 @@ classes = (
|
|||||||
ApplyArmatureOperator,
|
ApplyArmatureOperator,
|
||||||
SessionKickOperator,
|
SessionKickOperator,
|
||||||
SessionInitOperator,
|
SessionInitOperator,
|
||||||
|
ClearCache,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -570,31 +697,65 @@ def sanitize_deps_graph(dummy):
|
|||||||
A future solution should be to avoid storing dataclock reference...
|
A future solution should be to avoid storing dataclock reference...
|
||||||
|
|
||||||
"""
|
"""
|
||||||
global client
|
|
||||||
|
|
||||||
if client and client.state['STATE'] == STATE_ACTIVE:
|
|
||||||
for node_key in client.list():
|
|
||||||
client.get(node_key).resolve()
|
|
||||||
|
|
||||||
|
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||||
|
for node_key in session.list():
|
||||||
|
node = session.get(node_key)
|
||||||
|
if node and not node.resolve(construct=False):
|
||||||
|
session.remove(node_key)
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def load_pre_handler(dummy):
|
def load_pre_handler(dummy):
|
||||||
global client
|
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
|
||||||
|
|
||||||
if client and client.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
|
|
||||||
bpy.ops.session.stop()
|
bpy.ops.session.stop()
|
||||||
|
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def update_client_frame(scene):
|
def update_client_frame(scene):
|
||||||
if client and client.state['STATE'] == STATE_ACTIVE:
|
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||||
client.update_user_metadata({
|
session.update_user_metadata({
|
||||||
'frame_current': scene.frame_current
|
'frame_current': scene.frame_current
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def depsgraph_evaluation(scene):
|
||||||
|
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||||
|
context = bpy.context
|
||||||
|
blender_depsgraph = bpy.context.view_layer.depsgraph
|
||||||
|
dependency_updates = [u for u in blender_depsgraph.updates]
|
||||||
|
settings = utils.get_preferences()
|
||||||
|
|
||||||
|
# NOTE: maybe we don't need to check each update but only the first
|
||||||
|
|
||||||
|
for update in reversed(dependency_updates):
|
||||||
|
# Is the object tracked ?
|
||||||
|
if update.id.uuid:
|
||||||
|
# Retrieve local version
|
||||||
|
node = session.get(update.id.uuid)
|
||||||
|
|
||||||
|
# Check our right on this update:
|
||||||
|
# - if its ours or ( under common and diff), launch the
|
||||||
|
# update process
|
||||||
|
# - if its to someone else, ignore the update (go deeper ?)
|
||||||
|
if node and node.owner in [session.id, RP_COMMON] and node.state == UP:
|
||||||
|
# Avoid slow geometry update
|
||||||
|
if 'EDIT' in context.mode and \
|
||||||
|
not settings.sync_during_editmode:
|
||||||
|
break
|
||||||
|
|
||||||
|
session.stash(node.uuid)
|
||||||
|
else:
|
||||||
|
# Distant update
|
||||||
|
continue
|
||||||
|
# else:
|
||||||
|
# # New items !
|
||||||
|
# logger.error("UPDATE: ADD")
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
from bpy.utils import register_class
|
from bpy.utils import register_class
|
||||||
|
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
register_class(cls)
|
register_class(cls)
|
||||||
|
|
||||||
@ -606,11 +767,8 @@ def register():
|
|||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
global client
|
if session and session.state['STATE'] == STATE_ACTIVE:
|
||||||
|
session.disconnect()
|
||||||
if client and client.state['STATE'] == 2:
|
|
||||||
client.disconnect()
|
|
||||||
client = None
|
|
||||||
|
|
||||||
from bpy.utils import unregister_class
|
from bpy.utils import unregister_class
|
||||||
for cls in reversed(classes):
|
for cls in reversed(classes):
|
||||||
@ -621,7 +779,3 @@ def unregister():
|
|||||||
|
|
||||||
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
bpy.app.handlers.load_pre.remove(load_pre_handler)
|
||||||
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
bpy.app.handlers.frame_change_pre.remove(update_client_frame)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
register()
|
|
||||||
|
@ -20,9 +20,14 @@ import logging
|
|||||||
import bpy
|
import bpy
|
||||||
import string
|
import string
|
||||||
import re
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
from . import utils, bl_types, environment, addon_updater_ops, presence, ui
|
from pathlib import Path
|
||||||
from .libs.replication.replication.constants import RP_COMMON
|
|
||||||
|
from . import bl_types, environment, addon_updater_ops, presence, ui
|
||||||
|
from .utils import get_preferences, get_expanded_icon
|
||||||
|
from replication.constants import RP_COMMON
|
||||||
|
from replication.interface import session
|
||||||
|
|
||||||
IP_EXPR = re.compile('\d+\.\d+\.\d+\.\d+')
|
IP_EXPR = re.compile('\d+\.\d+\.\d+\.\d+')
|
||||||
|
|
||||||
@ -36,7 +41,7 @@ def randomColor():
|
|||||||
|
|
||||||
|
|
||||||
def random_string_digits(stringLength=6):
|
def random_string_digits(stringLength=6):
|
||||||
"""Generate a random string of letters and digits """
|
"""Generate a random string of letters and digits"""
|
||||||
lettersAndDigits = string.ascii_letters + string.digits
|
lettersAndDigits = string.ascii_letters + string.digits
|
||||||
return ''.join(random.choices(lettersAndDigits, k=stringLength))
|
return ''.join(random.choices(lettersAndDigits, k=stringLength))
|
||||||
|
|
||||||
@ -46,6 +51,7 @@ def update_panel_category(self, context):
|
|||||||
ui.SESSION_PT_settings.bl_category = self.panel_category
|
ui.SESSION_PT_settings.bl_category = self.panel_category
|
||||||
ui.register()
|
ui.register()
|
||||||
|
|
||||||
|
|
||||||
def update_ip(self, context):
|
def update_ip(self, context):
|
||||||
ip = IP_EXPR.search(self.ip)
|
ip = IP_EXPR.search(self.ip)
|
||||||
|
|
||||||
@ -55,14 +61,35 @@ def update_ip(self, context):
|
|||||||
logging.error("Wrong IP format")
|
logging.error("Wrong IP format")
|
||||||
self['ip'] = "127.0.0.1"
|
self['ip'] = "127.0.0.1"
|
||||||
|
|
||||||
|
|
||||||
def update_port(self, context):
|
def update_port(self, context):
|
||||||
max_port = self.port + 3
|
max_port = self.port + 3
|
||||||
|
|
||||||
if self.ipc_port < max_port and \
|
if self.ipc_port < max_port and \
|
||||||
self['ipc_port'] >= self.port:
|
self['ipc_port'] >= self.port:
|
||||||
logging.error("IPC Port in conflic with the port, assigning a random value")
|
logging.error(
|
||||||
|
"IPC Port in conflict with the port, assigning a random value")
|
||||||
self['ipc_port'] = random.randrange(self.port+4, 10000)
|
self['ipc_port'] = random.randrange(self.port+4, 10000)
|
||||||
|
|
||||||
|
|
||||||
|
def update_directory(self, context):
|
||||||
|
new_dir = Path(self.cache_directory)
|
||||||
|
if new_dir.exists() and any(Path(self.cache_directory).iterdir()):
|
||||||
|
logging.error("The folder is not empty, choose another one.")
|
||||||
|
self['cache_directory'] = environment.DEFAULT_CACHE_DIR
|
||||||
|
elif not new_dir.exists():
|
||||||
|
logging.info("Target cache folder doesn't exist, creating it.")
|
||||||
|
os.makedirs(self.cache_directory, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def set_log_level(self, value):
|
||||||
|
logging.getLogger().setLevel(value)
|
||||||
|
|
||||||
|
|
||||||
|
def get_log_level(self):
|
||||||
|
return logging.getLogger().level
|
||||||
|
|
||||||
|
|
||||||
class ReplicatedDatablock(bpy.types.PropertyGroup):
|
class ReplicatedDatablock(bpy.types.PropertyGroup):
|
||||||
type_name: bpy.props.StringProperty()
|
type_name: bpy.props.StringProperty()
|
||||||
bl_name: bpy.props.StringProperty()
|
bl_name: bpy.props.StringProperty()
|
||||||
@ -73,11 +100,49 @@ class ReplicatedDatablock(bpy.types.PropertyGroup):
|
|||||||
icon: bpy.props.StringProperty()
|
icon: bpy.props.StringProperty()
|
||||||
|
|
||||||
|
|
||||||
|
def set_sync_render_settings(self, value):
|
||||||
|
self['sync_render_settings'] = value
|
||||||
|
if session and bpy.context.scene.uuid and value:
|
||||||
|
bpy.ops.session.apply('INVOKE_DEFAULT',
|
||||||
|
target=bpy.context.scene.uuid,
|
||||||
|
reset_dependencies=False)
|
||||||
|
|
||||||
|
|
||||||
|
def set_sync_active_camera(self, value):
|
||||||
|
self['sync_active_camera'] = value
|
||||||
|
|
||||||
|
if session and bpy.context.scene.uuid and value:
|
||||||
|
bpy.ops.session.apply('INVOKE_DEFAULT',
|
||||||
|
target=bpy.context.scene.uuid,
|
||||||
|
reset_dependencies=False)
|
||||||
|
|
||||||
|
|
||||||
class ReplicationFlags(bpy.types.PropertyGroup):
|
class ReplicationFlags(bpy.types.PropertyGroup):
|
||||||
|
def get_sync_render_settings(self):
|
||||||
|
return self.get('sync_render_settings', True)
|
||||||
|
|
||||||
|
def get_sync_active_camera(self):
|
||||||
|
return self.get('sync_active_camera', True)
|
||||||
|
|
||||||
sync_render_settings: bpy.props.BoolProperty(
|
sync_render_settings: bpy.props.BoolProperty(
|
||||||
name="Synchronize render settings",
|
name="Synchronize render settings",
|
||||||
description="Synchronize render settings (eevee and cycles only)",
|
description="Synchronize render settings (eevee and cycles only)",
|
||||||
default=True)
|
default=False,
|
||||||
|
set=set_sync_render_settings,
|
||||||
|
get=get_sync_render_settings
|
||||||
|
)
|
||||||
|
sync_during_editmode: bpy.props.BoolProperty(
|
||||||
|
name="Edit mode updates",
|
||||||
|
description="Enable objects update in edit mode (! Impact performances !)",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
sync_active_camera: bpy.props.BoolProperty(
|
||||||
|
name="Synchronize active camera",
|
||||||
|
description="Synchronize the active camera",
|
||||||
|
default=True,
|
||||||
|
get=get_sync_active_camera,
|
||||||
|
set=set_sync_active_camera
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SessionPrefs(bpy.types.AddonPreferences):
|
class SessionPrefs(bpy.types.AddonPreferences):
|
||||||
@ -109,9 +174,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
)
|
)
|
||||||
ipc_port: bpy.props.IntProperty(
|
ipc_port: bpy.props.IntProperty(
|
||||||
name="ipc_port",
|
name="ipc_port",
|
||||||
description='internal ttl port(only usefull for multiple local instances)',
|
description='internal ttl port(only useful for multiple local instances)',
|
||||||
default=5561,
|
default=random.randrange(5570, 70000),
|
||||||
update=update_port
|
update=update_port,
|
||||||
)
|
)
|
||||||
init_method: bpy.props.EnumProperty(
|
init_method: bpy.props.EnumProperty(
|
||||||
name='init_method',
|
name='init_method',
|
||||||
@ -123,33 +188,80 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
cache_directory: bpy.props.StringProperty(
|
cache_directory: bpy.props.StringProperty(
|
||||||
name="cache directory",
|
name="cache directory",
|
||||||
subtype="DIR_PATH",
|
subtype="DIR_PATH",
|
||||||
default=environment.DEFAULT_CACHE_DIR)
|
default=environment.DEFAULT_CACHE_DIR,
|
||||||
|
update=update_directory)
|
||||||
connection_timeout: bpy.props.IntProperty(
|
connection_timeout: bpy.props.IntProperty(
|
||||||
name='connection timeout',
|
name='connection timeout',
|
||||||
description='connection timeout before disconnection',
|
description='connection timeout before disconnection',
|
||||||
default=1000
|
default=1000
|
||||||
)
|
)
|
||||||
|
update_method: bpy.props.EnumProperty(
|
||||||
|
name='update method',
|
||||||
|
description='replication update method',
|
||||||
|
items=[
|
||||||
|
('DEFAULT', "Default", "Default: Use threads to monitor databloc changes"),
|
||||||
|
('DEPSGRAPH', "Depsgraph",
|
||||||
|
"Experimental: Use the blender dependency graph to trigger updates"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
# Replication update settings
|
||||||
|
depsgraph_update_rate: bpy.props.IntProperty(
|
||||||
|
name='depsgraph update rate',
|
||||||
|
description='Dependency graph uppdate rate (milliseconds)',
|
||||||
|
default=100
|
||||||
|
)
|
||||||
|
clear_memory_filecache: bpy.props.BoolProperty(
|
||||||
|
name="Clear memory filecache",
|
||||||
|
description="Remove filecache from memory",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
# for UI
|
# for UI
|
||||||
category: bpy.props.EnumProperty(
|
category: bpy.props.EnumProperty(
|
||||||
name="Category",
|
name="Category",
|
||||||
description="Preferences Category",
|
description="Preferences Category",
|
||||||
items=[
|
items=[
|
||||||
('CONFIG', "Configuration", "Configuration about this add-on"),
|
('CONFIG', "Configuration", "Configuration of this add-on"),
|
||||||
('UPDATE', "Update", "Update this add-on"),
|
('UPDATE', "Update", "Update this add-on"),
|
||||||
],
|
],
|
||||||
default='CONFIG'
|
default='CONFIG'
|
||||||
)
|
)
|
||||||
# WIP
|
|
||||||
logging_level: bpy.props.EnumProperty(
|
logging_level: bpy.props.EnumProperty(
|
||||||
name="Log level",
|
name="Log level",
|
||||||
description="Log verbosity level",
|
description="Log verbosity level",
|
||||||
items=[
|
items=[
|
||||||
('ERROR', "error", "show only errors"),
|
('ERROR', "error", "show only errors", logging.ERROR),
|
||||||
('WARNING', "warning", "only show warnings and errors"),
|
('WARNING', "warning", "only show warnings and errors", logging.WARNING),
|
||||||
('INFO', "info", "default level"),
|
('INFO', "info", "default level", logging.INFO),
|
||||||
('DEBUG', "debug", "show all logs"),
|
('DEBUG', "debug", "show all logs", logging.DEBUG),
|
||||||
],
|
],
|
||||||
default='INFO'
|
default='INFO',
|
||||||
|
set=set_log_level,
|
||||||
|
get=get_log_level
|
||||||
|
)
|
||||||
|
presence_hud_scale: bpy.props.FloatProperty(
|
||||||
|
name="Text scale",
|
||||||
|
description="Adjust the session widget text scale",
|
||||||
|
min=7,
|
||||||
|
max=90,
|
||||||
|
default=15,
|
||||||
|
)
|
||||||
|
presence_hud_hpos: bpy.props.FloatProperty(
|
||||||
|
name="Horizontal position",
|
||||||
|
description="Adjust the session widget horizontal position",
|
||||||
|
min=1,
|
||||||
|
max=90,
|
||||||
|
default=3,
|
||||||
|
step=1,
|
||||||
|
subtype='PERCENTAGE',
|
||||||
|
)
|
||||||
|
presence_hud_vpos: bpy.props.FloatProperty(
|
||||||
|
name="Vertical position",
|
||||||
|
description="Adjust the session widget vertical position",
|
||||||
|
min=1,
|
||||||
|
max=94,
|
||||||
|
default=1,
|
||||||
|
step=1,
|
||||||
|
subtype='PERCENTAGE',
|
||||||
)
|
)
|
||||||
conf_session_identity_expanded: bpy.props.BoolProperty(
|
conf_session_identity_expanded: bpy.props.BoolProperty(
|
||||||
name="Identity",
|
name="Identity",
|
||||||
@ -181,6 +293,26 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
description="Interface",
|
description="Interface",
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
|
sidebar_advanced_rep_expanded: bpy.props.BoolProperty(
|
||||||
|
name="sidebar_advanced_rep_expanded",
|
||||||
|
description="sidebar_advanced_rep_expanded",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
sidebar_advanced_log_expanded: bpy.props.BoolProperty(
|
||||||
|
name="sidebar_advanced_log_expanded",
|
||||||
|
description="sidebar_advanced_log_expanded",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
sidebar_advanced_net_expanded: bpy.props.BoolProperty(
|
||||||
|
name="sidebar_advanced_net_expanded",
|
||||||
|
description="sidebar_advanced_net_expanded",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
sidebar_advanced_cache_expanded: bpy.props.BoolProperty(
|
||||||
|
name="sidebar_advanced_cache_expanded",
|
||||||
|
description="sidebar_advanced_cache_expanded",
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
|
||||||
auto_check_update: bpy.props.BoolProperty(
|
auto_check_update: bpy.props.BoolProperty(
|
||||||
name="Auto-check for Update",
|
name="Auto-check for Update",
|
||||||
@ -232,9 +364,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
# USER INFORMATIONS
|
# USER INFORMATIONS
|
||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
self, "conf_session_identity_expanded", text="User informations",
|
self, "conf_session_identity_expanded", text="User information",
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_identity_expanded
|
icon=get_expanded_icon(self.conf_session_identity_expanded),
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
emboss=False)
|
||||||
if self.conf_session_identity_expanded:
|
if self.conf_session_identity_expanded:
|
||||||
box.row().prop(self, "username", text="name")
|
box.row().prop(self, "username", text="name")
|
||||||
box.row().prop(self, "client_color", text="color")
|
box.row().prop(self, "client_color", text="color")
|
||||||
@ -242,24 +374,27 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
# NETWORK SETTINGS
|
# NETWORK SETTINGS
|
||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
self, "conf_session_net_expanded", text="Netorking",
|
self, "conf_session_net_expanded", text="Networking",
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_net_expanded
|
icon=get_expanded_icon(self.conf_session_net_expanded),
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
emboss=False)
|
||||||
|
|
||||||
if self.conf_session_net_expanded:
|
if self.conf_session_net_expanded:
|
||||||
box.row().prop(self, "ip", text="Address")
|
box.row().prop(self, "ip", text="Address")
|
||||||
row = box.row()
|
row = box.row()
|
||||||
row.label(text="Port:")
|
row.label(text="Port:")
|
||||||
row.prop(self, "port", text="Address")
|
row.prop(self, "port", text="")
|
||||||
row = box.row()
|
row = box.row()
|
||||||
row.label(text="Init the session from:")
|
row.label(text="Init the session from:")
|
||||||
row.prop(self, "init_method", text="")
|
row.prop(self, "init_method", text="")
|
||||||
|
row = box.row()
|
||||||
|
row.label(text="Update method:")
|
||||||
|
row.prop(self, "update_method", text="")
|
||||||
|
|
||||||
table = box.box()
|
table = box.box()
|
||||||
table.row().prop(
|
table.row().prop(
|
||||||
self, "conf_session_timing_expanded", text="Refresh rates",
|
self, "conf_session_timing_expanded", text="Refresh rates",
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_timing_expanded
|
icon=get_expanded_icon(self.conf_session_timing_expanded),
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
emboss=False)
|
||||||
|
|
||||||
if self.conf_session_timing_expanded:
|
if self.conf_session_timing_expanded:
|
||||||
line = table.row()
|
line = table.row()
|
||||||
@ -277,8 +412,8 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
self, "conf_session_hosting_expanded", text="Hosting",
|
self, "conf_session_hosting_expanded", text="Hosting",
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_hosting_expanded
|
icon=get_expanded_icon(self.conf_session_hosting_expanded),
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
emboss=False)
|
||||||
if self.conf_session_hosting_expanded:
|
if self.conf_session_hosting_expanded:
|
||||||
row = box.row()
|
row = box.row()
|
||||||
row.label(text="Init the session from:")
|
row.label(text="Init the session from:")
|
||||||
@ -288,23 +423,33 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
|||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
self, "conf_session_cache_expanded", text="Cache",
|
self, "conf_session_cache_expanded", text="Cache",
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_cache_expanded
|
icon=get_expanded_icon(self.conf_session_cache_expanded),
|
||||||
else 'DISCLOSURE_TRI_RIGHT', emboss=False)
|
emboss=False)
|
||||||
if self.conf_session_cache_expanded:
|
if self.conf_session_cache_expanded:
|
||||||
box.row().prop(self, "cache_directory", text="Cache directory")
|
box.row().prop(self, "cache_directory", text="Cache directory")
|
||||||
|
box.row().prop(self, "clear_memory_filecache", text="Clear memory filecache")
|
||||||
|
|
||||||
# INTERFACE SETTINGS
|
# INTERFACE SETTINGS
|
||||||
box = grid.box()
|
box = grid.box()
|
||||||
box.prop(
|
box.prop(
|
||||||
self, "conf_session_ui_expanded", text="Interface",
|
self, "conf_session_ui_expanded", text="Interface",
|
||||||
icon='DISCLOSURE_TRI_DOWN' if self.conf_session_ui_expanded else 'DISCLOSURE_TRI_RIGHT',
|
icon=get_expanded_icon(self.conf_session_ui_expanded),
|
||||||
emboss=False)
|
emboss=False)
|
||||||
if self.conf_session_ui_expanded:
|
if self.conf_session_ui_expanded:
|
||||||
box.row().prop(self, "panel_category", text="Panel category", expand=True)
|
box.row().prop(self, "panel_category", text="Panel category", expand=True)
|
||||||
|
row = box.row()
|
||||||
|
row.label(text="Session widget:")
|
||||||
|
|
||||||
|
col = box.column(align=True)
|
||||||
|
col.prop(self, "presence_hud_scale", expand=True)
|
||||||
|
|
||||||
|
|
||||||
|
col.prop(self, "presence_hud_hpos", expand=True)
|
||||||
|
col.prop(self, "presence_hud_vpos", expand=True)
|
||||||
|
|
||||||
if self.category == 'UPDATE':
|
if self.category == 'UPDATE':
|
||||||
from . import addon_updater_ops
|
from . import addon_updater_ops
|
||||||
addon_updater_ops.update_settings_ui_condensed(self, context)
|
addon_updater_ops.update_settings_ui(self, context)
|
||||||
|
|
||||||
def generate_supported_types(self):
|
def generate_supported_types(self):
|
||||||
self.supported_datablocks.clear()
|
self.supported_datablocks.clear()
|
||||||
@ -331,10 +476,10 @@ def client_list_callback(scene, context):
|
|||||||
|
|
||||||
items = [(RP_COMMON, RP_COMMON, "")]
|
items = [(RP_COMMON, RP_COMMON, "")]
|
||||||
|
|
||||||
username = utils.get_preferences().username
|
username = get_preferences().username
|
||||||
cli = operators.client
|
|
||||||
if cli:
|
if session:
|
||||||
client_ids = cli.online_users.keys()
|
client_ids = session.online_users.keys()
|
||||||
for id in client_ids:
|
for id in client_ids:
|
||||||
name_desc = id
|
name_desc = id
|
||||||
if id == username:
|
if id == username:
|
||||||
@ -370,25 +515,26 @@ class SessionProps(bpy.types.PropertyGroup):
|
|||||||
name="Presence overlay",
|
name="Presence overlay",
|
||||||
description='Enable overlay drawing module',
|
description='Enable overlay drawing module',
|
||||||
default=True,
|
default=True,
|
||||||
update=presence.update_presence
|
|
||||||
)
|
)
|
||||||
presence_show_selected: bpy.props.BoolProperty(
|
presence_show_selected: bpy.props.BoolProperty(
|
||||||
name="Show selected objects",
|
name="Show selected objects",
|
||||||
description='Enable selection overlay ',
|
description='Enable selection overlay ',
|
||||||
default=True,
|
default=True,
|
||||||
update=presence.update_overlay_settings
|
|
||||||
)
|
)
|
||||||
presence_show_user: bpy.props.BoolProperty(
|
presence_show_user: bpy.props.BoolProperty(
|
||||||
name="Show users",
|
name="Show users",
|
||||||
description='Enable user overlay ',
|
description='Enable user overlay ',
|
||||||
default=True,
|
default=True,
|
||||||
update=presence.update_overlay_settings
|
|
||||||
)
|
)
|
||||||
presence_show_far_user: bpy.props.BoolProperty(
|
presence_show_far_user: bpy.props.BoolProperty(
|
||||||
name="Show users on different scenes",
|
name="Show users on different scenes",
|
||||||
description="Show user on different scenes",
|
description="Show user on different scenes",
|
||||||
default=False,
|
default=False,
|
||||||
update=presence.update_overlay_settings
|
)
|
||||||
|
presence_show_session_status: bpy.props.BoolProperty(
|
||||||
|
name="Show session status ",
|
||||||
|
description="Show session status on the viewport",
|
||||||
|
default=True,
|
||||||
)
|
)
|
||||||
filter_owned: bpy.props.BoolProperty(
|
filter_owned: bpy.props.BoolProperty(
|
||||||
name="filter_owned",
|
name="filter_owned",
|
||||||
|
@ -19,6 +19,8 @@
|
|||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import math
|
import math
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
import bgl
|
import bgl
|
||||||
import blf
|
import blf
|
||||||
@ -27,13 +29,17 @@ import gpu
|
|||||||
import mathutils
|
import mathutils
|
||||||
from bpy_extras import view3d_utils
|
from bpy_extras import view3d_utils
|
||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
|
from replication.constants import (STATE_ACTIVE, STATE_AUTH, STATE_CONFIG,
|
||||||
|
STATE_INITIAL, STATE_LAUNCHING_SERVICES,
|
||||||
|
STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC,
|
||||||
|
STATE_SYNCING, STATE_WAITING)
|
||||||
|
from replication.interface import session
|
||||||
|
|
||||||
from . import utils
|
from .utils import find_from_attr, get_state_str, get_preferences
|
||||||
|
|
||||||
renderer = None
|
# Helper functions
|
||||||
|
|
||||||
|
def view3d_find() -> tuple:
|
||||||
def view3d_find():
|
|
||||||
""" Find the first 'VIEW_3D' windows found in areas
|
""" Find the first 'VIEW_3D' windows found in areas
|
||||||
|
|
||||||
:return: tuple(Area, Region, RegionView3D)
|
:return: tuple(Area, Region, RegionView3D)
|
||||||
@ -55,35 +61,48 @@ def refresh_3d_view():
|
|||||||
if area and region and rv3d:
|
if area and region and rv3d:
|
||||||
area.tag_redraw()
|
area.tag_redraw()
|
||||||
|
|
||||||
|
|
||||||
def refresh_sidebar_view():
|
def refresh_sidebar_view():
|
||||||
""" Refresh the blender sidebar
|
""" Refresh the blender viewport sidebar
|
||||||
"""
|
"""
|
||||||
area, region, rv3d = view3d_find()
|
area, region, rv3d = view3d_find()
|
||||||
|
|
||||||
|
if area:
|
||||||
area.regions[3].tag_redraw()
|
area.regions[3].tag_redraw()
|
||||||
|
|
||||||
def get_target(region, rv3d, coord):
|
|
||||||
|
def project_to_viewport(region: bpy.types.Region, rv3d: bpy.types.RegionView3D, coords: list, distance: float = 1.0) -> list:
|
||||||
|
""" Compute a projection from 2D to 3D viewport coordinate
|
||||||
|
|
||||||
|
:param region: target windows region
|
||||||
|
:type region: bpy.types.Region
|
||||||
|
:param rv3d: view 3D
|
||||||
|
:type rv3d: bpy.types.RegionView3D
|
||||||
|
:param coords: coordinate to project
|
||||||
|
:type coords: list
|
||||||
|
:param distance: distance offset into viewport
|
||||||
|
:type distance: float
|
||||||
|
:return: list of coordinates [x,y,z]
|
||||||
|
"""
|
||||||
target = [0, 0, 0]
|
target = [0, 0, 0]
|
||||||
|
|
||||||
if coord and region and rv3d:
|
if coords and region and rv3d:
|
||||||
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
|
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coords)
|
||||||
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
|
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coords)
|
||||||
target = ray_origin + view_vector
|
|
||||||
|
|
||||||
return [target.x, target.y, target.z]
|
|
||||||
|
|
||||||
|
|
||||||
def get_target_far(region, rv3d, coord, distance):
|
|
||||||
target = [0, 0, 0]
|
|
||||||
|
|
||||||
if coord and region and rv3d:
|
|
||||||
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
|
|
||||||
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
|
|
||||||
target = ray_origin + view_vector * distance
|
target = ray_origin + view_vector * distance
|
||||||
|
|
||||||
return [target.x, target.y, target.z]
|
return [target.x, target.y, target.z]
|
||||||
|
|
||||||
def get_default_bbox(obj, radius):
|
|
||||||
|
def bbox_from_obj(obj: bpy.types.Object, radius: float) -> list:
|
||||||
|
""" Generate a bounding box for a given object by using its world matrix
|
||||||
|
|
||||||
|
:param obj: target object
|
||||||
|
:type obj: bpy.types.Object
|
||||||
|
:param radius: bounding box radius
|
||||||
|
:type radius: float
|
||||||
|
:return: list of 8 points [(x,y,z),...]
|
||||||
|
"""
|
||||||
coords = [
|
coords = [
|
||||||
(-radius, -radius, -radius), (+radius, -radius, -radius),
|
(-radius, -radius, -radius), (+radius, -radius, -radius),
|
||||||
(-radius, +radius, -radius), (+radius, +radius, -radius),
|
(-radius, +radius, -radius), (+radius, +radius, -radius),
|
||||||
@ -96,259 +115,379 @@ def get_default_bbox(obj, radius):
|
|||||||
return [(point.x, point.y, point.z)
|
return [(point.x, point.y, point.z)
|
||||||
for point in bbox_corners]
|
for point in bbox_corners]
|
||||||
|
|
||||||
def get_view_corners():
|
|
||||||
|
def generate_user_camera() -> list:
|
||||||
|
""" Generate a basic camera represention of the user point of view
|
||||||
|
|
||||||
|
:return: list of 7 points
|
||||||
|
"""
|
||||||
area, region, rv3d = view3d_find()
|
area, region, rv3d = view3d_find()
|
||||||
|
|
||||||
v1 = [0, 0, 0]
|
v1 = v2 = v3 = v4 = v5 = v6 = v7 = [0, 0, 0]
|
||||||
v2 = [0, 0, 0]
|
|
||||||
v3 = [0, 0, 0]
|
|
||||||
v4 = [0, 0, 0]
|
|
||||||
v5 = [0, 0, 0]
|
|
||||||
v6 = [0, 0, 0]
|
|
||||||
v7 = [0, 0, 0]
|
|
||||||
|
|
||||||
if area and region and rv3d:
|
if area and region and rv3d:
|
||||||
width = region.width
|
width = region.width
|
||||||
height = region.height
|
height = region.height
|
||||||
|
|
||||||
v1 = get_target(region, rv3d, (0, 0))
|
v1 = project_to_viewport(region, rv3d, (0, 0))
|
||||||
v3 = get_target(region, rv3d, (0, height))
|
v3 = project_to_viewport(region, rv3d, (0, height))
|
||||||
v2 = get_target(region, rv3d, (width, height))
|
v2 = project_to_viewport(region, rv3d, (width, height))
|
||||||
v4 = get_target(region, rv3d, (width, 0))
|
v4 = project_to_viewport(region, rv3d, (width, 0))
|
||||||
|
|
||||||
v5 = get_target(region, rv3d, (width/2, height/2))
|
v5 = project_to_viewport(region, rv3d, (width/2, height/2))
|
||||||
v6 = list(rv3d.view_location)
|
v6 = list(rv3d.view_location)
|
||||||
v7 = get_target_far(region, rv3d, (width/2, height/2), -.8)
|
v7 = project_to_viewport(
|
||||||
|
region, rv3d, (width/2, height/2), distance=-.8)
|
||||||
|
|
||||||
coords = [v1, v2, v3, v4, v5, v6, v7]
|
coords = [v1, v2, v3, v4, v5, v6, v7]
|
||||||
|
|
||||||
return coords
|
return coords
|
||||||
|
|
||||||
|
|
||||||
def get_client_2d(coords):
|
def project_to_screen(coords: list) -> list:
|
||||||
|
""" Project 3D coordinate to 2D screen coordinates
|
||||||
|
|
||||||
|
:param coords: 3D coordinates (x,y,z)
|
||||||
|
:type coords: list
|
||||||
|
:return: list of 2D coordinates [x,y]
|
||||||
|
"""
|
||||||
area, region, rv3d = view3d_find()
|
area, region, rv3d = view3d_find()
|
||||||
if area and region and rv3d:
|
if area and region and rv3d:
|
||||||
return view3d_utils.location_3d_to_region_2d(region, rv3d, coords)
|
return view3d_utils.location_3d_to_region_2d(region, rv3d, coords)
|
||||||
else:
|
else:
|
||||||
return (0, 0)
|
return (0, 0)
|
||||||
|
|
||||||
def get_bb_coords_from_obj(object, parent=None):
|
|
||||||
base = object.matrix_world if parent is None else parent.matrix_world
|
def get_bb_coords_from_obj(object: bpy.types.Object, instance: bpy.types.Object = None) -> list:
|
||||||
|
""" Generate bounding box in world coordinate from object bound box
|
||||||
|
|
||||||
|
:param object: target object
|
||||||
|
:type object: bpy.types.Object
|
||||||
|
:param instance: optionnal instance
|
||||||
|
:type instance: bpy.types.Object
|
||||||
|
:return: list of 8 points [(x,y,z),...]
|
||||||
|
"""
|
||||||
|
base = object.matrix_world
|
||||||
|
|
||||||
|
if instance:
|
||||||
|
scale = mathutils.Matrix.Diagonal(object.matrix_world.to_scale())
|
||||||
|
base = instance.matrix_world @ scale.to_4x4()
|
||||||
|
|
||||||
bbox_corners = [base @ mathutils.Vector(
|
bbox_corners = [base @ mathutils.Vector(
|
||||||
corner) for corner in object.bound_box]
|
corner) for corner in object.bound_box]
|
||||||
|
|
||||||
return [(point.x, point.y, point.z)
|
|
||||||
for point in bbox_corners]
|
return [(point.x, point.y, point.z) for point in bbox_corners]
|
||||||
|
|
||||||
|
|
||||||
def get_view_matrix():
|
def get_view_matrix() -> list:
|
||||||
|
""" Return the 3d viewport view matrix
|
||||||
|
|
||||||
|
:return: view matrix as a 4x4 list
|
||||||
|
"""
|
||||||
area, region, rv3d = view3d_find()
|
area, region, rv3d = view3d_find()
|
||||||
|
|
||||||
if area and region and rv3d:
|
if area and region and rv3d:
|
||||||
return [list(v) for v in rv3d.view_matrix]
|
return [list(v) for v in rv3d.view_matrix]
|
||||||
|
|
||||||
def update_presence(self, context):
|
|
||||||
global renderer
|
|
||||||
|
|
||||||
if 'renderer' in globals() and hasattr(renderer, 'run'):
|
class Widget(object):
|
||||||
if self.enable_presence:
|
""" Base class to define an interface element
|
||||||
renderer.run()
|
"""
|
||||||
|
draw_type: str = 'POST_VIEW' # Draw event type
|
||||||
|
|
||||||
|
def poll(self) -> bool:
|
||||||
|
"""Test if the widget can be drawn or not
|
||||||
|
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
return True
|
||||||
|
|
||||||
|
def draw(self):
|
||||||
|
"""How to draw the widget
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class UserFrustumWidget(Widget):
|
||||||
|
# Camera widget indices
|
||||||
|
indices = ((1, 3), (2, 1), (3, 0),
|
||||||
|
(2, 0), (4, 5), (1, 6),
|
||||||
|
(2, 6), (3, 6), (0, 6))
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
username):
|
||||||
|
self.username = username
|
||||||
|
self.settings = bpy.context.window_manager.session
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
user = session.online_users.get(self.username)
|
||||||
|
if user:
|
||||||
|
return user.get('metadata')
|
||||||
else:
|
else:
|
||||||
renderer.stop()
|
return None
|
||||||
|
|
||||||
|
def poll(self):
|
||||||
|
if self.data is None:
|
||||||
|
return False
|
||||||
|
|
||||||
def update_overlay_settings(self, context):
|
scene_current = self.data.get('scene_current')
|
||||||
global renderer
|
view_corners = self.data.get('view_corners')
|
||||||
|
|
||||||
if renderer and not self.presence_show_selected:
|
return (scene_current == bpy.context.scene.name or
|
||||||
renderer.flush_selection()
|
self.settings.presence_show_far_user) and \
|
||||||
if renderer and not self.presence_show_user:
|
view_corners and \
|
||||||
renderer.flush_users()
|
self.settings.presence_show_user and \
|
||||||
|
self.settings.enable_presence
|
||||||
|
|
||||||
|
def draw(self):
|
||||||
|
location = self.data.get('view_corners')
|
||||||
|
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
||||||
|
positions = [tuple(coord) for coord in location]
|
||||||
|
|
||||||
class DrawFactory(object):
|
if len(positions) != 7:
|
||||||
def __init__(self):
|
|
||||||
self.d3d_items = {}
|
|
||||||
self.d2d_items = {}
|
|
||||||
self.draw3d_handle = None
|
|
||||||
self.draw2d_handle = None
|
|
||||||
self.draw_event = None
|
|
||||||
self.coords = None
|
|
||||||
self.active_object = None
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
self.register_handlers()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.flush_users()
|
|
||||||
self.flush_selection()
|
|
||||||
self.unregister_handlers()
|
|
||||||
|
|
||||||
refresh_3d_view()
|
|
||||||
|
|
||||||
def register_handlers(self):
|
|
||||||
self.draw3d_handle = bpy.types.SpaceView3D.draw_handler_add(
|
|
||||||
self.draw3d_callback, (), 'WINDOW', 'POST_VIEW')
|
|
||||||
self.draw2d_handle = bpy.types.SpaceView3D.draw_handler_add(
|
|
||||||
self.draw2d_callback, (), 'WINDOW', 'POST_PIXEL')
|
|
||||||
|
|
||||||
def unregister_handlers(self):
|
|
||||||
if self.draw2d_handle:
|
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(
|
|
||||||
self.draw2d_handle, "WINDOW")
|
|
||||||
self.draw2d_handle = None
|
|
||||||
|
|
||||||
if self.draw3d_handle:
|
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(
|
|
||||||
self.draw3d_handle, "WINDOW")
|
|
||||||
self.draw3d_handle = None
|
|
||||||
|
|
||||||
self.d3d_items.clear()
|
|
||||||
self.d2d_items.clear()
|
|
||||||
|
|
||||||
def flush_selection(self, user=None):
|
|
||||||
key_to_remove = []
|
|
||||||
select_key = f"{user}_select" if user else "select"
|
|
||||||
for k in self.d3d_items.keys():
|
|
||||||
|
|
||||||
if select_key in k:
|
|
||||||
key_to_remove.append(k)
|
|
||||||
|
|
||||||
for k in key_to_remove:
|
|
||||||
del self.d3d_items[k]
|
|
||||||
|
|
||||||
def flush_users(self):
|
|
||||||
key_to_remove = []
|
|
||||||
for k in self.d3d_items.keys():
|
|
||||||
if "select" not in k:
|
|
||||||
key_to_remove.append(k)
|
|
||||||
|
|
||||||
for k in key_to_remove:
|
|
||||||
del self.d3d_items[k]
|
|
||||||
|
|
||||||
self.d2d_items.clear()
|
|
||||||
|
|
||||||
def draw_client_selection(self, client_id, client_color, client_selection):
|
|
||||||
local_user = utils.get_preferences().username
|
|
||||||
|
|
||||||
if local_user != client_id:
|
|
||||||
self.flush_selection(client_id)
|
|
||||||
|
|
||||||
for select_ob in client_selection:
|
|
||||||
drawable_key = f"{client_id}_select_{select_ob}"
|
|
||||||
|
|
||||||
ob = utils.find_from_attr("uuid", select_ob, bpy.data.objects)
|
|
||||||
if not ob:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if ob.type == 'EMPTY':
|
|
||||||
# TODO: Child case
|
|
||||||
# Collection instance case
|
|
||||||
indices = (
|
|
||||||
(0, 1), (1, 2), (2, 3), (0, 3),
|
|
||||||
(4, 5), (5, 6), (6, 7), (4, 7),
|
|
||||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
|
||||||
if ob.instance_collection:
|
|
||||||
for obj in ob.instance_collection.objects:
|
|
||||||
if obj.type == 'MESH':
|
|
||||||
self.append_3d_item(
|
|
||||||
drawable_key,
|
|
||||||
client_color,
|
|
||||||
get_bb_coords_from_obj(obj, parent=ob),
|
|
||||||
indices)
|
|
||||||
|
|
||||||
if ob.type in ['MESH','META']:
|
|
||||||
indices = (
|
|
||||||
(0, 1), (1, 2), (2, 3), (0, 3),
|
|
||||||
(4, 5), (5, 6), (6, 7), (4, 7),
|
|
||||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
|
||||||
|
|
||||||
self.append_3d_item(
|
|
||||||
drawable_key,
|
|
||||||
client_color,
|
|
||||||
get_bb_coords_from_obj(ob),
|
|
||||||
indices)
|
|
||||||
else:
|
|
||||||
indices = (
|
|
||||||
(0, 1), (0, 2), (1, 3), (2, 3),
|
|
||||||
(4, 5), (4, 6), (5, 7), (6, 7),
|
|
||||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
|
||||||
|
|
||||||
self.append_3d_item(
|
|
||||||
drawable_key,
|
|
||||||
client_color,
|
|
||||||
get_default_bbox(ob, ob.scale.x),
|
|
||||||
indices)
|
|
||||||
|
|
||||||
def append_3d_item(self,key,color, coords, indices):
|
|
||||||
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
|
||||||
color = color
|
|
||||||
batch = batch_for_shader(
|
batch = batch_for_shader(
|
||||||
shader, 'LINES', {"pos": coords}, indices=indices)
|
shader,
|
||||||
|
'LINES',
|
||||||
|
{"pos": positions},
|
||||||
|
indices=self.indices)
|
||||||
|
|
||||||
self.d3d_items[key] = (shader, batch, color)
|
bgl.glLineWidth(2.)
|
||||||
|
|
||||||
def draw_client_camera(self, client_id, client_location, client_color):
|
|
||||||
if client_location:
|
|
||||||
local_user = utils.get_preferences().username
|
|
||||||
|
|
||||||
if local_user != client_id:
|
|
||||||
try:
|
|
||||||
indices = (
|
|
||||||
(1, 3), (2, 1), (3, 0),
|
|
||||||
(2, 0), (4, 5), (1, 6),
|
|
||||||
(2, 6), (3, 6), (0, 6)
|
|
||||||
)
|
|
||||||
|
|
||||||
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
|
||||||
position = [tuple(coord) for coord in client_location]
|
|
||||||
color = client_color
|
|
||||||
|
|
||||||
batch = batch_for_shader(
|
|
||||||
shader, 'LINES', {"pos": position}, indices=indices)
|
|
||||||
|
|
||||||
self.d3d_items[client_id] = (shader, batch, color)
|
|
||||||
self.d2d_items[client_id] = (position[1], client_id, color)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Draw client exception: {e}")
|
|
||||||
|
|
||||||
def draw3d_callback(self):
|
|
||||||
bgl.glLineWidth(1.5)
|
|
||||||
bgl.glEnable(bgl.GL_DEPTH_TEST)
|
bgl.glEnable(bgl.GL_DEPTH_TEST)
|
||||||
bgl.glEnable(bgl.GL_BLEND)
|
bgl.glEnable(bgl.GL_BLEND)
|
||||||
bgl.glEnable(bgl.GL_LINE_SMOOTH)
|
bgl.glEnable(bgl.GL_LINE_SMOOTH)
|
||||||
|
|
||||||
try:
|
|
||||||
for shader, batch, color in self.d3d_items.values():
|
|
||||||
shader.bind()
|
shader.bind()
|
||||||
shader.uniform_float("color", color)
|
shader.uniform_float("color", self.data.get('color'))
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
except Exception:
|
|
||||||
logging.error("3D Exception")
|
|
||||||
|
|
||||||
def draw2d_callback(self):
|
|
||||||
for position, font, color in self.d2d_items.values():
|
class UserSelectionWidget(Widget):
|
||||||
try:
|
def __init__(
|
||||||
coords = get_client_2d(position)
|
self,
|
||||||
|
username):
|
||||||
|
self.username = username
|
||||||
|
self.settings = bpy.context.window_manager.session
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
user = session.online_users.get(self.username)
|
||||||
|
if user:
|
||||||
|
return user.get('metadata')
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def poll(self):
|
||||||
|
if self.data is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
user_selection = self.data.get('selected_objects')
|
||||||
|
scene_current = self.data.get('scene_current')
|
||||||
|
|
||||||
|
return (scene_current == bpy.context.scene.name or
|
||||||
|
self.settings.presence_show_far_user) and \
|
||||||
|
user_selection and \
|
||||||
|
self.settings.presence_show_selected and \
|
||||||
|
self.settings.enable_presence
|
||||||
|
|
||||||
|
def draw(self):
|
||||||
|
user_selection = self.data.get('selected_objects')
|
||||||
|
for select_ob in user_selection:
|
||||||
|
ob = find_from_attr("uuid", select_ob, bpy.data.objects)
|
||||||
|
if not ob:
|
||||||
|
return
|
||||||
|
|
||||||
|
vertex_pos = bbox_from_obj(ob, 1.0)
|
||||||
|
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
|
||||||
|
(4, 5), (4, 6), (5, 7), (6, 7),
|
||||||
|
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||||
|
|
||||||
|
if ob.instance_collection:
|
||||||
|
for obj in ob.instance_collection.objects:
|
||||||
|
if obj.type == 'MESH' and hasattr(obj, 'bound_box'):
|
||||||
|
vertex_pos = get_bb_coords_from_obj(obj, instance=ob)
|
||||||
|
break
|
||||||
|
elif ob.type == 'EMPTY':
|
||||||
|
vertex_pos = bbox_from_obj(ob, ob.empty_display_size)
|
||||||
|
elif ob.type == 'LIGHT':
|
||||||
|
vertex_pos = bbox_from_obj(ob, ob.data.shadow_soft_size)
|
||||||
|
elif ob.type == 'LIGHT_PROBE':
|
||||||
|
vertex_pos = bbox_from_obj(ob, ob.data.influence_distance)
|
||||||
|
elif ob.type == 'CAMERA':
|
||||||
|
vertex_pos = bbox_from_obj(ob, ob.data.display_size)
|
||||||
|
elif hasattr(ob, 'bound_box'):
|
||||||
|
vertex_indices = (
|
||||||
|
(0, 1), (1, 2), (2, 3), (0, 3),
|
||||||
|
(4, 5), (5, 6), (6, 7), (4, 7),
|
||||||
|
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||||
|
vertex_pos = get_bb_coords_from_obj(ob)
|
||||||
|
|
||||||
|
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
||||||
|
batch = batch_for_shader(
|
||||||
|
shader,
|
||||||
|
'LINES',
|
||||||
|
{"pos": vertex_pos},
|
||||||
|
indices=vertex_indices)
|
||||||
|
|
||||||
|
shader.bind()
|
||||||
|
shader.uniform_float("color", self.data.get('color'))
|
||||||
|
batch.draw(shader)
|
||||||
|
|
||||||
|
|
||||||
|
class UserNameWidget(Widget):
|
||||||
|
draw_type = 'POST_PIXEL'
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
username):
|
||||||
|
self.username = username
|
||||||
|
self.settings = bpy.context.window_manager.session
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
user = session.online_users.get(self.username)
|
||||||
|
if user:
|
||||||
|
return user.get('metadata')
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def poll(self):
|
||||||
|
if self.data is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
scene_current = self.data.get('scene_current')
|
||||||
|
view_corners = self.data.get('view_corners')
|
||||||
|
|
||||||
|
return (scene_current == bpy.context.scene.name or
|
||||||
|
self.settings.presence_show_far_user) and \
|
||||||
|
view_corners and \
|
||||||
|
self.settings.presence_show_user and \
|
||||||
|
self.settings.enable_presence
|
||||||
|
|
||||||
|
def draw(self):
|
||||||
|
view_corners = self.data.get('view_corners')
|
||||||
|
color = self.data.get('color')
|
||||||
|
position = [tuple(coord) for coord in view_corners]
|
||||||
|
coords = project_to_screen(position[1])
|
||||||
|
|
||||||
if coords:
|
if coords:
|
||||||
blf.position(0, coords[0], coords[1]+10, 0)
|
blf.position(0, coords[0], coords[1]+10, 0)
|
||||||
blf.size(0, 16, 72)
|
blf.size(0, 16, 72)
|
||||||
blf.color(0, color[0], color[1], color[2], color[3])
|
blf.color(0, color[0], color[1], color[2], color[3])
|
||||||
blf.draw(0, font)
|
blf.draw(0, self.username)
|
||||||
|
|
||||||
except Exception:
|
|
||||||
logging.error("2D EXCEPTION")
|
class SessionStatusWidget(Widget):
|
||||||
|
draw_type = 'POST_PIXEL'
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.preferences = get_preferences()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def settings(self):
|
||||||
|
return getattr(bpy.context.window_manager, 'session', None)
|
||||||
|
|
||||||
|
def poll(self):
|
||||||
|
return self.settings and self.settings.presence_show_session_status and \
|
||||||
|
self.settings.enable_presence
|
||||||
|
|
||||||
|
def draw(self):
|
||||||
|
text_scale = self.preferences.presence_hud_scale
|
||||||
|
ui_scale = bpy.context.preferences.view.ui_scale
|
||||||
|
color = [1, 1, 0, 1]
|
||||||
|
state = session.state.get('STATE')
|
||||||
|
state_str = f"{get_state_str(state)}"
|
||||||
|
|
||||||
|
if state == STATE_ACTIVE:
|
||||||
|
color = [0, 1, 0, 1]
|
||||||
|
elif state == STATE_INITIAL:
|
||||||
|
color = [1, 0, 0, 1]
|
||||||
|
hpos = (self.preferences.presence_hud_hpos*bpy.context.area.width)/100
|
||||||
|
vpos = (self.preferences.presence_hud_vpos*bpy.context.area.height)/100
|
||||||
|
|
||||||
|
blf.position(0, hpos, vpos, 0)
|
||||||
|
blf.size(0, int(text_scale*ui_scale), 72)
|
||||||
|
blf.color(0, color[0], color[1], color[2], color[3])
|
||||||
|
blf.draw(0, state_str)
|
||||||
|
|
||||||
|
|
||||||
|
class DrawFactory(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.post_view_handle = None
|
||||||
|
self.post_pixel_handle = None
|
||||||
|
self.widgets = {}
|
||||||
|
|
||||||
|
def add_widget(self, name: str, widget: Widget):
|
||||||
|
self.widgets[name] = widget
|
||||||
|
|
||||||
|
def remove_widget(self, name: str):
|
||||||
|
if name in self.widgets:
|
||||||
|
del self.widgets[name]
|
||||||
|
else:
|
||||||
|
logging.error(f"Widget {name} not existing")
|
||||||
|
|
||||||
|
def clear_widgets(self):
|
||||||
|
self.widgets.clear()
|
||||||
|
|
||||||
|
def register_handlers(self):
|
||||||
|
self.post_view_handle = bpy.types.SpaceView3D.draw_handler_add(
|
||||||
|
self.post_view_callback,
|
||||||
|
(),
|
||||||
|
'WINDOW',
|
||||||
|
'POST_VIEW')
|
||||||
|
self.post_pixel_handle = bpy.types.SpaceView3D.draw_handler_add(
|
||||||
|
self.post_pixel_callback,
|
||||||
|
(),
|
||||||
|
'WINDOW',
|
||||||
|
'POST_PIXEL')
|
||||||
|
|
||||||
|
def unregister_handlers(self):
|
||||||
|
if self.post_pixel_handle:
|
||||||
|
bpy.types.SpaceView3D.draw_handler_remove(
|
||||||
|
self.post_pixel_handle,
|
||||||
|
"WINDOW")
|
||||||
|
self.post_pixel_handle = None
|
||||||
|
|
||||||
|
if self.post_view_handle:
|
||||||
|
bpy.types.SpaceView3D.draw_handler_remove(
|
||||||
|
self.post_view_handle,
|
||||||
|
"WINDOW")
|
||||||
|
self.post_view_handle = None
|
||||||
|
|
||||||
|
def post_view_callback(self):
|
||||||
|
try:
|
||||||
|
for widget in self.widgets.values():
|
||||||
|
if widget.draw_type == 'POST_VIEW' and widget.poll():
|
||||||
|
widget.draw()
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(
|
||||||
|
f"Post view widget exception: {e} \n {traceback.print_exc()}")
|
||||||
|
|
||||||
|
def post_pixel_callback(self):
|
||||||
|
try:
|
||||||
|
for widget in self.widgets.values():
|
||||||
|
if widget.draw_type == 'POST_PIXEL' and widget.poll():
|
||||||
|
widget.draw()
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(
|
||||||
|
f"Post pixel widget Exception: {e} \n {traceback.print_exc()}")
|
||||||
|
|
||||||
|
|
||||||
|
this = sys.modules[__name__]
|
||||||
|
this.renderer = DrawFactory()
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
global renderer
|
this.renderer.register_handlers()
|
||||||
renderer = DrawFactory()
|
|
||||||
|
this.renderer.add_widget("session_status", SessionStatusWidget())
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
global renderer
|
this.renderer.unregister_handlers()
|
||||||
renderer.unregister_handlers()
|
|
||||||
|
|
||||||
del renderer
|
this.renderer.clear_widgets()
|
||||||
|
351
multi_user/ui.py
@ -18,8 +18,8 @@
|
|||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from . import operators, utils
|
from .utils import get_preferences, get_expanded_icon, get_folder_size, get_state_str
|
||||||
from .libs.replication.replication.constants import (ADDED, ERROR, FETCHED,
|
from replication.constants import (ADDED, ERROR, FETCHED,
|
||||||
MODIFIED, RP_COMMON, UP,
|
MODIFIED, RP_COMMON, UP,
|
||||||
STATE_ACTIVE, STATE_AUTH,
|
STATE_ACTIVE, STATE_AUTH,
|
||||||
STATE_CONFIG, STATE_SYNCING,
|
STATE_CONFIG, STATE_SYNCING,
|
||||||
@ -27,13 +27,16 @@ from .libs.replication.replication.constants import (ADDED, ERROR, FETCHED,
|
|||||||
STATE_WAITING, STATE_QUITTING,
|
STATE_WAITING, STATE_QUITTING,
|
||||||
STATE_LOBBY,
|
STATE_LOBBY,
|
||||||
STATE_LAUNCHING_SERVICES)
|
STATE_LAUNCHING_SERVICES)
|
||||||
|
from replication import __version__
|
||||||
|
from replication.interface import session
|
||||||
|
|
||||||
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
|
ICONS_PROP_STATES = ['TRIA_DOWN', # ADDED
|
||||||
'TRIA_UP', # COMMITED
|
'TRIA_UP', # COMMITED
|
||||||
'KEYTYPE_KEYFRAME_VEC', # PUSHED
|
'KEYTYPE_KEYFRAME_VEC', # PUSHED
|
||||||
'TRIA_DOWN', # FETCHED
|
'TRIA_DOWN', # FETCHED
|
||||||
'FILE_REFRESH', # UP
|
'RECOVER_LAST', # RESET
|
||||||
'TRIA_UP'] # CHANGED
|
'TRIA_UP', # CHANGED
|
||||||
|
'ERROR'] # ERROR
|
||||||
|
|
||||||
|
|
||||||
def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='█', fill_empty=' '):
|
def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='█', fill_empty=' '):
|
||||||
@ -50,50 +53,26 @@ def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=
|
|||||||
From here:
|
From here:
|
||||||
https://gist.github.com/greenstick/b23e475d2bfdc3a82e34eaa1f6781ee4
|
https://gist.github.com/greenstick/b23e475d2bfdc3a82e34eaa1f6781ee4
|
||||||
"""
|
"""
|
||||||
|
if total == 0:
|
||||||
|
return ""
|
||||||
filledLength = int(length * iteration // total)
|
filledLength = int(length * iteration // total)
|
||||||
bar = fill * filledLength + fill_empty * (length - filledLength)
|
bar = fill * filledLength + fill_empty * (length - filledLength)
|
||||||
return f"{prefix} |{bar}| {iteration}/{total}{suffix}"
|
return f"{prefix} |{bar}| {iteration}/{total}{suffix}"
|
||||||
|
|
||||||
|
|
||||||
def get_state_str(state):
|
|
||||||
state_str = 'UNKNOWN'
|
|
||||||
if state == STATE_WAITING:
|
|
||||||
state_str = 'WARMING UP DATA'
|
|
||||||
elif state == STATE_SYNCING:
|
|
||||||
state_str = 'FETCHING'
|
|
||||||
elif state == STATE_AUTH:
|
|
||||||
state_str = 'AUTHENTIFICATION'
|
|
||||||
elif state == STATE_CONFIG:
|
|
||||||
state_str = 'CONFIGURATION'
|
|
||||||
elif state == STATE_ACTIVE:
|
|
||||||
state_str = 'ONLINE'
|
|
||||||
elif state == STATE_SRV_SYNC:
|
|
||||||
state_str = 'PUSHING'
|
|
||||||
elif state == STATE_INITIAL:
|
|
||||||
state_str = 'INIT'
|
|
||||||
elif state == STATE_QUITTING:
|
|
||||||
state_str = 'QUITTING'
|
|
||||||
elif state == STATE_LAUNCHING_SERVICES:
|
|
||||||
state_str = 'LAUNCHING SERVICES'
|
|
||||||
elif state == STATE_LOBBY:
|
|
||||||
state_str = 'LOBBY'
|
|
||||||
|
|
||||||
return state_str
|
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_settings(bpy.types.Panel):
|
class SESSION_PT_settings(bpy.types.Panel):
|
||||||
"""Settings panel"""
|
"""Settings panel"""
|
||||||
bl_idname = "MULTIUSER_SETTINGS_PT_panel"
|
bl_idname = "MULTIUSER_SETTINGS_PT_panel"
|
||||||
bl_label = ""
|
bl_label = " "
|
||||||
bl_space_type = 'VIEW_3D'
|
bl_space_type = 'VIEW_3D'
|
||||||
bl_region_type = 'UI'
|
bl_region_type = 'UI'
|
||||||
bl_category = "Multiuser"
|
bl_category = "Multiuser"
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
if operators.client and operators.client.state['STATE'] != STATE_INITIAL:
|
if session and session.state['STATE'] != STATE_INITIAL:
|
||||||
cli_state = operators.client.state
|
cli_state = session.state
|
||||||
state = operators.client.state.get('STATE')
|
state = session.state.get('STATE')
|
||||||
connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
|
connection_icon = "KEYTYPE_MOVING_HOLD_VEC"
|
||||||
|
|
||||||
if state == STATE_ACTIVE:
|
if state == STATE_ACTIVE:
|
||||||
@ -103,72 +82,54 @@ class SESSION_PT_settings(bpy.types.Panel):
|
|||||||
|
|
||||||
layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
|
layout.label(text=f"Session - {get_state_str(cli_state['STATE'])}", icon=connection_icon)
|
||||||
else:
|
else:
|
||||||
layout.label(text="Session",icon="PROP_OFF")
|
layout.label(text=f"Session - v{__version__}",icon="PROP_OFF")
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
layout.use_property_split = True
|
layout.use_property_split = True
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
settings = utils.get_preferences()
|
settings = get_preferences()
|
||||||
|
|
||||||
if hasattr(context.window_manager, 'session'):
|
if hasattr(context.window_manager, 'session'):
|
||||||
# STATE INITIAL
|
# STATE INITIAL
|
||||||
if not operators.client \
|
if not session \
|
||||||
or (operators.client and operators.client.state['STATE'] == STATE_INITIAL):
|
or (session and session.state['STATE'] == STATE_INITIAL):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
cli_state = operators.client.state
|
cli_state = session.state
|
||||||
|
|
||||||
|
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
|
|
||||||
current_state = cli_state['STATE']
|
current_state = cli_state['STATE']
|
||||||
|
info_msg = None
|
||||||
|
|
||||||
# STATE ACTIVE
|
if current_state in [STATE_ACTIVE]:
|
||||||
if current_state in [STATE_ACTIVE, STATE_LOBBY]:
|
row = row.split(factor=0.3)
|
||||||
row.operator("session.stop", icon='QUIT', text="Exit")
|
row.prop(settings.sync_flags, "sync_render_settings",text="",icon_only=True, icon='SCENE')
|
||||||
row = layout.row()
|
row.prop(settings.sync_flags, "sync_during_editmode", text="",icon_only=True, icon='EDITMODE_HLT')
|
||||||
if runtime_settings.is_host:
|
row.prop(settings.sync_flags, "sync_active_camera", text="",icon_only=True, icon='OBJECT_DATAMODE')
|
||||||
row = row.box()
|
|
||||||
row.label(text=f"{runtime_settings.internet_ip}:{settings.port}", icon='INFO')
|
|
||||||
row = layout.row()
|
|
||||||
|
|
||||||
# CONNECTION STATE
|
row= layout.row()
|
||||||
elif current_state in [STATE_SRV_SYNC,
|
|
||||||
STATE_SYNCING,
|
|
||||||
STATE_AUTH,
|
|
||||||
STATE_CONFIG,
|
|
||||||
STATE_WAITING]:
|
|
||||||
|
|
||||||
if cli_state['STATE'] in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
|
if current_state in [STATE_ACTIVE] and runtime_settings.is_host:
|
||||||
box = row.box()
|
info_msg = f"LAN: {runtime_settings.internet_ip}"
|
||||||
box.label(text=printProgressBar(
|
if current_state == STATE_LOBBY:
|
||||||
|
info_msg = "Waiting for the session to start."
|
||||||
|
|
||||||
|
if info_msg:
|
||||||
|
info_box = row.box()
|
||||||
|
info_box.row().label(text=info_msg,icon='INFO')
|
||||||
|
|
||||||
|
# Progress bar
|
||||||
|
if current_state in [STATE_SYNCING, STATE_SRV_SYNC, STATE_WAITING]:
|
||||||
|
info_box = row.box()
|
||||||
|
info_box.row().label(text=printProgressBar(
|
||||||
cli_state['CURRENT'],
|
cli_state['CURRENT'],
|
||||||
cli_state['TOTAL'],
|
cli_state['TOTAL'],
|
||||||
length=16
|
length=16
|
||||||
))
|
))
|
||||||
|
|
||||||
row = layout.row()
|
layout.row().operator("session.stop", icon='QUIT', text="Exit")
|
||||||
row.operator("session.stop", icon='QUIT', text="CANCEL")
|
|
||||||
elif current_state == STATE_QUITTING:
|
|
||||||
row = layout.row()
|
|
||||||
box = row.box()
|
|
||||||
|
|
||||||
num_online_services = 0
|
|
||||||
for name, state in operators.client.services_state.items():
|
|
||||||
if state == STATE_ACTIVE:
|
|
||||||
num_online_services += 1
|
|
||||||
|
|
||||||
total_online_services = len(
|
|
||||||
operators.client.services_state)
|
|
||||||
|
|
||||||
box.label(text=printProgressBar(
|
|
||||||
total_online_services-num_online_services,
|
|
||||||
total_online_services,
|
|
||||||
length=16
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_settings_network(bpy.types.Panel):
|
class SESSION_PT_settings_network(bpy.types.Panel):
|
||||||
bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel"
|
bl_idname = "MULTIUSER_SETTINGS_NETWORK_PT_panel"
|
||||||
@ -179,8 +140,8 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return not operators.client \
|
return not session \
|
||||||
or (operators.client and operators.client.state['STATE'] == 0)
|
or (session and session.state['STATE'] == 0)
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.label(text="", icon='URL')
|
self.layout.label(text="", icon='URL')
|
||||||
@ -189,7 +150,7 @@ class SESSION_PT_settings_network(bpy.types.Panel):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
settings = utils.get_preferences()
|
settings = get_preferences()
|
||||||
|
|
||||||
# USER SETTINGS
|
# USER SETTINGS
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
@ -237,8 +198,8 @@ class SESSION_PT_settings_user(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return not operators.client \
|
return not session \
|
||||||
or (operators.client and operators.client.state['STATE'] == 0)
|
or (session and session.state['STATE'] == 0)
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.label(text="", icon='USER')
|
self.layout.label(text="", icon='USER')
|
||||||
@ -247,7 +208,7 @@ class SESSION_PT_settings_user(bpy.types.Panel):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
settings = utils.get_preferences()
|
settings = get_preferences()
|
||||||
|
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
# USER SETTINGS
|
# USER SETTINGS
|
||||||
@ -268,8 +229,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return not operators.client \
|
return not session \
|
||||||
or (operators.client and operators.client.state['STATE'] == 0)
|
or (session and session.state['STATE'] == 0)
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.label(text="", icon='PREFERENCES')
|
self.layout.label(text="", icon='PREFERENCES')
|
||||||
@ -278,11 +239,18 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
settings = utils.get_preferences()
|
settings = get_preferences()
|
||||||
|
|
||||||
|
|
||||||
net_section = layout.row().box()
|
net_section = layout.row().box()
|
||||||
net_section.label(text="Network ", icon='TRIA_DOWN')
|
net_section.prop(
|
||||||
|
settings,
|
||||||
|
"sidebar_advanced_net_expanded",
|
||||||
|
text="Network",
|
||||||
|
icon=get_expanded_icon(settings.sidebar_advanced_net_expanded),
|
||||||
|
emboss=False)
|
||||||
|
|
||||||
|
if settings.sidebar_advanced_net_expanded:
|
||||||
net_section_row = net_section.row()
|
net_section_row = net_section.row()
|
||||||
net_section_row.label(text="IPC Port:")
|
net_section_row.label(text="IPC Port:")
|
||||||
net_section_row.prop(settings, "ipc_port", text="")
|
net_section_row.prop(settings, "ipc_port", text="")
|
||||||
@ -291,16 +259,40 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
net_section_row.prop(settings, "connection_timeout", text="")
|
net_section_row.prop(settings, "connection_timeout", text="")
|
||||||
|
|
||||||
replication_section = layout.row().box()
|
replication_section = layout.row().box()
|
||||||
replication_section.label(text="Replication ", icon='TRIA_DOWN')
|
replication_section.prop(
|
||||||
replication_section_row = replication_section.row()
|
settings,
|
||||||
if runtime_settings.session_mode == 'HOST':
|
"sidebar_advanced_rep_expanded",
|
||||||
replication_section_row.prop(settings.sync_flags, "sync_render_settings")
|
text="Replication",
|
||||||
|
icon=get_expanded_icon(settings.sidebar_advanced_rep_expanded),
|
||||||
|
emboss=False)
|
||||||
|
|
||||||
|
if settings.sidebar_advanced_rep_expanded:
|
||||||
replication_section_row = replication_section.row()
|
replication_section_row = replication_section.row()
|
||||||
replication_section_row.label(text="Per data type timers:")
|
|
||||||
|
replication_section_row.label(text="Sync flags", icon='COLLECTION_NEW')
|
||||||
replication_section_row = replication_section.row()
|
replication_section_row = replication_section.row()
|
||||||
|
replication_section_row.prop(settings.sync_flags, "sync_render_settings")
|
||||||
|
replication_section_row = replication_section.row()
|
||||||
|
replication_section_row.prop(settings.sync_flags, "sync_active_camera")
|
||||||
|
replication_section_row = replication_section.row()
|
||||||
|
|
||||||
|
replication_section_row.prop(settings.sync_flags, "sync_during_editmode")
|
||||||
|
replication_section_row = replication_section.row()
|
||||||
|
if settings.sync_flags.sync_during_editmode:
|
||||||
|
warning = replication_section_row.box()
|
||||||
|
warning.label(text="Don't use this with heavy meshes !", icon='ERROR')
|
||||||
|
replication_section_row = replication_section.row()
|
||||||
|
|
||||||
|
replication_section_row.label(text="Update method", icon='RECOVER_LAST')
|
||||||
|
replication_section_row = replication_section.row()
|
||||||
|
replication_section_row.prop(settings, "update_method", expand=True)
|
||||||
|
replication_section_row = replication_section.row()
|
||||||
|
replication_timers = replication_section_row.box()
|
||||||
|
replication_timers.label(text="Replication timers", icon='TIME')
|
||||||
|
if settings.update_method == "DEFAULT":
|
||||||
|
replication_timers = replication_timers.row()
|
||||||
# Replication frequencies
|
# Replication frequencies
|
||||||
flow = replication_section_row .grid_flow(
|
flow = replication_timers.grid_flow(
|
||||||
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
|
row_major=True, columns=0, even_columns=True, even_rows=False, align=True)
|
||||||
line = flow.row(align=True)
|
line = flow.row(align=True)
|
||||||
line.label(text=" ")
|
line.label(text=" ")
|
||||||
@ -314,8 +306,40 @@ class SESSION_PT_advanced_settings(bpy.types.Panel):
|
|||||||
line.separator()
|
line.separator()
|
||||||
line.prop(item, "bl_delay_refresh", text="")
|
line.prop(item, "bl_delay_refresh", text="")
|
||||||
line.prop(item, "bl_delay_apply", text="")
|
line.prop(item, "bl_delay_apply", text="")
|
||||||
|
else:
|
||||||
|
replication_timers = replication_timers.row()
|
||||||
|
replication_timers.label(text="Update rate (ms):")
|
||||||
|
replication_timers.prop(settings, "depsgraph_update_rate", text="")
|
||||||
|
|
||||||
|
cache_section = layout.row().box()
|
||||||
|
cache_section.prop(
|
||||||
|
settings,
|
||||||
|
"sidebar_advanced_cache_expanded",
|
||||||
|
text="Cache",
|
||||||
|
icon=get_expanded_icon(settings.sidebar_advanced_cache_expanded),
|
||||||
|
emboss=False)
|
||||||
|
if settings.sidebar_advanced_cache_expanded:
|
||||||
|
cache_section_row = cache_section.row()
|
||||||
|
cache_section_row.label(text="Cache directory:")
|
||||||
|
cache_section_row = cache_section.row()
|
||||||
|
cache_section_row.prop(settings, "cache_directory", text="")
|
||||||
|
cache_section_row = cache_section.row()
|
||||||
|
cache_section_row.label(text="Clear memory filecache:")
|
||||||
|
cache_section_row.prop(settings, "clear_memory_filecache", text="")
|
||||||
|
cache_section_row = cache_section.row()
|
||||||
|
cache_section_row.operator('session.clear_cache', text=f"Clear cache ({get_folder_size(settings.cache_directory)})")
|
||||||
|
log_section = layout.row().box()
|
||||||
|
log_section.prop(
|
||||||
|
settings,
|
||||||
|
"sidebar_advanced_log_expanded",
|
||||||
|
text="Logging",
|
||||||
|
icon=get_expanded_icon(settings.sidebar_advanced_log_expanded),
|
||||||
|
emboss=False)
|
||||||
|
|
||||||
|
if settings.sidebar_advanced_log_expanded:
|
||||||
|
log_section_row = log_section.row()
|
||||||
|
log_section_row.label(text="Log level:")
|
||||||
|
log_section_row.prop(settings, 'logging_level', text="")
|
||||||
class SESSION_PT_user(bpy.types.Panel):
|
class SESSION_PT_user(bpy.types.Panel):
|
||||||
bl_idname = "MULTIUSER_USER_PT_panel"
|
bl_idname = "MULTIUSER_USER_PT_panel"
|
||||||
bl_label = "Online users"
|
bl_label = "Online users"
|
||||||
@ -325,7 +349,7 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return operators.client and operators.client.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
|
return session and session.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.label(text="", icon='USER')
|
self.layout.label(text="", icon='USER')
|
||||||
@ -334,7 +358,7 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
online_users = context.window_manager.online_users
|
online_users = context.window_manager.online_users
|
||||||
selected_user = context.window_manager.user_index
|
selected_user = context.window_manager.user_index
|
||||||
settings = utils.get_preferences()
|
settings = get_preferences()
|
||||||
active_user = online_users[selected_user] if len(
|
active_user = online_users[selected_user] if len(
|
||||||
online_users)-1 >= selected_user else 0
|
online_users)-1 >= selected_user else 0
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
@ -356,6 +380,8 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
if active_user != 0 and active_user.username != settings.username:
|
if active_user != 0 and active_user.username != settings.username:
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
user_operations = row.split()
|
user_operations = row.split()
|
||||||
|
if session.state['STATE'] == STATE_ACTIVE:
|
||||||
|
|
||||||
user_operations.alert = context.window_manager.session.time_snap_running
|
user_operations.alert = context.window_manager.session.time_snap_running
|
||||||
user_operations.operator(
|
user_operations.operator(
|
||||||
"session.snapview",
|
"session.snapview",
|
||||||
@ -368,7 +394,7 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
text="",
|
text="",
|
||||||
icon='TIME').target_client = active_user.username
|
icon='TIME').target_client = active_user.username
|
||||||
|
|
||||||
if operators.client.online_users[settings.username]['admin']:
|
if session.online_users[settings.username]['admin']:
|
||||||
user_operations.operator(
|
user_operations.operator(
|
||||||
"session.kick",
|
"session.kick",
|
||||||
text="",
|
text="",
|
||||||
@ -377,8 +403,7 @@ class SESSION_PT_user(bpy.types.Panel):
|
|||||||
|
|
||||||
class SESSION_UL_users(bpy.types.UIList):
|
class SESSION_UL_users(bpy.types.UIList):
|
||||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
|
||||||
session = operators.client
|
settings = get_preferences()
|
||||||
settings = utils.get_preferences()
|
|
||||||
is_local_user = item.username == settings.username
|
is_local_user = item.username == settings.username
|
||||||
ping = '-'
|
ping = '-'
|
||||||
frame_current = '-'
|
frame_current = '-'
|
||||||
@ -390,8 +415,8 @@ class SESSION_UL_users(bpy.types.UIList):
|
|||||||
ping = str(user['latency'])
|
ping = str(user['latency'])
|
||||||
metadata = user.get('metadata')
|
metadata = user.get('metadata')
|
||||||
if metadata and 'frame_current' in metadata:
|
if metadata and 'frame_current' in metadata:
|
||||||
frame_current = str(metadata['frame_current'])
|
frame_current = str(metadata.get('frame_current','-'))
|
||||||
scene_current = metadata['scene_current']
|
scene_current = metadata.get('scene_current','-')
|
||||||
if user['admin']:
|
if user['admin']:
|
||||||
status_icon = 'FAKE_USER_ON'
|
status_icon = 'FAKE_USER_ON'
|
||||||
split = layout.split(factor=0.35)
|
split = layout.split(factor=0.35)
|
||||||
@ -412,8 +437,8 @@ class SESSION_PT_presence(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return not operators.client \
|
return not session \
|
||||||
or (operators.client and operators.client.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
|
or (session and session.state['STATE'] in [STATE_INITIAL, STATE_ACTIVE])
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.prop(context.window_manager.session,
|
self.layout.prop(context.window_manager.session,
|
||||||
@ -423,56 +448,35 @@ class SESSION_PT_presence(bpy.types.Panel):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
settings = context.window_manager.session
|
settings = context.window_manager.session
|
||||||
|
pref = get_preferences()
|
||||||
layout.active = settings.enable_presence
|
layout.active = settings.enable_presence
|
||||||
col = layout.column()
|
col = layout.column()
|
||||||
|
col.prop(settings, "presence_show_session_status")
|
||||||
|
row = col.column()
|
||||||
|
row.active = settings.presence_show_session_status
|
||||||
|
row.prop(pref, "presence_hud_scale", expand=True)
|
||||||
|
row = col.column(align=True)
|
||||||
|
row.active = settings.presence_show_session_status
|
||||||
|
row.prop(pref, "presence_hud_hpos", expand=True)
|
||||||
|
row.prop(pref, "presence_hud_vpos", expand=True)
|
||||||
col.prop(settings, "presence_show_selected")
|
col.prop(settings, "presence_show_selected")
|
||||||
col.prop(settings, "presence_show_user")
|
col.prop(settings, "presence_show_user")
|
||||||
row = layout.column()
|
row = layout.column()
|
||||||
row.active = settings.presence_show_user
|
row.active = settings.presence_show_user
|
||||||
row.prop(settings, "presence_show_far_user")
|
row.prop(settings, "presence_show_far_user")
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_services(bpy.types.Panel):
|
|
||||||
bl_idname = "MULTIUSER_SERVICE_PT_panel"
|
|
||||||
bl_label = "Services"
|
|
||||||
bl_space_type = 'VIEW_3D'
|
|
||||||
bl_region_type = 'UI'
|
|
||||||
bl_parent_id = 'MULTIUSER_SETTINGS_PT_panel'
|
|
||||||
bl_options = {'DEFAULT_CLOSED'}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context):
|
|
||||||
return operators.client and operators.client.state['STATE'] == 2
|
|
||||||
|
|
||||||
def draw_header(self, context):
|
|
||||||
self.layout.label(text="", icon='FILE_CACHE')
|
|
||||||
|
|
||||||
def draw(self, context):
|
|
||||||
layout = self.layout
|
|
||||||
online_users = context.window_manager.online_users
|
|
||||||
selected_user = context.window_manager.user_index
|
|
||||||
settings = context.window_manager.session
|
|
||||||
active_user = online_users[selected_user] if len(online_users)-1 >= selected_user else 0
|
|
||||||
|
|
||||||
# Create a simple row.
|
|
||||||
for name, state in operators.client.services_state.items():
|
|
||||||
row = layout.row()
|
|
||||||
row.label(text=name)
|
|
||||||
row.label(text=get_state_str(state))
|
|
||||||
|
|
||||||
|
|
||||||
def draw_property(context, parent, property_uuid, level=0):
|
def draw_property(context, parent, property_uuid, level=0):
|
||||||
settings = utils.get_preferences()
|
settings = get_preferences()
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
item = operators.client.get(uuid=property_uuid)
|
item = session.get(uuid=property_uuid)
|
||||||
|
|
||||||
if item.state == ERROR:
|
|
||||||
return
|
|
||||||
|
|
||||||
area_msg = parent.row(align=True)
|
area_msg = parent.row(align=True)
|
||||||
if level > 0:
|
|
||||||
for i in range(level):
|
if item.state == ERROR:
|
||||||
area_msg.label(text="")
|
area_msg.alert=True
|
||||||
|
else:
|
||||||
|
area_msg.alert=False
|
||||||
|
|
||||||
line = area_msg.box()
|
line = area_msg.box()
|
||||||
|
|
||||||
name = item.data['name'] if item.data else item.uuid
|
name = item.data['name'] if item.data else item.uuid
|
||||||
@ -485,8 +489,8 @@ def draw_property(context, parent, property_uuid, level=0):
|
|||||||
|
|
||||||
# Operations
|
# Operations
|
||||||
|
|
||||||
have_right_to_modify = item.owner == settings.username or \
|
have_right_to_modify = (item.owner == settings.username or \
|
||||||
item.owner == RP_COMMON
|
item.owner == RP_COMMON) and item.state != ERROR
|
||||||
|
|
||||||
if have_right_to_modify:
|
if have_right_to_modify:
|
||||||
detail_item_box.operator(
|
detail_item_box.operator(
|
||||||
@ -496,10 +500,12 @@ def draw_property(context, parent, property_uuid, level=0):
|
|||||||
detail_item_box.separator()
|
detail_item_box.separator()
|
||||||
|
|
||||||
if item.state in [FETCHED, UP]:
|
if item.state in [FETCHED, UP]:
|
||||||
detail_item_box.operator(
|
apply = detail_item_box.operator(
|
||||||
"session.apply",
|
"session.apply",
|
||||||
text="",
|
text="",
|
||||||
icon=ICONS_PROP_STATES[item.state]).target = item.uuid
|
icon=ICONS_PROP_STATES[item.state])
|
||||||
|
apply.target = item.uuid
|
||||||
|
apply.reset_dependencies = True
|
||||||
elif item.state in [MODIFIED, ADDED]:
|
elif item.state in [MODIFIED, ADDED]:
|
||||||
detail_item_box.operator(
|
detail_item_box.operator(
|
||||||
"session.commit",
|
"session.commit",
|
||||||
@ -522,7 +528,6 @@ def draw_property(context, parent, property_uuid, level=0):
|
|||||||
else:
|
else:
|
||||||
detail_item_box.label(text="", icon="DECORATE_LOCKED")
|
detail_item_box.label(text="", icon="DECORATE_LOCKED")
|
||||||
|
|
||||||
|
|
||||||
class SESSION_PT_repository(bpy.types.Panel):
|
class SESSION_PT_repository(bpy.types.Panel):
|
||||||
bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
|
bl_idname = "MULTIUSER_PROPERTIES_PT_panel"
|
||||||
bl_label = "Repository"
|
bl_label = "Repository"
|
||||||
@ -532,9 +537,17 @@ class SESSION_PT_repository(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
|
settings = get_preferences()
|
||||||
|
admin = False
|
||||||
|
|
||||||
|
if session and hasattr(session,'online_users'):
|
||||||
|
usr = session.online_users.get(settings.username)
|
||||||
|
if usr:
|
||||||
|
admin = usr['admin']
|
||||||
return hasattr(context.window_manager, 'session') and \
|
return hasattr(context.window_manager, 'session') and \
|
||||||
operators.client and \
|
session and \
|
||||||
operators.client.state['STATE'] in [STATE_ACTIVE, STATE_LOBBY]
|
(session.state['STATE'] == STATE_ACTIVE or \
|
||||||
|
session.state['STATE'] == STATE_LOBBY and admin)
|
||||||
|
|
||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
|
self.layout.label(text="", icon='OUTLINER_OB_GROUP_INSTANCE')
|
||||||
@ -543,10 +556,9 @@ class SESSION_PT_repository(bpy.types.Panel):
|
|||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
# Filters
|
# Filters
|
||||||
settings = utils.get_preferences()
|
settings = get_preferences()
|
||||||
runtime_settings = context.window_manager.session
|
runtime_settings = context.window_manager.session
|
||||||
|
|
||||||
session = operators.client
|
|
||||||
usr = session.online_users.get(settings.username)
|
usr = session.online_users.get(settings.username)
|
||||||
|
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
@ -572,11 +584,11 @@ class SESSION_PT_repository(bpy.types.Panel):
|
|||||||
types_filter = [t.type_name for t in settings.supported_datablocks
|
types_filter = [t.type_name for t in settings.supported_datablocks
|
||||||
if t.use_as_filter]
|
if t.use_as_filter]
|
||||||
|
|
||||||
key_to_filter = operators.client.list(
|
key_to_filter = session.list(
|
||||||
filter_owner=settings.username) if runtime_settings.filter_owned else operators.client.list()
|
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
|
||||||
|
|
||||||
client_keys = [key for key in key_to_filter
|
client_keys = [key for key in key_to_filter
|
||||||
if operators.client.get(uuid=key).str_type
|
if session.get(uuid=key).str_type
|
||||||
in types_filter]
|
in types_filter]
|
||||||
|
|
||||||
if client_keys:
|
if client_keys:
|
||||||
@ -592,6 +604,36 @@ class SESSION_PT_repository(bpy.types.Panel):
|
|||||||
else:
|
else:
|
||||||
row.label(text="Waiting to start")
|
row.label(text="Waiting to start")
|
||||||
|
|
||||||
|
class VIEW3D_PT_overlay_session(bpy.types.Panel):
|
||||||
|
bl_space_type = 'VIEW_3D'
|
||||||
|
bl_region_type = 'HEADER'
|
||||||
|
bl_parent_id = 'VIEW3D_PT_overlay'
|
||||||
|
bl_label = "Multi-user"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
|
||||||
|
view = context.space_data
|
||||||
|
overlay = view.overlay
|
||||||
|
display_all = overlay.show_overlays
|
||||||
|
|
||||||
|
col = layout.column()
|
||||||
|
|
||||||
|
row = col.row(align=True)
|
||||||
|
settings = context.window_manager.session
|
||||||
|
layout.active = settings.enable_presence
|
||||||
|
col = layout.column()
|
||||||
|
col.prop(settings, "presence_show_session_status")
|
||||||
|
col.prop(settings, "presence_show_selected")
|
||||||
|
col.prop(settings, "presence_show_user")
|
||||||
|
|
||||||
|
row = layout.column()
|
||||||
|
row.active = settings.presence_show_user
|
||||||
|
row.prop(settings, "presence_show_far_user")
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
SESSION_UL_users,
|
SESSION_UL_users,
|
||||||
@ -601,9 +643,8 @@ classes = (
|
|||||||
SESSION_PT_presence,
|
SESSION_PT_presence,
|
||||||
SESSION_PT_advanced_settings,
|
SESSION_PT_advanced_settings,
|
||||||
SESSION_PT_user,
|
SESSION_PT_user,
|
||||||
SESSION_PT_services,
|
|
||||||
SESSION_PT_repository,
|
SESSION_PT_repository,
|
||||||
|
VIEW3D_PT_overlay_session,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,13 +21,22 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from uuid import uuid4
|
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
|
from pathlib import Path
|
||||||
|
from uuid import uuid4
|
||||||
|
import math
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
from . import environment, presence
|
from . import environment
|
||||||
|
|
||||||
|
from replication.constants import (STATE_ACTIVE, STATE_AUTH,
|
||||||
|
STATE_CONFIG, STATE_SYNCING,
|
||||||
|
STATE_INITIAL, STATE_SRV_SYNC,
|
||||||
|
STATE_WAITING, STATE_QUITTING,
|
||||||
|
STATE_LOBBY,
|
||||||
|
STATE_LAUNCHING_SERVICES)
|
||||||
|
|
||||||
|
|
||||||
def find_from_attr(attr_name, attr_value, list):
|
def find_from_attr(attr_name, attr_value, list):
|
||||||
@ -47,7 +56,7 @@ def get_datablock_users(datablock):
|
|||||||
if hasattr(datablock, 'users_group') and datablock.users_scene:
|
if hasattr(datablock, 'users_group') and datablock.users_scene:
|
||||||
users.extend(list(datablock.users_scene))
|
users.extend(list(datablock.users_scene))
|
||||||
for datatype in supported_types:
|
for datatype in supported_types:
|
||||||
if datatype.bl_name != 'users':
|
if datatype.bl_name != 'users' and hasattr(bpy.data, datatype.bl_name):
|
||||||
root = getattr(bpy.data, datatype.bl_name)
|
root = getattr(bpy.data, datatype.bl_name)
|
||||||
for item in root:
|
for item in root:
|
||||||
if hasattr(item, 'data') and datablock == item.data or \
|
if hasattr(item, 'data') and datablock == item.data or \
|
||||||
@ -56,6 +65,32 @@ def get_datablock_users(datablock):
|
|||||||
return users
|
return users
|
||||||
|
|
||||||
|
|
||||||
|
def get_state_str(state):
|
||||||
|
state_str = 'UNKOWN'
|
||||||
|
if state == STATE_WAITING:
|
||||||
|
state_str = 'WARMING UP DATA'
|
||||||
|
elif state == STATE_SYNCING:
|
||||||
|
state_str = 'FETCHING'
|
||||||
|
elif state == STATE_AUTH:
|
||||||
|
state_str = 'AUTHENTICATION'
|
||||||
|
elif state == STATE_CONFIG:
|
||||||
|
state_str = 'CONFIGURATION'
|
||||||
|
elif state == STATE_ACTIVE:
|
||||||
|
state_str = 'ONLINE'
|
||||||
|
elif state == STATE_SRV_SYNC:
|
||||||
|
state_str = 'PUSHING'
|
||||||
|
elif state == STATE_INITIAL:
|
||||||
|
state_str = 'OFFLINE'
|
||||||
|
elif state == STATE_QUITTING:
|
||||||
|
state_str = 'QUITTING'
|
||||||
|
elif state == STATE_LAUNCHING_SERVICES:
|
||||||
|
state_str = 'LAUNCHING SERVICES'
|
||||||
|
elif state == STATE_LOBBY:
|
||||||
|
state_str = 'LOBBY'
|
||||||
|
|
||||||
|
return state_str
|
||||||
|
|
||||||
|
|
||||||
def clean_scene():
|
def clean_scene():
|
||||||
for type_name in dir(bpy.data):
|
for type_name in dir(bpy.data):
|
||||||
try:
|
try:
|
||||||
@ -82,5 +117,71 @@ def resolve_from_id(id, optionnal_type=None):
|
|||||||
def get_preferences():
|
def get_preferences():
|
||||||
return bpy.context.preferences.addons[__package__].preferences
|
return bpy.context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
|
|
||||||
def current_milli_time():
|
def current_milli_time():
|
||||||
return int(round(time.time() * 1000))
|
return int(round(time.time() * 1000))
|
||||||
|
|
||||||
|
|
||||||
|
def get_expanded_icon(prop: bpy.types.BoolProperty) -> str:
|
||||||
|
if prop:
|
||||||
|
return 'DISCLOSURE_TRI_DOWN'
|
||||||
|
else:
|
||||||
|
return 'DISCLOSURE_TRI_RIGHT'
|
||||||
|
|
||||||
|
|
||||||
|
# Taken from here: https://stackoverflow.com/a/55659577
|
||||||
|
def get_folder_size(folder):
|
||||||
|
return ByteSize(sum(file.stat().st_size for file in Path(folder).rglob('*')))
|
||||||
|
|
||||||
|
|
||||||
|
class ByteSize(int):
|
||||||
|
|
||||||
|
_kB = 1024
|
||||||
|
_suffixes = 'B', 'kB', 'MB', 'GB', 'PB'
|
||||||
|
|
||||||
|
def __new__(cls, *args, **kwargs):
|
||||||
|
return super().__new__(cls, *args, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.bytes = self.B = int(self)
|
||||||
|
self.kilobytes = self.kB = self / self._kB**1
|
||||||
|
self.megabytes = self.MB = self / self._kB**2
|
||||||
|
self.gigabytes = self.GB = self / self._kB**3
|
||||||
|
self.petabytes = self.PB = self / self._kB**4
|
||||||
|
*suffixes, last = self._suffixes
|
||||||
|
suffix = next((
|
||||||
|
suffix
|
||||||
|
for suffix in suffixes
|
||||||
|
if 1 < getattr(self, suffix) < self._kB
|
||||||
|
), last)
|
||||||
|
self.readable = suffix, getattr(self, suffix)
|
||||||
|
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.__format__('.2f')
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '{}({})'.format(self.__class__.__name__, super().__repr__())
|
||||||
|
|
||||||
|
def __format__(self, format_spec):
|
||||||
|
suffix, val = self.readable
|
||||||
|
return '{val:{fmt}} {suf}'.format(val=math.ceil(val), fmt=format_spec, suf=suffix)
|
||||||
|
|
||||||
|
def __sub__(self, other):
|
||||||
|
return self.__class__(super().__sub__(other))
|
||||||
|
|
||||||
|
def __add__(self, other):
|
||||||
|
return self.__class__(super().__add__(other))
|
||||||
|
|
||||||
|
def __mul__(self, other):
|
||||||
|
return self.__class__(super().__mul__(other))
|
||||||
|
|
||||||
|
def __rsub__(self, other):
|
||||||
|
return self.__class__(super().__sub__(other))
|
||||||
|
|
||||||
|
def __radd__(self, other):
|
||||||
|
return self.__class__(super().__add__(other))
|
||||||
|
|
||||||
|
def __rmul__(self, other):
|
||||||
|
return self.__class__(super().__rmul__(other))
|
||||||
|
24
scripts/docker_server/Dockerfile
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# Download base image debian jessie
|
||||||
|
FROM python:slim
|
||||||
|
|
||||||
|
ARG replication_version=0.0.21
|
||||||
|
ARG version=0.1.1
|
||||||
|
|
||||||
|
# Infos
|
||||||
|
LABEL maintainer="Swann Martinez"
|
||||||
|
LABEL version=$version
|
||||||
|
LABEL description="Blender multi-user addon \
|
||||||
|
dedicated server image."
|
||||||
|
|
||||||
|
# Argument
|
||||||
|
ENV password='admin'
|
||||||
|
ENV port=5555
|
||||||
|
ENV timeout=3000
|
||||||
|
ENV log_level=INFO
|
||||||
|
ENV log_file="multiuser_server.log"
|
||||||
|
|
||||||
|
#Install replication
|
||||||
|
RUN pip install replication==$replication_version
|
||||||
|
|
||||||
|
# Run the server with parameters
|
||||||
|
CMD replication.serve -pwd ${password} -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}
|
6
scripts/get_addon_version.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
init_py = open("multi_user/__init__.py").read()
|
||||||
|
version = re.search("\d+, \d+, \d+", init_py).group(0)
|
||||||
|
digits = version.split(',')
|
||||||
|
print('.'.join(digits).replace(" ",""))
|
4
scripts/get_replication_version.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
init_py = open("multi_user/__init__.py").read()
|
||||||
|
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))
|
10
scripts/start_server/run-dockerfile.sh
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
#! /bin/bash
|
||||||
|
|
||||||
|
# Start server in docker container, from image hosted on the multi-user gitlab's container registry
|
||||||
|
docker run -d \
|
||||||
|
-p 5555-5560:5555-5560 \
|
||||||
|
-e port=5555 \
|
||||||
|
-e log-level DEBUG \
|
||||||
|
-e password=admin \
|
||||||
|
-e timeout=1000 \
|
||||||
|
registry.gitlab.com/slumber/multi-user/multi-user-server:0.1.0
|
5
scripts/start_server/start-server.sh
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
#! /bin/bash
|
||||||
|
|
||||||
|
# Start replication server locally, and include logging (requires replication_version=0.0.21a15)
|
||||||
|
clear
|
||||||
|
replication.serve -p 5555 -pwd admin -t 1000 -l DEBUG -lf server.log
|
@ -2,7 +2,7 @@ import os
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from deepdiff import DeepDiff
|
from deepdiff import DeepDiff
|
||||||
|
from uuid import uuid4
|
||||||
import bpy
|
import bpy
|
||||||
import random
|
import random
|
||||||
from multi_user.bl_types.bl_collection import BlCollection
|
from multi_user.bl_types.bl_collection import BlCollection
|
||||||
@ -10,8 +10,13 @@ from multi_user.bl_types.bl_collection import BlCollection
|
|||||||
def test_collection(clear_blend):
|
def test_collection(clear_blend):
|
||||||
# Generate a collection with childrens and a cube
|
# Generate a collection with childrens and a cube
|
||||||
datablock = bpy.data.collections.new("root")
|
datablock = bpy.data.collections.new("root")
|
||||||
datablock.children.link(bpy.data.collections.new("child"))
|
datablock.uuid = str(uuid4())
|
||||||
datablock.children.link(bpy.data.collections.new("child2"))
|
s1 = bpy.data.collections.new("child")
|
||||||
|
s1.uuid = str(uuid4())
|
||||||
|
s2 = bpy.data.collections.new("child2")
|
||||||
|
s2.uuid = str(uuid4())
|
||||||
|
datablock.children.link(s1)
|
||||||
|
datablock.children.link(s2)
|
||||||
|
|
||||||
bpy.ops.mesh.primitive_cube_add()
|
bpy.ops.mesh.primitive_cube_add()
|
||||||
datablock.objects.link(bpy.data.objects[0])
|
datablock.objects.link(bpy.data.objects[0])
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from deepdiff import DeepDiff
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
import random
|
|
||||||
from multi_user.bl_types.bl_image import BlImage
|
|
||||||
|
|
||||||
def test_image(clear_blend):
|
|
||||||
datablock = bpy.data.images.new('asd',2000,2000)
|
|
||||||
|
|
||||||
implementation = BlImage()
|
|
||||||
expected = implementation._dump(datablock)
|
|
||||||
bpy.data.images.remove(datablock)
|
|
||||||
|
|
||||||
test = implementation._construct(expected)
|
|
||||||
implementation._load(expected, test)
|
|
||||||
result = implementation._dump(test)
|
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
|
@ -7,12 +7,11 @@ import bpy
|
|||||||
from multi_user.bl_types.bl_material import BlMaterial
|
from multi_user.bl_types.bl_material import BlMaterial
|
||||||
|
|
||||||
|
|
||||||
def test_material(clear_blend):
|
def test_material_nodes(clear_blend):
|
||||||
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()]
|
nodes_types = [node.bl_rna.identifier for node in bpy.types.ShaderNode.__subclasses__()]
|
||||||
|
|
||||||
datablock = bpy.data.materials.new("test")
|
datablock = bpy.data.materials.new("test")
|
||||||
datablock.use_nodes = True
|
datablock.use_nodes = True
|
||||||
bpy.data.materials.create_gpencil_data(datablock)
|
|
||||||
|
|
||||||
for ntype in nodes_types:
|
for ntype in nodes_types:
|
||||||
datablock.node_tree.nodes.new(ntype)
|
datablock.node_tree.nodes.new(ntype)
|
||||||
@ -26,3 +25,18 @@ def test_material(clear_blend):
|
|||||||
result = implementation._dump(test)
|
result = implementation._dump(test)
|
||||||
|
|
||||||
assert not DeepDiff(expected, result)
|
assert not DeepDiff(expected, result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_material_gpencil(clear_blend):
|
||||||
|
datablock = bpy.data.materials.new("test")
|
||||||
|
bpy.data.materials.create_gpencil_data(datablock)
|
||||||
|
|
||||||
|
implementation = BlMaterial()
|
||||||
|
expected = implementation._dump(datablock)
|
||||||
|
bpy.data.materials.remove(datablock)
|
||||||
|
|
||||||
|
test = implementation._construct(expected)
|
||||||
|
implementation._load(expected, test)
|
||||||
|
result = implementation._dump(test)
|
||||||
|
|
||||||
|
assert not DeepDiff(expected, result)
|
||||||
|
@ -30,9 +30,11 @@ CONSTRAINTS_TYPES = [
|
|||||||
'COPY_ROTATION', 'COPY_SCALE', 'COPY_TRANSFORMS', 'LIMIT_DISTANCE',
|
'COPY_ROTATION', 'COPY_SCALE', 'COPY_TRANSFORMS', 'LIMIT_DISTANCE',
|
||||||
'LIMIT_LOCATION', 'LIMIT_ROTATION', 'LIMIT_SCALE', 'MAINTAIN_VOLUME',
|
'LIMIT_LOCATION', 'LIMIT_ROTATION', 'LIMIT_SCALE', 'MAINTAIN_VOLUME',
|
||||||
'TRANSFORM', 'TRANSFORM_CACHE', 'CLAMP_TO', 'DAMPED_TRACK', 'IK',
|
'TRANSFORM', 'TRANSFORM_CACHE', 'CLAMP_TO', 'DAMPED_TRACK', 'IK',
|
||||||
'LOCKED_TRACK', 'SPLINE_IK', 'STRETCH_TO', 'TRACK_TO', 'ACTION',
|
'LOCKED_TRACK', 'STRETCH_TO', 'TRACK_TO', 'ACTION',
|
||||||
'ARMATURE', 'CHILD_OF', 'FLOOR', 'FOLLOW_PATH', 'PIVOT', 'SHRINKWRAP']
|
'ARMATURE', 'CHILD_OF', 'FLOOR', 'FOLLOW_PATH', 'PIVOT', 'SHRINKWRAP']
|
||||||
|
|
||||||
|
#temporary disabled 'SPLINE_IK' until its fixed
|
||||||
|
|
||||||
def test_object(clear_blend):
|
def test_object(clear_blend):
|
||||||
bpy.ops.mesh.primitive_cube_add(
|
bpy.ops.mesh.primitive_cube_add(
|
||||||
enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
enter_editmode=False, align='WORLD', location=(0, 0, 0))
|
||||||
|
@ -6,8 +6,11 @@ from deepdiff import DeepDiff
|
|||||||
import bpy
|
import bpy
|
||||||
import random
|
import random
|
||||||
from multi_user.bl_types.bl_scene import BlScene
|
from multi_user.bl_types.bl_scene import BlScene
|
||||||
|
from multi_user.utils import get_preferences
|
||||||
|
|
||||||
def test_scene(clear_blend):
|
def test_scene(clear_blend):
|
||||||
|
get_preferences().sync_flags.sync_render_settings = True
|
||||||
|
|
||||||
datablock = bpy.data.scenes.new("toto")
|
datablock = bpy.data.scenes.new("toto")
|
||||||
datablock.view_settings.use_curve_mapping = True
|
datablock.view_settings.use_curve_mapping = True
|
||||||
# Test
|
# Test
|
||||||
|