Compare commits
46 Commits
246-bevel-
...
v0.5.8
Author | SHA1 | Date | |
---|---|---|---|
12355b6457 | |||
74ad4e5e1f | |||
2a88c3e5ac | |||
4c42a5be92 | |||
757ee7015a | |||
15d66579c6 | |||
4128a47b88 | |||
689a565c75 | |||
c5f1bf1176 | |||
4dc6781c94 | |||
5311e55208 | |||
4cb64e5e77 | |||
ff67b581b1 | |||
f7bec3fc08 | |||
5e929db3ee | |||
629f2e1cdb | |||
b8fed806ed | |||
8190846b59 | |||
c228b6ad7f | |||
48651ce890 | |||
26847cf459 | |||
bfa6991c00 | |||
0c60c86775 | |||
70b6f9bcfa | |||
8d176b55e4 | |||
4c0356e724 | |||
6b04d1d8d6 | |||
edfcdd8867 | |||
bdd6599614 | |||
6efd1321ce | |||
047bd47048 | |||
d32cbb7b30 | |||
adabce3822 | |||
62f52db5b2 | |||
745f45b682 | |||
4b7573234a | |||
f84860f520 | |||
c7ee67d4dd | |||
7ed4644b75 | |||
e0c4a17be9 | |||
2a6181b832 | |||
0f7c9adec5 | |||
f094ec097c | |||
ac84509b83 | |||
69565b3852 | |||
57fdd492ef |
54
CHANGELOG.md
54
CHANGELOG.md
@ -217,3 +217,57 @@ All notable changes to this project will be documented in this file.
|
|||||||
- GPencil fill stroke
|
- GPencil fill stroke
|
||||||
- Sculpt and GPencil brushes deleted when joining a session (@Kysios)
|
- Sculpt and GPencil brushes deleted when joining a session (@Kysios)
|
||||||
- Auto-updater doesn't work for master and develop builds
|
- Auto-updater doesn't work for master and develop builds
|
||||||
|
|
||||||
|
## [0.5.0] - 2022-02-10
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- New overall UI and UX (@Kysios)
|
||||||
|
- Documentation overall update (@Kysios)
|
||||||
|
- Server presets (@Kysios)
|
||||||
|
- Server online status (@Kysios)
|
||||||
|
- Draw connected user color in the user list
|
||||||
|
- Private session (access protected with a password) (@Kysios)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Dependencies are now installed in the addon folder and correctly cleaned during the addon removal process
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Python 3.10 compatibility (@notfood)
|
||||||
|
- Blender 3.x compatibility
|
||||||
|
- Skin vertex radius synchronization (@kromar)
|
||||||
|
- Sequencer audio strip synchronization
|
||||||
|
- Crash with empty after a reconnection
|
||||||
|
|
||||||
|
## [0.5.1] - 2022-02-10
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Auto updater breaks dependency auto installer
|
||||||
|
- Auto updater update from tag
|
||||||
|
|
||||||
|
## [0.5.2] - 2022-02-18
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Objects not selectable after user leaves session
|
||||||
|
- Geometry nodes attribute toogle doesn't sync
|
||||||
|
|
||||||
|
## [0.5.3] - 2022-03-11
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Snapshots logs
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Server crashing during snapshots
|
||||||
|
- Blender 3.1 numpy loading error during early connection process
|
||||||
|
- Server docker arguments
|
||||||
|
|
||||||
|
## [0.5.5] - 2022-06-12
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Numpy mesh serialization error
|
@ -16,12 +16,12 @@ import sys
|
|||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
# -- Project information -----------------------------------------------------
|
||||||
|
|
||||||
project = 'Multi-User 0.5.0 Documentation'
|
project = 'Multi-User 0.5.x Documentation'
|
||||||
copyright = '2020, Swann Martinez'
|
copyright = '2020, Swann Martinez'
|
||||||
author = 'Swann Martinez, Poochy, Fabian'
|
author = 'Swann Martinez, Poochy, Fabian'
|
||||||
|
|
||||||
# The full version, including alpha/beta/rc tags
|
# The full version, including alpha/beta/rc tags
|
||||||
version_release = '0.5.1-develop'
|
version_release = '0.5.5'
|
||||||
|
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
# -- General configuration ---------------------------------------------------
|
||||||
|
@ -206,9 +206,9 @@ You can run the dedicated server on any platform by following these steps:
|
|||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
python -m pip install replication==0.1.13
|
python -m pip install replication
|
||||||
|
|
||||||
4. Launch the server with:
|
3. Launch the server with:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
@ -562,7 +562,7 @@ The default Docker image essentially runs the equivalent of:
|
|||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
replication.serve -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
|
replication.server -pwd admin -p 5555 -t 5000 -l DEBUG -lf multiuser_server.log
|
||||||
|
|
||||||
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.
|
This means the server will be launched with 'admin' as the administrator password, run on ports 5555:5558, use a timeout of 5 seconds, verbose 'DEBUG' log level, and with log files written to 'multiuser_server.log'. See :ref:`cmd-line` for a description of optional parameters.
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Multi-User",
|
"name": "Multi-User",
|
||||||
"author": "Swann Martinez",
|
"author": "Swann Martinez",
|
||||||
"version": (0, 4, 1),
|
"version": (0, 5, 8),
|
||||||
"description": "Enable real-time collaborative workflow inside blender",
|
"description": "Enable real-time collaborative workflow inside blender",
|
||||||
"blender": (2, 82, 0),
|
"blender": (2, 82, 0),
|
||||||
"location": "3D View > Sidebar > Multi-User tab",
|
"location": "3D View > Sidebar > Multi-User tab",
|
||||||
|
@ -1015,16 +1015,18 @@ class Singleton_updater(object):
|
|||||||
for path, dirs, files in os.walk(base):
|
for path, dirs, files in os.walk(base):
|
||||||
# prune ie skip updater folder
|
# prune ie skip updater folder
|
||||||
dirs[:] = [d for d in dirs if os.path.join(path,d) not in [self._updater_path]]
|
dirs[:] = [d for d in dirs if os.path.join(path,d) not in [self._updater_path]]
|
||||||
|
|
||||||
|
for directory in dirs:
|
||||||
|
shutil.rmtree(os.path.join(path,directory))
|
||||||
|
|
||||||
for file in files:
|
for file in files:
|
||||||
for ptrn in self.remove_pre_update_patterns:
|
try:
|
||||||
if fnmatch.filter([file],ptrn):
|
fl = os.path.join(path,file)
|
||||||
try:
|
os.remove(fl)
|
||||||
fl = os.path.join(path,file)
|
if self._verbose: print("Pre-removed file "+file)
|
||||||
os.remove(fl)
|
except OSError:
|
||||||
if self._verbose: print("Pre-removed file "+file)
|
print("Failed to pre-remove "+file)
|
||||||
except OSError:
|
self.print_trace()
|
||||||
print("Failed to pre-remove "+file)
|
|
||||||
self.print_trace()
|
|
||||||
|
|
||||||
# Walk through the temp addon sub folder for replacements
|
# Walk through the temp addon sub folder for replacements
|
||||||
# this implements the overwrite rules, which apply after
|
# this implements the overwrite rules, which apply after
|
||||||
@ -1701,7 +1703,7 @@ class GitlabEngine(object):
|
|||||||
def parse_tags(self, response, updater):
|
def parse_tags(self, response, updater):
|
||||||
if response == None:
|
if response == None:
|
||||||
return []
|
return []
|
||||||
return [{"name": tag["name"], "zipball_url": self.get_zip_url(tag["commit"]["id"], updater)} for tag in response]
|
return [{"name": tag["name"], "zipball_url": f"https://gitlab.com/slumber/multi-user/-/jobs/artifacts/{tag['name']}/download?job=build"} for tag in response]
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
|
@ -267,7 +267,7 @@ class addon_updater_update_now(bpy.types.Operator):
|
|||||||
clean_install: bpy.props.BoolProperty(
|
clean_install: bpy.props.BoolProperty(
|
||||||
name="Clean install",
|
name="Clean install",
|
||||||
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
|
||||||
default=False,
|
default=True,
|
||||||
options={'HIDDEN'}
|
options={'HIDDEN'}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -47,10 +47,7 @@ SHAPEKEY_BLOCK_ATTR = [
|
|||||||
'slider_max',
|
'slider_max',
|
||||||
]
|
]
|
||||||
|
|
||||||
CURVE_POINT = [
|
|
||||||
'location',
|
|
||||||
'handle_type_2',
|
|
||||||
]
|
|
||||||
if bpy.app.version >= (2,93,0):
|
if bpy.app.version >= (2,93,0):
|
||||||
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
|
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
|
||||||
else:
|
else:
|
||||||
@ -175,7 +172,7 @@ def load_modifier_geometry_node_props(dumped_modifier: dict, target_modifier: bp
|
|||||||
for input_index, inpt in enumerate(get_node_group_properties_identifiers(target_modifier.node_group)):
|
for input_index, inpt in enumerate(get_node_group_properties_identifiers(target_modifier.node_group)):
|
||||||
dumped_value, dumped_type = dumped_modifier['props'][input_index]
|
dumped_value, dumped_type = dumped_modifier['props'][input_index]
|
||||||
input_value = target_modifier[inpt[0]]
|
input_value = target_modifier[inpt[0]]
|
||||||
if dumped_type in ['INT', 'VALUE', 'STR']:
|
if dumped_type in ['INT', 'VALUE', 'STR', 'BOOL']:
|
||||||
logging.info(f"{inpt[0]}/{dumped_value}")
|
logging.info(f"{inpt[0]}/{dumped_value}")
|
||||||
target_modifier[inpt[0]] = dumped_value
|
target_modifier[inpt[0]] = dumped_value
|
||||||
elif dumped_type in ['RGBA', 'VECTOR']:
|
elif dumped_type in ['RGBA', 'VECTOR']:
|
||||||
@ -421,8 +418,7 @@ def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
|
|||||||
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
dumped_modifier['settings'] = dumper.dump(modifier.settings)
|
||||||
elif modifier.type == 'UV_PROJECT':
|
elif modifier.type == 'UV_PROJECT':
|
||||||
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
|
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
|
||||||
elif modifier.type == 'BEVEL' and modifier.profile_type == 'CUSTOM':
|
|
||||||
dumped_modifier['custom_profile'] = np_dump_collection(modifier.custom_profile.points, CURVE_POINT)
|
|
||||||
dumped_modifiers.append(dumped_modifier)
|
dumped_modifiers.append(dumped_modifier)
|
||||||
return dumped_modifiers
|
return dumped_modifiers
|
||||||
|
|
||||||
@ -494,31 +490,11 @@ def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collect
|
|||||||
loaded_modifier.projectors[projector_index].object = target_object
|
loaded_modifier.projectors[projector_index].object = target_object
|
||||||
else:
|
else:
|
||||||
logging.error("Could't load projector target object {projector_object}")
|
logging.error("Could't load projector target object {projector_object}")
|
||||||
elif loaded_modifier.type == 'BEVEL':
|
|
||||||
src_cust_profile = dumped_modifier.get('custom_profile')
|
|
||||||
if src_cust_profile:
|
|
||||||
dst_points = loaded_modifier.custom_profile.points
|
|
||||||
|
|
||||||
# TODO: refactor to be diff-compatible
|
|
||||||
for p in dst_points:
|
|
||||||
try:
|
|
||||||
dst_points.remove(dst_points[0])
|
|
||||||
except Exception:
|
|
||||||
break
|
|
||||||
|
|
||||||
for i in range(len(src_cust_profile['handle_type_2'])-len(dst_points)):
|
|
||||||
dst_points.add(0,0)
|
|
||||||
|
|
||||||
|
|
||||||
np_load_collection(src_cust_profile, dst_points, CURVE_POINT)
|
|
||||||
|
|
||||||
loaded_modifier.custom_profile.points.update()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
|
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
|
||||||
""" Load modifiers custom data not managed by the dump_anything loader
|
""" Load modifiers custom data not managed by the dump_anything loader
|
||||||
git
|
|
||||||
:param dumped_modifiers: modifiers to load
|
:param dumped_modifiers: modifiers to load
|
||||||
:type dumped_modifiers: dict
|
:type dumped_modifiers: dict
|
||||||
:param modifiers: target modifiers collection
|
:param modifiers: target modifiers collection
|
||||||
|
@ -26,7 +26,8 @@ import numpy as np
|
|||||||
BPY_TO_NUMPY_TYPES = {
|
BPY_TO_NUMPY_TYPES = {
|
||||||
'FLOAT': np.float32,
|
'FLOAT': np.float32,
|
||||||
'INT': np.int32,
|
'INT': np.int32,
|
||||||
'BOOL': np.bool}
|
'BOOL': bool,
|
||||||
|
'BOOLEAN': bool}
|
||||||
|
|
||||||
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
|
PRIMITIVE_TYPES = ['FLOAT', 'INT', 'BOOLEAN']
|
||||||
|
|
||||||
@ -581,7 +582,6 @@ class Loader:
|
|||||||
else:
|
else:
|
||||||
dst_curve.points.new(pos[0], pos[1])
|
dst_curve.points.new(pos[0], pos[1])
|
||||||
curves.update()
|
curves.update()
|
||||||
|
|
||||||
|
|
||||||
def _load_pointer(self, instance, dump):
|
def _load_pointer(self, instance, dump):
|
||||||
rna_property_type = instance.bl_rna_property.fixed_type
|
rna_property_type = instance.bl_rna_property.fixed_type
|
||||||
|
@ -36,8 +36,6 @@ REPLICATION_DEPENDENCIES = {
|
|||||||
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||||
REPLICATION = os.path.join(LIBS,"replication")
|
REPLICATION = os.path.join(LIBS,"replication")
|
||||||
|
|
||||||
PYTHON_PATH = None
|
|
||||||
SUBPROCESS_DIR = None
|
|
||||||
|
|
||||||
rtypes = []
|
rtypes = []
|
||||||
|
|
||||||
@ -50,13 +48,13 @@ def module_can_be_imported(name: str) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def install_pip():
|
def install_pip(python_path):
|
||||||
# pip can not necessarily be imported into Blender after this
|
# pip can not necessarily be imported into Blender after this
|
||||||
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
subprocess.run([str(python_path), "-m", "ensurepip"])
|
||||||
|
|
||||||
|
|
||||||
def install_package(name: str, install_dir: str):
|
def install_requirements(python_path:str, module_requirement: str, install_dir: str):
|
||||||
logging.info(f"installing {name} version...")
|
logging.info(f"Installing {module_requirement} dependencies in {install_dir}")
|
||||||
env = os.environ
|
env = os.environ
|
||||||
if "PIP_REQUIRE_VIRTUALENV" in env:
|
if "PIP_REQUIRE_VIRTUALENV" in env:
|
||||||
# PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env
|
# PIP_REQUIRE_VIRTUALENV is an env var to ensure pip cannot install packages outside a virtual env
|
||||||
@ -65,23 +63,7 @@ def install_package(name: str, install_dir: str):
|
|||||||
# env var for the subprocess.
|
# env var for the subprocess.
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
del env["PIP_REQUIRE_VIRTUALENV"]
|
del env["PIP_REQUIRE_VIRTUALENV"]
|
||||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
|
subprocess.run([str(python_path), "-m", "pip", "install", "-r", f"{install_dir}/{module_requirement}/requirements.txt", "-t", install_dir], env=env)
|
||||||
|
|
||||||
if name in sys.modules:
|
|
||||||
del sys.modules[name]
|
|
||||||
|
|
||||||
|
|
||||||
def check_package_version(name: str, required_version: str):
|
|
||||||
logging.info(f"Checking {name} version...")
|
|
||||||
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
|
||||||
|
|
||||||
version = VERSION_EXPR.search(out.stdout.decode())
|
|
||||||
if version and version.group() == required_version:
|
|
||||||
logging.info(f"{name} is up to date")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
logging.info(f"{name} need an update")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_ip():
|
def get_ip():
|
||||||
@ -117,21 +99,7 @@ def remove_paths(paths: list):
|
|||||||
if path in sys.path:
|
if path in sys.path:
|
||||||
logging.debug(f"Removing {path} dir from the path.")
|
logging.debug(f"Removing {path} dir from the path.")
|
||||||
sys.path.remove(path)
|
sys.path.remove(path)
|
||||||
|
|
||||||
|
|
||||||
def install_modules(dependencies: list, python_path: str, install_dir: str):
|
|
||||||
global PYTHON_PATH, SUBPROCESS_DIR
|
|
||||||
|
|
||||||
PYTHON_PATH = Path(python_path)
|
|
||||||
SUBPROCESS_DIR = PYTHON_PATH.parent
|
|
||||||
|
|
||||||
if not module_can_be_imported("pip"):
|
|
||||||
install_pip()
|
|
||||||
|
|
||||||
for package_name in dependencies:
|
|
||||||
if not module_can_be_imported(package_name):
|
|
||||||
install_package(package_name, install_dir=install_dir)
|
|
||||||
module_can_be_imported(package_name)
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
if bpy.app.version >= (2,91,0):
|
if bpy.app.version >= (2,91,0):
|
||||||
@ -139,12 +107,21 @@ def register():
|
|||||||
else:
|
else:
|
||||||
python_binary_path = bpy.app.binary_path_python
|
python_binary_path = bpy.app.binary_path_python
|
||||||
|
|
||||||
|
python_path = Path(python_binary_path)
|
||||||
|
|
||||||
for module_name in list(sys.modules.keys()):
|
for module_name in list(sys.modules.keys()):
|
||||||
if 'replication' in module_name:
|
if 'replication' in module_name:
|
||||||
del sys.modules[module_name]
|
del sys.modules[module_name]
|
||||||
|
|
||||||
setup_paths([LIBS, REPLICATION])
|
setup_paths([LIBS, REPLICATION])
|
||||||
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
|
|
||||||
|
if not module_can_be_imported("pip"):
|
||||||
|
install_pip(python_path)
|
||||||
|
|
||||||
|
deps_not_installed = [package_name for package_name in REPLICATION_DEPENDENCIES if not module_can_be_imported(package_name)]
|
||||||
|
if any(deps_not_installed):
|
||||||
|
install_requirements(python_path, module_requirement='replication', install_dir=LIBS)
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
remove_paths([REPLICATION, LIBS])
|
remove_paths([REPLICATION, LIBS])
|
Submodule multi_user/libs/replication updated: 029e12b2be...3e9eb4f5c0
@ -1036,7 +1036,7 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
|||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class SessionImportUser(bpy.types.Panel):
|
class SESSION_PT_ImportUser(bpy.types.Panel):
|
||||||
bl_space_type = 'FILE_BROWSER'
|
bl_space_type = 'FILE_BROWSER'
|
||||||
bl_region_type = 'TOOL_PROPS'
|
bl_region_type = 'TOOL_PROPS'
|
||||||
bl_label = "Users"
|
bl_label = "Users"
|
||||||
@ -1300,7 +1300,7 @@ classes = (
|
|||||||
SessionNotifyOperator,
|
SessionNotifyOperator,
|
||||||
SessionSaveBackupOperator,
|
SessionSaveBackupOperator,
|
||||||
SessionLoadSaveOperator,
|
SessionLoadSaveOperator,
|
||||||
SessionImportUser,
|
SESSION_PT_ImportUser,
|
||||||
SessionStopAutoSaveOperator,
|
SessionStopAutoSaveOperator,
|
||||||
SessionPurgeOperator,
|
SessionPurgeOperator,
|
||||||
SessionPresetServerAdd,
|
SessionPresetServerAdd,
|
||||||
|
@ -44,13 +44,6 @@ DEFAULT_PRESETS = {
|
|||||||
"admin_password": "admin",
|
"admin_password": "admin",
|
||||||
"server_password": ""
|
"server_password": ""
|
||||||
},
|
},
|
||||||
"public session" : {
|
|
||||||
"server_name": "public session",
|
|
||||||
"ip": "51.75.71.183",
|
|
||||||
"port": 5555,
|
|
||||||
"admin_password": "",
|
|
||||||
"server_password": ""
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def randomColor():
|
def randomColor():
|
||||||
|
@ -22,4 +22,4 @@ RUN pip install replication==$replication_version
|
|||||||
|
|
||||||
# Run the server with parameters
|
# Run the server with parameters
|
||||||
ENTRYPOINT ["/bin/sh", "-c"]
|
ENTRYPOINT ["/bin/sh", "-c"]
|
||||||
CMD ["replication.serve -apwd ${password} -spwd '' -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]
|
CMD ["replication.server -apwd ${password} -spwd '' -p ${port} -t ${timeout} -l ${log_level} -lf ${log_file}"]
|
Reference in New Issue
Block a user