feat: gzip to compress graph
This commit is contained in:
@ -110,45 +110,13 @@ class Timer(Delayable):
|
|||||||
self.is_running = False
|
self.is_running = False
|
||||||
|
|
||||||
class SessionRecordGraphTimer(Timer):
|
class SessionRecordGraphTimer(Timer):
|
||||||
def __init__(self, timout=60, filepath=None):
|
def __init__(self, timout=10, filepath=None):
|
||||||
self._filepath = filepath
|
self._filepath = filepath
|
||||||
super().__init__(timout)
|
super().__init__(timout)
|
||||||
|
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
import networkx as nx
|
operators.dump_db(self._filepath)
|
||||||
import pickle
|
|
||||||
import copy
|
|
||||||
from time import gmtime, strftime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Replication graph
|
|
||||||
nodes_ids = session.list()
|
|
||||||
#TODO: add dump graph to replication
|
|
||||||
|
|
||||||
nodes =[]
|
|
||||||
for n in nodes_ids:
|
|
||||||
nd = session.get(uuid=n)
|
|
||||||
nodes.append((
|
|
||||||
n,
|
|
||||||
{
|
|
||||||
'owner': nd.owner,
|
|
||||||
'str_type': nd.str_type,
|
|
||||||
'data': nd.data,
|
|
||||||
'dependencies': nd.dependencies,
|
|
||||||
}
|
|
||||||
))
|
|
||||||
|
|
||||||
db = dict()
|
|
||||||
db['nodes'] = nodes
|
|
||||||
db['users'] = copy.copy(session.online_users)
|
|
||||||
|
|
||||||
time = strftime("%Y_%m_%d_%H_%M_%S", gmtime())
|
|
||||||
filepath = Path(self._filepath)
|
|
||||||
filepath = filepath.with_name(f"{filepath.stem}_{time}{filepath.suffix}")
|
|
||||||
with open(filepath, "wb") as f:
|
|
||||||
logging.info(f"Writing db snapshot to {filepath}")
|
|
||||||
pickle.dump(db, f, protocol=4)
|
|
||||||
|
|
||||||
class ApplyTimer(Timer):
|
class ApplyTimer(Timer):
|
||||||
def __init__(self, timout=1, target_type=None):
|
def __init__(self, timout=1, target_type=None):
|
||||||
|
@ -25,9 +25,11 @@ import shutil
|
|||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from datetime import datetime
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
import gzip
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
@ -714,6 +716,43 @@ class SessionNotifyOperator(bpy.types.Operator):
|
|||||||
return context.window_manager.invoke_props_dialog(self)
|
return context.window_manager.invoke_props_dialog(self)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_db(filepath):
|
||||||
|
import networkx as nx
|
||||||
|
import pickle
|
||||||
|
import copy
|
||||||
|
from time import gmtime, strftime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Replication graph
|
||||||
|
nodes_ids = session.list()
|
||||||
|
#TODO: add dump graph to replication
|
||||||
|
|
||||||
|
nodes =[]
|
||||||
|
for n in nodes_ids:
|
||||||
|
nd = session.get(uuid=n)
|
||||||
|
nodes.append((
|
||||||
|
n,
|
||||||
|
{
|
||||||
|
'owner': nd.owner,
|
||||||
|
'str_type': nd.str_type,
|
||||||
|
'data': nd.data,
|
||||||
|
'dependencies': nd.dependencies,
|
||||||
|
}
|
||||||
|
))
|
||||||
|
|
||||||
|
db = dict()
|
||||||
|
db['nodes'] = nodes
|
||||||
|
db['users'] = copy.copy(session.online_users)
|
||||||
|
|
||||||
|
stime = datetime.now().strftime('%Y_%m_%d_%H-%M-%S')
|
||||||
|
|
||||||
|
filepath = Path(filepath)
|
||||||
|
filepath = filepath.with_name(f"{filepath.stem}_{stime}{filepath.suffix}")
|
||||||
|
with gzip.open(filepath, "wb") as f:
|
||||||
|
logging.info(f"Writing db snapshot to {filepath}")
|
||||||
|
pickle.dump(db, f, protocol=4)
|
||||||
|
|
||||||
|
|
||||||
class SessionRecordGraphOperator(bpy.types.Operator, ExportHelper):
|
class SessionRecordGraphOperator(bpy.types.Operator, ExportHelper):
|
||||||
bl_idname = "session.export"
|
bl_idname = "session.export"
|
||||||
bl_label = "SessionRecordGraph"
|
bl_label = "SessionRecordGraph"
|
||||||
|
Reference in New Issue
Block a user