feat: gzip to compress graph

This commit is contained in:
Swann
2020-12-11 23:02:20 +01:00
parent 16fc4b8c54
commit 056b3524e5
2 changed files with 41 additions and 34 deletions

View File

@ -110,45 +110,13 @@ class Timer(Delayable):
self.is_running = False
class SessionRecordGraphTimer(Timer):
def __init__(self, timout=60, filepath=None):
def __init__(self, timout=10, filepath=None):
self._filepath = filepath
super().__init__(timout)
def execute(self):
import networkx as nx
import pickle
import copy
from time import gmtime, strftime
from pathlib import Path
# Replication graph
nodes_ids = session.list()
#TODO: add dump graph to replication
nodes =[]
for n in nodes_ids:
nd = session.get(uuid=n)
nodes.append((
n,
{
'owner': nd.owner,
'str_type': nd.str_type,
'data': nd.data,
'dependencies': nd.dependencies,
}
))
db = dict()
db['nodes'] = nodes
db['users'] = copy.copy(session.online_users)
time = strftime("%Y_%m_%d_%H_%M_%S", gmtime())
filepath = Path(self._filepath)
filepath = filepath.with_name(f"{filepath.stem}_{time}{filepath.suffix}")
with open(filepath, "wb") as f:
logging.info(f"Writing db snapshot to {filepath}")
pickle.dump(db, f, protocol=4)
operators.dump_db(self._filepath)
class ApplyTimer(Timer):
def __init__(self, timout=1, target_type=None):