This commit is contained in:
Amber 2023-12-04 11:02:47 +01:00
parent b96ed81842
commit bd4fce8855
11 changed files with 98 additions and 5 deletions

View File

@ -65,3 +65,5 @@ class SyncAgent():
print('name: %s is regular FILE' % (attr.filename))
synca = _agent.SyncAgent()
# sftpc = a.get_sftp_client()

View File

@ -1,3 +1,10 @@
if __name__ == '__main__':
print('python masync script: #!/usr/bin/env python ')
## suppose last tree is
last_local_tree_hash = {'taglioCapelli4.jpg': '5af62402ec7716e8d729b0683061f0c4', 'taglioCapelli5.jpg': '9c42961af589a279c4c828925291153b', 'pino.txt': 'd41d8cd98f00b204e9800998ecf8427e', 'taglioCapelli6.jpg': '4059905eab817c33ee48f912af80fdb7', 'spartiti_sepu': {'IMG_20220626_081839.jpg': '3c14e508124c928d59b393a571e2f751', 'IMG_20220626_081951.jpg': 'd484638ac09cbe40f8753de3f1b3c4a6'}, 'preferiti.txt': 'af45981ef2be534dbb37f96833d3fd04'}
from snapshot.generate import local as _genlocal
local_tree_hash = _genlocal.generate_tree_hash('/home/luca/sharednotes_dev/')

View File

@ -1,13 +1,13 @@
import gzip
import json
from snapshot import gen as _gen
from snapshot.generate import local as _genlocal
DUMP_FILE_NAME = '.snapshot.json.gz'
SNAPSHOT_PATH = '../'
def dump_snapshot(snapshot, path=None, dump_file_name=None):
path = path or SNAPSHOT_PATH
path = _gen.check_isdir(path)
path = _genlocal.check_isdir(path)
dump = json.dumps(snapshot)
dump = gzip.compress(dump.encode())
@ -20,7 +20,7 @@ def dump_snapshot(snapshot, path=None, dump_file_name=None):
def decode_snapshot(path=None, dump_file_name=None):
path = path or SNAPSHOT_PATH
path = _gen.check_isdir(path)
path = _genlocal.check_isdir(path)
dump_file_name = dump_file_name or DUMP_FILE_NAME

Binary file not shown.

View File

@ -0,0 +1,84 @@
import stat
import os
import hashlib
import json
import gzip
from client import agent as _agent
def generate_rfile_hash(file_path, hexdigest=True, client=None):
if not client:
a = _agent.synca
client = a.get_sftp_client()
with client.open(file_path, "rb") as f:
buf = f.read()
if hexdigest: return hashlib.md5(buf).hexdigest()
return hashlib.md5(buf).digest()
# def check_isdir(path: str):
# if not os.path.isdir(path):
# raise Exception('Provide a valid folder to start the hashing')
#
# if not path.endswith(os.path.sep):
# path = path + os.path.sep
# return path
def generate_tree_hash_oversftp(root_path :str):
'''
@param root_path string, root_path in remote server
generate a map of hashes starting from `root_path` recursively
'''
# if not os.path.isdir(root_path):
# raise Exception('Provide a valid folder to start the hashing')
#
# if not root_path.endswith(os.path.sep):
# root_path = root_path + os.path.sep
# root_path = check_isdir(root_path)
rtreemap = {}
if not root_path.endswith(os.path.sep):
root_path = root_path + os.path.sep
a = _agent.synca
sftpc = a.get_sftp_client()
for item in sftpc.listdir_attr(root_path):
absolute_item_path = root_path + item.filename
print('absolute_item_path: %s, item %s, isdir: %s' % (absolute_item_path, item.filename, stat.S_ISDIR(item.st_mode)))
if stat.S_ISDIR(item.st_mode):
rtreemap[item.filename] = generate_tree_hash_oversftp(absolute_item_path)
else:
rtreemap[item.filename] = generate_rfile_hash(absolute_item_path, client=sftpc)
return rtreemap
'''
a
- b
- c
- k.txt
i.txt
g.txt
j.txt
k.txt
tree['a'] = {
'b' : {
},
'j.txt' : '012349jasdfh9934',
}
'''
root_path = '/home/luca/rsyn_test_fap'

View File

@ -1,8 +1,8 @@
from snapshot import gen as _gen
from snapshot import dump as _dump
from snapshot.generate import local as _genlocal
def dmpsnap(root_tree: str):
snapshot = _gen.generate_recursive_filemap_hash(root_tree)
snapshot = _genlocal.generate_recursive_filemap_hash(root_tree)
_dump.dump_snapshot(snapshot)
def dcsnap(path: str, filename=None):