added delete from remote if not present on source, fixed a bug on comparison of already uploaded files, changed md5 pathdir of large files, added print messages, fixed remote list of encrypted files if there are multiple encrypted versions of the same file
This commit is contained in:
@@ -7,7 +7,7 @@ from utility import read_in_chunks
|
||||
import time
|
||||
import shutil
|
||||
|
||||
def launch(localpath,temp_dir,swift_container,prefix,size_limit_to_segment,size_limit_reading_os,upload,uploadlarge,fail_tries ,md5_compare, encrypted,encrypt_key,excluded_patterns,copy_to_dir):
|
||||
def launch(localpath,temp_dir,swift_container,prefix,size_limit_to_segment,size_limit_reading_os,upload,uploadlarge,fail_tries ,md5_compare, encrypted,encrypt_key,excluded_patterns,copy_to_dir,delete):
|
||||
print ("Localpath " + localpath)
|
||||
print ("Temppath " + temp_dir)
|
||||
print ("Swift container " + swift_container)
|
||||
@@ -44,9 +44,9 @@ def launch(localpath,temp_dir,swift_container,prefix,size_limit_to_segment,size_
|
||||
print("___________")
|
||||
swift_conn = authentication.set_authentication ()
|
||||
swift_conn,objects = utility.get_list(fail_tries,swift_conn,swift_container,prefix)
|
||||
byte0real,byte0manifest,swift_conn,remotefiles,remotefiles_md5 = utility.list_compute_correct_size (fail_tries,objects,swift_conn,swift_container,prefix)
|
||||
byte0real,byte0manifest,swift_conn,remotefiles,remotefiles_md5,remotefiles_xobj = utility.list_compute_correct_size (fail_tries,objects,swift_conn,swift_container,prefix)
|
||||
if encrypted:
|
||||
remotefiles_encr = utility.list_compute_correct_names_for_enctyption(objects,prefix)
|
||||
remotefiles_encr,list_enc_old = utility.list_compute_correct_names_for_encryption(objects,prefix)
|
||||
|
||||
print ("Files remoti " + str(len(remotefiles)))
|
||||
|
||||
@@ -90,7 +90,7 @@ def launch(localpath,temp_dir,swift_container,prefix,size_limit_to_segment,size_
|
||||
if upload_file:
|
||||
if encrypted :
|
||||
lnameenc = lname + "_xg10v10_encrypted"
|
||||
xtime = str(int(time()))
|
||||
xtime = str(int(time.time()))
|
||||
if lnameenc not in remotefiles_encr.keys() or localfiles[lname] != int((remotefiles_encr[lnameenc]).split("_xg10v10_")[2]) or remotefiles[remotefiles_encr[lnameenc]] != utility.total_size_encrypted(localfiles[lname]) :
|
||||
if upload:
|
||||
with open(localpath + lname, 'rb') as f:
|
||||
@@ -223,7 +223,7 @@ def launch(localpath,temp_dir,swift_container,prefix,size_limit_to_segment,size_
|
||||
skipped_uploads = 0
|
||||
for file, size in difffiles.items():
|
||||
hash_dir = hashlib.md5()
|
||||
hash_dir.update((utility.folder_from_path(file,utility.set_dash())[:-1]).encode("utf-8"))
|
||||
hash_dir.update((utility.dash_replace(prefix + file)).encode("utf-8"))
|
||||
hash_dir = hash_dir.hexdigest()
|
||||
if encrypted:
|
||||
local_path_corrected =temp_dir
|
||||
@@ -246,6 +246,7 @@ def launch(localpath,temp_dir,swift_container,prefix,size_limit_to_segment,size_
|
||||
for piece in read_in_chunks(f,size_limit_reading_os):
|
||||
hash.update(piece)
|
||||
if bytes_written == 0:
|
||||
print("Creating segment: " + str(counter))
|
||||
t = open(temp_dir + utility.file_only_name(file,utility.set_dash()) + "_" + str(format_numbers_for_large_files(str(counter),len(str(math.ceil( (size/size_limit_to_segment) * 10 ))))),'wb')
|
||||
if (bytes_written + len(piece) <= size_limit_to_segment):
|
||||
t.write(piece)
|
||||
@@ -259,12 +260,13 @@ def launch(localpath,temp_dir,swift_container,prefix,size_limit_to_segment,size_
|
||||
counter = counter + 1
|
||||
hash = hash.hexdigest()
|
||||
large_segments_created = True
|
||||
print("Large segments created")
|
||||
# check if there are uploaded segments
|
||||
if not large_segments_uploaded:
|
||||
headers,remote_segments_list = swift_conn.get_container(swift_container + "_segments", prefix =hash + "_xg10v10_" + hash_dir + "_xg10v10_" + str(size_limit_to_segment) + "/",full_listing=True )
|
||||
remote_segments_dict = {}
|
||||
for o in remote_segments_list :
|
||||
remote_segments_dict[o["name"].replace(hash +"_xg10v10_" + str(size_limit_to_segment) + "/","")] = o["bytes"]
|
||||
remote_segments_dict[o["name"].replace(hash + "_xg10v10_" + hash_dir + "_xg10v10_" + str(size_limit_to_segment) + "/","")] = o["bytes"]
|
||||
for local_segment_name,local_segment_size in local_segments_dict.items() :
|
||||
if (local_segment_name) not in remote_segments_dict.keys() or local_segment_size != remote_segments_dict[local_segment_name]:
|
||||
local_segments_to_upload_dict[local_segment_name] = local_segment_size
|
||||
@@ -313,4 +315,41 @@ def launch(localpath,temp_dir,swift_container,prefix,size_limit_to_segment,size_
|
||||
else:
|
||||
print("Upload Disabled")
|
||||
|
||||
|
||||
|
||||
dellist = []
|
||||
print("")
|
||||
print("Computing deletion list...")
|
||||
if encrypted:
|
||||
#update remote list with new files encrypted
|
||||
swift_conn,objects = utility.get_list(fail_tries,swift_conn,swift_container,prefix)
|
||||
remotefiles_encr,list_enc_old = utility.list_compute_correct_names_for_encryption(objects,prefix)
|
||||
|
||||
for o in list_enc_old:
|
||||
dellist.append(o)
|
||||
for rname in remotefiles_encr.keys():
|
||||
if rname.endswith("_xg10v10_encrypted"):
|
||||
rname_pure = rname.split("_xg10v10_encrypted")[0]
|
||||
if rname_pure in remotefiles_encr.keys():
|
||||
dellist.append(remotefiles_encr[rname_pure])
|
||||
if rname_pure not in localfiles.keys():
|
||||
dellist.append(remotefiles_encr[rname])
|
||||
else:
|
||||
if rname not in localfiles.keys():
|
||||
dellist.append(remotefiles_encr[rname])
|
||||
else:
|
||||
for rname in remotefiles.keys():
|
||||
if rname not in localfiles.keys():
|
||||
dellist.append(rname)
|
||||
|
||||
print("___________Files to delete______")
|
||||
for files in dellist:
|
||||
print(files)
|
||||
print("___________")
|
||||
if delete :
|
||||
for object in dellist:
|
||||
swift_conn = utility.delete_object(swift_conn,swift_container,prefix + object,remotefiles_xobj[object],fail_tries)
|
||||
else:
|
||||
print("Delete disabled")
|
||||
|
||||
swift_conn.close()
|
||||
|
||||
Reference in New Issue
Block a user