optimise a move via GUI and any re-hashing, due to FS move, or Transform, etc. to update the last_hash_dt so we dont keep redoing the md5 hash as ctime > last_hash_dt, but there is no new content so no new last_hash_dt was being modified
This commit is contained in:
3
TODO
3
TODO
@@ -8,9 +8,6 @@
|
|||||||
FIX: BUG-69
|
FIX: BUG-69
|
||||||
TEST: what if we try to move a path in settings, should not allow this?
|
TEST: what if we try to move a path in settings, should not allow this?
|
||||||
|
|
||||||
* when we do a legitimate move of a file, update last_scanned_dt otherwise FS change is newer, and it will 're-scan'
|
|
||||||
* when we scan and there is no hash diff then, update last_scanned_dt otherwise FS change is newer, and it will 're-scan' continually
|
|
||||||
|
|
||||||
* remember last import dir, so you can just go straight back to it
|
* remember last import dir, so you can just go straight back to it
|
||||||
|
|
||||||
* in Fullscreen mode and next/prev dropped out of FS when calling /viewlist route
|
* in Fullscreen mode and next/prev dropped out of FS when calling /viewlist route
|
||||||
|
|||||||
@@ -1057,6 +1057,8 @@ def MoveFileToNewFolderInStorage(job,move_me, dst_storage_path, dst_rel_path):
|
|||||||
print( f"DONE change of {move_me} in_dir to {new_dir} created above" )
|
print( f"DONE change of {move_me} in_dir to {new_dir} created above" )
|
||||||
session.add(move_me)
|
session.add(move_me)
|
||||||
CleanUpDirInDB(job, orig_parent_dir_e)
|
CleanUpDirInDB(job, orig_parent_dir_e)
|
||||||
|
# reset last_hash_date otherwise, the move resets ctime on the FS, and so scanning sees a 'new' file
|
||||||
|
move_me.file_details.last_hash_date = time.time()
|
||||||
AddLogForJob(job, f"{move_me.name} - (moved to {os.path.dirname(move_me.FullPathOnFS())})" )
|
AddLogForJob(job, f"{move_me.name} - (moved to {os.path.dirname(move_me.FullPathOnFS())})" )
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -1337,7 +1339,7 @@ def JobTransformImage(job):
|
|||||||
out.save( e.FullPathOnFS() )
|
out.save( e.FullPathOnFS() )
|
||||||
print( f"JobTransformImage DONE transform: job={job.id}, id={id}, amt={amt}" )
|
print( f"JobTransformImage DONE transform: job={job.id}, id={id}, amt={amt}" )
|
||||||
e.file_details.thumbnail, _ , _ = GenThumb( e.FullPathOnFS() )
|
e.file_details.thumbnail, _ , _ = GenThumb( e.FullPathOnFS() )
|
||||||
e.file_details.hash = md5( job, e.FullPathOnFS() )
|
e.file_details.hash = md5( job, e )
|
||||||
print( f"JobTransformImage DONE thumb: job={job.id}, id={id}, amt={amt}" )
|
print( f"JobTransformImage DONE thumb: job={job.id}, id={id}, amt={amt}" )
|
||||||
session.add(e)
|
session.add(e)
|
||||||
FinishJob(job, "Finished Processesing image rotation/flip")
|
FinishJob(job, "Finished Processesing image rotation/flip")
|
||||||
@@ -1358,7 +1360,7 @@ def GenHashAndThumb(job, e):
|
|||||||
job.current_file_num+=1
|
job.current_file_num+=1
|
||||||
return
|
return
|
||||||
|
|
||||||
new_hash = md5( job, e.FullPathOnFS() )
|
new_hash = md5( job, e )
|
||||||
# same hash and we already have a thumbnail-> just return
|
# same hash and we already have a thumbnail-> just return
|
||||||
if new_hash == e.file_details.hash and e.file_details.thumbnail:
|
if new_hash == e.file_details.hash and e.file_details.thumbnail:
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
@@ -1372,7 +1374,6 @@ def GenHashAndThumb(job, e):
|
|||||||
e.file_details.thumbnail = GenVideoThumbnail( job, e.FullPathOnFS() )
|
e.file_details.thumbnail = GenVideoThumbnail( job, e.FullPathOnFS() )
|
||||||
elif e.type.name == 'Unknown':
|
elif e.type.name == 'Unknown':
|
||||||
job.current_file_num+=1
|
job.current_file_num+=1
|
||||||
e.file_details.last_hash_date = time.time()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
####################################################################################################################################
|
####################################################################################################################################
|
||||||
@@ -1427,13 +1428,14 @@ def isVideo(file):
|
|||||||
####################################################################################################################################
|
####################################################################################################################################
|
||||||
# Returns an md5 hash of the fnames' contents
|
# Returns an md5 hash of the fnames' contents
|
||||||
####################################################################################################################################
|
####################################################################################################################################
|
||||||
def md5(job, fname):
|
def md5(job, e):
|
||||||
hash_md5 = hashlib.md5()
|
hash_md5 = hashlib.md5()
|
||||||
with open(fname, "rb") as f:
|
with open(e.FullPathOnFS(), "rb") as f:
|
||||||
for chunk in iter(lambda: f.read(4096), b""):
|
for chunk in iter(lambda: f.read(4096), b""):
|
||||||
hash_md5.update(chunk)
|
hash_md5.update(chunk)
|
||||||
hash = hash_md5.hexdigest()
|
hash = hash_md5.hexdigest()
|
||||||
AddLogForJob( job, "Generated md5 hash: {} for file: {}".format( hash, fname ) )
|
AddLogForJob( job, "Generated md5 hash: {} for file: {}".format( hash, e.FullPathOnFS() ) )
|
||||||
|
e.file_details.last_hash_date = time.time()
|
||||||
return hash
|
return hash
|
||||||
|
|
||||||
####################################################################################################################################
|
####################################################################################################################################
|
||||||
|
|||||||
Reference in New Issue
Block a user