fixed up some missing try/excepts on os. calls

This commit is contained in:
2022-08-06 12:02:10 +10:00
parent 60b95b6152
commit 20c76609cd
2 changed files with 70 additions and 37 deletions

View File

@@ -766,20 +766,20 @@ def JobMetadata(job):
which=[jex.value for jex in job.extra if jex.name == "which"][0]
face_id=[jex.value for jex in job.extra if jex.name == "face_id"][0]
f=session.query(Face).get(face_id)
if which == 'add_force_match_override' or which=='remove_force_match_override':
person_id=[jex.value for jex in job.extra if jex.name == "person_id"][0]
p=session.query(Person).get(person_id)
os.makedirs( f"{SettingsMPath()}force_match_overrides", mode=0o777, exist_ok=True )
fname=f"{SettingsMPath()}force_match_overrides/{face_id}_{p.tag}"
elif which == 'add_no_match_override' or which == 'remove_no_match_override':
type_id=[jex.value for jex in job.extra if jex.name == "type_id"][0]
t=session.query(FaceOverrideType).get(type_id)
os.makedirs( f"{SettingsMPath()}no_match_overrides", mode=0o777, exist_ok=True )
fname=f"{SettingsMPath()}no_match_overrides/{face_id}_{t.name}"
else:
AddLogForJob(job, f"ERROR: Failed to process metadata (which={which})" )
return
try:
if which == 'add_force_match_override' or which=='remove_force_match_override':
person_id=[jex.value for jex in job.extra if jex.name == "person_id"][0]
p=session.query(Person).get(person_id)
os.makedirs( f"{SettingsMPath()}force_match_overrides", mode=0o777, exist_ok=True )
fname=f"{SettingsMPath()}force_match_overrides/{face_id}_{p.tag}"
elif which == 'add_no_match_override' or which == 'remove_no_match_override':
type_id=[jex.value for jex in job.extra if jex.name == "type_id"][0]
t=session.query(FaceOverrideType).get(type_id)
os.makedirs( f"{SettingsMPath()}no_match_overrides", mode=0o777, exist_ok=True )
fname=f"{SettingsMPath()}no_match_overrides/{face_id}_{t.name}"
else:
AddLogForJob(job, f"ERROR: Failed to process metadata (which={which})" )
return
if str.find( which, 'add_' ) == 0:
file_h=open(fname, 'wb')
file_h.write(f.face)
@@ -1085,8 +1085,11 @@ def CreateSymlink(job,ptype,path):
symlink=SymlinkName(path_type.name, path, path)
if not os.path.exists(symlink):
print( f"INFO: symlink does not exist, actually creating it -- s={symlink}" )
os.makedirs( os.path.dirname(symlink), mode=0o777, exist_ok=True )
os.symlink(path, symlink)
try:
os.makedirs( os.path.dirname(symlink), mode=0o777, exist_ok=True )
os.symlink(path, symlink)
except Exception as e:
AddLogForJob( job, f"ERROR: Failed to create symlink - tried to link {symlink} -> {path}: {e}")
return symlink
##############################################################################
@@ -1372,8 +1375,13 @@ def MoveEntriesToOtherFolder(job, move_me, dst_storage_path, dst_rel_path):
# we use the new path to this new Dir with the full location (the old dir is put into the new location)
ResetAnySubdirPaths( move_me, dst_storage_path, move_me.dir_details.rel_path )
# move the actual dir to its new location
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
try:
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
except Exception as e:
AddLogForJob( job, f"ERROR: Failed to move dir: {orig_fs_pos} into {move_me.FullPathOnFS()}, err: {e}")
return
AddLogForJob( job, f"INFO: move {orig_fs_pos} -> {move_me.FullPathOnFS()}" )
return
else:
# scen 3: rename dir -- as the last component of dst_rel_path is what we will rename move_me to, so dont create last bit (os.path.dirname),
# we will just change move_me into that last dir -> renaming the dir
@@ -1386,7 +1394,11 @@ def MoveEntriesToOtherFolder(job, move_me, dst_storage_path, dst_rel_path):
move_me.name = os.path.basename(dst_rel_path)
session.add(move_me)
ResetAnySubdirPaths( move_me, dst_storage_path, dst_rel_path )
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
try:
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
except Exception as e:
AddLogForJob( job, f"ERROR: Failed to rename dir: {orig_fs_pos} -> {move_me.FullPathOnFS()}, err: {e}")
return
AddLogForJob( job, f"INFO: rename {orig_fs_pos} -> {move_me.FullPathOnFS()}" )
return
else:
@@ -1407,7 +1419,11 @@ def MoveEntriesToOtherFolder(job, move_me, dst_storage_path, dst_rel_path):
session.add(move_me)
# move the actual file to its new location
AddLogForJob( job, f"DEBUG: move of FILE - {orig_fs_pos} -> {move_me.FullPathOnFS()}" )
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
try:
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
except Exception as e:
AddLogForJob( job, f"ERROR: Failed to move file: {orig_fs_pos} -> {move_me.FullPathOnFS()}, err: {e}")
return
old_dir = session.query(Entry).filter(Entry.id==orig_dir_eid).first()
CleanUpDirInDB(job, old_dir)
@@ -1424,7 +1440,10 @@ def CreateFSLocation( job, dst_path, dst_locn ):
part_rel_path += f"{dirname}"
parent_dir=AddDir( job, dirname, parent_dir, part_rel_path, dst_path )
part_rel_path += "/"
os.makedirs( dst_path.path_prefix + '/' + dst_locn, mode=0o777, exist_ok=True )
try:
os.makedirs( dst_path.path_prefix + '/' + dst_locn, mode=0o777, exist_ok=True )
except Exception as e:
AddLogForJob( job, f"ERROR: Failed to makedirs: {dst_path.path_prefix + '/' + dst_locn} Err: {e}")
return parent_dir
@@ -1591,7 +1610,13 @@ def JobImportDir(job):
ResetExistsOnFS(job, symlink)
# go through data once to work out file_cnt so progress bar works from first import
walk=os.walk(path, topdown=True)
try:
walk=os.walk(path, topdown=True)
except Exception as e:
WithdrawDependantJobs( job, job.id, "scan job FAILED" )
FinishJob(job, f"ERROR: Failed to 'walk' the filesystem at: {path} Err: {e}", "Failed" )
return
ftree=list(walk)
overall_file_cnt=0
for root, subdirs, files in ftree:
@@ -1628,7 +1653,12 @@ def JobImportDir(job):
session.commit()
fname=dir.PathOnFS()+'/'+basename
stat = os.stat(fname)
try:
stat = os.stat(fname)
except Exception as e:
AddLogForJob(job, f"failed to stat file - was it removed from the underlying filesystem while PA was scanning? Err: {e}" )
continue
# use ctime as even a metadata change (mv'd file on the fs, or a perms change) needs to be checked
if stat.st_ctime > dir.last_import_date:
if DEBUG:
@@ -1821,7 +1851,12 @@ def GenHashAndThumb(job, e):
# commit every 100 files to see progress being made but not hammer the database
if job.current_file_num % 100 == 0:
session.commit()
stat = os.stat( e.FullPathOnFS() )
try:
stat = os.stat( e.FullPathOnFS() )
except Exception as e:
AddLogForJob(job, f"failed to stat file - was it removed from the underlying filesystem while PA was scanning? Err: {e}" )
job.current_file_num+=1
return
# use mtime as only if the content is different do we need to redo the hash
if stat.st_mtime < e.file_details.last_hash_date:
if DEBUG:
@@ -1946,12 +1981,12 @@ def GenVideoThumbnail( job, fname):
.overwrite_output()
.run(capture_stdout=True, capture_stderr=True)
)
thumbnail, w, h = GenThumb( tmp_fname, False )
os.remove( tmp_fname )
except ffmpeg.Error as e:
AddLogForJob( job, f"ERROR: Failed to Generate thumbnail for video file: {fname} - error={e}" )
return None
thumbnail, w, h = GenThumb( tmp_fname, False )
os.remove( tmp_fname )
return thumbnail
####################################################################################################################################
@@ -2282,14 +2317,17 @@ def InitialValidationChecks():
path=SettingsRBPath()
rbp_exists=0
if os.path.exists(path):
rbp_exists=1
root, dirs, files = next(os.walk(path))
if len(dirs) + len(files) > 0:
AddLogForJob(job, "INFO: the bin path contains content, cannot process to know where original deletes were form - skipping content!" )
AddLogForJob(job, "TODO: could be smart about what is known in the DB vs on the FS, and change below to an ERROR if it is one")
AddLogForJob(job, "WARNING: IF the files in the bin are in the DB (succeeded from GUI deletes) then this is okay, otherwise you should delete contents form the recycle bin and restart the job manager)" )
# create symlink and Path/Dir if needed
ProcessRecycleBinDir(job)
try:
root, dirs, files = next(os.walk(path))
if len(dirs) + len(files) > 0:
AddLogForJob(job, "INFO: the bin path contains content, cannot process to know where original deletes were form - skipping content!" )
AddLogForJob(job, "TODO: could be smart about what is known in the DB vs on the FS, and change below to an ERROR if it is one")
AddLogForJob(job, "WARNING: IF the files in the bin are in the DB (succeeded from GUI deletes) then this is okay, otherwise you should delete contents form the recycle bin and restart the job manager)" )
# create symlink and Path/Dir if needed
ProcessRecycleBinDir(job)
rbp_exists=1
except Exception as ex:
print( f"FATAL ERROR: Failed to walk the recycle bin at {path} Err:{ex}" )
else:
AddLogForJob(job, "ERROR: The bin path in settings does not exist - Please fix now");
sp_exists=0