fixed up some missing try/excepts on os. calls
This commit is contained in:
5
TODO
5
TODO
@@ -1,9 +1,4 @@
|
||||
## GENERAL
|
||||
* put try: around any os.remove, os.symlink etc.
|
||||
File "/code/pa_job_manager.py", line 1088, in CreateSymlink
|
||||
os.symlink(path, symlink)
|
||||
FileExistsError: [Errno 17] File exists: '/export/docker/storage/.pa_metadata/' -> 'static/Metadata/.pa_metadata'
|
||||
|
||||
* remove multiple Paths from SettingsIPath, etc.
|
||||
|
||||
* should I change the rotation code to use that jpeg util to reduce/remove compression loss?
|
||||
|
||||
@@ -766,6 +766,7 @@ def JobMetadata(job):
|
||||
which=[jex.value for jex in job.extra if jex.name == "which"][0]
|
||||
face_id=[jex.value for jex in job.extra if jex.name == "face_id"][0]
|
||||
f=session.query(Face).get(face_id)
|
||||
try:
|
||||
if which == 'add_force_match_override' or which=='remove_force_match_override':
|
||||
person_id=[jex.value for jex in job.extra if jex.name == "person_id"][0]
|
||||
p=session.query(Person).get(person_id)
|
||||
@@ -779,7 +780,6 @@ def JobMetadata(job):
|
||||
else:
|
||||
AddLogForJob(job, f"ERROR: Failed to process metadata (which={which})" )
|
||||
return
|
||||
try:
|
||||
if str.find( which, 'add_' ) == 0:
|
||||
file_h=open(fname, 'wb')
|
||||
file_h.write(f.face)
|
||||
@@ -1085,8 +1085,11 @@ def CreateSymlink(job,ptype,path):
|
||||
symlink=SymlinkName(path_type.name, path, path)
|
||||
if not os.path.exists(symlink):
|
||||
print( f"INFO: symlink does not exist, actually creating it -- s={symlink}" )
|
||||
try:
|
||||
os.makedirs( os.path.dirname(symlink), mode=0o777, exist_ok=True )
|
||||
os.symlink(path, symlink)
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to create symlink - tried to link {symlink} -> {path}: {e}")
|
||||
return symlink
|
||||
|
||||
##############################################################################
|
||||
@@ -1372,8 +1375,13 @@ def MoveEntriesToOtherFolder(job, move_me, dst_storage_path, dst_rel_path):
|
||||
# we use the new path to this new Dir with the full location (the old dir is put into the new location)
|
||||
ResetAnySubdirPaths( move_me, dst_storage_path, move_me.dir_details.rel_path )
|
||||
# move the actual dir to its new location
|
||||
try:
|
||||
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to move dir: {orig_fs_pos} into {move_me.FullPathOnFS()}, err: {e}")
|
||||
return
|
||||
AddLogForJob( job, f"INFO: move {orig_fs_pos} -> {move_me.FullPathOnFS()}" )
|
||||
return
|
||||
else:
|
||||
# scen 3: rename dir -- as the last component of dst_rel_path is what we will rename move_me to, so dont create last bit (os.path.dirname),
|
||||
# we will just change move_me into that last dir -> renaming the dir
|
||||
@@ -1386,7 +1394,11 @@ def MoveEntriesToOtherFolder(job, move_me, dst_storage_path, dst_rel_path):
|
||||
move_me.name = os.path.basename(dst_rel_path)
|
||||
session.add(move_me)
|
||||
ResetAnySubdirPaths( move_me, dst_storage_path, dst_rel_path )
|
||||
try:
|
||||
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to rename dir: {orig_fs_pos} -> {move_me.FullPathOnFS()}, err: {e}")
|
||||
return
|
||||
AddLogForJob( job, f"INFO: rename {orig_fs_pos} -> {move_me.FullPathOnFS()}" )
|
||||
return
|
||||
else:
|
||||
@@ -1407,7 +1419,11 @@ def MoveEntriesToOtherFolder(job, move_me, dst_storage_path, dst_rel_path):
|
||||
session.add(move_me)
|
||||
# move the actual file to its new location
|
||||
AddLogForJob( job, f"DEBUG: move of FILE - {orig_fs_pos} -> {move_me.FullPathOnFS()}" )
|
||||
try:
|
||||
os.replace( orig_fs_pos, move_me.FullPathOnFS() )
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to move file: {orig_fs_pos} -> {move_me.FullPathOnFS()}, err: {e}")
|
||||
return
|
||||
|
||||
old_dir = session.query(Entry).filter(Entry.id==orig_dir_eid).first()
|
||||
CleanUpDirInDB(job, old_dir)
|
||||
@@ -1424,7 +1440,10 @@ def CreateFSLocation( job, dst_path, dst_locn ):
|
||||
part_rel_path += f"{dirname}"
|
||||
parent_dir=AddDir( job, dirname, parent_dir, part_rel_path, dst_path )
|
||||
part_rel_path += "/"
|
||||
try:
|
||||
os.makedirs( dst_path.path_prefix + '/' + dst_locn, mode=0o777, exist_ok=True )
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to makedirs: {dst_path.path_prefix + '/' + dst_locn} Err: {e}")
|
||||
return parent_dir
|
||||
|
||||
|
||||
@@ -1591,7 +1610,13 @@ def JobImportDir(job):
|
||||
ResetExistsOnFS(job, symlink)
|
||||
|
||||
# go through data once to work out file_cnt so progress bar works from first import
|
||||
try:
|
||||
walk=os.walk(path, topdown=True)
|
||||
except Exception as e:
|
||||
WithdrawDependantJobs( job, job.id, "scan job FAILED" )
|
||||
FinishJob(job, f"ERROR: Failed to 'walk' the filesystem at: {path} Err: {e}", "Failed" )
|
||||
return
|
||||
|
||||
ftree=list(walk)
|
||||
overall_file_cnt=0
|
||||
for root, subdirs, files in ftree:
|
||||
@@ -1628,7 +1653,12 @@ def JobImportDir(job):
|
||||
session.commit()
|
||||
fname=dir.PathOnFS()+'/'+basename
|
||||
|
||||
try:
|
||||
stat = os.stat(fname)
|
||||
except Exception as e:
|
||||
AddLogForJob(job, f"failed to stat file - was it removed from the underlying filesystem while PA was scanning? Err: {e}" )
|
||||
continue
|
||||
|
||||
# use ctime as even a metadata change (mv'd file on the fs, or a perms change) needs to be checked
|
||||
if stat.st_ctime > dir.last_import_date:
|
||||
if DEBUG:
|
||||
@@ -1821,7 +1851,12 @@ def GenHashAndThumb(job, e):
|
||||
# commit every 100 files to see progress being made but not hammer the database
|
||||
if job.current_file_num % 100 == 0:
|
||||
session.commit()
|
||||
try:
|
||||
stat = os.stat( e.FullPathOnFS() )
|
||||
except Exception as e:
|
||||
AddLogForJob(job, f"failed to stat file - was it removed from the underlying filesystem while PA was scanning? Err: {e}" )
|
||||
job.current_file_num+=1
|
||||
return
|
||||
# use mtime as only if the content is different do we need to redo the hash
|
||||
if stat.st_mtime < e.file_details.last_hash_date:
|
||||
if DEBUG:
|
||||
@@ -1946,12 +1981,12 @@ def GenVideoThumbnail( job, fname):
|
||||
.overwrite_output()
|
||||
.run(capture_stdout=True, capture_stderr=True)
|
||||
)
|
||||
thumbnail, w, h = GenThumb( tmp_fname, False )
|
||||
os.remove( tmp_fname )
|
||||
except ffmpeg.Error as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to Generate thumbnail for video file: {fname} - error={e}" )
|
||||
return None
|
||||
|
||||
thumbnail, w, h = GenThumb( tmp_fname, False )
|
||||
os.remove( tmp_fname )
|
||||
return thumbnail
|
||||
|
||||
####################################################################################################################################
|
||||
@@ -2282,7 +2317,7 @@ def InitialValidationChecks():
|
||||
path=SettingsRBPath()
|
||||
rbp_exists=0
|
||||
if os.path.exists(path):
|
||||
rbp_exists=1
|
||||
try:
|
||||
root, dirs, files = next(os.walk(path))
|
||||
if len(dirs) + len(files) > 0:
|
||||
AddLogForJob(job, "INFO: the bin path contains content, cannot process to know where original deletes were form - skipping content!" )
|
||||
@@ -2290,6 +2325,9 @@ def InitialValidationChecks():
|
||||
AddLogForJob(job, "WARNING: IF the files in the bin are in the DB (succeeded from GUI deletes) then this is okay, otherwise you should delete contents form the recycle bin and restart the job manager)" )
|
||||
# create symlink and Path/Dir if needed
|
||||
ProcessRecycleBinDir(job)
|
||||
rbp_exists=1
|
||||
except Exception as ex:
|
||||
print( f"FATAL ERROR: Failed to walk the recycle bin at {path} Err:{ex}" )
|
||||
else:
|
||||
AddLogForJob(job, "ERROR: The bin path in settings does not exist - Please fix now");
|
||||
sp_exists=0
|
||||
|
||||
Reference in New Issue
Block a user