broad (small) changes to make style of all routes and jobs to be consistent, e.g. use underscords between words

This commit is contained in:
2023-01-15 23:17:59 +11:00
parent dc11a0697b
commit 4b1bbcb2bf
16 changed files with 88 additions and 86 deletions

View File

@@ -687,22 +687,22 @@ def JobsForPath( parent_job, path, ptype ):
jex=[]
jex.append( JobExtra( name="path", value=path ) )
jex.append( JobExtra( name="path_type", value=ptype.id ) )
job1=NewJob( name="importdir", num_files=cfn, wait_for=None, jex=jex, parent_job=parent_job, desc=f"scan for files from {ptype.name} path" )
job1=NewJob( name="import_dir", num_files=cfn, wait_for=None, jex=jex, parent_job=parent_job, desc=f"scan for files from {ptype.name} path" )
# then get file details (hash/thumbs)
jex=[]
jex.append( JobExtra( name="path", value=path ) )
job2=NewJob( name="getfiledetails", num_files=0, wait_for=job1.id, jex=jex, parent_job=parent_job, desc=f"get details of files from {ptype.name} path" )
job2=NewJob( name="get_file_details", num_files=0, wait_for=job1.id, jex=jex, parent_job=parent_job, desc=f"get details of files from {ptype.name} path" )
# can start straight after importdir - job1, does not need details (job2)
# can start straight after import_dir - job1, does not need details (job2)
jex=[]
jex.append( JobExtra( name="person", value="all" ) )
jex.append( JobExtra( name="path_type", value=ptype.id ) )
job3=NewJob( name="run_ai_on_path", num_files=0, wait_for=job1.id, jex=jex, parent_job=parent_job, desc=f"match faces on files from {ptype.name} path" )
# careful here, wait for getfiledetails (job2), the ai job cannot cause a dup
# but it can fail - in which case the checkdup will be withdrawn
job4=NewJob( name="checkdups", num_files=0, wait_for=job2.id, jex=None, parent_job=parent_job, desc="check for duplicate files" )
# careful here, wait for get_file_details (job2), the ai job cannot cause a dup
# but it can fail - in which case the check_dup will be withdrawn
job4=NewJob( name="check_dups", num_files=0, wait_for=job2.id, jex=None, parent_job=parent_job, desc="check for duplicate files" )
# okay, now process all the new jobs
HandleJobs(False)
@@ -835,17 +835,17 @@ def RunJob(job):
job.start_time=datetime.now(pytz.utc)
if job.name =="scan_ip":
JobScanImportDir(job)
elif job.name =="forcescan":
elif job.name =="force_scan":
JobForceScan(job)
elif job.name =="scan_sp":
JobScanStorageDir(job)
elif job.name =="importdir":
elif job.name =="import_dir":
JobImportDir(job)
elif job.name =="getfiledetails":
elif job.name =="get_file_details":
JobGetFileDetails(job)
elif job.name == "checkdups":
elif job.name == "check_dups":
JobCheckForDups(job)
elif job.name == "rmdups":
elif job.name == "rm_dups":
JobRemoveDups(job)
elif job.name == "delete_files":
JobDeleteFiles(job)
@@ -1570,7 +1570,7 @@ def WithdrawDependantJobs( job, id, reason ):
####################################################################################################################################
# next 3 funcs used to optimise whether to do dependant jobs (i.e. no new files, dont keep doing file details, ai scans
# find last successful importdir job for this path
# find last successful import_dir job for this path
####################################################################################################################################
def find_last_time_new_files_found(job):
path=[jex.value for jex in job.extra if jex.name == "path"][0]
@@ -1581,11 +1581,11 @@ def find_last_time_new_files_found(job):
return 0
####################################################################################################################################
# find time of last getfiledetails job for this path
# find time of last get_file_details job for this path
####################################################################################################################################
def find_last_successful_gfd_job(job):
path=[jex.value for jex in job.extra if jex.name == "path"][0]
jobs=session.query(Job).join(JobExtra).filter(Job.name=="getfiledetails").filter(JobExtra.value==path).filter(Job.state=='Completed').order_by(Job.id.desc()).limit(1).all()
jobs=session.query(Job).join(JobExtra).filter(Job.name=="get_file_details").filter(JobExtra.value==path).filter(Job.state=='Completed').order_by(Job.id.desc()).limit(1).all()
for j in jobs:
return j.last_update.timestamp()
return 0
@@ -1731,7 +1731,7 @@ def JobImportDir(job):
last_ai_scan=find_last_successful_ai_scan(job)
for j in session.query(Job).filter(Job.wait_for==job.id).all():
if j.name == "getfiledetails" and last_file_details > last_scan:
if j.name == "get_file_details" and last_file_details > last_scan:
FinishJob(j, f"Job #{j.id} has been withdrawn -- #{job.id} (scan job) did not find new files", "Withdrawn" )
# scan found no new files and last ai scan was after the last file scan
if j.name == "run_ai_on_path" and last_ai_scan > last_scan:
@@ -2032,10 +2032,10 @@ def GenVideoThumbnail( job, fname):
# /removedups, but some other job has since created another dup message...
####################################################################################################################################
def ClearOtherDupMessagesAndJobs():
msgs=session.query(PA_JobManager_FE_Message).join(Job).filter(Job.name=='checkdups')
msgs=session.query(PA_JobManager_FE_Message).join(Job).filter(Job.name=='check_dups')
for msg in msgs:
session.query(PA_JobManager_FE_Message).filter(PA_JobManager_FE_Message.id==msg.id).delete()
cd_jobs=session.query(Job).filter(Job.name=='checkdups').filter(Job.pa_job_state=='New').all()
cd_jobs=session.query(Job).filter(Job.name=='check_dups').filter(Job.pa_job_state=='New').all()
for j in cd_jobs:
FinishJob(j, "New CheckForDups job/removal supercedes this job, withdrawing it", "Withdrawn")
session.commit()
@@ -2065,7 +2065,7 @@ def JobCheckForDups(job):
def JobRemoveDups(job):
JobProgressState( job, "In Progress" )
AddLogForJob(job, f"INFO: Starting Remove Duplicates job...")
# as checkdups covers all dups, delete all future dups messages, and Withdraw future checkdups jobs
# as check_dups covers all dups, delete all future dups messages, and Withdraw future check_dups jobs
ClearOtherDupMessagesAndJobs()
dup_cnt=0
@@ -2120,8 +2120,8 @@ def JobRemoveDups(job):
dup_cnt += 1
# Need to put another checkdups job in now to force / validate we have no dups
next_job=NewJob( name="checkdups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
# Need to put another check_dups job in now to force / validate we have no dups
next_job=NewJob( name="check_dups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
AddLogForJob(job, f"adding <a href='/job/{next_job.id}'>job id={next_job.id} {next_job.name}</a> to confirm there are no more duplicates" )
FinishJob(job, f"Finished removing {dup_cnt} duplicate files" )
return
@@ -2152,7 +2152,7 @@ def JobMoveFiles(job):
if 'eid-' in jex.name:
move_me=session.query(Entry).get(jex.value)
MoveEntriesToOtherFolder( job, move_me, dst_storage_path, f"{prefix}{suffix}" )
NewJob( name="checkdups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
NewJob( name="check_dups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
FinishJob(job, f"Finished move selected file(s)")
return
@@ -2166,7 +2166,7 @@ def JobDeleteFiles(job):
if 'eid-' in jex.name:
del_me=session.query(Entry).join(File).filter(Entry.id==jex.value).first()
MoveFileToRecycleBin(job,del_me)
NewJob( name="checkdups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
NewJob( name="check_dups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
FinishJob(job, f"Finished deleting selected file(s)")
return
@@ -2180,7 +2180,7 @@ def JobRestoreFiles(job):
if 'eid-' in jex.name:
restore_me=session.query(Entry).join(File).filter(Entry.id==jex.value).first()
RestoreFile(job,restore_me)
NewJob( name="checkdups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
NewJob( name="check_dups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
FinishJob(job, f"Finished restoring selected file(s)")
return