clarified bug-82, fixed issue with AI scan optimising still scanning when it should not, finalised back button work for view/viewlist

This commit is contained in:
2022-01-27 21:44:29 +11:00
parent 07b339f5ab
commit 0751cc6010
5 changed files with 72 additions and 81 deletions

View File

@@ -548,7 +548,7 @@ def JobsForPaths( parent_job, paths, ptype ):
job3=Job(start_time=now, last_update=now, name="run_ai_on_path", state="New", wait_for=job1.id, pa_job_state="New", current_file_num=0 )
job3.extra.append( JobExtra( name="person", value="all" ) )
job3.extra.append( JobExtra( name="ptype", value=ptype.name ) )
job3.extra.append( JobExtra( name="path_type", value=ptype.id ) )
session.add(job3)
session.commit()
if parent_job:
@@ -1284,8 +1284,8 @@ def find_last_successful_gfd_job(job):
# find time of last run_ai_on_path job for this path
####################################################################################################################################
def find_last_successful_ai_scan(job):
path=[jex.value for jex in job.extra if jex.name == "path"][0]
jobs=session.query(Job).join(JobExtra).filter(Job.name=="run_ai_on_path").filter(JobExtra.value==path).filter(Job.state=='Completed').order_by(Job.id.desc()).limit(1).all()
path_type=[jex.value for jex in job.extra if jex.name == "path_type"][0]
jobs=session.query(Job).join(JobExtra).filter(Job.name=="run_ai_on_path").filter(JobExtra.name=='path_type',JobExtra.value==path_type).filter(Job.state=='Completed').order_by(Job.id.desc()).limit(1).all()
for j in jobs:
return j.last_update.timestamp()
return 0
@@ -1385,7 +1385,6 @@ def JobImportDir(job):
dir.last_import_date = time.time()
job.num_files=overall_file_cnt
if found_new_files:
print("adding new_files jex" )
job.extra.append( JobExtra( name="new_files", value=found_new_files ) )
session.add(job)
@@ -1396,16 +1395,17 @@ def JobImportDir(job):
last_file_details=find_last_successful_gfd_job(job)
last_ai_scan=find_last_successful_ai_scan(job)
print( f"last_scan={last_scan}" )
print( f"last_file_details={last_file_details}" )
print( f"last_ai_scan={last_ai_scan}" )
for j in session.query(Job).filter(Job.wait_for==job.id).all():
if j.name == "getfiledetails" and last_file_details > last_scan:
FinishJob(j, f"Job (#{j.id}) has been withdrawn -- #{job.id} (scan job) did not find new files", "Withdrawn" )
# scan found no new files and last ai scan was after the last file scan
if j.name == "run_ai_on_path" and last_ai_scan > last_scan:
newest_refimg = session.query(Refimg).order_by(Refimg.created_on.desc()).limit(1).all()
if newest_refimg and last_scan >= newest_refimg[0].created_on:
# IF we also have no new refimgs since last scan, then no need to run any AI again
if newest_refimg and newest_refimg[0].created_on < last_scan:
FinishJob(j, f"Job (#{j.id}) has been withdrawn -- scan did not find new files, and no new reference images since last scan", "Withdrawn" )
# IF we also have no new refimgs since last AI scan, then no need to run any AI again
elif newest_refimg and newest_refimg[0].created_on < last_ai_scan:
FinishJob(j, f"Job (#{j.id}) has been withdrawn -- scan did not find new files, and no new reference images since last scan", "Withdrawn" )
FinishJob(job, f"Finished Importing: {path} - Processed {overall_file_cnt} files, Found {found_new_files} new files, Removed {rm_cnt} file(s)")
return
@@ -1443,8 +1443,8 @@ def AddToJobImageCount(job, entry ):
# so we can then just calls JobRunAIOn
####################################################################################################################################
def JobRunAIOnPath(job):
which_ptype=[jex.value for jex in job.extra if jex.name == "ptype"][0]
paths=session.query(Path).join(PathType).filter(PathType.name==which_ptype).all()
path_type=[jex.value for jex in job.extra if jex.name == "path_type"][0]
paths=session.query(Path).join(PathType).filter(PathType.id==path_type).all()
path_cnt=0
for p in paths:
d = session.query(Dir).join(PathDirLink).filter(PathDirLink.path_id==p.id).filter(Dir.rel_path=='').first()