make optimisation from scan do run_ai_on_path IF there are newer reference images than the last scan even if we dont find new files in the scan itself

This commit is contained in:
2022-01-19 15:37:27 +11:00
parent 2b923f0b60
commit f8cff5f73e
2 changed files with 15 additions and 2 deletions

5
TODO
View File

@@ -1,4 +1,9 @@
## GENERAL
* going forward into search page (and probably all POSTs) does not work - no data posted (e.g. no search term)
- use replaceState in routes I know use POST and I need to keep data, try search first
https://developer.mozilla.org/en-US/docs/Web/API/WindowEventHandlers/onpopstate
* per file you could select an unknown face and add it as a ref img to an existing person, or make a new person and attach?
* [DONE] order/ find face with largest size and at least show that as unmatched
- could also try to check it vs. other faces, if it matches more than say 10? we offer it up as a required ref img, then cut that face (with margin) out and use it is a new ref image / person

View File

@@ -546,7 +546,7 @@ def JobsForPaths( parent_job, paths, ptype ):
if parent_job:
AddLogForJob(parent_job, f"adding <a href='/job/{job2.id}'>job id={job2.id} {job2.name}</a> (wait for: {job2.wait_for})")
job3=Job(start_time=now, last_update=now, name="run_ai_on_path", state="New", wait_for=job2.id, pa_job_state="New", current_file_num=0 )
job3=Job(start_time=now, last_update=now, name="run_ai_on_path", state="New", wait_for=job1.id, pa_job_state="New", current_file_num=0 )
job3.extra.append( JobExtra( name="person", value="all" ) )
job3.extra.append( JobExtra( name="ptype", value=ptype.name ) )
session.add(job3)
@@ -1299,6 +1299,8 @@ def JobImportDir(job):
dir=session.query(Dir).join(PathDirLink).join(Path).filter(Path.id==path_obj.id,Dir.rel_path=='').first()
# session.add in case we already have imported this dir (as AddDir wont) & now we might have diff num of files to last time,
session.add(dir)
orig_last_import = dir.last_import_date
# if we set / then commit this now, the web page will know how many files
# to process as we then do the slow job of processing them
@@ -1353,7 +1355,13 @@ def JobImportDir(job):
rm_cnt=HandleAnyFSDeletions(job)
if found_new_files == 0:
WithdrawDependantJobs( job, job.id, "scan job found no new files to process" )
for j in session.query(Job).filter(Job.wait_for==job.id).all():
if j.name == "getfiledetails":
FinishJob(j, f"Job (#{j.id}) has been withdrawn -- #{job.id} (scan job) did not find new files", "Withdrawn" )
if j.name == "run_ai_on_path":
newest_refimg = session.query(Refimg).order_by(Refimg.created_on.desc()).limit(1).all()
if newest_refimg and orig_last_import >= newest_refimg[0].created_on:
FinishJob(j, f"Job (#{j.id}) has been withdrawn -- scan did not find new files, and no new reference images since last scan", "Withdrawn" )
FinishJob(job, f"Finished Importing: {path} - Processed {overall_file_cnt} files, Found {found_new_files} new files, Removed {rm_cnt} file(s)")
return