diff --git a/TODO b/TODO index a91f49d..48d6caf 100644 --- a/TODO +++ b/TODO @@ -1,4 +1,9 @@ ## GENERAL + * going forward into search page (and probably all POSTs) does not work - no data posted (e.g. no search term) + - use replaceState in routes I know use POST and I need to keep data, try search first + https://developer.mozilla.org/en-US/docs/Web/API/WindowEventHandlers/onpopstate + + * per file you could select an unknown face and add it as a ref img to an existing person, or make a new person and attach? * [DONE] order/ find face with largest size and at least show that as unmatched - could also try to check it vs. other faces, if it matches more than say 10? we offer it up as a required ref img, then cut that face (with margin) out and use it is a new ref image / person diff --git a/pa_job_manager.py b/pa_job_manager.py index 20f2e48..957ed58 100644 --- a/pa_job_manager.py +++ b/pa_job_manager.py @@ -546,7 +546,7 @@ def JobsForPaths( parent_job, paths, ptype ): if parent_job: AddLogForJob(parent_job, f"adding job id={job2.id} {job2.name} (wait for: {job2.wait_for})") - job3=Job(start_time=now, last_update=now, name="run_ai_on_path", state="New", wait_for=job2.id, pa_job_state="New", current_file_num=0 ) + job3=Job(start_time=now, last_update=now, name="run_ai_on_path", state="New", wait_for=job1.id, pa_job_state="New", current_file_num=0 ) job3.extra.append( JobExtra( name="person", value="all" ) ) job3.extra.append( JobExtra( name="ptype", value=ptype.name ) ) session.add(job3) @@ -1299,6 +1299,8 @@ def JobImportDir(job): dir=session.query(Dir).join(PathDirLink).join(Path).filter(Path.id==path_obj.id,Dir.rel_path=='').first() # session.add in case we already have imported this dir (as AddDir wont) & now we might have diff num of files to last time, session.add(dir) + + orig_last_import = dir.last_import_date # if we set / then commit this now, the web page will know how many files # to process as we then do the slow job of processing them @@ -1353,7 +1355,13 @@ def JobImportDir(job): rm_cnt=HandleAnyFSDeletions(job) if found_new_files == 0: - WithdrawDependantJobs( job, job.id, "scan job found no new files to process" ) + for j in session.query(Job).filter(Job.wait_for==job.id).all(): + if j.name == "getfiledetails": + FinishJob(j, f"Job (#{j.id}) has been withdrawn -- #{job.id} (scan job) did not find new files", "Withdrawn" ) + if j.name == "run_ai_on_path": + newest_refimg = session.query(Refimg).order_by(Refimg.created_on.desc()).limit(1).all() + if newest_refimg and orig_last_import >= newest_refimg[0].created_on: + FinishJob(j, f"Job (#{j.id}) has been withdrawn -- scan did not find new files, and no new reference images since last scan", "Withdrawn" ) FinishJob(job, f"Finished Importing: {path} - Processed {overall_file_cnt} files, Found {found_new_files} new files, Removed {rm_cnt} file(s)") return