when restart JobRunAIOnPath, dont add the path again, move a couple of debug lines to an actual debug
This commit is contained in:
@@ -745,7 +745,7 @@ def HandleJobs(first_run=False):
|
|||||||
print("INFO: PA job manager is starting up - check for stale jobs" )
|
print("INFO: PA job manager is starting up - check for stale jobs" )
|
||||||
else:
|
else:
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
print("INFO: PA job manager is scanning for new jobs to process")
|
print("DEBUG: PA job manager is scanning for new jobs to process")
|
||||||
for job in session.query(Job).filter(Job.pa_job_state != 'Complete').all():
|
for job in session.query(Job).filter(Job.pa_job_state != 'Complete').all():
|
||||||
if first_run and job.pa_job_state == 'In Progress':
|
if first_run and job.pa_job_state == 'In Progress':
|
||||||
print( f"INFO: Found stale job#{job.id} - {job.name}" )
|
print( f"INFO: Found stale job#{job.id} - {job.name}" )
|
||||||
@@ -1482,7 +1482,16 @@ def JobRunAIOnPath(job):
|
|||||||
for p in paths:
|
for p in paths:
|
||||||
d = session.query(Dir).join(PathDirLink).filter(PathDirLink.path_id==p.id).filter(Dir.rel_path=='').first()
|
d = session.query(Dir).join(PathDirLink).filter(PathDirLink.path_id==p.id).filter(Dir.rel_path=='').first()
|
||||||
DelMatchesForDir( job, d.eid )
|
DelMatchesForDir( job, d.eid )
|
||||||
job.extra.append( JobExtra( name=f"eid-{path_cnt}", value=f"{d.eid}" ) )
|
# small chance we are restarting a job (and this is the one that is likely to be 'stale' and restarted, so accommodate
|
||||||
|
# this by not adding it twice -- only really throws count out and makes it 're-process', but if this say the storage path,
|
||||||
|
# that can be another 10s of thousands of files to re-AI over...
|
||||||
|
already_there=False
|
||||||
|
for ex in job.extra:
|
||||||
|
if ex.name == f"eid-{path_cnt}":
|
||||||
|
already_there=True
|
||||||
|
break
|
||||||
|
if not already_there:
|
||||||
|
job.extra.append( JobExtra( name=f"eid-{path_cnt}", value=f"{d.eid}" ) )
|
||||||
path_cnt+=1
|
path_cnt+=1
|
||||||
JobRunAIOn(job)
|
JobRunAIOn(job)
|
||||||
return
|
return
|
||||||
@@ -2067,7 +2076,8 @@ def ScanFileForPerson( job, e, force=False ):
|
|||||||
threshold = settings.default_threshold
|
threshold = settings.default_threshold
|
||||||
|
|
||||||
# add log, set current_file and increment file_num in job
|
# add log, set current_file and increment file_num in job
|
||||||
ProcessFileForJob( job, f'INFO: processing File: {e.name} and threshold face distance of {threshold}', e.name )
|
if DEBUG:
|
||||||
|
ProcessFileForJob( job, f'DEBUG: processing File: {e.name} and threshold face distance of {threshold}', e.name )
|
||||||
file_h = session.query(File).get( e.id )
|
file_h = session.query(File).get( e.id )
|
||||||
# if we are forcing this, delete any old faces (this will also delete linked tables), and reset faces_created_on to None
|
# if we are forcing this, delete any old faces (this will also delete linked tables), and reset faces_created_on to None
|
||||||
if force:
|
if force:
|
||||||
|
|||||||
Reference in New Issue
Block a user