improved DBox images/html, improved look of MoveDBox, started plumbing for pa_job_manager to do actual moves, all the data is there, just havent performed the FS or DB move. cleaned up duplicated DEBUGS that are straight before AddLogForJob... tweaked TODO appropriately
This commit is contained in:
@@ -379,6 +379,9 @@ def AddLogForJob(job, message):
|
||||
log=Joblog( job_id=job.id, log=message, log_date=now )
|
||||
job.last_update=now
|
||||
session.add(log)
|
||||
# some logs have DEBUG: in front, so clean that up
|
||||
message = message.replace("DEBUG:", "" )
|
||||
print( f"DEBUG: {message}" )
|
||||
return
|
||||
|
||||
def RunJob(job):
|
||||
@@ -400,6 +403,8 @@ def RunJob(job):
|
||||
RemoveDups(job)
|
||||
elif job.name == "delete_files":
|
||||
JobDeleteFiles(job)
|
||||
elif job.name == "move_files":
|
||||
JobMoveFiles(job)
|
||||
elif job.name == "restore_files":
|
||||
JobRestoreFiles(job)
|
||||
elif job.name == "processai":
|
||||
@@ -415,9 +420,7 @@ def RunJob(job):
|
||||
|
||||
def CancelJob(job,id):
|
||||
for j in session.query(Job).filter(Job.wait_for==id).all():
|
||||
if DEBUG==1:
|
||||
print("DEBUG: cancelling job: {} as it was waiting for this failed job: {}".format(j.id, job.id) )
|
||||
FinishJob(j, "Job has been withdrawn as the job being waited for failed", "Withdrawn" )
|
||||
FinishJob(j, f"Job (#{j.id}) has been withdrawn as the job being waited for #{job.id} failed", "Withdrawn" )
|
||||
CancelJob(j, j.id)
|
||||
return
|
||||
|
||||
@@ -429,6 +432,8 @@ def FinishJob(job, last_log, state="Completed", pa_job_state="Completed"):
|
||||
if job.state=="Failed":
|
||||
CancelJob(job,job.id)
|
||||
session.commit()
|
||||
if DEBUG==1:
|
||||
print( f"DEBUG: {last_log}" )
|
||||
return
|
||||
|
||||
def HandleJobs():
|
||||
@@ -555,8 +560,7 @@ def AddDir(job, dirname, in_dir, rel_path, in_path ):
|
||||
if in_dir:
|
||||
e.in_dir=in_dir
|
||||
if DEBUG==1:
|
||||
print(f"DEBUG: AddDir: created d={dirname}, rp={rel_path}")
|
||||
AddLogForJob(job, f"DEBUG: Process new dir: {dirname}")
|
||||
AddLogForJob(job, f"DEBUG: Process new dir: {dirname}, rel_path={rel_path}")
|
||||
session.add(e)
|
||||
return dir
|
||||
|
||||
@@ -755,10 +759,7 @@ def GetDateFromFile(file, stat):
|
||||
return year, month, day, woy
|
||||
|
||||
def AddJexToDependantJobs(job,name,value):
|
||||
if DEBUG==1:
|
||||
print( f"DEBUG: AddJexToDependantJobs({job}, {name}, {value}) ")
|
||||
for j in session.query(Job).filter(Job.wait_for==job.id).all():
|
||||
print( f"DEBUG: adding jex to this job.id == {j.id}" )
|
||||
jex=JobExtra( name=name, value=value )
|
||||
j.extra.append(jex)
|
||||
AddJexToDependantJobs(j, name, value)
|
||||
@@ -771,7 +772,7 @@ def JobImportDir(job):
|
||||
path_type=[jex.value for jex in job.extra if jex.name == "path_type"][0]
|
||||
AddLogForJob(job, f"Checking {path_type} Directory: {path}" )
|
||||
if DEBUG==1:
|
||||
print("DEBUG: Checking Directory: {}".format( path ) )
|
||||
print( f"DEBUG: Checking Directory: {path}" )
|
||||
if not os.path.exists( path ):
|
||||
FinishJob( job, f"Finished Importing: {path} -- Path does not exist", "Failed" )
|
||||
return
|
||||
@@ -890,8 +891,6 @@ def GenHashAndThumb(job, e):
|
||||
return
|
||||
|
||||
e.file_details.hash = md5( job, e.FullPathOnFS() )
|
||||
if DEBUG==1:
|
||||
print( f"{e.name} - hash={e.file_details.hash}" )
|
||||
if e.type.name == 'Image':
|
||||
e.file_details.thumbnail = GenImageThumbnail( job, e.FullPathOnFS() )
|
||||
elif e.type.name == 'Video':
|
||||
@@ -960,7 +959,6 @@ def lookForPersonInImage(job, person, unknown_encoding, e):
|
||||
deserialized_bytes = numpy.frombuffer(refimg.encodings, dtype=numpy.float64)
|
||||
results = compareAI(deserialized_bytes, unknown_encoding)
|
||||
if results[0]:
|
||||
print(f'DEBUG: Found a match between: {person.tag} and {e.name}')
|
||||
AddLogForJob(job, f'Found a match between: {person.tag} and {e.name}')
|
||||
frl.matched=True
|
||||
return
|
||||
@@ -996,7 +994,7 @@ def compareAI(known_encoding, unknown_encoding):
|
||||
|
||||
def ProcessFilesInDir(job, e, file_func):
|
||||
if DEBUG==1:
|
||||
print("DEBUG: files in dir - process: {}".format(e.FullPathOnFS()) )
|
||||
print("DEBUG: ProcessFilesInDir: {e.FullPathOnFS()}")
|
||||
if e.type.name != 'Directory':
|
||||
file_func(job, e)
|
||||
else:
|
||||
@@ -1004,14 +1002,14 @@ def ProcessFilesInDir(job, e, file_func):
|
||||
job.current_file_num+=1
|
||||
for sub in dir.files:
|
||||
ProcessFilesInDir(job, sub, file_func)
|
||||
return
|
||||
|
||||
def JobGetFileDetails(job):
|
||||
JobProgressState( job, "In Progress" )
|
||||
#### I think the fix here is to get JobImportDir (or whatever makes the PATH) to add a jex for path_prefix and just pull it here, and stop 're-creating' it via SymlinkName
|
||||
path=[jex.value for jex in job.extra if jex.name == "path"][0]
|
||||
path_prefix=[jex.value for jex in job.extra if jex.name == "path_prefix"][0]
|
||||
if DEBUG==1:
|
||||
print("DEBUG: JobGetFileDetails for path={}".format( path_prefix ) )
|
||||
print("DEBUG: JobGetFileDetails for path={path_prefix}" )
|
||||
p=session.query(Path).filter(Path.path_prefix==path_prefix).first()
|
||||
job.current_file_num = 0
|
||||
job.num_files = p.num_files
|
||||
@@ -1187,6 +1185,26 @@ def RemoveDups(job):
|
||||
AddLogForJob(job, "adding <a href='/job/{}'>job id={} {}</a> to confirm there are no more duplicates".format( next_job.id, next_job.id, next_job.name ) )
|
||||
return
|
||||
|
||||
def MoveFileToStorage(job, move_me, dst_dir):
|
||||
AddLogForJob(job, f"TEST: Moving {move_me.name} to {dst_dir} in storage path" )
|
||||
return
|
||||
|
||||
def JobMoveFiles(job):
|
||||
AddLogForJob(job, f"INFO: Starting Move Files job...")
|
||||
AddLogForJob(job, f"INFO: NOT PROCESSING THIS - TESTING...")
|
||||
prefix=[jex.value for jex in job.extra if jex.name == "prefix"][0]
|
||||
suffix=[jex.value for jex in job.extra if jex.name == "suffix"][0]
|
||||
for jex in job.extra:
|
||||
if 'eid-' in jex.name:
|
||||
move_me=session.query(Entry).join(File).filter(Entry.id==jex.value).first()
|
||||
MoveFileToStorage(job,move_me, f"{prefix}{suffix}" )
|
||||
now=datetime.now(pytz.utc)
|
||||
next_job=Job(start_time=now, last_update=now, name="checkdups", state="New", wait_for=None, pa_job_state="New", current_file_num=0 )
|
||||
session.add(next_job)
|
||||
MessageToFE( job.id, "success", "Completed (move of selected files)" )
|
||||
FinishJob(job, f"Finished move selected file(s)")
|
||||
return
|
||||
|
||||
def JobDeleteFiles(job):
|
||||
AddLogForJob(job, f"INFO: Starting Delete Files job...")
|
||||
for jex in job.extra:
|
||||
|
||||
Reference in New Issue
Block a user