had to revert the import line - pylint didnt originally like it this way, cant see issue now - go figure, but it definitely caused the video thumbs to fail. turned off debug again, fixed up how we account for current_file_num increases via explicit use of ProcessFileForJob() to increment where required, or just inc. properly. Finally, check if image has transparency, if so convert it to rgb as we cant save it as jpeg otherwise (in thumbnail creation)

This commit is contained in:
2021-03-23 18:39:58 +11:00
parent a2ef27e6a1
commit 1ab339ffea

View File

@@ -38,7 +38,7 @@ import hashlib
import exifread import exifread
import base64 import base64
import numpy import numpy
from cv2 import cv2 import cv2
import socket import socket
import threading import threading
import io import io
@@ -47,7 +47,7 @@ import re
import sys import sys
DEBUG=1 DEBUG=0
sys.setrecursionlimit(50000) sys.setrecursionlimit(50000)
# an Manager, which the Session will use for connection resources # an Manager, which the Session will use for connection resources
@@ -256,7 +256,6 @@ def ProcessImportDirs(parent_job):
return return
def JobsForPaths( parent_job, paths ): def JobsForPaths( parent_job, paths ):
now=datetime.now(pytz.utc) now=datetime.now(pytz.utc)
# make new set of Jobs per path... HandleJobs will make them run later # make new set of Jobs per path... HandleJobs will make them run later
for path in paths: for path in paths:
@@ -301,15 +300,17 @@ def JobsForPaths( parent_job, paths ):
HandleJobs() HandleJobs()
return return
def AddLogForJob(job, message, current_file=''): def ProcessFileForJob(job, message, current_file):
job.current_file=os.path.basename(current_file)
if job.num_files:
job.current_file_num=job.current_file_num+1
AddLogForJob(job, message )
return
def AddLogForJob(job, message):
now=datetime.now(pytz.utc) now=datetime.now(pytz.utc)
log=Joblog( job_id=job.id, log=message, log_date=now ) log=Joblog( job_id=job.id, log=message, log_date=now )
job.last_update=now job.last_update=now
if current_file != '':
job.current_file=os.path.basename(current_file)
# this may not be set on an import job
if job.num_files:
job.current_file_num=job.current_file_num+1
session.add(log) session.add(log)
return return
@@ -360,8 +361,8 @@ def FinishJob(job, last_log, state="Completed", pa_job_state="Completed"):
return return
def HandleJobs(): def HandleJobs():
if DEBUG==1:
print("INFO: PA job manager is scanning for new jobs to process") print("INFO: PA job manager is scanning for new jobs to process")
for job in session.query(Job).all(): for job in session.query(Job).all():
if job.pa_job_state == 'New': if job.pa_job_state == 'New':
if job.wait_for != None: if job.wait_for != None:
@@ -388,7 +389,7 @@ def HandleJobs():
MessageToFE( job.id, "danger", "Failed with: {} (try job log for details)".format(e) ) MessageToFE( job.id, "danger", "Failed with: {} (try job log for details)".format(e) )
except Exception as e2: except Exception as e2:
print("Failed to let front-end know, but back-end Failed to run job (id: {}, name: {} -- orig exep was: {}, this exception was: {})".format( job.id, job.name, e, e2) ) print("Failed to let front-end know, but back-end Failed to run job (id: {}, name: {} -- orig exep was: {}, this exception was: {})".format( job.id, job.name, e, e2) )
print("INFO: PA job manager is waiting jobs") print("INFO: PA job manager is waiting for a job")
return return
def JobProgressState( job, state ): def JobProgressState( job, state ):
@@ -422,6 +423,7 @@ def JobForceScan(job):
session.query(Entry).delete() session.query(Entry).delete()
session.commit() session.commit()
ProcessImportDirs(job) ProcessImportDirs(job)
ProcessStorageDirs(job)
FinishJob(job, "Completed (forced remove and recreation of all file data)") FinishJob(job, "Completed (forced remove and recreation of all file data)")
MessageToFE( job.id, "success", "Completed (forced remove and recreation of all file data)" ) MessageToFE( job.id, "success", "Completed (forced remove and recreation of all file data)" )
session.commit() session.commit()
@@ -505,18 +507,14 @@ def HandleAnyFSDeletions(job):
def GetDateFromFile(file, stat): def GetDateFromFile(file, stat):
# try exif # try exif
try: try:
print(f"trying exif read of {file}")
f = open(file, 'rb') f = open(file, 'rb')
tags = exifread.process_file(f) tags = exifread.process_file(f)
date_str, _ = str(tags["EXIF DateTimeOriginal"]).split(" ") date_str, _ = str(tags["EXIF DateTimeOriginal"]).split(" ")
print(date_str)
year, month, day = date_str.split(":") year, month, day = date_str.split(":")
year=int(year) year=int(year)
month=int(month) month=int(month)
day=int(day) day=int(day)
print(year)
check = datetime( year, month, day ) check = datetime( year, month, day )
print( f"check={check}" )
except: except:
# try parsing filename # try parsing filename
try: try:
@@ -525,7 +523,6 @@ def GetDateFromFile(file, stat):
month=int(m[2]) month=int(m[2])
day=int(m[3]) day=int(m[3])
check2 = datetime( year, month, day ) check2 = datetime( year, month, day )
print( f"check2={check2}" )
# give up and use file sys date # give up and use file sys date
except: except:
year, month, day, _, _, _, _, _, _ = datetime.fromtimestamp(stat.st_ctime).timetuple() year, month, day, _, _, _, _, _, _ = datetime.fromtimestamp(stat.st_ctime).timetuple()
@@ -533,7 +530,6 @@ def GetDateFromFile(file, stat):
woy=c[1] woy=c[1]
return year, month, day, woy return year, month, day, woy
def JobImportDir(job): def JobImportDir(job):
JobProgressState( job, "In Progress" ) JobProgressState( job, "In Progress" )
settings = session.query(Settings).first() settings = session.query(Settings).first()
@@ -580,7 +576,6 @@ def JobImportDir(job):
stat = os.stat(fname) stat = os.stat(fname)
if stat.st_ctime > dir.last_import_date: if stat.st_ctime > dir.last_import_date:
AddLogForJob(job, f"Processing new/update file: {basename}", basename )
if DEBUG==1: if DEBUG==1:
print("DEBUG: {} - {} is newer than {}".format( basename, stat.st_ctime, dir.last_import_date ) ) print("DEBUG: {} - {} is newer than {}".format( basename, stat.st_ctime, dir.last_import_date ) )
if isImage(fname): if isImage(fname):
@@ -600,11 +595,12 @@ def JobImportDir(job):
print("DEBUG: {} - {} is OLDER than {}".format( basename, stat.st_ctime, dir.last_import_date ), basename ) print("DEBUG: {} - {} is OLDER than {}".format( basename, stat.st_ctime, dir.last_import_date ), basename )
job.current_file=basename job.current_file=basename
job.current_file_num+=1 job.current_file_num+=1
dir.num_files=len(files)+len(subdirs) job.current_file_num += len(subdirs)
dir.last_import_date = time.time() dir.last_import_date = time.time()
if parent_dir != None:
dir.num_files=len(files)+len(subdirs)
parent_dir=dir parent_dir=dir
job.num_files=overall_file_cnt job.num_files=overall_file_cnt
job.current_file_num=overall_file_cnt
rm_cnt=HandleAnyFSDeletions(job) rm_cnt=HandleAnyFSDeletions(job)
@@ -692,7 +688,7 @@ def ProcessAI(job, e):
for unknown_encoding in faces: for unknown_encoding in faces:
for person in people: for person in people:
lookForPersonInImage(job, person, unknown_encoding, e) lookForPersonInImage(job, person, unknown_encoding, e)
AddLogForJob(job, f"Finished processing {e.name}", e.name ) ProcessFileForJob(job, f"Finished processing {e.name}", e.name )
return return
def lookForPersonInImage(job, person, unknown_encoding, e): def lookForPersonInImage(job, person, unknown_encoding, e):
@@ -802,10 +798,13 @@ def isImage(file):
return False return False
def GenImageThumbnail(job, file): def GenImageThumbnail(job, file):
AddLogForJob( job, "Generate Thumbnail from Image file: {}".format( file ), file ) ProcessFileForJob( job, "Generate Thumbnail from Image file: {}".format( file ), file )
try: try:
im_orig = Image.open(file) im_orig = Image.open(file)
im = ImageOps.exif_transpose(im_orig) im = ImageOps.exif_transpose(im_orig)
bands = im.getbands()
if 'A' in bands:
im = im.convert('RGB')
im.thumbnail((THUMBSIZE,THUMBSIZE)) im.thumbnail((THUMBSIZE,THUMBSIZE))
img_bytearray = io.BytesIO() img_bytearray = io.BytesIO()
im.save(img_bytearray, format='JPEG') im.save(img_bytearray, format='JPEG')
@@ -813,13 +812,12 @@ def GenImageThumbnail(job, file):
thumbnail = base64.b64encode(img_bytearray) thumbnail = base64.b64encode(img_bytearray)
thumbnail = str(thumbnail)[2:-1] thumbnail = str(thumbnail)[2:-1]
except Exception as e: except Exception as e:
print('WARNING: NO EXIF TAGS?!?!?!?')
AddLogForJob(job, f"WARNING: No EXIF TAF found for: {file} - error={e}") AddLogForJob(job, f"WARNING: No EXIF TAF found for: {file} - error={e}")
return None return None
return thumbnail return thumbnail
def GenVideoThumbnail(job, file): def GenVideoThumbnail(job, file):
AddLogForJob( job, "Generate Thumbnail from Video file: {}".format( file ), file ) ProcessFileForJob( job, "Generate Thumbnail from Video file: {}".format( file ), file )
try: try:
vcap = cv2.VideoCapture(file) vcap = cv2.VideoCapture(file)
res, frame = vcap.read() res, frame = vcap.read()