fixed bugs with wrong sequence names in DB classes, also added AddLogForJob and used it in GenerateFileData, and fixed but where I re-ran a completed job
This commit is contained in:
@@ -118,7 +118,7 @@ class FileData():
|
|||||||
# HACK: At present this only handles one path (need to re-factor if we have #
|
# HACK: At present this only handles one path (need to re-factor if we have #
|
||||||
# multiple valid paths in import_path) #
|
# multiple valid paths in import_path) #
|
||||||
##############################################################################
|
##############################################################################
|
||||||
def GenerateFileData(self):
|
def GenerateFileData(self, job):
|
||||||
settings = session.query(Settings).first()
|
settings = session.query(Settings).first()
|
||||||
if settings == None:
|
if settings == None:
|
||||||
return
|
return
|
||||||
@@ -140,6 +140,7 @@ class FileData():
|
|||||||
for file in file_list[0]:
|
for file in file_list[0]:
|
||||||
if file == path:
|
if file == path:
|
||||||
continue
|
continue
|
||||||
|
fname=file.replace(path, "")
|
||||||
stat = os.stat(file)
|
stat = os.stat(file)
|
||||||
if last_import_date == 0 or stat.st_ctime > last_import_date:
|
if last_import_date == 0 or stat.st_ctime > last_import_date:
|
||||||
print( "{} - {} is newer than {}".format( file, stat.st_ctime, last_import_date ) )
|
print( "{} - {} is newer than {}".format( file, stat.st_ctime, last_import_date ) )
|
||||||
@@ -161,12 +162,11 @@ class FileData():
|
|||||||
fhash=None
|
fhash=None
|
||||||
|
|
||||||
fsize = round(os.stat(file).st_size/(1024*1024))
|
fsize = round(os.stat(file).st_size/(1024*1024))
|
||||||
fname=file.replace(path, "")
|
|
||||||
path_prefix=symlink.replace(path,"")
|
path_prefix=symlink.replace(path,"")
|
||||||
file_obj = File( name=fname, type=ftype, size_mb=fsize, hash=fhash, path_prefix=path_prefix, thumbnail=fthumbnail )
|
file_obj = File( name=fname, type=ftype, size_mb=fsize, hash=fhash, path_prefix=path_prefix, thumbnail=fthumbnail )
|
||||||
session.add(file_obj)
|
session.add(file_obj)
|
||||||
else:
|
else:
|
||||||
print( "{} - {} is OLDER than {}".format( file, stat.st_ctime, last_import_date ) )
|
AddLogForJob(job, "{} - {} is OLDER than {}".format( file, stat.st_ctime, last_import_date ), file )
|
||||||
settings.last_import_date = time.time()
|
settings.last_import_date = time.time()
|
||||||
session.commit()
|
session.commit()
|
||||||
return self
|
return self
|
||||||
@@ -220,7 +220,7 @@ class PA_JobManager(Base):
|
|||||||
|
|
||||||
class Joblog(Base):
|
class Joblog(Base):
|
||||||
__tablename__ = "joblog"
|
__tablename__ = "joblog"
|
||||||
id = Column(Integer, Sequence('ill_id_seq'), primary_key=True )
|
id = Column(Integer, Sequence('joblog_id_seq'), primary_key=True )
|
||||||
job_id = Column(Integer, ForeignKey('job.id') )
|
job_id = Column(Integer, ForeignKey('job.id') )
|
||||||
log_date = Column(DateTime(timezone=True))
|
log_date = Column(DateTime(timezone=True))
|
||||||
log = Column(String)
|
log = Column(String)
|
||||||
@@ -230,7 +230,7 @@ class Joblog(Base):
|
|||||||
|
|
||||||
class Job(Base):
|
class Job(Base):
|
||||||
__tablename__ = "job"
|
__tablename__ = "job"
|
||||||
id = Column(Integer, Sequence('joblog_id_seq'), primary_key=True )
|
id = Column(Integer, Sequence('job_id_seq'), primary_key=True )
|
||||||
start_time = Column(DateTime(timezone=True))
|
start_time = Column(DateTime(timezone=True))
|
||||||
last_update = Column(DateTime(timezone=True))
|
last_update = Column(DateTime(timezone=True))
|
||||||
name = Column(String)
|
name = Column(String)
|
||||||
@@ -273,6 +273,14 @@ def InitialiseManager():
|
|||||||
session.add(pa_eng)
|
session.add(pa_eng)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def AddLogForJob(job, message, current_file=''):
|
||||||
|
now=datetime.now(pytz.utc)
|
||||||
|
log=Joblog( job_id=job.id, log=message, log_date=now )
|
||||||
|
job.last_update=now
|
||||||
|
job.current_file=current_file
|
||||||
|
session.add(log)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
def RunJob(job):
|
def RunJob(job):
|
||||||
print("Run job: {}, pa_eng state: {}, internal job state: {}".format( job.name, job.pa_job_state, job.state) )
|
print("Run job: {}, pa_eng state: {}, internal job state: {}".format( job.name, job.pa_job_state, job.state) )
|
||||||
try:
|
try:
|
||||||
@@ -282,8 +290,8 @@ def RunJob(job):
|
|||||||
print("force scan not being handled yet")
|
print("force scan not being handled yet")
|
||||||
else:
|
else:
|
||||||
print("Requested to process unknown job type: {}".format(job.name))
|
print("Requested to process unknown job type: {}".format(job.name))
|
||||||
except:
|
except Exception as e:
|
||||||
MessageToFE( job.id, "danger", "Failed (see log for details)" )
|
MessageToFE( job.id, "danger", "Failed with: {} (try job log for details)".format(e) )
|
||||||
return
|
return
|
||||||
|
|
||||||
def HandleJobs():
|
def HandleJobs():
|
||||||
@@ -291,7 +299,7 @@ def HandleJobs():
|
|||||||
pa_eng.state = 'Scanning Jobs'
|
pa_eng.state = 'Scanning Jobs'
|
||||||
jobs=GetJobs()
|
jobs=GetJobs()
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
if job.pa_job_state != 'complete':
|
if job.pa_job_state != 'Completed':
|
||||||
RunJob(job)
|
RunJob(job)
|
||||||
pa_eng.num_active_jobs = pa_eng.num_active_jobs + 1
|
pa_eng.num_active_jobs = pa_eng.num_active_jobs + 1
|
||||||
else:
|
else:
|
||||||
@@ -301,7 +309,7 @@ def HandleJobs():
|
|||||||
return
|
return
|
||||||
|
|
||||||
def JobScanNow(job):
|
def JobScanNow(job):
|
||||||
filedata.GenerateFileData()
|
filedata.GenerateFileData(job)
|
||||||
job.state="Completed"
|
job.state="Completed"
|
||||||
job.pa_job_state="Completed"
|
job.pa_job_state="Completed"
|
||||||
job.last_update=datetime.now(pytz.utc)
|
job.last_update=datetime.now(pytz.utc)
|
||||||
|
|||||||
Reference in New Issue
Block a user