fix bug with commit logs every log, rather than actually every 5 seconds as intentended.

This commit is contained in:
2021-09-16 20:22:50 +10:00
parent 4d0addb87b
commit 85dd7d7bd1

View File

@@ -15,7 +15,7 @@
# global debug setting # global debug setting
DEBUG=1 DEBUG=0
### SQLALCHEMY IMPORTS ### ### SQLALCHEMY IMPORTS ###
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
@@ -455,7 +455,6 @@ def ProcessRecycleBinDir(job):
print( f"here2: {path}, s={symlink}" ) print( f"here2: {path}, s={symlink}" )
# create the Path (and Dir objects for the Bin) # create the Path (and Dir objects for the Bin)
AddPath( job, symlink, ptype.id ) AddPath( job, symlink, ptype.id )
session.commit()
return return
############################################################################## ##############################################################################
@@ -591,7 +590,7 @@ def AddLogForJob(job, message):
# if its been more than 5 seconds since our last log, then commit to the DB to show some progress # if its been more than 5 seconds since our last log, then commit to the DB to show some progress
if hasattr(job, 'last_commit'): if hasattr(job, 'last_commit'):
if (now - job.last_commit).seconds > 5: if (now - job.last_commit).seconds > 5:
job.last_commmit=now job.last_commit=now
session.commit() session.commit()
else: else:
job.last_commit = now job.last_commit = now
@@ -720,7 +719,6 @@ def JobScanNow(job):
ProcessImportDirs(job) ProcessImportDirs(job)
FinishJob( job, "Completed (scan for new files)" ) FinishJob( job, "Completed (scan for new files)" )
MessageToFE( job.id, "success", "Completed (scan for new files)" ) MessageToFE( job.id, "success", "Completed (scan for new files)" )
session.commit()
return return
############################################################################## ##############################################################################
@@ -731,7 +729,6 @@ def JobScanStorageDir(job):
ProcessStorageDirs(job) ProcessStorageDirs(job)
FinishJob( job, "Completed (scan for new files)" ) FinishJob( job, "Completed (scan for new files)" )
MessageToFE( job.id, "success", "Completed (scan for new files)" ) MessageToFE( job.id, "success", "Completed (scan for new files)" )
session.commit()
return return
############################################################################## ##############################################################################
@@ -784,6 +781,7 @@ def AddPath(job, pp, type ):
dir=AddDir( job, os.path.basename(pp), None, "", path_obj ) dir=AddDir( job, os.path.basename(pp), None, "", path_obj )
session.add(path_obj) session.add(path_obj)
session.add(dir) session.add(dir)
session.commit()
return path_obj return path_obj
@@ -1125,7 +1123,6 @@ def JobImportDir(job):
# create/find the Path # create/find the Path
path_obj=AddPath( job, symlink, path_type ) path_obj=AddPath( job, symlink, path_type )
session.commit()
# for recycle bin path, we dont want to import content, just create the path/dir vars (above) in the DB # for recycle bin path, we dont want to import content, just create the path/dir vars (above) in the DB
bin_path=session.query(Path).join(PathType).filter(PathType.name=='Bin').first() bin_path=session.query(Path).join(PathType).filter(PathType.name=='Bin').first()
if bin_path != None and path_type == bin_path.type.id: if bin_path != None and path_type == bin_path.type.id:
@@ -1448,6 +1445,7 @@ def ClearOtherDupMessagesAndJobs():
for j in cd_jobs: for j in cd_jobs:
FinishJob(j, "New CheckForDups job/removal supercedes this job, withdrawing it", "Withdrawn") FinishJob(j, "New CheckForDups job/removal supercedes this job, withdrawing it", "Withdrawn")
session.commit() session.commit()
return
#################################################################################################################################### ####################################################################################################################################
# CheckForDups(): job to dig into the DB with sql, find duplicates - if there are any, pop a F/E status to say so # CheckForDups(): job to dig into the DB with sql, find duplicates - if there are any, pop a F/E status to say so