added code to split out storage_path and import_path, still a bit clunky, but functional. SymlinkName also now in shared

This commit is contained in:
2021-02-27 18:30:54 +11:00
parent 40dba847c1
commit 477bd6f099
3 changed files with 84 additions and 18 deletions

View File

@@ -25,7 +25,7 @@ from sqlalchemy.orm import scoped_session
### LOCAL FILE IMPORTS ###
from shared import DB_URL, PA_JOB_MANAGER_HOST, PA_JOB_MANAGER_PORT, THUMBSIZE
from shared import DB_URL, PA_JOB_MANAGER_HOST, PA_JOB_MANAGER_PORT, THUMBSIZE, SymlinkName
from datetime import datetime, timedelta, date
import pytz
import time
@@ -138,6 +138,7 @@ class Settings(Base):
__tablename__ = "settings"
id = Column(Integer, Sequence('settings_id_seq'), primary_key=True )
import_path = Column(String)
storage_path = Column(String)
def __repr__(self):
return f"<id: {self.id}, import_path: {self.import_path}>"
@@ -235,11 +236,24 @@ def MessageToFE( job_id, alert, message ):
session.commit()
return msg.id
def ProcessStorageDirs(parent_job):
settings = session.query(Settings).first()
if settings == None:
raise Exception("Cannot create file data with no settings / import path is missing")
paths = settings.storage_path.split("#")
JobsForPaths( parent_job, paths )
return
def ProcessImportDirs(parent_job):
settings = session.query(Settings).first()
if settings == None:
raise Exception("Cannot create file data with no settings / import path is missing")
paths = settings.import_path.split("#")
JobsForPaths( parent_job, paths )
return
def JobsForPaths( parent_job, paths ):
now=datetime.now(pytz.utc)
# make new set of Jobs per path... HandleJobs will make them run later
for path in paths:
@@ -303,6 +317,8 @@ def RunJob(job):
JobScanNow(job)
elif job.name =="forcescan":
JobForceScan(job)
elif job.name =="scan_sp":
JobScanStorageDir(job)
elif job.name =="importdir":
JobImportDir(job)
elif job.name =="getfiledetails":
@@ -318,7 +334,7 @@ def RunJob(job):
# okay, we finished a job, so check for any jobs that are dependant on this and run them...
# session.close()
if job.pa_job_state != "Completed":
FinishJob(job, "PA Job Manager - This is a catchall to close of a Job, this sould never be seen and implies a job did not actually complete?", "Failed" )
FinishJob(job, "PA Job Manager - This is a catchall to close of a Job, this should never be seen and implies a job did not actually complete?", "Failed" )
HandleJobs()
return
@@ -386,6 +402,14 @@ def JobScanNow(job):
session.commit()
return
def JobScanStorageDir(job):
JobProgressState( job, "In Progress" )
ProcessStorageDirs(job)
FinishJob( job, "Completed (scan for new files)" )
MessageToFE( job.id, "success", "Completed (scan for new files)" )
session.commit()
return
def JobForceScan(job):
JobProgressState( job, "In Progress" )
session.query(FileRefimgLink).delete()
@@ -524,11 +548,11 @@ def JobImportDir(job):
JobProgressState( job, "In Progress" )
settings = session.query(Settings).first()
if settings == None:
raise Exception("Cannot create file data with no settings / import path is missing")
raise Exception("Cannot create file data with no settings / paths missing")
path=[jex.value for jex in job.extra if jex.name == "path"][0]
AddLogForJob(job, "Checking Import Directory: {}".format( path ) )
AddLogForJob(job, "Checking Directory: {}".format( path ) )
if DEBUG==1:
print("DEBUG: Checking Import Directory: {}".format( path ) )
print("DEBUG: Checking Directory: {}".format( path ) )
if not os.path.exists( path ):
FinishJob( job, "Finished Importing: {} -- Path does not exist".format( path), "Failed" )
return
@@ -630,6 +654,7 @@ def GenHashAndThumb(job, e):
return
e.file_details[0].hash = md5( job, e.in_dir[0].path_prefix+'/'+ e.name )
print( e )
if e.type.name == 'Image':
e.file_details[0].thumbnail = GenImageThumbnail( job, e.in_dir[0].path_prefix+'/'+ e.name )
elif e.type.name == 'Video':