mid-way through new Import function, has buggy counts, updated bugs and todos
This commit is contained in:
4
BUGs
4
BUGs
@@ -1,4 +1,6 @@
|
|||||||
### Next: 4
|
### Next: 8
|
||||||
BUG-2: Fix the function FixPath so its not just C:, use isPosixPath instead...
|
BUG-2: Fix the function FixPath so its not just C:, use isPosixPath instead...
|
||||||
BUG-3: import job (first time, does not update with #files as it runs)
|
BUG-3: import job (first time, does not update with #files as it runs)
|
||||||
BUG-4: Duration is borked and comes out as -1 day under jobs (windows created jobs only)
|
BUG-4: Duration is borked and comes out as -1 day under jobs (windows created jobs only)
|
||||||
|
BUG-6: add a new file (e.g. touch an image in the import dir), and keep_dir[<key>] has missing key
|
||||||
|
BUG-7: thumbnail orientation issues (see latest image from my phone - of laptop)
|
||||||
|
|||||||
1
TODO
1
TODO
@@ -50,3 +50,4 @@
|
|||||||
* date stuff
|
* date stuff
|
||||||
* exif processing?
|
* exif processing?
|
||||||
* location stuff - test a new photo from my camera out
|
* location stuff - test a new photo from my camera out
|
||||||
|
-- image is in dir, need to look at exifread output
|
||||||
|
|||||||
@@ -222,7 +222,7 @@ def RunJob(job):
|
|||||||
elif job.name =="forcescan":
|
elif job.name =="forcescan":
|
||||||
JobForceScan(job)
|
JobForceScan(job)
|
||||||
elif job.name =="importdir":
|
elif job.name =="importdir":
|
||||||
JobImportDir(job)
|
JobNewImportDir(job)
|
||||||
elif job.name =="getfiledetails":
|
elif job.name =="getfiledetails":
|
||||||
JobGetFileDetails(job)
|
JobGetFileDetails(job)
|
||||||
else:
|
else:
|
||||||
@@ -302,7 +302,7 @@ def SymlinkName(path, file):
|
|||||||
sig_bit=file.replace(path, "")
|
sig_bit=file.replace(path, "")
|
||||||
last_dir=os.path.basename(path[0:-1])
|
last_dir=os.path.basename(path[0:-1])
|
||||||
|
|
||||||
if sig_bit[-1] == os.path.sep:
|
if sig_bit[-1] == '/':
|
||||||
last_bit = os.path.dirname(sig_bit)[0:-1]
|
last_bit = os.path.dirname(sig_bit)[0:-1]
|
||||||
else:
|
else:
|
||||||
last_bit = os.path.dirname(sig_bit)
|
last_bit = os.path.dirname(sig_bit)
|
||||||
@@ -319,14 +319,21 @@ def CreateSymlink(job,path):
|
|||||||
return symlink
|
return symlink
|
||||||
|
|
||||||
def AddDir(job, dirname, path_prefix, in_dir):
|
def AddDir(job, dirname, path_prefix, in_dir):
|
||||||
|
# see if this exists already
|
||||||
|
dir=session.query(Dir).filter(Dir.path_prefix==dirname).first()
|
||||||
|
if dir:
|
||||||
|
if DEBUG==1:
|
||||||
|
print("Found {} returning DB object".format(dirname))
|
||||||
|
return dir
|
||||||
dir=Dir( path_prefix=path_prefix, num_files=0, last_import_date=0, last_hash_date=0 )
|
dir=Dir( path_prefix=path_prefix, num_files=0, last_import_date=0, last_hash_date=0 )
|
||||||
dtype=session.query(FileType).filter(FileType.name=='Directory').first()
|
dtype=session.query(FileType).filter(FileType.name=='Directory').first()
|
||||||
e=Entry( name=dirname, type=dtype )
|
e=Entry( name=dirname, type=dtype )
|
||||||
e.dir_details.append(dir)
|
e.dir_details.append(dir)
|
||||||
# this occurs when we Add the actual Dir for the import_path
|
# no in_dir occurs when we Add the actual Dir for the import_path (top of the tree)
|
||||||
if in_dir:
|
if in_dir:
|
||||||
e.in_dir.append(in_dir)
|
e.in_dir.append(in_dir)
|
||||||
if DEBUG==1:
|
if DEBUG==1:
|
||||||
|
print("AddDir: created {}".format(dirname))
|
||||||
AddLogForJob(job, "DEBUG: AddDir: {} in (dir_id={})".format(dirname, in_dir) )
|
AddLogForJob(job, "DEBUG: AddDir: {} in (dir_id={})".format(dirname, in_dir) )
|
||||||
session.add(e)
|
session.add(e)
|
||||||
return dir
|
return dir
|
||||||
@@ -341,6 +348,72 @@ def AddFile(job, fname, type_str, fsize, in_dir ):
|
|||||||
session.add(e)
|
session.add(e)
|
||||||
return e
|
return e
|
||||||
|
|
||||||
|
def JobNewImportDir(job):
|
||||||
|
JobProgressState( job, "In Progress" )
|
||||||
|
settings = session.query(Settings).first()
|
||||||
|
if settings == None:
|
||||||
|
raise Exception("Cannot create file data with no settings / import path is missing")
|
||||||
|
path=[jex.value for jex in job.extra if jex.name == "path"][0]
|
||||||
|
AddLogForJob(job, "Checking Import Directory: {}".format( path ) )
|
||||||
|
if DEBUG==1:
|
||||||
|
print("DEBUG: Checking Import Directory: {}".format( path ) )
|
||||||
|
if not os.path.exists( path ):
|
||||||
|
FinishJob( job, "Finished Importing: {} -- Path does not exist".format( path), "Failed" )
|
||||||
|
for j in session.query(Job).filter(Job.wait_for==job.id).all():
|
||||||
|
if DEBUG==1:
|
||||||
|
print("DEBUG: cancelling job: {} as it was waiting for this failed job: {}".format(job.id, j.id) )
|
||||||
|
FinishJob(j, "Job has been withdrawn as the job being waited for failed", "Withdrawn" )
|
||||||
|
return
|
||||||
|
symlink=CreateSymlink(job,path)
|
||||||
|
overall_file_cnt=0
|
||||||
|
walk=os.walk(path, topdown=True)
|
||||||
|
# root == path of dir, files are in dir... subdirs are in dir
|
||||||
|
parent_dir=None
|
||||||
|
for root, subdirs, files in walk:
|
||||||
|
overall_file_cnt+= len(subdirs) + len(files)
|
||||||
|
if root == path:
|
||||||
|
pp = symlink
|
||||||
|
else:
|
||||||
|
pp=SymlinkName( path, root )+'/'+os.path.basename(root)
|
||||||
|
dir=AddDir(job, os.path.basename(root), pp, parent_dir)
|
||||||
|
parent_dir=dir
|
||||||
|
stat = os.stat( dir.path_prefix )
|
||||||
|
# check any modificaiton on fs, since last import, if none we are done
|
||||||
|
if dir.last_import_date > 0 and stat.st_ctime < dir.last_import_date:
|
||||||
|
if DEBUG==1:
|
||||||
|
print( "DEBUG: Directory has not been altered since the last import, just ignore contents" )
|
||||||
|
job.current_file_num=dir.num_files
|
||||||
|
job.num_files+=dir.num_files
|
||||||
|
continue
|
||||||
|
for basename in files:
|
||||||
|
fname=dir.path_prefix+'/'+basename
|
||||||
|
stat = os.stat(fname)
|
||||||
|
if stat.st_ctime > dir.last_import_date:
|
||||||
|
if DEBUG==1:
|
||||||
|
AddLogForJob(job, "DEBUG: {} - is new/updated".format( basename ), basename )
|
||||||
|
print("DEBUG: {} - {} is newer than {}".format( basename, stat.st_ctime, dir.last_import_date ) )
|
||||||
|
if isImage(fname):
|
||||||
|
type_str = 'Image'
|
||||||
|
elif isVideo(fname):
|
||||||
|
type_str = 'Video'
|
||||||
|
else:
|
||||||
|
type_str = 'Unknown'
|
||||||
|
fsize = round(stat.st_size/(1024*1024))
|
||||||
|
e=AddFile( job, basename, type_str, fsize, dir )
|
||||||
|
else:
|
||||||
|
if DEBUG==1:
|
||||||
|
AddLogForJob(job, "DEBUG: {} - is unchanged".format( basename, basename ) )
|
||||||
|
print("DEBUG: {} - {} is OLDER than {}".format( basename, stat.st_ctime, dir.last_import_date ), basename )
|
||||||
|
dir.num_files=len(files)+len(subdirs)
|
||||||
|
dir.last_import_date = time.time()
|
||||||
|
job.num_files=overall_file_cnt
|
||||||
|
FinishJob(job, "Finished Importing: {} - Found {} new files".format( path, overall_file_cnt ) )
|
||||||
|
####### NEED TO FIX THIS BASED ON os.walk contents
|
||||||
|
import_dir=session.query(Dir).filter(Dir.path_prefix==symlink).first()
|
||||||
|
import_dir.num_files=overall_file_cnt
|
||||||
|
session.commit()
|
||||||
|
return
|
||||||
|
|
||||||
def JobImportDir(job):
|
def JobImportDir(job):
|
||||||
JobProgressState( job, "In Progress" )
|
JobProgressState( job, "In Progress" )
|
||||||
settings = session.query(Settings).first()
|
settings = session.query(Settings).first()
|
||||||
@@ -384,6 +457,8 @@ def JobImportDir(job):
|
|||||||
fname=file.replace(path, "")
|
fname=file.replace(path, "")
|
||||||
stat = os.stat(file)
|
stat = os.stat(file)
|
||||||
dirname=SymlinkName(path, file)
|
dirname=SymlinkName(path, file)
|
||||||
|
if not keep_dirs[dirname]:
|
||||||
|
print("ERROR: dirname={}, keep_dir={}, fname={}, path={}, symlink=symlink", dirname, keep_dir, fname, path, symlink )
|
||||||
if stat.st_ctime > keep_dirs[dirname].last_import_date:
|
if stat.st_ctime > keep_dirs[dirname].last_import_date:
|
||||||
if DEBUG==1:
|
if DEBUG==1:
|
||||||
AddLogForJob(job, "DEBUG: {} - {} is newer than {}".format( file, stat.st_ctime, keep_dirs[dirname].last_import_date ), file )
|
AddLogForJob(job, "DEBUG: {} - {} is newer than {}".format( file, stat.st_ctime, keep_dirs[dirname].last_import_date ), file )
|
||||||
@@ -525,17 +600,21 @@ def isImage(file):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def GenImageThumbnail(job, file):
|
def GenImageThumbnail(job, file):
|
||||||
|
thumbnail=None
|
||||||
AddLogForJob( job, "Generate Thumbnail from Image file: {}".format( file ), file )
|
AddLogForJob( job, "Generate Thumbnail from Image file: {}".format( file ), file )
|
||||||
f = open(file, 'rb')
|
f = open(file, 'rb')
|
||||||
try:
|
try:
|
||||||
tags = exifread.process_file(f)
|
tags = exifread.process_file(f)
|
||||||
|
if '20210121_223307.jpg' in file:
|
||||||
|
print("Tag: img orientation={}".format( tags['Image Orientation']) )
|
||||||
|
print("Tag: GPS GPSLatitude={}".format( tags['GPS GPSLatitude']) )
|
||||||
|
thumbnail = base64.b64encode(tags['JPEGThumbnail'])
|
||||||
|
thumbnail = str(thumbnail)[2:-1]
|
||||||
except:
|
except:
|
||||||
print('WARNING: NO EXIF TAGS?!?!?!?')
|
print('WARNING: NO EXIF TAGS?!?!?!?')
|
||||||
AddLogForJob(job, "WARNING: No EXIF TAF found for: {}".format(file))
|
AddLogForJob(job, "WARNING: No EXIF TAF found for: {}".format(file))
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
thumbnail = base64.b64encode(tags['JPEGThumbnail'])
|
|
||||||
thumbnail = str(thumbnail)[2:-1]
|
|
||||||
return thumbnail
|
return thumbnail
|
||||||
|
|
||||||
def GenVideoThumbnail(job, file):
|
def GenVideoThumbnail(job, file):
|
||||||
|
|||||||
Reference in New Issue
Block a user