added viewing recycle bin via folders, added comments, cleaned up TODO to note this is done

This commit is contained in:
2021-05-26 18:32:52 +10:00
parent 8dc98dd368
commit 9b926938e4
4 changed files with 75 additions and 28 deletions

View File

@@ -495,6 +495,19 @@ def CreateSymlink(job,ptype,path):
os.symlink(path, symlink)
return symlink
################################################################################################################################################################
#
# Key function that runs as part of (usually) an import job. The name of the directory (dirname) is checked to see
# if it already is in the database (inside of in_dir in in_path). If it is,
# just return the db entry. If not, then we create a new row in Dir, that has name of dirname, has a parent directory
# of in_dir (DB object), has the rel_path set to the relative fs path from the actual fs path to this entry (including the dirname)
# and the in_path set to the overarching path (one of an Import, Storage or Recycle_bin path in the DB)
#
# e.g. path on FS: /home/ddp/src/photoassistant/images_to_process/ ... ends in DB as path_prefix="static/Import/images_to_process"
# and we have a dir in /home/ddp/src/photoassistant/images_to_process/0000/subtest, then we call:
# AddDir( job, dirname='subtest', in_dir=Dir object for '0000', rel_path='0000/subtest', in_path=Path object for 'static/Import/images_to_process' )
#
################################################################################################################################################################
def AddDir(job, dirname, in_dir, rel_path, in_path ):
dir=session.query(Dir).join(PathDirLink).join(Path).filter(Path.id==in_path.id).filter(Dir.rel_path==rel_path).first()
if dir:
@@ -573,16 +586,28 @@ def MoveFileToRecycleBin(job,del_me):
os.replace( src, dst )
except Exception as e:
print( f"Failed to remove file from filesystem - which={src}, err: {e}")
bin=session.query(Path).join(PathType).filter(PathType.name=='Bin').first()
print("bin={bin}")
print("del_me={del_me}")
bin_path=session.query(Path).join(PathType).filter(PathType.name=='Bin').first()
print( f"bin={bin}")
print( f"del_me={del_me}")
new_rel_path=del_me.in_dir.in_path.path_prefix.replace('static/','')
# if there is a relative path on this dir, add it to the new_rel_path as there is only ever 1 Bin path
if len(del_me.in_dir.rel_path):
new_rel_path += '/' + del_me.in_dir.rel_path
print("new_rel_path={new_rel_path}")
new_dir = AddDir(job, new_rel_path, None, new_rel_path, bin )
print( "new_dir={new_dir}" )
print( f"new_rel_path={new_rel_path}" )
parent_dir=session.query(Dir).join(PathDirLink).filter(PathDirLink.path_id==bin_path.id).first()
print( f"parent_dir for path={parent_dir}" )
part_rel_path=""
for dirname in new_rel_path.split("/"):
part_rel_path += f"{dirname}"
print( f"AddDir( {dirname} in {parent_dir} with {part_rel_path} as pfx ) ")
new_dir=AddDir( job, dirname, parent_dir, part_rel_path, bin_path )
parent_dir=new_dir
part_rel_path += "/"
print( f"new_dir={new_dir}" )
del_me.in_dir = new_dir
return
@@ -675,7 +700,7 @@ def JobImportDir(job):
path_obj.num_files=overall_file_cnt
parent_dir=None
# rel_path is always '' at the top of the path objects path_prefix for the first dir
# rel_path is always '' at the top of the path objects path_prefix for the first dir
dir=AddDir(job, os.path.basename(symlink), parent_dir, '', path_obj)
# session.add in case we already have imported this dir (as AddDir wont) & now we might have diff num of files to last time,
session.add(dir)
@@ -688,8 +713,8 @@ def JobImportDir(job):
# already create root above to work out num_files for whole os.walk
if root != path:
pp=SymlinkName( path_obj.type.name, path, root )+'/'+os.path.basename(root)
print( F"pp={pp}, root={root}, symlink={symlink}" )
rel_path=pp.replace(symlink+'/','')
print( f"pp={pp}, root={root}, symlink={symlink}, rel_path={rel_path}" )
dir=AddDir(job, os.path.basename(root), parent_dir, rel_path, path_obj)
for basename in files:
# commit every 100 files to see progress being made but not hammer the database
@@ -1005,8 +1030,8 @@ def RemoveDups(job):
del_me_lst = []
for f in files:
if os.path.isfile( f.FullPathOnFS() ) == False:
AddLogForJob( job, f"ERROR: (per file del) file (DB id: {f.eid} - {f.FullPathOnFS()}) does not exist? ignorning file")
elif f.file_details.eid == int(keeping):
AddLogForJob( job, f"ERROR: (per file del) file (DB id: {f.id} - {f.FullPathOnFS()}) does not exist? ignorning file")
elif f.id == int(keeping):
found = f
else:
del_me_lst.append(f)
@@ -1032,7 +1057,7 @@ def RemoveDups(job):
del_me=None
for f in files:
if os.path.isfile(f.FullPathOnFS()) == False:
AddLogForJob( job, f"ERROR: (per path del) file (DB id: {f.eid} - {f.FullPathOnFS()}) does not exist? ignorning file")
AddLogForJob( job, f"ERROR: (per path del) file (DB id: {f.id} - {f.FullPathOnFS()}) does not exist? ignorning file")
if f.in_dir.eid == int(keeping):
found=f
else: