fix BUG-107: view / pagination gets out of whack

This commit is contained in:
2022-09-22 14:25:07 +10:00
parent 93252a3fc9
commit 2bf8f08228
2 changed files with 24 additions and 11 deletions

View File

@@ -982,9 +982,9 @@ def DisconnectSingleNoMatchOverride( job, o ):
session.add( DisconnectedNoMatchOverride( face=f.face, type_id=o.type_id ) )
# now deal with 'renaming' the metadata on FS
p=f'{SettingsMPath()}/no_match_overrides/'
fname=f'{p}{o.face_id}_{ot.name}'
new_fname=f'{p}0_{ot.name}_{uuid.uuid4()}'
mpath=f'{SettingsMPath()}/no_match_overrides/'
fname=f'{mpath}{o.face_id}_{ot.name}'
new_fname=f'{mpath}0_{ot.name}_{uuid.uuid4()}'
try:
if os.path.exists( fname ):
os.replace( fname, new_fname )
@@ -1581,6 +1581,18 @@ def find_last_successful_ai_scan(job):
return j.last_update.timestamp()
return 0
####################################################################################################################################
# when an import job actually finds new files, then the pa_user_state caches will become invalid (offsets are now wrong)
####################################################################################################################################
def DeleteOldPA_UserState(job):
# clear them out for now - this is 'dumb', just delete ALL. Eventually, can do this based on just the path &/or whether the last_used is
# newer than this delete moment (only would be a race condition between an import changing things and someone simultaneously viewing)
# path=[jex.value for jex in job.extra if jex.name == "path"][0]
session.query(PA_UserState).delete()
return
####################################################################################################################################
# JobImportDir(): job that scan import dir and processes entries in there - key function that uses os.walk() to traverse the
# file system and calls AddFile()/AddDir() as necessary
@@ -1689,6 +1701,8 @@ def JobImportDir(job):
if found_new_files:
job.extra.append( JobExtra( name="new_files", value=found_new_files ) )
session.add(job)
# this will invalidate pa_user_state for this path's contents (offsets are now wrong), clear them out
DeleteOldPA_UserState(job)
rm_cnt=HandleAnyFSDeletions(job)
@@ -2206,8 +2220,8 @@ def ReloadMetadata(job):
AddLogForJob(job, f"INFO: Loading/Retrieving any Metatdata...")
# no path, then no metadata (probably first ever run)
p = SettingsMPath()
if not p:
mpath = SettingsMPath()
if not mpath:
FinishJob( job, "No metadata path - skipping" )
return False
@@ -2215,7 +2229,7 @@ def ReloadMetadata(job):
CopyOverrides()
# process Metadata on FS for no_match_overrides (disco ones, will have 0 as face_id)
fnames = glob.glob( f'{p}/no_match_overrides/*' )
fnames = glob.glob( f'{mpath}/no_match_overrides/*' )
for fname in fnames:
# type derived from fname (e.g. 0_Too Young_uuid*, 1_Too Young, 2_Ingore Face, etc.)
match=re.search( '(\d+)_([^_\.]+)', fname )
@@ -2234,12 +2248,12 @@ def ReloadMetadata(job):
session.add( DisconnectedNoMatchOverride( face=face_data, type_id=otype.id ) )
if face_id:
try:
os.replace( fname, f'{p}no_match_overrides/0_{otype.name}_{uuid.uuid4()}' )
os.replace( fname, f'{mpath}no_match_overrides/0_{otype.name}_{uuid.uuid4()}' )
except Exception as ex:
print( f"ERROR: renaming no-match metadata on filesystem failed: {ex}" )
# process Metadata on FS for force_match_overrides (disco ones, will have 0 as face_id)
fnames = glob.glob( f'{p}force_match_overrides/*' )
fnames = glob.glob( f'{mpath}force_match_overrides/*' )
for fname in fnames:
# person derived from fname (e.g. 0_ddp_uuid*, 1_ddp, 2_mich, etc.)
match=re.search( '(\d+)_([^_]+)', fname )
@@ -2262,7 +2276,7 @@ def ReloadMetadata(job):
# if face>0, then we need to move the FS copy to a disco
if face_id:
try:
os.replace( fname, f'{p}force_match_overrides/0_{p.tag}_{uuid.uuid4()}' )
os.replace( fname, f'{mpath}force_match_overrides/0_{p.tag}_{uuid.uuid4()}' )
except Exception as ex:
print( f"ERROR: renaming force-match metadata on filesystem failed: {ex}" )