big change to get metadata working fully in DB and on Filesystem, and recover from most common scenarios, improved GUI as well for allowing an immediate search after adding refimg as well
This commit is contained in:
@@ -43,9 +43,11 @@ import threading
|
||||
import io
|
||||
import face_recognition
|
||||
import re
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import ffmpeg
|
||||
import uuid
|
||||
|
||||
|
||||
# global debug setting
|
||||
@@ -54,6 +56,9 @@ if 'FLASK_ENV' not in os.environ or os.environ['FLASK_ENV'] != "production":
|
||||
else:
|
||||
DEBUG=False
|
||||
|
||||
# global list of override tables to allow enumeration over them ...
|
||||
override_tbls={ "face_no_match_override", "face_force_match_override", "disconnected_no_match_override", "disconnected_force_match_override" }
|
||||
|
||||
# this is required to handle the duplicate processing code
|
||||
sys.setrecursionlimit(50000)
|
||||
|
||||
@@ -239,6 +244,7 @@ class Settings(Base):
|
||||
import_path = Column(String)
|
||||
storage_path = Column(String)
|
||||
recycle_bin_path = Column(String)
|
||||
metadata_path = Column(String)
|
||||
auto_rotate = Column(Boolean)
|
||||
default_refimg_model = Column(Integer,ForeignKey('ai_model.id'), unique=True, nullable=False)
|
||||
default_scan_model = Column(Integer,ForeignKey('ai_model.id'), unique=True, nullable=False)
|
||||
@@ -388,10 +394,10 @@ class FaceNoMatchOverride(Base):
|
||||
|
||||
|
||||
################################################################################
|
||||
# Class containing a manual / forced match of a face in a file to a person
|
||||
# Class containing a manual / forced matches of a face in a file to a person
|
||||
################################################################################
|
||||
class FaceManualOverride(Base):
|
||||
__tablename__ = "face_manual_override"
|
||||
class FaceForceMatchOverride(Base):
|
||||
__tablename__ = "face_force_match_override"
|
||||
id = Column(Integer, Sequence('face_override_id_seq'), primary_key=True )
|
||||
face_id = Column(Integer, ForeignKey("face.id"), primary_key=True )
|
||||
person_id = Column(Integer, ForeignKey("person.id"), primary_key=True )
|
||||
@@ -418,15 +424,15 @@ class DisconnectedNoMatchOverride(Base):
|
||||
return f"<face: {self.face}, type_id={self.type_id}"
|
||||
|
||||
################################################################################
|
||||
# Class describing DisconnectedManualOverride in the database and DB via
|
||||
# Class describing DisconnectedForceMatchOverride in the database and DB via
|
||||
# sqlalchemy - Used when a face with an override is deleted from the DB to keep
|
||||
# the raw data so that we can reconnect the override if we ever scan that same
|
||||
# file/face again (think delete/undelete file, rebuild DB from file sys/from
|
||||
# scratch, etc)
|
||||
# used specifically for a match that was forced between a face and a person
|
||||
################################################################################
|
||||
class DisconnectedManualOverride(Base):
|
||||
__tablename__ = "disconnected_manual_override"
|
||||
class DisconnectedForceMatchOverride(Base):
|
||||
__tablename__ = "disconnected_force_match_override"
|
||||
face = Column( LargeBinary, primary_key=True )
|
||||
person_id = Column(Integer, ForeignKey('person.id'))
|
||||
|
||||
@@ -631,6 +637,22 @@ def SettingsIPath():
|
||||
paths.append(settings.base_path+p)
|
||||
return paths
|
||||
|
||||
|
||||
##############################################################################
|
||||
# SettingsMPath(): return path to actual metadata path from settings
|
||||
##############################################################################
|
||||
def SettingsMPath():
|
||||
settings = session.query(Settings).first()
|
||||
if not settings or settings.metadata_path == "":
|
||||
print ("WARNING: no Settings for metadata path")
|
||||
return None
|
||||
p=settings.metadata_path
|
||||
if p[0] == '/':
|
||||
return p
|
||||
else:
|
||||
return settings.base_path+p
|
||||
|
||||
|
||||
##############################################################################
|
||||
# ProcessImportDirs(): wrapper func to call passed in job for each
|
||||
# storage path defined in Settings - called via scan import job
|
||||
@@ -732,6 +754,42 @@ def JobCleanBin(job):
|
||||
FinishJob(job, f"Finished clean up of files older than {settings.bin_cleanup_file_age} days from Recycle Bin")
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# JobMetadata(job): is called when we add/remove an individual override
|
||||
# and in future for 'notes' per file -- This function writes an 'extra' copy
|
||||
# out to the filesystem. This allows a full delete/rebuild of the PA data
|
||||
# and we won't lose any manual overrides
|
||||
##############################################################################
|
||||
def JobMetadata(job):
|
||||
JobProgressState( job, "In Progress" )
|
||||
which=[jex.value for jex in job.extra if jex.name == "which"][0]
|
||||
face_id=[jex.value for jex in job.extra if jex.name == "face_id"][0]
|
||||
f=session.query(Face).get(face_id)
|
||||
if which == 'add_force_match_override' or which=='remove_force_match_override':
|
||||
person_id=[jex.value for jex in job.extra if jex.name == "person_id"][0]
|
||||
p=session.query(Person).get(person_id)
|
||||
os.makedirs( f"{SettingsMPath()}force_match_overrides", mode=0o777, exist_ok=True )
|
||||
fname=f"{SettingsMPath()}force_match_overrides/{face_id}_{p.tag}"
|
||||
elif which == 'add_no_match_override' or which == 'remove_no_match_override':
|
||||
type_id=[jex.value for jex in job.extra if jex.name == "type_id"][0]
|
||||
t=session.query(FaceOverrideType).get(type_id)
|
||||
os.makedirs( f"{SettingsMPath()}no_match_overrides", mode=0o777, exist_ok=True )
|
||||
fname=f"{SettingsMPath()}no_match_overrides/{face_id}_{t.name}"
|
||||
else:
|
||||
AddLogForJob(job, f"ERROR: Failed to process metadata (which={which})" )
|
||||
return
|
||||
try:
|
||||
if str.find( which, 'add_' ) == 0:
|
||||
file_h=open(fname, 'wb')
|
||||
file_h.write(f.face)
|
||||
file_h.close()
|
||||
else:
|
||||
os.remove( fname )
|
||||
except Exception as ex:
|
||||
AddLogForJob(job, f"ERROR: Error with metadata file '{fname}': {ex}" )
|
||||
FinishJob(job, f"Finished metadata job {which}" )
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# AddLogForJob(): add a log line to joblog, if the last time we wrote a log
|
||||
# was over 5 seconds ago, then commit the log to the db, so in f/e we see
|
||||
@@ -794,6 +852,8 @@ def RunJob(job):
|
||||
JobTransformImage(job)
|
||||
elif job.name == "clean_bin":
|
||||
JobCleanBin(job)
|
||||
elif job.name == "metadata":
|
||||
JobMetadata(job)
|
||||
else:
|
||||
FinishJob(job, f"ERROR: Requested to process unknown job type: {job.name}", "Failed")
|
||||
# okay, we finished a job, so check for any jobs that are dependant on this and run them...
|
||||
@@ -901,26 +961,91 @@ def JobScanStorageDir(job):
|
||||
MessageToFE( job.id, "success", "Completed (scan for new files)" )
|
||||
return
|
||||
|
||||
|
||||
##############################################################################
|
||||
# DisconnectSingleNoMatchOverride( job, o ): takes a single NoMatch override
|
||||
# and moves it over to the Disconnected version in the DB, and moves the
|
||||
# metadata on the filesystem from a NMO to disco* version to renames file to
|
||||
# use 0 for face_id and puts unique num on end
|
||||
##############################################################################
|
||||
def DisconnectSingleNoMatchOverride( job, o ):
|
||||
f=session.query(Face).get(o.face_id)
|
||||
ot=session.query(FaceOverrideType).get(o.type_id)
|
||||
d=session.query(DisconnectedNoMatchOverride).filter(
|
||||
DisconnectedNoMatchOverride.type_id==o.type_id, DisconnectedNoMatchOverride.face==f.face ).first()
|
||||
# jic, check its not already there - shouldn't occur, but FS and DB can get out of sync
|
||||
# no unique keys in Disco*, so just being over-cautious
|
||||
if not d:
|
||||
session.add( DisconnectedNoMatchOverride( face=f.face, type_id=o.type_id ) )
|
||||
|
||||
# now deal with 'renaming' the metadata on FS
|
||||
p=f'{SettingsMPath()}/no_match_overrides/'
|
||||
fname=f'{p}{o.face_id}_{ot.name}'
|
||||
new_fname=f'{p}0_{ot.name}_{uuid.uuid4()}'
|
||||
try:
|
||||
if os.path.exists( fname ):
|
||||
os.replace( fname, new_fname )
|
||||
else:
|
||||
file_h=open( new_fname, 'wb')
|
||||
file_h.write(f.face)
|
||||
file_h.close()
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to move an override to a 'DisconnectedNoMatchOverride' override in metadata: {e}")
|
||||
|
||||
session.query(FaceNoMatchOverride).filter( FaceNoMatchOverride.face_id==o.face_id, FaceNoMatchOverride.type_id==o.type_id).delete()
|
||||
# force commit here as we now have added Disco, remove Override and made FS metadata match
|
||||
session.commit()
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# DisconnectSingleForceMatchOverride( job, o ): takes a single ForceMatch
|
||||
# override and moves it over to the Disconnected version in the DB, and moves
|
||||
# the metadata on the filesystem from a NMO to disco* version to renames file
|
||||
# to use 0 for face_id and puts unique num on end
|
||||
##############################################################################
|
||||
def DisconnectSingleForceMatchOverride( job, o ):
|
||||
f=session.query(Face).get(o.face_id)
|
||||
p=session.query(Person).get(o.person_id)
|
||||
d=session.query(DisconnectedForceMatchOverride).filter(
|
||||
DisconnectedForceMatchOverride.person_id==o.person_id, DisconnectedForceMatchOverride.face==f.face ).first()
|
||||
# jic, check its not already there - shouldn't occur, but FS and DB can get out of sync
|
||||
# no unique keys in Disco*, so just being over-cautious
|
||||
if not d:
|
||||
session.add( DisconnectedForceMatchOverride( face=f.face, person_id=o.person_id ) )
|
||||
|
||||
# now deal with 'renaming' the metadata on FS
|
||||
path=f'{SettingsMPath()}/force_match_overrides/'
|
||||
fname=f'{path}{o.face_id}_{p.tag}'
|
||||
new_fname=f'{path}0_{p.tag}_{uuid.uuid4()}'
|
||||
try:
|
||||
if os.path.exists( fname ):
|
||||
os.replace( fname, new_fname )
|
||||
else:
|
||||
file_h=open( new_fname, 'wb')
|
||||
file_h.write(f.face)
|
||||
file_h.close()
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to move an override to a 'DisconnectedForceMatchOverride' override in metadata: {e}")
|
||||
|
||||
session.query(FaceForceMatchOverride).filter( FaceForceMatchOverride.face_id==o.face_id, FaceForceMatchOverride.person_id==o.person_id).delete()
|
||||
# force commit here as we now have added Disco, remove Override and made FS metadata match
|
||||
session.commit()
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# All face Overrides should not just be deleted, they should be disconnected
|
||||
# from the file (in face_file_link), but instead keep the raw face data so
|
||||
# that a face that is found in a future scan can still keep the override
|
||||
# connection
|
||||
##############################################################################
|
||||
def DisconnectAllOverrides():
|
||||
def DisconnectAllOverrides(job):
|
||||
overrides=session.query(FaceNoMatchOverride).all()
|
||||
for o in overrides:
|
||||
f=session.query(Face).get(o.face_id)
|
||||
session.add( DisconnectedNoMatchOverride( face=f.face, type_id=o.type_id ) )
|
||||
overrides=session.query(FaceNoMatchOverride).delete()
|
||||
DisconnectSingleNoMatchOverride(job, o )
|
||||
|
||||
overrides=session.query(FaceManualOverride).all()
|
||||
overrides=session.query(FaceForceMatchOverride).all()
|
||||
for o in overrides:
|
||||
f=session.query(Face).get(o.face_id)
|
||||
session.add( DisconnectedManualOverride( face=f.face, person_id=o.person_id ) )
|
||||
overrides=session.query(FaceManualOverride).delete()
|
||||
session.commit()
|
||||
|
||||
DisconnectSingleForceMatchOverride( job, o )
|
||||
return
|
||||
|
||||
|
||||
@@ -930,7 +1055,7 @@ def DisconnectAllOverrides():
|
||||
##############################################################################
|
||||
def JobForceScan(job):
|
||||
JobProgressState( job, "In Progress" )
|
||||
DisconnectAllOverrides()
|
||||
DisconnectAllOverrides(job)
|
||||
session.query(PA_UserState).delete()
|
||||
session.query(FaceFileLink).delete()
|
||||
session.query(FaceRefimgLink).delete()
|
||||
@@ -1341,6 +1466,7 @@ def HandleAnyFSDeletions(job):
|
||||
rms = session.query(Entry).filter(Entry.exists_on_fs==False,Entry.type_id!=dtype.id).all()
|
||||
rm_cnt=0
|
||||
for rm in rms:
|
||||
DelFacesForFile( job, rm.id )
|
||||
RemoveFileFromDB(job, rm, f"INFO: Removing file: {rm.name} from system as it is no longer on the file system")
|
||||
rm_cnt+=1
|
||||
|
||||
@@ -1990,6 +2116,157 @@ def JobRestoreFiles(job):
|
||||
FinishJob(job, f"Finished restoring selected file(s)")
|
||||
return
|
||||
|
||||
|
||||
####################################################################################################################################
|
||||
# CopyOverrides(): copies the overrides from 4 override tbls into tmp_<tbl>s
|
||||
# Metadata is only going to be used in cases where the DB does not have the
|
||||
# overrides that were once put in by hand - we are extra-careful processing
|
||||
# these, so we check there is a metadata path, that we aren't in the middle of
|
||||
# processing metadata when we Init (which will show up as tmp_<tbl> still
|
||||
# existing
|
||||
####################################################################################################################################
|
||||
def CopyOverrides():
|
||||
try:
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"select * into tmp_{tbl} from {tbl}")
|
||||
# force a commit here - I want to fail before I delete override content
|
||||
session.commit()
|
||||
# now take all 4 override tables in DB and clear them out
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"delete from {tbl}" )
|
||||
session.commit()
|
||||
except Exception as ex:
|
||||
print( f"ERROR: there are existing tmp tables when processing metadata. This SHOULD NEVER HAPPEN - manual intervention needed" )
|
||||
print( f"ERROR: most likely the job manager was killed during processing metadata - you may want to manually put" )
|
||||
print( f"ERROR: the contents of the 'tmp_*' tables back into their corresponding official metadata tables " )
|
||||
print( f"ERROR: and try to restart the job manager" )
|
||||
exit( 1 )
|
||||
return
|
||||
|
||||
####################################################################################################################################
|
||||
# GetFaceInMetadata(fname): quick wrapper to return face as binary data from
|
||||
# metdata file 'fname'
|
||||
####################################################################################################################################
|
||||
def GetFaceInMetadata(fname):
|
||||
try:
|
||||
file_h=open(fname, "rb")
|
||||
face_data=file_h.read(-1)
|
||||
file_h.close()
|
||||
except Exception as ex:
|
||||
print( f"ERROR: FATAL tried to read in override data and cant read content" )
|
||||
print( f"ERROR: manual intervention needed - exc={ex}" )
|
||||
exit(1)
|
||||
return face_data
|
||||
|
||||
####################################################################################################################################
|
||||
# ReloadMetadata(): reads in any metadata and puts it back into the DB (if needed)
|
||||
# Metadata will be disconnected overrides & eventually actual metadata we store per file
|
||||
# see https://wiki.depaoli.id.au/en/shared/photoassistant/metadata for detailed
|
||||
# explanation of this function
|
||||
####################################################################################################################################
|
||||
def ReloadMetadata(job):
|
||||
AddLogForJob(job, f"INFO: Loading/Retrieving any Metatdata...")
|
||||
|
||||
# no path, then no metadata (probably first ever run)
|
||||
p = SettingsMPath()
|
||||
if not p:
|
||||
FinishJob( job, "No metadata path - skipping" )
|
||||
return False
|
||||
|
||||
# copy overrides into tmp tables
|
||||
CopyOverrides()
|
||||
|
||||
# process Metadata on FS for no_match_overrides (disco ones, will have 0 as face_id)
|
||||
fnames = glob.glob( f'{p}/no_match_overrides/*' )
|
||||
for fname in fnames:
|
||||
# type derived from fname (e.g. 0_Too Young_uuid*, 1_Too Young, 2_Ingore Face, etc.)
|
||||
match=re.search( '(\d+)_([^_\.]+)', fname )
|
||||
face_id=match.group(1)
|
||||
type_name=match.group(2)
|
||||
otype = session.query(FaceOverrideType).filter(FaceOverrideType.name==type_name).one()
|
||||
face_data=GetFaceInMetadata(fname)
|
||||
if DEBUG:
|
||||
print( f"Found metadata showing Override of type: {type_name}" )
|
||||
|
||||
# check that both the id and data match - if so make new FaceNoMatch otherwise Disco*FaceNoMatch
|
||||
face=session.query( Face ).filter( Face.id==face_id ).filter( Face.face == face_data ). first()
|
||||
if face:
|
||||
session.add( FaceNoMatchOverride( face_id=face_id, type_id=otype.id ) )
|
||||
else:
|
||||
session.add( DisconnectedNoMatchOverride( face=face_data, type_id=otype.id ) )
|
||||
if face_id:
|
||||
try:
|
||||
os.replace( fname, f'{p}no_match_overrides/0_{otype.name}_{uuid.uuid4()}' )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: renaming no-match metadata on filesystem failed: {ex}" )
|
||||
|
||||
# process Metadata on FS for force_match_overrides (disco ones, will have 0 as face_id)
|
||||
fnames = glob.glob( f'{p}force_match_overrides/*' )
|
||||
for fname in fnames:
|
||||
# person derived from fname (e.g. 0_ddp_uuid*, 1_ddp, 2_mich, etc.)
|
||||
match=re.search( '(\d+)_([^_]+)', fname )
|
||||
face_id=match.group(1)
|
||||
person_tag=match.group(2)
|
||||
p = session.query(Person).filter(Person.tag==person_tag).one()
|
||||
face_data=GetFaceInMetadata(fname)
|
||||
if DEBUG:
|
||||
print( f"Found metadata showing Override match for person: {person_tag}" )
|
||||
|
||||
# check that both the id and data match - if so make new FaceNoMatch otherwise Disco*FaceNoMatch
|
||||
face=session.query( Face ).filter( Face.id==face_id ).filter( Face.face == face_data ).first()
|
||||
if face:
|
||||
session.add( FaceForceMatchOverride( face_id=face_id, person_id=p.id ) )
|
||||
else:
|
||||
session.add( DisconnectedForceMatchOverride( face=face_data, person_id=p.id ) )
|
||||
# if face>0, then we need to move the FS copy to a disco
|
||||
if face_id:
|
||||
try:
|
||||
os.replace( fname, f'{p}force_match_overrides/0_{p.tag}_{uuid.uuid4()}' )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: renaming force-match metadata on filesystem failed: {ex}" )
|
||||
|
||||
|
||||
# now process each of the tmp tables for anything that was in the DB but not on FS (e.g rm'd metadata)
|
||||
overrides=session.execute( "select face_id, type_id from tmp_face_no_match_override" )
|
||||
for o in overrides:
|
||||
print( f"F Force Match: o.face_id={o.face_id}" )
|
||||
print( f"F No Match: o.type_id={o.type_id}" )
|
||||
nmo=session.query(FaceNoMatchOverride).filter(FaceNoMatchOverride.face_id==o.face_id).filter(FaceNoMatchOverride.type_id==o.type_id).first()
|
||||
if not nmo:
|
||||
session.add( FaceNoMatchOverride( face_id=o.face_id, type_id=o.type_id ) )
|
||||
|
||||
overrides=session.execute( "select face_id, person_id from tmp_face_force_match_override" )
|
||||
for o in overrides:
|
||||
print( f"F Force Match: o.face_id={o.face_id}" )
|
||||
print( f"F Force Match: o.person_id={o.person_id}" )
|
||||
fmo=session.query(FaceForceMatchOverride).filter(FaceForceMatchOverride.face_id==o.face_id,FaceForceMatchOverride.person_id==o.person_id).first()
|
||||
if not fmo:
|
||||
session.add( FaceForceMatchOverride( face_id=o.face_id, person_id=o.person_id ) )
|
||||
|
||||
overrides=session.execute( "select face, type_id from tmp_disconnected_no_match_override" )
|
||||
for o in overrides:
|
||||
print( f"D No Match: o.type_id={o.type_id}" )
|
||||
dnmo=session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==o.face).filter(DisconnectedNoMatchOverride.type_id==o.type_id).first()
|
||||
if not dnmo:
|
||||
session.add( DisconnectedNoMatchOverride( face=o.face, type_id=o.type_id ) )
|
||||
|
||||
overrides=session.execute( "select face, person_id from tmp_disconnected_force_match_override" )
|
||||
for o in overrides:
|
||||
print( f"D Force Match: o.person_id={o.person_id}" )
|
||||
dfmo=session.query(DisconnectedForceMatchOverride).filter(DisconnectedForceMatchOverride.face==o.face).filter(DisconnectedForceMatchOverride.person_id==o.person_id).first()
|
||||
if not dfmo:
|
||||
session.add( DisconnectedForceMatchOverride( face=o.face, person_id=o.person_id ) )
|
||||
|
||||
# finally, drop the tmp tables
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"drop table tmp_{tbl}" )
|
||||
|
||||
# ok, finally commit all these changes - dont do this until now. Worst case if we crash/fail, the overrides should continue to be in tmp_{tbl}
|
||||
session.commit()
|
||||
|
||||
return
|
||||
|
||||
|
||||
####################################################################################################################################
|
||||
# InitialValidationChecks(): checks paths (and dirs) exist in DB on first run.
|
||||
# IF path from settings does not exists - log it
|
||||
@@ -1998,9 +2275,9 @@ def JobRestoreFiles(job):
|
||||
def InitialValidationChecks():
|
||||
now=datetime.now(pytz.utc)
|
||||
job=NewJob( "init" )
|
||||
settings = session.query(Settings).first()
|
||||
AddLogForJob(job, f"INFO: Starting Initial Validation checks...")
|
||||
job.start_time=datetime.now(pytz.utc)
|
||||
JobProgressState( job, "In Progress" )
|
||||
AddLogForJob(job, f"INFO: Starting Initial Validation checks...")
|
||||
path=SettingsRBPath()
|
||||
rbp_exists=0
|
||||
if os.path.exists(path):
|
||||
@@ -2032,10 +2309,21 @@ def InitialValidationChecks():
|
||||
symlink=CreateSymlink(job,ptype,path)
|
||||
if not ip_exists:
|
||||
AddLogForJob(job, "ERROR: None of the import paths in the settings exist - Please fix now");
|
||||
if not rbp_exists or not sp_exists or not ip_exists:
|
||||
|
||||
path=SettingsMPath()
|
||||
mp_exists=0
|
||||
if os.path.exists(path):
|
||||
mp_exists=1
|
||||
ptype = session.query(PathType).filter(PathType.name=='Metadata').first().id
|
||||
symlink=CreateSymlink(job,ptype,path)
|
||||
if not mp_exists:
|
||||
AddLogForJob(job, "ERROR: The metadata path in settings does not exist - Please fix now");
|
||||
|
||||
if not rbp_exists or not sp_exists or not ip_exists or not mp_exists:
|
||||
FinishJob(job,"ERROR: Job manager EXITing until above errors are fixed by paths being created or settings being updated to valid paths", "Failed" )
|
||||
exit(-1)
|
||||
|
||||
ReloadMetadata(job)
|
||||
FinishJob(job,"Finished Initial Validation Checks")
|
||||
return
|
||||
|
||||
@@ -2053,6 +2341,39 @@ def AddFaceToFile( locn_data, face_data, file_eid, model_id, settings ):
|
||||
ffl = FaceFileLink( face_id=face.id, file_eid=file_eid, model_used=model_id )
|
||||
session.add(ffl)
|
||||
session.commit()
|
||||
|
||||
# See if this face is included in any Disconnected overrides, if so copy it
|
||||
# back to override connected to this/new face_id for same old face :)
|
||||
dfmo=session.query(DisconnectedForceMatchOverride).filter(DisconnectedForceMatchOverride.face==face.face).first()
|
||||
if dfmo:
|
||||
session.add( FaceForceMatchOverride( face_id=face.id, person_id=dfmo.person_id ) )
|
||||
session.query(DisconnectedForceMatchOverride).filter(DisconnectedForceMatchOverride.face==dfmo.face).delete()
|
||||
# move metadata from Disco to Normal
|
||||
p=session.query(Person).get(dfmo.person_id)
|
||||
path=f'{SettingsMPath()}/force_match_overrides/'
|
||||
try:
|
||||
# can only be 1 match with the * being a UUID
|
||||
fname=glob.glob( f'{path}0_{p.tag}_*' )[0]
|
||||
new_fname=f'{path}{face.id}_{p.tag}'
|
||||
os.replace( fname, new_fname )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: AddFaceToFile-face connects to 'disconnected-force-match' metadata, but fixing the filesystem metadata failed: {ex}" )
|
||||
|
||||
dnmo=session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==face.face).first()
|
||||
if dnmo:
|
||||
session.add( FaceNoMatchOverride( face_id=face.id, type_id=dnmo.type_id ) )
|
||||
session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==dnmo.face).delete()
|
||||
# move metadata from Disco to Normal
|
||||
t=session.query(FaceOverrideType).get(dnmo.type_id)
|
||||
path=f'{SettingsMPath()}/no_match_overrides/'
|
||||
try:
|
||||
# can only be 1 match with the * being a UUID
|
||||
fname=glob.glob( f'{path}0_{t.name}_*' )[0]
|
||||
new_fname=f'{path}{face.id}_{t.name}'
|
||||
os.replace( fname, new_fname )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: AddFaceToFile-face connects to 'disconnected-no-match' metadata, but fixing the filesystem metadata failed: {ex}" )
|
||||
|
||||
return
|
||||
|
||||
####################################################################################################################################
|
||||
@@ -2078,23 +2399,21 @@ def DelMatchesForFile( job, ent ):
|
||||
return
|
||||
|
||||
####################################################################################################################################
|
||||
# DelFacesForFile(): quick func to delete any faces associated with the specified file
|
||||
# DelFacesForFile(job, eid): quick func to delete any faces associated with the specified file
|
||||
####################################################################################################################################
|
||||
def DelFacesForFile( eid ):
|
||||
def DelFacesForFile( job, eid ):
|
||||
ffl=session.query(FaceFileLink).filter(FaceFileLink.file_eid==eid).all()
|
||||
|
||||
for link in ffl:
|
||||
# find any manaul overrides on this face (before we delete it, and put them into the disc* table)
|
||||
o=session.query(FaceManualOverride).filter(FaceManualOverride.face_id==link.face_id).one()
|
||||
# find any forced match overrides on this face (before we delete it, and put them into the disc* table)
|
||||
o=session.query(FaceForceMatchOverride).filter(FaceForceMatchOverride.face_id==link.face_id).first()
|
||||
if o:
|
||||
f=session.query(Face).get(link.face_id)
|
||||
session.add( DisconnectedManualOverride( face=f.face, person_id=o.person_id ) )
|
||||
DisconnectSingleForceMatchOverride(job, o )
|
||||
|
||||
# find any no-match overrides on this face (before we delete it, and put them into the disc* table)
|
||||
o=session.query(FaceNoMatchOverride).filter(FaceNoMatchOverride.face_id==link.face_id).one()
|
||||
o=session.query(FaceNoMatchOverride).filter(FaceNoMatchOverride.face_id==link.face_id).first()
|
||||
if o:
|
||||
f=session.query(Face).get(link.face_id)
|
||||
session.add( DisconnectedNoMatchOverride( face=f.face, type_id=o.type_id ) )
|
||||
DisconnectSingleNoMatchOverride( job, o )
|
||||
|
||||
session.execute( f"delete from face where id in (select face_id from face_file_link where file_eid = {eid})" )
|
||||
|
||||
@@ -2186,7 +2505,7 @@ def ScanFileForPerson( job, e, force=False ):
|
||||
# if we are forcing this, delete any old faces (this will also delete linked tables), and reset faces_created_on to None
|
||||
if force:
|
||||
AddLogForJob( job, f'INFO: force is true, so deleting old face information for {e.name}' )
|
||||
DelFacesForFile( e.id )
|
||||
DelFacesForFile( job, e.id )
|
||||
file_h.faces_created_on = 0
|
||||
|
||||
# optimise: dont rescan if we already have faces
|
||||
|
||||
Reference in New Issue
Block a user