big change to get metadata working fully in DB and on Filesystem, and recover from most common scenarios, improved GUI as well for allowing an immediate search after adding refimg as well
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -6,3 +6,4 @@ DB_BACKUP/
|
||||
new_img_dir/
|
||||
static/
|
||||
internal/upstream
|
||||
.pa_metadata
|
||||
|
||||
3
BUGs
3
BUGs
@@ -1,2 +1,3 @@
|
||||
### Next: 100
|
||||
### Next: 103
|
||||
BUG-100: I managed to get 2 photos matching mich in the NOT_WORKING photo (probably dif refimgs but same p.tag?)
|
||||
BUG-102: cant change from flat view to folder view
|
||||
|
||||
58
TODO
58
TODO
@@ -1,14 +1,47 @@
|
||||
## GENERAL
|
||||
* need force scan on a file as an option in GUI (to test below)
|
||||
## MIGRATION/NEXT Production build:
|
||||
drop table FACE_NO_MATCH_OVERRIDE;
|
||||
drop table FACE_FORCE_MATCH_OVERRIDE;
|
||||
drop table DISCONNECTED_NO_MATCH_OVERRIDE;
|
||||
drop table DISCONNECTED_FORCE_MATCH_OVERRIDE;
|
||||
|
||||
* keep overrides across 'deletes/rescans'
|
||||
[DONE] - when we delete/force a full scan then move overrides into disconnected* tables
|
||||
[PARTIAL] - when an individual face is deleted - need to keep any associated override
|
||||
code is [DONE]
|
||||
TEST! (no way to force a delete via gui as yet)
|
||||
- when we scan a new face, we need to see if there is a matching override, if so, add override back & delete disc*
|
||||
- TEST (add an override, delete refimg, re-add refimg & re-scan)
|
||||
- TEST (forcescan job)
|
||||
create table FACE_NO_MATCH_OVERRIDE ( ID integer, FACE_ID integer, TYPE_ID integer,
|
||||
constraint FK_FNMO_FACE_ID foreign key (FACE_ID) references FACE(ID),
|
||||
constraint FK_FNMO_TYPE foreign key (TYPE_ID) references FACE_OVERRIDE_TYPE(ID),
|
||||
constraint PK_FNMO_ID primary key(ID) );
|
||||
|
||||
create table FACE_FORCE_MATCH_OVERRIDE ( ID integer, FACE_ID integer, PERSON_ID integer, constraint PK_FACE_FORCE_MATCH_OVERRIDE_ID primary key(ID) );
|
||||
|
||||
create table DISCONNECTED_NO_MATCH_OVERRIDE ( FACE bytea, TYPE_ID integer,
|
||||
constraint FK_DNMO_TYPE_ID foreign key (TYPE_ID) references FACE_OVERRIDE_TYPE(ID),
|
||||
constraint PK_DNMO_FACE primary key (FACE) );
|
||||
|
||||
create table DISCONNECTED_FORCE_MATCH_OVERRIDE ( FACE bytea, PERSON_ID integer,
|
||||
constraint FK_DFMO_PERSON_ID foreign key (PERSON_ID) references PERSON(ID),
|
||||
constraint PK_DFMO_FACE primary key (FACE) );
|
||||
|
||||
drop table SETTINGS
|
||||
create table SETTINGS(
|
||||
ID integer,
|
||||
BASE_PATH varchar, IMPORT_PATH varchar, STORAGE_PATH varchar, RECYCLE_BIN_PATH varchar, METADATA_PATH varchar,
|
||||
AUTO_ROTATE Boolean,
|
||||
DEFAULT_REFIMG_MODEL integer, DEFAULT_SCAN_MODEL integer, DEFAULT_THRESHOLD float,
|
||||
FACE_SIZE_LIMIT integer,
|
||||
SCHEDULED_IMPORT_SCAN integer, SCHEDULED_STORAGE_SCAN integer,
|
||||
SCHEDULED_BIN_CLEANUP integer, BIN_CLEANUP_FILE_AGE integer,
|
||||
JOB_ARCHIVE_AGE integer,
|
||||
constraint PK_SETTINGS_ID primary key(ID),
|
||||
constraint FK_DEFAULT_REFIMG_MODEL foreign key (DEFAULT_REFIMG_MODEL) references AI_MODEL(ID),
|
||||
constraint FK_DEFAULT_SCAN_MODEL foreign key (DEFAULT_SCAN_MODEL) references AI_MODEL(ID) );
|
||||
|
||||
insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/export/docker/storage/', 'Camera_uploads/', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 4 );
|
||||
|
||||
|
||||
## then docker-compose build...
|
||||
|
||||
## GENERAL
|
||||
* put try: around any os.remove, etc.
|
||||
|
||||
* remove Paths from SettingsIPath, etc.
|
||||
|
||||
* should I change the rotation code to use that jpeg util to reduce/remove compression loss?
|
||||
|
||||
@@ -72,6 +105,8 @@
|
||||
|
||||
* think about security - in job_mgr anywhere I can os.replace/remove NEED to protect, etc
|
||||
|
||||
* real first-run, 'no or empty settings' -- need to think this through
|
||||
|
||||
## DB
|
||||
* Dir can have date in the DB, so we can do Oldest/Newest dirs in Folder view
|
||||
|
||||
@@ -86,8 +121,7 @@
|
||||
(only show in DEV for now)
|
||||
|
||||
### AI
|
||||
* faces per file (need a threshold for too many? OR
|
||||
* consider size of bbox of face / 'high-quality' faces -- if face is too small in image, dont match it
|
||||
* faces per file - need a threshold for too many?
|
||||
|
||||
### UI
|
||||
* viewer needs to allow toggle to scan_model (and prob. right-click on file... AI (with CNN) AI (with hog)
|
||||
|
||||
4
ai.py
4
ai.py
@@ -15,7 +15,7 @@ import base64
|
||||
import json
|
||||
|
||||
from job import Job, JobExtra, Joblog, NewJob
|
||||
from face import Face, FaceFileLink, FaceRefimgLink
|
||||
from face import Face, FaceFileLink, FaceRefimgLink, fix_face_locn
|
||||
|
||||
|
||||
# pylint: disable=no-member
|
||||
@@ -87,6 +87,7 @@ def unmatched_faces():
|
||||
faces=Face.query.join(FaceFileLink).join(FaceRefimgLink, isouter=True).filter(FaceRefimgLink.refimg_id==None).order_by(Face.h.desc()).limit(10).all()
|
||||
imgs={}
|
||||
for face in faces:
|
||||
fix_face_locn(face)
|
||||
face.tmp_locn=json.loads(face.locn)
|
||||
f = Entry.query.join(File).join(FaceFileLink).filter(FaceFileLink.face_id==face.id).first()
|
||||
face.file_eid=f.id
|
||||
@@ -116,6 +117,7 @@ def unmatched_faces():
|
||||
def get_face_from_image(face_id):
|
||||
face=Face.query.get(face_id)
|
||||
f = Entry.query.join(File).join(FaceFileLink).filter(FaceFileLink.face_id==face_id).first()
|
||||
fix_face_locn(face)
|
||||
tmp_locn=json.loads(face.locn)
|
||||
x=tmp_locn[3]*0.95
|
||||
y=tmp_locn[0]*0.95
|
||||
|
||||
12
face.py
12
face.py
@@ -84,8 +84,8 @@ class FaceNoMatchOverride(db.Model):
|
||||
return f"<id: {self.id}, face_id={self.face_id}, type: {self.type}>"
|
||||
|
||||
|
||||
class FaceManualOverride(db.Model):
|
||||
__tablename__ = "face_manual_override"
|
||||
class FaceForceMatchOverride(db.Model):
|
||||
__tablename__ = "face_force_match_override"
|
||||
id = db.Column(db.Integer, db.Sequence('face_override_id_seq'), primary_key=True )
|
||||
face_id = db.Column(db.Integer, db.ForeignKey("face.id"), primary_key=True )
|
||||
person_id = db.Column(db.Integer, db.ForeignKey("person.id"), primary_key=True )
|
||||
@@ -93,3 +93,11 @@ class FaceManualOverride(db.Model):
|
||||
|
||||
def __repr__(self):
|
||||
return f"<id: {self.id}, face_id={self.face_id}, person_id={self.person_id}>"
|
||||
|
||||
|
||||
def fix_face_locn(face):
|
||||
# just fix any data issues in DB (happens with some bugs in code)
|
||||
if face.locn[0]=='{':
|
||||
face.locn[0]='['
|
||||
face.locn[-1]=']'
|
||||
return
|
||||
|
||||
6
files.py
6
files.py
@@ -32,7 +32,7 @@ from person import Refimg, Person, PersonRefimgLink
|
||||
from settings import Settings, SettingsIPath, SettingsSPath, SettingsRBPath
|
||||
from shared import SymlinkName
|
||||
from dups import Duplicates
|
||||
from face import Face, FaceFileLink, FaceRefimgLink, FaceOverrideType, FaceNoMatchOverride, FaceManualOverride
|
||||
from face import Face, FaceFileLink, FaceRefimgLink, FaceOverrideType, FaceNoMatchOverride, FaceForceMatchOverride, fix_face_locn
|
||||
|
||||
# pylint: disable=no-member
|
||||
|
||||
@@ -673,6 +673,7 @@ def viewlist():
|
||||
# put locn data back into array format
|
||||
fid=0
|
||||
for face in e.file_details.faces:
|
||||
fix_face_locn(face)
|
||||
tmp_locn = json.loads(face.locn)
|
||||
fd= {}
|
||||
fd['x'] = tmp_locn[3]
|
||||
@@ -725,6 +726,7 @@ def view(id):
|
||||
# put locn data back into array format
|
||||
for face in e.file_details.faces:
|
||||
if face.locn:
|
||||
fix_face_locn(face)
|
||||
face.tmp_locn = json.loads(face.locn)
|
||||
else:
|
||||
# this at least stops a 500 server error - seems to occur when
|
||||
@@ -735,7 +737,7 @@ def view(id):
|
||||
fnmo = FaceNoMatchOverride.query.filter(FaceNoMatchOverride.face_id==face.id).first()
|
||||
if fnmo:
|
||||
face.no_match_override=fnmo
|
||||
mo = FaceManualOverride.query.filter(FaceManualOverride.face_id==face.id).first()
|
||||
mo = FaceForceMatchOverride.query.filter(FaceForceMatchOverride.face_id==face.id).first()
|
||||
if mo:
|
||||
mo.type = FaceOverrideType.query.filter( FaceOverrideType.name== 'Manual match to existing person' ).first()
|
||||
face.manual_override=mo
|
||||
|
||||
@@ -232,7 +232,7 @@ $(document).ready( function()
|
||||
{
|
||||
if( objs[current].faces[i].override )
|
||||
{
|
||||
item_list['remove_override_force_match']={ 'name': 'Remove override for this face', 'which_face': i, 'id': objs[current].faces[i].id }
|
||||
item_list['remove_force_match_override']={ 'name': 'Remove override for this face', 'which_face': i, 'id': objs[current].faces[i].id }
|
||||
}
|
||||
else if( objs[current].faces[i].who )
|
||||
{
|
||||
@@ -277,7 +277,7 @@ function OverrideForceMatch( person_id, key )
|
||||
}
|
||||
}
|
||||
ofm='&person_id='+person_id+'&face_id='+item[key].id
|
||||
$.ajax({ type: 'POST', data: ofm, url: '/override_force_match', success: function(data) {
|
||||
$.ajax({ type: 'POST', data: ofm, url: '/add_force_match_override', success: function(data) {
|
||||
objs[current].faces[item[key].which_face].override={}
|
||||
objs[current].faces[item[key].which_face].override.who=data.person_tag
|
||||
objs[current].faces[item[key].which_face].override.distance='N/A'
|
||||
@@ -340,17 +340,18 @@ function SearchForPerson(content, key, face_id, face_pos, type_id)
|
||||
if( key == "NMO_1" )
|
||||
{
|
||||
func='OverrideForceMatch('+person.id+',\''+key+'\' )'
|
||||
func_sn=func_func_ao=func
|
||||
content+= '<div class="col">' + person.tag + ' (' + person.firstname+' '+person.surname+ ') </div>'
|
||||
content+= '<button onClick="'+func+'" class="col btn btn-success py-1 input-group-prepend">Add Override</button>'
|
||||
}
|
||||
if( key == 'no_match_new_refimg' )
|
||||
{
|
||||
func='AddRefimgTo('+person.id+',\''+key+'\''
|
||||
func_sn=func+ ', true )'
|
||||
func_ao=func+ ', false )'
|
||||
}
|
||||
content+= '<div class="col">' + person.tag + ' (' + person.firstname+' '+person.surname+ ') </div><div class="col input-group">'
|
||||
content+= '<button onClick="'+func_sn+'" class="btn btn-success py-1 input-group-prepend">Add & search now</a> '
|
||||
content+= '<button onClick="'+func_ao+'" class="btn btn-outline-success py-1 input-group-append">Add only</a></div>'
|
||||
content+= '<button onClick="'+func_sn+'" class="btn btn-success py-1 input-group-prepend">Add & search now</button> '
|
||||
content+= '<button onClick="'+func_ao+'" class="btn btn-outline-success py-1 input-group-append">Add only</button></div>'
|
||||
}
|
||||
content+='</div class="row">'
|
||||
}
|
||||
$('#search_person_results').html( content )
|
||||
@@ -361,9 +362,13 @@ function SearchForPerson(content, key, face_id, face_pos, type_id)
|
||||
|
||||
function RemoveOverrideForceMatch(face_pos)
|
||||
{
|
||||
d='&face_id='+objs[current].faces[face_pos].id+'&person_tag='+objs[current].faces[face_pos].who+
|
||||
'&file_eid='+current
|
||||
$.ajax({ type: 'POST', data: d, url: '/remove_override_force_match',
|
||||
if( objs[current].faces[face_pos].override )
|
||||
who=objs[current].faces[face_pos].override.who
|
||||
else
|
||||
who=objs[current].faces[face_pos].who
|
||||
|
||||
d='&face_id='+objs[current].faces[face_pos].id+'&person_tag='+who+'&file_eid='+current
|
||||
$.ajax({ type: 'POST', data: d, url: '/remove_force_match_override',
|
||||
success: function(data) {
|
||||
delete objs[current].faces[face_pos].override
|
||||
$('#dbox').modal('hide')
|
||||
@@ -377,7 +382,7 @@ function RemoveOverrideForceMatch(face_pos)
|
||||
function RemoveOverrideNoMatch(face_pos, type_id)
|
||||
{
|
||||
d='&face_id='+objs[current].faces[face_pos].id+'&type_id='+type_id
|
||||
$.ajax({ type: 'POST', data: d, url: '/remove_override_no_match',
|
||||
$.ajax({ type: 'POST', data: d, url: '/remove_no_match_override',
|
||||
success: function(data) {
|
||||
delete objs[current].faces[face_pos].override
|
||||
$('#dbox').modal('hide')
|
||||
@@ -442,7 +447,7 @@ function FaceDBox(key, item)
|
||||
}
|
||||
} )
|
||||
div+='</div><div class="col-6">'
|
||||
if ( key == 'remove_override_force_match' )
|
||||
if ( key == 'remove_force_match_override' )
|
||||
{
|
||||
if( objs[current].faces[face_pos].override.type_name == 'Manual match to existing person' )
|
||||
div+='<div class="row col-12">remove this override (force match to: ' + objs[current].faces[face_pos].override.who + ')</div>'
|
||||
|
||||
6
job.py
6
job.py
@@ -171,9 +171,10 @@ def joblog(id):
|
||||
display_more=False
|
||||
order="desc"
|
||||
|
||||
if joblog.start_time:
|
||||
if joblog.pa_job_state == "Completed":
|
||||
duration=(joblog.last_update-joblog.start_time)
|
||||
else:
|
||||
elif joblog.start_time:
|
||||
duration=(datetime.now(pytz.utc)-joblog.start_time)
|
||||
duration= duration-timedelta(microseconds=duration.microseconds)
|
||||
estimate=None
|
||||
@@ -183,6 +184,9 @@ def joblog(id):
|
||||
estimate_s = duration_s / joblog.current_file_num * joblog.num_files
|
||||
estimate = timedelta( seconds=(estimate_s-duration_s) )
|
||||
estimate = estimate - timedelta(microseconds=estimate.microseconds)
|
||||
else:
|
||||
duration="N/A"
|
||||
estimate=None
|
||||
return render_template("joblog.html", job=joblog, logs=logs, duration=duration, display_more=display_more, order=order, estimate=estimate, refresh=refresh)
|
||||
|
||||
###############################################################################
|
||||
|
||||
@@ -43,9 +43,11 @@ import threading
|
||||
import io
|
||||
import face_recognition
|
||||
import re
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import ffmpeg
|
||||
import uuid
|
||||
|
||||
|
||||
# global debug setting
|
||||
@@ -54,6 +56,9 @@ if 'FLASK_ENV' not in os.environ or os.environ['FLASK_ENV'] != "production":
|
||||
else:
|
||||
DEBUG=False
|
||||
|
||||
# global list of override tables to allow enumeration over them ...
|
||||
override_tbls={ "face_no_match_override", "face_force_match_override", "disconnected_no_match_override", "disconnected_force_match_override" }
|
||||
|
||||
# this is required to handle the duplicate processing code
|
||||
sys.setrecursionlimit(50000)
|
||||
|
||||
@@ -239,6 +244,7 @@ class Settings(Base):
|
||||
import_path = Column(String)
|
||||
storage_path = Column(String)
|
||||
recycle_bin_path = Column(String)
|
||||
metadata_path = Column(String)
|
||||
auto_rotate = Column(Boolean)
|
||||
default_refimg_model = Column(Integer,ForeignKey('ai_model.id'), unique=True, nullable=False)
|
||||
default_scan_model = Column(Integer,ForeignKey('ai_model.id'), unique=True, nullable=False)
|
||||
@@ -388,10 +394,10 @@ class FaceNoMatchOverride(Base):
|
||||
|
||||
|
||||
################################################################################
|
||||
# Class containing a manual / forced match of a face in a file to a person
|
||||
# Class containing a manual / forced matches of a face in a file to a person
|
||||
################################################################################
|
||||
class FaceManualOverride(Base):
|
||||
__tablename__ = "face_manual_override"
|
||||
class FaceForceMatchOverride(Base):
|
||||
__tablename__ = "face_force_match_override"
|
||||
id = Column(Integer, Sequence('face_override_id_seq'), primary_key=True )
|
||||
face_id = Column(Integer, ForeignKey("face.id"), primary_key=True )
|
||||
person_id = Column(Integer, ForeignKey("person.id"), primary_key=True )
|
||||
@@ -418,15 +424,15 @@ class DisconnectedNoMatchOverride(Base):
|
||||
return f"<face: {self.face}, type_id={self.type_id}"
|
||||
|
||||
################################################################################
|
||||
# Class describing DisconnectedManualOverride in the database and DB via
|
||||
# Class describing DisconnectedForceMatchOverride in the database and DB via
|
||||
# sqlalchemy - Used when a face with an override is deleted from the DB to keep
|
||||
# the raw data so that we can reconnect the override if we ever scan that same
|
||||
# file/face again (think delete/undelete file, rebuild DB from file sys/from
|
||||
# scratch, etc)
|
||||
# used specifically for a match that was forced between a face and a person
|
||||
################################################################################
|
||||
class DisconnectedManualOverride(Base):
|
||||
__tablename__ = "disconnected_manual_override"
|
||||
class DisconnectedForceMatchOverride(Base):
|
||||
__tablename__ = "disconnected_force_match_override"
|
||||
face = Column( LargeBinary, primary_key=True )
|
||||
person_id = Column(Integer, ForeignKey('person.id'))
|
||||
|
||||
@@ -631,6 +637,22 @@ def SettingsIPath():
|
||||
paths.append(settings.base_path+p)
|
||||
return paths
|
||||
|
||||
|
||||
##############################################################################
|
||||
# SettingsMPath(): return path to actual metadata path from settings
|
||||
##############################################################################
|
||||
def SettingsMPath():
|
||||
settings = session.query(Settings).first()
|
||||
if not settings or settings.metadata_path == "":
|
||||
print ("WARNING: no Settings for metadata path")
|
||||
return None
|
||||
p=settings.metadata_path
|
||||
if p[0] == '/':
|
||||
return p
|
||||
else:
|
||||
return settings.base_path+p
|
||||
|
||||
|
||||
##############################################################################
|
||||
# ProcessImportDirs(): wrapper func to call passed in job for each
|
||||
# storage path defined in Settings - called via scan import job
|
||||
@@ -732,6 +754,42 @@ def JobCleanBin(job):
|
||||
FinishJob(job, f"Finished clean up of files older than {settings.bin_cleanup_file_age} days from Recycle Bin")
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# JobMetadata(job): is called when we add/remove an individual override
|
||||
# and in future for 'notes' per file -- This function writes an 'extra' copy
|
||||
# out to the filesystem. This allows a full delete/rebuild of the PA data
|
||||
# and we won't lose any manual overrides
|
||||
##############################################################################
|
||||
def JobMetadata(job):
|
||||
JobProgressState( job, "In Progress" )
|
||||
which=[jex.value for jex in job.extra if jex.name == "which"][0]
|
||||
face_id=[jex.value for jex in job.extra if jex.name == "face_id"][0]
|
||||
f=session.query(Face).get(face_id)
|
||||
if which == 'add_force_match_override' or which=='remove_force_match_override':
|
||||
person_id=[jex.value for jex in job.extra if jex.name == "person_id"][0]
|
||||
p=session.query(Person).get(person_id)
|
||||
os.makedirs( f"{SettingsMPath()}force_match_overrides", mode=0o777, exist_ok=True )
|
||||
fname=f"{SettingsMPath()}force_match_overrides/{face_id}_{p.tag}"
|
||||
elif which == 'add_no_match_override' or which == 'remove_no_match_override':
|
||||
type_id=[jex.value for jex in job.extra if jex.name == "type_id"][0]
|
||||
t=session.query(FaceOverrideType).get(type_id)
|
||||
os.makedirs( f"{SettingsMPath()}no_match_overrides", mode=0o777, exist_ok=True )
|
||||
fname=f"{SettingsMPath()}no_match_overrides/{face_id}_{t.name}"
|
||||
else:
|
||||
AddLogForJob(job, f"ERROR: Failed to process metadata (which={which})" )
|
||||
return
|
||||
try:
|
||||
if str.find( which, 'add_' ) == 0:
|
||||
file_h=open(fname, 'wb')
|
||||
file_h.write(f.face)
|
||||
file_h.close()
|
||||
else:
|
||||
os.remove( fname )
|
||||
except Exception as ex:
|
||||
AddLogForJob(job, f"ERROR: Error with metadata file '{fname}': {ex}" )
|
||||
FinishJob(job, f"Finished metadata job {which}" )
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# AddLogForJob(): add a log line to joblog, if the last time we wrote a log
|
||||
# was over 5 seconds ago, then commit the log to the db, so in f/e we see
|
||||
@@ -794,6 +852,8 @@ def RunJob(job):
|
||||
JobTransformImage(job)
|
||||
elif job.name == "clean_bin":
|
||||
JobCleanBin(job)
|
||||
elif job.name == "metadata":
|
||||
JobMetadata(job)
|
||||
else:
|
||||
FinishJob(job, f"ERROR: Requested to process unknown job type: {job.name}", "Failed")
|
||||
# okay, we finished a job, so check for any jobs that are dependant on this and run them...
|
||||
@@ -901,26 +961,91 @@ def JobScanStorageDir(job):
|
||||
MessageToFE( job.id, "success", "Completed (scan for new files)" )
|
||||
return
|
||||
|
||||
|
||||
##############################################################################
|
||||
# DisconnectSingleNoMatchOverride( job, o ): takes a single NoMatch override
|
||||
# and moves it over to the Disconnected version in the DB, and moves the
|
||||
# metadata on the filesystem from a NMO to disco* version to renames file to
|
||||
# use 0 for face_id and puts unique num on end
|
||||
##############################################################################
|
||||
def DisconnectSingleNoMatchOverride( job, o ):
|
||||
f=session.query(Face).get(o.face_id)
|
||||
ot=session.query(FaceOverrideType).get(o.type_id)
|
||||
d=session.query(DisconnectedNoMatchOverride).filter(
|
||||
DisconnectedNoMatchOverride.type_id==o.type_id, DisconnectedNoMatchOverride.face==f.face ).first()
|
||||
# jic, check its not already there - shouldn't occur, but FS and DB can get out of sync
|
||||
# no unique keys in Disco*, so just being over-cautious
|
||||
if not d:
|
||||
session.add( DisconnectedNoMatchOverride( face=f.face, type_id=o.type_id ) )
|
||||
|
||||
# now deal with 'renaming' the metadata on FS
|
||||
p=f'{SettingsMPath()}/no_match_overrides/'
|
||||
fname=f'{p}{o.face_id}_{ot.name}'
|
||||
new_fname=f'{p}0_{ot.name}_{uuid.uuid4()}'
|
||||
try:
|
||||
if os.path.exists( fname ):
|
||||
os.replace( fname, new_fname )
|
||||
else:
|
||||
file_h=open( new_fname, 'wb')
|
||||
file_h.write(f.face)
|
||||
file_h.close()
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to move an override to a 'DisconnectedNoMatchOverride' override in metadata: {e}")
|
||||
|
||||
session.query(FaceNoMatchOverride).filter( FaceNoMatchOverride.face_id==o.face_id, FaceNoMatchOverride.type_id==o.type_id).delete()
|
||||
# force commit here as we now have added Disco, remove Override and made FS metadata match
|
||||
session.commit()
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# DisconnectSingleForceMatchOverride( job, o ): takes a single ForceMatch
|
||||
# override and moves it over to the Disconnected version in the DB, and moves
|
||||
# the metadata on the filesystem from a NMO to disco* version to renames file
|
||||
# to use 0 for face_id and puts unique num on end
|
||||
##############################################################################
|
||||
def DisconnectSingleForceMatchOverride( job, o ):
|
||||
f=session.query(Face).get(o.face_id)
|
||||
p=session.query(Person).get(o.person_id)
|
||||
d=session.query(DisconnectedForceMatchOverride).filter(
|
||||
DisconnectedForceMatchOverride.person_id==o.person_id, DisconnectedForceMatchOverride.face==f.face ).first()
|
||||
# jic, check its not already there - shouldn't occur, but FS and DB can get out of sync
|
||||
# no unique keys in Disco*, so just being over-cautious
|
||||
if not d:
|
||||
session.add( DisconnectedForceMatchOverride( face=f.face, person_id=o.person_id ) )
|
||||
|
||||
# now deal with 'renaming' the metadata on FS
|
||||
path=f'{SettingsMPath()}/force_match_overrides/'
|
||||
fname=f'{path}{o.face_id}_{p.tag}'
|
||||
new_fname=f'{path}0_{p.tag}_{uuid.uuid4()}'
|
||||
try:
|
||||
if os.path.exists( fname ):
|
||||
os.replace( fname, new_fname )
|
||||
else:
|
||||
file_h=open( new_fname, 'wb')
|
||||
file_h.write(f.face)
|
||||
file_h.close()
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to move an override to a 'DisconnectedForceMatchOverride' override in metadata: {e}")
|
||||
|
||||
session.query(FaceForceMatchOverride).filter( FaceForceMatchOverride.face_id==o.face_id, FaceForceMatchOverride.person_id==o.person_id).delete()
|
||||
# force commit here as we now have added Disco, remove Override and made FS metadata match
|
||||
session.commit()
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# All face Overrides should not just be deleted, they should be disconnected
|
||||
# from the file (in face_file_link), but instead keep the raw face data so
|
||||
# that a face that is found in a future scan can still keep the override
|
||||
# connection
|
||||
##############################################################################
|
||||
def DisconnectAllOverrides():
|
||||
def DisconnectAllOverrides(job):
|
||||
overrides=session.query(FaceNoMatchOverride).all()
|
||||
for o in overrides:
|
||||
f=session.query(Face).get(o.face_id)
|
||||
session.add( DisconnectedNoMatchOverride( face=f.face, type_id=o.type_id ) )
|
||||
overrides=session.query(FaceNoMatchOverride).delete()
|
||||
DisconnectSingleNoMatchOverride(job, o )
|
||||
|
||||
overrides=session.query(FaceManualOverride).all()
|
||||
overrides=session.query(FaceForceMatchOverride).all()
|
||||
for o in overrides:
|
||||
f=session.query(Face).get(o.face_id)
|
||||
session.add( DisconnectedManualOverride( face=f.face, person_id=o.person_id ) )
|
||||
overrides=session.query(FaceManualOverride).delete()
|
||||
session.commit()
|
||||
|
||||
DisconnectSingleForceMatchOverride( job, o )
|
||||
return
|
||||
|
||||
|
||||
@@ -930,7 +1055,7 @@ def DisconnectAllOverrides():
|
||||
##############################################################################
|
||||
def JobForceScan(job):
|
||||
JobProgressState( job, "In Progress" )
|
||||
DisconnectAllOverrides()
|
||||
DisconnectAllOverrides(job)
|
||||
session.query(PA_UserState).delete()
|
||||
session.query(FaceFileLink).delete()
|
||||
session.query(FaceRefimgLink).delete()
|
||||
@@ -1341,6 +1466,7 @@ def HandleAnyFSDeletions(job):
|
||||
rms = session.query(Entry).filter(Entry.exists_on_fs==False,Entry.type_id!=dtype.id).all()
|
||||
rm_cnt=0
|
||||
for rm in rms:
|
||||
DelFacesForFile( job, rm.id )
|
||||
RemoveFileFromDB(job, rm, f"INFO: Removing file: {rm.name} from system as it is no longer on the file system")
|
||||
rm_cnt+=1
|
||||
|
||||
@@ -1990,6 +2116,157 @@ def JobRestoreFiles(job):
|
||||
FinishJob(job, f"Finished restoring selected file(s)")
|
||||
return
|
||||
|
||||
|
||||
####################################################################################################################################
|
||||
# CopyOverrides(): copies the overrides from 4 override tbls into tmp_<tbl>s
|
||||
# Metadata is only going to be used in cases where the DB does not have the
|
||||
# overrides that were once put in by hand - we are extra-careful processing
|
||||
# these, so we check there is a metadata path, that we aren't in the middle of
|
||||
# processing metadata when we Init (which will show up as tmp_<tbl> still
|
||||
# existing
|
||||
####################################################################################################################################
|
||||
def CopyOverrides():
|
||||
try:
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"select * into tmp_{tbl} from {tbl}")
|
||||
# force a commit here - I want to fail before I delete override content
|
||||
session.commit()
|
||||
# now take all 4 override tables in DB and clear them out
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"delete from {tbl}" )
|
||||
session.commit()
|
||||
except Exception as ex:
|
||||
print( f"ERROR: there are existing tmp tables when processing metadata. This SHOULD NEVER HAPPEN - manual intervention needed" )
|
||||
print( f"ERROR: most likely the job manager was killed during processing metadata - you may want to manually put" )
|
||||
print( f"ERROR: the contents of the 'tmp_*' tables back into their corresponding official metadata tables " )
|
||||
print( f"ERROR: and try to restart the job manager" )
|
||||
exit( 1 )
|
||||
return
|
||||
|
||||
####################################################################################################################################
|
||||
# GetFaceInMetadata(fname): quick wrapper to return face as binary data from
|
||||
# metdata file 'fname'
|
||||
####################################################################################################################################
|
||||
def GetFaceInMetadata(fname):
|
||||
try:
|
||||
file_h=open(fname, "rb")
|
||||
face_data=file_h.read(-1)
|
||||
file_h.close()
|
||||
except Exception as ex:
|
||||
print( f"ERROR: FATAL tried to read in override data and cant read content" )
|
||||
print( f"ERROR: manual intervention needed - exc={ex}" )
|
||||
exit(1)
|
||||
return face_data
|
||||
|
||||
####################################################################################################################################
|
||||
# ReloadMetadata(): reads in any metadata and puts it back into the DB (if needed)
|
||||
# Metadata will be disconnected overrides & eventually actual metadata we store per file
|
||||
# see https://wiki.depaoli.id.au/en/shared/photoassistant/metadata for detailed
|
||||
# explanation of this function
|
||||
####################################################################################################################################
|
||||
def ReloadMetadata(job):
|
||||
AddLogForJob(job, f"INFO: Loading/Retrieving any Metatdata...")
|
||||
|
||||
# no path, then no metadata (probably first ever run)
|
||||
p = SettingsMPath()
|
||||
if not p:
|
||||
FinishJob( job, "No metadata path - skipping" )
|
||||
return False
|
||||
|
||||
# copy overrides into tmp tables
|
||||
CopyOverrides()
|
||||
|
||||
# process Metadata on FS for no_match_overrides (disco ones, will have 0 as face_id)
|
||||
fnames = glob.glob( f'{p}/no_match_overrides/*' )
|
||||
for fname in fnames:
|
||||
# type derived from fname (e.g. 0_Too Young_uuid*, 1_Too Young, 2_Ingore Face, etc.)
|
||||
match=re.search( '(\d+)_([^_\.]+)', fname )
|
||||
face_id=match.group(1)
|
||||
type_name=match.group(2)
|
||||
otype = session.query(FaceOverrideType).filter(FaceOverrideType.name==type_name).one()
|
||||
face_data=GetFaceInMetadata(fname)
|
||||
if DEBUG:
|
||||
print( f"Found metadata showing Override of type: {type_name}" )
|
||||
|
||||
# check that both the id and data match - if so make new FaceNoMatch otherwise Disco*FaceNoMatch
|
||||
face=session.query( Face ).filter( Face.id==face_id ).filter( Face.face == face_data ). first()
|
||||
if face:
|
||||
session.add( FaceNoMatchOverride( face_id=face_id, type_id=otype.id ) )
|
||||
else:
|
||||
session.add( DisconnectedNoMatchOverride( face=face_data, type_id=otype.id ) )
|
||||
if face_id:
|
||||
try:
|
||||
os.replace( fname, f'{p}no_match_overrides/0_{otype.name}_{uuid.uuid4()}' )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: renaming no-match metadata on filesystem failed: {ex}" )
|
||||
|
||||
# process Metadata on FS for force_match_overrides (disco ones, will have 0 as face_id)
|
||||
fnames = glob.glob( f'{p}force_match_overrides/*' )
|
||||
for fname in fnames:
|
||||
# person derived from fname (e.g. 0_ddp_uuid*, 1_ddp, 2_mich, etc.)
|
||||
match=re.search( '(\d+)_([^_]+)', fname )
|
||||
face_id=match.group(1)
|
||||
person_tag=match.group(2)
|
||||
p = session.query(Person).filter(Person.tag==person_tag).one()
|
||||
face_data=GetFaceInMetadata(fname)
|
||||
if DEBUG:
|
||||
print( f"Found metadata showing Override match for person: {person_tag}" )
|
||||
|
||||
# check that both the id and data match - if so make new FaceNoMatch otherwise Disco*FaceNoMatch
|
||||
face=session.query( Face ).filter( Face.id==face_id ).filter( Face.face == face_data ).first()
|
||||
if face:
|
||||
session.add( FaceForceMatchOverride( face_id=face_id, person_id=p.id ) )
|
||||
else:
|
||||
session.add( DisconnectedForceMatchOverride( face=face_data, person_id=p.id ) )
|
||||
# if face>0, then we need to move the FS copy to a disco
|
||||
if face_id:
|
||||
try:
|
||||
os.replace( fname, f'{p}force_match_overrides/0_{p.tag}_{uuid.uuid4()}' )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: renaming force-match metadata on filesystem failed: {ex}" )
|
||||
|
||||
|
||||
# now process each of the tmp tables for anything that was in the DB but not on FS (e.g rm'd metadata)
|
||||
overrides=session.execute( "select face_id, type_id from tmp_face_no_match_override" )
|
||||
for o in overrides:
|
||||
print( f"F Force Match: o.face_id={o.face_id}" )
|
||||
print( f"F No Match: o.type_id={o.type_id}" )
|
||||
nmo=session.query(FaceNoMatchOverride).filter(FaceNoMatchOverride.face_id==o.face_id).filter(FaceNoMatchOverride.type_id==o.type_id).first()
|
||||
if not nmo:
|
||||
session.add( FaceNoMatchOverride( face_id=o.face_id, type_id=o.type_id ) )
|
||||
|
||||
overrides=session.execute( "select face_id, person_id from tmp_face_force_match_override" )
|
||||
for o in overrides:
|
||||
print( f"F Force Match: o.face_id={o.face_id}" )
|
||||
print( f"F Force Match: o.person_id={o.person_id}" )
|
||||
fmo=session.query(FaceForceMatchOverride).filter(FaceForceMatchOverride.face_id==o.face_id,FaceForceMatchOverride.person_id==o.person_id).first()
|
||||
if not fmo:
|
||||
session.add( FaceForceMatchOverride( face_id=o.face_id, person_id=o.person_id ) )
|
||||
|
||||
overrides=session.execute( "select face, type_id from tmp_disconnected_no_match_override" )
|
||||
for o in overrides:
|
||||
print( f"D No Match: o.type_id={o.type_id}" )
|
||||
dnmo=session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==o.face).filter(DisconnectedNoMatchOverride.type_id==o.type_id).first()
|
||||
if not dnmo:
|
||||
session.add( DisconnectedNoMatchOverride( face=o.face, type_id=o.type_id ) )
|
||||
|
||||
overrides=session.execute( "select face, person_id from tmp_disconnected_force_match_override" )
|
||||
for o in overrides:
|
||||
print( f"D Force Match: o.person_id={o.person_id}" )
|
||||
dfmo=session.query(DisconnectedForceMatchOverride).filter(DisconnectedForceMatchOverride.face==o.face).filter(DisconnectedForceMatchOverride.person_id==o.person_id).first()
|
||||
if not dfmo:
|
||||
session.add( DisconnectedForceMatchOverride( face=o.face, person_id=o.person_id ) )
|
||||
|
||||
# finally, drop the tmp tables
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"drop table tmp_{tbl}" )
|
||||
|
||||
# ok, finally commit all these changes - dont do this until now. Worst case if we crash/fail, the overrides should continue to be in tmp_{tbl}
|
||||
session.commit()
|
||||
|
||||
return
|
||||
|
||||
|
||||
####################################################################################################################################
|
||||
# InitialValidationChecks(): checks paths (and dirs) exist in DB on first run.
|
||||
# IF path from settings does not exists - log it
|
||||
@@ -1998,9 +2275,9 @@ def JobRestoreFiles(job):
|
||||
def InitialValidationChecks():
|
||||
now=datetime.now(pytz.utc)
|
||||
job=NewJob( "init" )
|
||||
settings = session.query(Settings).first()
|
||||
AddLogForJob(job, f"INFO: Starting Initial Validation checks...")
|
||||
job.start_time=datetime.now(pytz.utc)
|
||||
JobProgressState( job, "In Progress" )
|
||||
AddLogForJob(job, f"INFO: Starting Initial Validation checks...")
|
||||
path=SettingsRBPath()
|
||||
rbp_exists=0
|
||||
if os.path.exists(path):
|
||||
@@ -2032,10 +2309,21 @@ def InitialValidationChecks():
|
||||
symlink=CreateSymlink(job,ptype,path)
|
||||
if not ip_exists:
|
||||
AddLogForJob(job, "ERROR: None of the import paths in the settings exist - Please fix now");
|
||||
if not rbp_exists or not sp_exists or not ip_exists:
|
||||
|
||||
path=SettingsMPath()
|
||||
mp_exists=0
|
||||
if os.path.exists(path):
|
||||
mp_exists=1
|
||||
ptype = session.query(PathType).filter(PathType.name=='Metadata').first().id
|
||||
symlink=CreateSymlink(job,ptype,path)
|
||||
if not mp_exists:
|
||||
AddLogForJob(job, "ERROR: The metadata path in settings does not exist - Please fix now");
|
||||
|
||||
if not rbp_exists or not sp_exists or not ip_exists or not mp_exists:
|
||||
FinishJob(job,"ERROR: Job manager EXITing until above errors are fixed by paths being created or settings being updated to valid paths", "Failed" )
|
||||
exit(-1)
|
||||
|
||||
ReloadMetadata(job)
|
||||
FinishJob(job,"Finished Initial Validation Checks")
|
||||
return
|
||||
|
||||
@@ -2053,6 +2341,39 @@ def AddFaceToFile( locn_data, face_data, file_eid, model_id, settings ):
|
||||
ffl = FaceFileLink( face_id=face.id, file_eid=file_eid, model_used=model_id )
|
||||
session.add(ffl)
|
||||
session.commit()
|
||||
|
||||
# See if this face is included in any Disconnected overrides, if so copy it
|
||||
# back to override connected to this/new face_id for same old face :)
|
||||
dfmo=session.query(DisconnectedForceMatchOverride).filter(DisconnectedForceMatchOverride.face==face.face).first()
|
||||
if dfmo:
|
||||
session.add( FaceForceMatchOverride( face_id=face.id, person_id=dfmo.person_id ) )
|
||||
session.query(DisconnectedForceMatchOverride).filter(DisconnectedForceMatchOverride.face==dfmo.face).delete()
|
||||
# move metadata from Disco to Normal
|
||||
p=session.query(Person).get(dfmo.person_id)
|
||||
path=f'{SettingsMPath()}/force_match_overrides/'
|
||||
try:
|
||||
# can only be 1 match with the * being a UUID
|
||||
fname=glob.glob( f'{path}0_{p.tag}_*' )[0]
|
||||
new_fname=f'{path}{face.id}_{p.tag}'
|
||||
os.replace( fname, new_fname )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: AddFaceToFile-face connects to 'disconnected-force-match' metadata, but fixing the filesystem metadata failed: {ex}" )
|
||||
|
||||
dnmo=session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==face.face).first()
|
||||
if dnmo:
|
||||
session.add( FaceNoMatchOverride( face_id=face.id, type_id=dnmo.type_id ) )
|
||||
session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==dnmo.face).delete()
|
||||
# move metadata from Disco to Normal
|
||||
t=session.query(FaceOverrideType).get(dnmo.type_id)
|
||||
path=f'{SettingsMPath()}/no_match_overrides/'
|
||||
try:
|
||||
# can only be 1 match with the * being a UUID
|
||||
fname=glob.glob( f'{path}0_{t.name}_*' )[0]
|
||||
new_fname=f'{path}{face.id}_{t.name}'
|
||||
os.replace( fname, new_fname )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: AddFaceToFile-face connects to 'disconnected-no-match' metadata, but fixing the filesystem metadata failed: {ex}" )
|
||||
|
||||
return
|
||||
|
||||
####################################################################################################################################
|
||||
@@ -2078,23 +2399,21 @@ def DelMatchesForFile( job, ent ):
|
||||
return
|
||||
|
||||
####################################################################################################################################
|
||||
# DelFacesForFile(): quick func to delete any faces associated with the specified file
|
||||
# DelFacesForFile(job, eid): quick func to delete any faces associated with the specified file
|
||||
####################################################################################################################################
|
||||
def DelFacesForFile( eid ):
|
||||
def DelFacesForFile( job, eid ):
|
||||
ffl=session.query(FaceFileLink).filter(FaceFileLink.file_eid==eid).all()
|
||||
|
||||
for link in ffl:
|
||||
# find any manaul overrides on this face (before we delete it, and put them into the disc* table)
|
||||
o=session.query(FaceManualOverride).filter(FaceManualOverride.face_id==link.face_id).one()
|
||||
# find any forced match overrides on this face (before we delete it, and put them into the disc* table)
|
||||
o=session.query(FaceForceMatchOverride).filter(FaceForceMatchOverride.face_id==link.face_id).first()
|
||||
if o:
|
||||
f=session.query(Face).get(link.face_id)
|
||||
session.add( DisconnectedManualOverride( face=f.face, person_id=o.person_id ) )
|
||||
DisconnectSingleForceMatchOverride(job, o )
|
||||
|
||||
# find any no-match overrides on this face (before we delete it, and put them into the disc* table)
|
||||
o=session.query(FaceNoMatchOverride).filter(FaceNoMatchOverride.face_id==link.face_id).one()
|
||||
o=session.query(FaceNoMatchOverride).filter(FaceNoMatchOverride.face_id==link.face_id).first()
|
||||
if o:
|
||||
f=session.query(Face).get(link.face_id)
|
||||
session.add( DisconnectedNoMatchOverride( face=f.face, type_id=o.type_id ) )
|
||||
DisconnectSingleNoMatchOverride( job, o )
|
||||
|
||||
session.execute( f"delete from face where id in (select face_id from face_file_link where file_eid = {eid})" )
|
||||
|
||||
@@ -2186,7 +2505,7 @@ def ScanFileForPerson( job, e, force=False ):
|
||||
# if we are forcing this, delete any old faces (this will also delete linked tables), and reset faces_created_on to None
|
||||
if force:
|
||||
AddLogForJob( job, f'INFO: force is true, so deleting old face information for {e.name}' )
|
||||
DelFacesForFile( e.id )
|
||||
DelFacesForFile( job, e.id )
|
||||
file_h.faces_created_on = 0
|
||||
|
||||
# optimise: dont rescan if we already have faces
|
||||
|
||||
60
person.py
60
person.py
@@ -9,7 +9,7 @@ from status import st, Status
|
||||
from flask_login import login_required, current_user
|
||||
from werkzeug.utils import secure_filename
|
||||
from shared import GenFace, GenThumb
|
||||
from face import Face, FaceRefimgLink, FaceOverrideType, FaceNoMatchOverride, FaceManualOverride
|
||||
from face import Face, FaceRefimgLink, FaceOverrideType, FaceNoMatchOverride, FaceForceMatchOverride
|
||||
from path import Path, PathType
|
||||
from job import JobExtra, NewJob
|
||||
|
||||
@@ -250,6 +250,10 @@ def person(id):
|
||||
return render_template("base.html" )
|
||||
|
||||
for r in person.refimg:
|
||||
# in case DB data gets broken, just fix it - still keeps happening
|
||||
if r.face_locn[0]=='{':
|
||||
r.face_locn[0]='['
|
||||
r.face_locn[-1]=']'
|
||||
r.tmp_locn=json.loads(r.face_locn)
|
||||
form = PersonForm(request.values, obj=person)
|
||||
return render_template("person.html", person=person, form=form, page_title = page_title)
|
||||
@@ -333,11 +337,11 @@ def add_refimg_to_person():
|
||||
return resp
|
||||
|
||||
################################################################################
|
||||
# /override_force_match -> POST
|
||||
# /add_force_match_override -> POST
|
||||
################################################################################
|
||||
@app.route("/override_force_match", methods=["POST"])
|
||||
@app.route("/add_force_match_override", methods=["POST"])
|
||||
@login_required
|
||||
def override_force_match():
|
||||
def add_force_match_override():
|
||||
person_id = request.form['person_id']
|
||||
p = Person.query.get(person_id);
|
||||
if not p:
|
||||
@@ -348,10 +352,17 @@ def override_force_match():
|
||||
if not f:
|
||||
raise Exception("could not find face to add override for!")
|
||||
|
||||
mo = FaceManualOverride( face_id=f.id, person_id=p.id )
|
||||
mo = FaceForceMatchOverride( face_id=f.id, person_id=p.id )
|
||||
db.session.add( mo )
|
||||
db.session.commit()
|
||||
|
||||
jex=[]
|
||||
jex.append( JobExtra( name="which", value="add_force_match_override" ) )
|
||||
jex.append( JobExtra( name="face_id", value=f.id ) )
|
||||
jex.append( JobExtra( name="person_id", value=p.id ) )
|
||||
# dont do status update here, the F/E is in the middle of a dbox, just send metadata through to the B/E
|
||||
NewJob( "metadata", 0, None, jex )
|
||||
|
||||
print( f"Placing an override match with face_id {face_id}, for person: {p.tag}" )
|
||||
# this will reply to the Ajax / POST, and cause the page to re-draw with new face override to person_tag
|
||||
resp={}
|
||||
@@ -359,29 +370,40 @@ def override_force_match():
|
||||
return resp
|
||||
|
||||
################################################################################
|
||||
# /remove_override_force_match -> POST
|
||||
# /remove_force_match_override -> POST
|
||||
################################################################################
|
||||
@app.route("/remove_override_force_match", methods=["POST"])
|
||||
@app.route("/remove_force_match_override", methods=["POST"])
|
||||
@login_required
|
||||
def remove_override_force_match():
|
||||
def remove_force_match_override():
|
||||
face_id = request.form['face_id']
|
||||
person_tag = request.form['person_tag']
|
||||
file_eid = request.form['file_eid']
|
||||
print( f"Remove override force match of face_id={face_id} to person_tag={person_tag}" )
|
||||
|
||||
FaceManualOverride.query.filter( FaceManualOverride.face_id==face_id ).delete()
|
||||
FaceForceMatchOverride.query.filter( FaceForceMatchOverride.face_id==face_id ).delete()
|
||||
db.session.commit()
|
||||
|
||||
print( f"person_tag={person_tag}" )
|
||||
# needed to use person_id in job below (allows consistent processing in job_mgr)
|
||||
p=Person.query.filter(Person.tag==person_tag).one()
|
||||
|
||||
jex=[]
|
||||
jex.append( JobExtra( name="which", value="remove_force_match_override" ) )
|
||||
jex.append( JobExtra( name="face_id", value=face_id ) )
|
||||
jex.append( JobExtra( name="person_id", value=p.id ) )
|
||||
# dont do status update here, the F/E is in the middle of a dbox, just send metadata through to the B/E
|
||||
NewJob( "metadata", 0, None, jex )
|
||||
|
||||
# this will reply to the Ajax / POST, and cause the page to re-draw with new face override
|
||||
resp={}
|
||||
return resp
|
||||
|
||||
################################################################################
|
||||
# /remove_override_no_match -> POST
|
||||
# /remove_no_match_override -> POST
|
||||
################################################################################
|
||||
@app.route("/remove_override_no_match", methods=["POST"])
|
||||
@app.route("/remove_no_match_override", methods=["POST"])
|
||||
@login_required
|
||||
def remove_override_no_match():
|
||||
def remove_no_match_override():
|
||||
face_id = request.form['face_id']
|
||||
type_id = request.form['type_id']
|
||||
print( f"Remove override of no match (type_id={type_id}) for face_id={face_id}" )
|
||||
@@ -389,6 +411,13 @@ def remove_override_no_match():
|
||||
FaceNoMatchOverride.query.filter( FaceNoMatchOverride.face_id==face_id, FaceNoMatchOverride.type_id==type_id ).delete()
|
||||
db.session.commit()
|
||||
|
||||
jex=[]
|
||||
jex.append( JobExtra( name="which", value="remove_no_match_override" ) )
|
||||
jex.append( JobExtra( name="face_id", value=face_id ) )
|
||||
jex.append( JobExtra( name="type_id", value=type_id ) )
|
||||
# dont do status update here, the F/E is in the middle of a dbox, just send metadata through to the B/E
|
||||
NewJob( "metadata", 0, None, jex )
|
||||
|
||||
# this will reply to the Ajax / POST, and cause the page to re-draw with new face override
|
||||
resp={}
|
||||
return resp
|
||||
@@ -414,6 +443,13 @@ def add_no_match_override():
|
||||
db.session.add( nmo )
|
||||
db.session.commit()
|
||||
|
||||
jex=[]
|
||||
jex.append( JobExtra( name="which", value="add_no_match_override" ) )
|
||||
jex.append( JobExtra( name="face_id", value=f.id ) )
|
||||
jex.append( JobExtra( name="type_id", value=t.id ) )
|
||||
# dont do status update here, the F/E is in the middle of a dbox, just send metadata through to the B/E
|
||||
NewJob( "metadata", 0, None, jex )
|
||||
|
||||
print( f"Placing an override of NO Match for face_id {face_id}" )
|
||||
# this will reply to the Ajax / POST, and cause the page to re-draw with new face override to person_tag
|
||||
resp={}
|
||||
|
||||
18
settings.py
18
settings.py
@@ -30,6 +30,7 @@ class Settings(db.Model):
|
||||
import_path = db.Column(db.String)
|
||||
storage_path = db.Column(db.String)
|
||||
recycle_bin_path = db.Column(db.String)
|
||||
metadata_path = db.Column(db.String)
|
||||
auto_rotate = db.Column(db.Boolean)
|
||||
default_refimg_model = db.Column(db.Integer,db.ForeignKey('ai_model.id'), unique=True, nullable=False)
|
||||
default_scan_model = db.Column(db.Integer,db.ForeignKey('ai_model.id'), unique=True, nullable=False)
|
||||
@@ -64,6 +65,7 @@ class SettingsForm(FlaskForm):
|
||||
import_path = StringField('Path(s) to import from:', [validators.DataRequired()])
|
||||
storage_path = StringField('Path to store sorted images to:', [validators.DataRequired()])
|
||||
recycle_bin_path = StringField('Path to temporarily store deleted images in:', [validators.DataRequired()])
|
||||
metadata_path = StringField('Path to store metadata to:', [validators.DataRequired()])
|
||||
auto_rotate = BooleanField('Automatically rotate jpegs based on exif', [validators.AnyOf([True, False])])
|
||||
default_refimg_model = SelectField( 'Default model to use for reference images', choices=[(c.id, c.name) for c in AIModel.query.order_by('id')] )
|
||||
default_scan_model = SelectField( 'Default model to use for all scanned images', choices=[(c.id, c.name) for c in AIModel.query.order_by('id')] )
|
||||
@@ -89,6 +91,7 @@ def settings():
|
||||
HELP['import_path']="Path(s) to import files from. If starting with /, then used literally, otherwise base path is prepended"
|
||||
HELP['storage_path']="Path(s) to store sorted files to. If starting with /, then used literally, otherwise base path is prepended"
|
||||
HELP['recycle_bin_path']="Path where deleted files are moved to. If starting with /, then used literally, otherwise base path is prepended"
|
||||
HELP['metadata_path']="Path where metadata (overrides) are stored. If starting with /, then used literally, otherwise base path is prepended"
|
||||
HELP['auto_rotate']="Automatically rotate jpegs based on exif to orient them so that AI matching will work. NOTE: this actually changes/rewrites the file - as it is a simple rotate, it is down without losing quality/content"
|
||||
HELP['default_refimg_model']="Default face recognition model used for reference images - cnn is slower/more accurate, hog is faster/less accurate - we scan (small) refimg once, so cnn is okay"
|
||||
HELP['default_scan_model']="Default face recognition model used for scanned images - cnn is slower/more accurate, hog is faster/less accurate - we scan (large) scanned images lots, so cnn NEEDS gpu/mem"
|
||||
@@ -109,6 +112,7 @@ def settings():
|
||||
s.import_path = request.form['import_path']
|
||||
s.storage_path = request.form['storage_path']
|
||||
s.recycle_bin_path = request.form['recycle_bin_path']
|
||||
s.metadata_path = request.form['metadata_path']
|
||||
if 'auto_rotate' in request.form:
|
||||
s.auto_rotate = True
|
||||
else:
|
||||
@@ -180,3 +184,17 @@ def SettingsIPath():
|
||||
else:
|
||||
paths.append(settings.base_path+p)
|
||||
return paths
|
||||
|
||||
##############################################################################
|
||||
# SettingsMPath(): return path to actual metadata path from settings
|
||||
##############################################################################
|
||||
def SettingsMPath():
|
||||
settings = Settings.query.first()
|
||||
if not settings or settings.metadata_path == "":
|
||||
print ("WARNING: no Settings for metadata path")
|
||||
return
|
||||
p=settings.metadata_path
|
||||
if p[0] == '/':
|
||||
return p
|
||||
else:
|
||||
return settings.base_path+p
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from settings import Settings, SettingsRBPath, SettingsIPath, SettingsSPath
|
||||
from flask import request, render_template, redirect, url_for
|
||||
from flask_login import login_required, current_user
|
||||
from main import db, app, ma
|
||||
|
||||
20
tables.sql
20
tables.sql
@@ -24,7 +24,8 @@ insert into AI_MODEL values ( 1, 'hog', 'normal' );
|
||||
insert into AI_MODEL values ( 2, 'cnn', 'more accurate / much slower' );
|
||||
|
||||
create table SETTINGS(
|
||||
ID integer, BASE_PATH varchar, IMPORT_PATH varchar, STORAGE_PATH varchar, RECYCLE_BIN_PATH varchar,
|
||||
ID integer,
|
||||
BASE_PATH varchar, IMPORT_PATH varchar, STORAGE_PATH varchar, RECYCLE_BIN_PATH varchar, METADATA_PATH varchar,
|
||||
AUTO_ROTATE Boolean,
|
||||
DEFAULT_REFIMG_MODEL integer, DEFAULT_SCAN_MODEL integer, DEFAULT_THRESHOLD float,
|
||||
FACE_SIZE_LIMIT integer,
|
||||
@@ -123,21 +124,21 @@ insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_S
|
||||
-- keep non-redundant FACE because, when we rebuild data we may have a null FACE_ID, but still want to connect to this override
|
||||
-- from a previous AI pass... (would happen if we delete a file and then reimport/scan it), OR, more likely we change (say) a threshold, etc.
|
||||
-- any reordering of faces, generates new face_ids... (but if the face data was the same, then this override should stand)
|
||||
create table FACE_NO_MATCH_OVERRIDE ( ID integer, FACE_ID integer, TYPE_ID integer, FACE bytea,
|
||||
create table FACE_NO_MATCH_OVERRIDE ( ID integer, FACE_ID integer, TYPE_ID integer,
|
||||
constraint FK_FNMO_FACE_ID foreign key (FACE_ID) references FACE(ID),
|
||||
constraint FK_FNMO_TYPE foreign key (TYPE_ID) references FACE_OVERRIDE_TYPE(ID),
|
||||
constraint PK_FNMO_ID primary key(ID) );
|
||||
|
||||
-- manual match goes to person not refimg, so on search, etc. we deal with this anomaly (via sql not ORM)
|
||||
create table FACE_MANUAL_OVERRIDE ( ID integer, FACE_ID integer, PERSON_ID integer, constraint PK_FACE_MANUAL_OVERRIDE_ID primary key(ID) );
|
||||
create table FACE_FORCE_MATCH_OVERRIDE ( ID integer, FACE_ID integer, PERSON_ID integer, constraint PK_FACE_FORCE_MATCH_OVERRIDE_ID primary key(ID) );
|
||||
|
||||
create table DISCONNECTED_NO_MATCH_OVERRIDE ( FACE bytea, TYPE_ID integer,
|
||||
constraint FK_DNMO_TYPE_ID foreign key (TYPE_ID) references FACE_OVERRIDE_TYPE(ID),
|
||||
constraint PK_DNMO_FACE primary key (FACE) );
|
||||
|
||||
create table DISCONNECTED_MANUAL_OVERRIDE ( FACE bytea, PERSON_ID integer,
|
||||
constraint FK_DMO_PERSON_ID foreign key (PERSON_ID) references PERSON(ID),
|
||||
constraint PK_DMO_FACE primary key (FACE) );
|
||||
create table DISCONNECTED_FORCE_MATCH_OVERRIDE ( FACE bytea, PERSON_ID integer,
|
||||
constraint FK_DFMO_PERSON_ID foreign key (PERSON_ID) references PERSON(ID),
|
||||
constraint PK_DFMO_FACE primary key (FACE) );
|
||||
|
||||
create table PERSON_REFIMG_LINK ( PERSON_ID integer, REFIMG_ID integer,
|
||||
constraint PK_PRL primary key(PERSON_ID, REFIMG_ID),
|
||||
@@ -165,6 +166,7 @@ create table PA_JOB_MANAGER_FE_MESSAGE ( ID integer, JOB_ID integer, ALERT varch
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Import' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Storage' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Bin' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Metadata' );
|
||||
|
||||
-- default data for types of files
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Image' );
|
||||
@@ -178,8 +180,8 @@ insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Unknown' )
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'cam', 'Cameron', 'De Paoli' );
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'mich', 'Michelle', 'De Paoli' );
|
||||
-- DEV(ddp):
|
||||
insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/home/ddp/src/photoassistant/', 'images_to_process/', 'photos/', '.pa_bin/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/home/ddp/src/photoassistant/', 'images_to_process/', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
-- DEV(cam):
|
||||
--insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), 'c:/Users/cam/Desktop/code/python/photoassistant/', 'c:\images_to_process', 'photos/', '.pa_bin/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
--insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), 'c:/Users/cam/Desktop/code/python/photoassistant/', 'c:\images_to_process', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
-- PROD:
|
||||
-- insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/export/docker/storage/', 'Camera_uploads/', 'photos/', '.pa_bin/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 4 );
|
||||
-- insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/export/docker/storage/', 'Camera_uploads/', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 4 );
|
||||
|
||||
Reference in New Issue
Block a user