no longer maintain too much state per user, remove all need to update it

This commit is contained in:
2025-10-02 17:54:51 +10:00
parent 5a923359bc
commit e526d99389

View File

@@ -513,42 +513,6 @@ class PA_JobManager_FE_Message(Base):
return "<id: {}, job_id: {}, level: {}, message: {}".format(self.id, self.job_id, self.level, self.message) return "<id: {}, job_id: {}, level: {}, message: {}".format(self.id, self.job_id, self.level, self.message)
##############################################################################
# Class describing PA_UserState and in the DB (via sqlalchemy)
# the state for a User defines a series of remembered states for a user
# to optimise their viewing, etc. If we scan and fine new files, we need to
# invalidate these cached values, so we have this class here just for that
##############################################################################
class PA_UserState(Base):
__tablename__ = "pa_user_state"
id = Column(Integer, Sequence('pa_user_state_id_seq'), primary_key=True )
pa_user_dn = Column(String, ForeignKey('pa_user.dn'), primary_key=True )
last_used = Column(DateTime(timezone=True))
path_type = Column(String, primary_key=True, unique=False, nullable=False )
noo = Column(String, unique=False, nullable=False )
grouping = Column(String, unique=False, nullable=False )
how_many = Column(Integer, unique=False, nullable=False )
st_offset = Column(Integer, unique=False, nullable=False )
size = Column(Integer, unique=False, nullable=False )
folders = Column(Boolean, unique=False, nullable=False )
root = Column(String, unique=False, nullable=False )
cwd = Column(String, unique=False, nullable=False )
## for now being lazy and not doing a separate table until I settle on needed fields and when
# only used if ptype == View
view_eid = Column(Integer, unique=False, nullable=False )
orig_ptype = Column(String, unique=False, nullable=False )
# only used if view and orig_ptype was search
orig_search_term = Column(String, unique=False, nullable=False )
orig_url = Column(String, unique=False, nullable=False )
current = Column(Integer)
first_eid = Column(Integer)
last_eid = Column(Integer)
num_entries = Column(Integer)
def __repr__(self):
return f"<pa_user_dn: {self.pa_user_dn}, path_type: {self.path_type}, noo: {self.noo}, grouping: {self.grouping}, how_many: {self.how_many}, st_offset: {self.st_offset}, size: {self.size}, folders: {self.folders}, root: {self.root}, cwd: {self.cwd}, view_eid: {self.view_eid}, orig_ptype: {self.orig_ptype}, orig_search_term: {self.orig_search_term}, orig_url: {self.orig_url}, current={self.current}, first_eid={self.first_eid}, last_eid={self.last_eid}, num_entries={self.num_entries}>"
############################################################################## ##############################################################################
# PAprint(): convenience function to prepend a timestamp to a printed string # PAprint(): convenience function to prepend a timestamp to a printed string
############################################################################## ##############################################################################
@@ -1131,7 +1095,6 @@ def DisconnectAllOverrides(job):
def JobForceScan(job): def JobForceScan(job):
JobProgressState( job, "In Progress" ) JobProgressState( job, "In Progress" )
DisconnectAllOverrides(job) DisconnectAllOverrides(job)
session.query(PA_UserState).delete()
session.query(FaceFileLink).delete() session.query(FaceFileLink).delete()
session.query(FaceRefimgLink).delete() session.query(FaceRefimgLink).delete()
session.query(Face).delete() session.query(Face).delete()
@@ -1668,18 +1631,6 @@ def find_last_successful_ai_scan(job):
return ai_job.last_update.timestamp() return ai_job.last_update.timestamp()
return 0 return 0
####################################################################################################################################
# when an import job actually finds new files, then the pa_user_state caches will become invalid (offsets are now wrong)
####################################################################################################################################
def DeleteOldPA_UserState(job):
# clear them out for now - this is 'dumb', just delete ALL. Eventually, can do this based on just the path &/or whether the last_used is
# newer than this delete moment (only would be a race condition between an import changing things and someone simultaneously viewing)
# path=[jex.value for jex in job.extra if jex.name == "path"][0]
session.query(PA_UserState).delete()
return
#################################################################################################################################### ####################################################################################################################################
# JobImportDir(): job that scan import dir and processes entries in there - key function that uses os.walk() to traverse the # JobImportDir(): job that scan import dir and processes entries in there - key function that uses os.walk() to traverse the
# file system and calls AddFile()/AddDir() as necessary # file system and calls AddFile()/AddDir() as necessary
@@ -1788,8 +1739,6 @@ def JobImportDir(job):
if found_new_files: if found_new_files:
job.extra.append( JobExtra( name="new_files", value=str(found_new_files) ) ) job.extra.append( JobExtra( name="new_files", value=str(found_new_files) ) )
session.add(job) session.add(job)
# this will invalidate pa_user_state for this path's contents (offsets are now wrong), clear them out
DeleteOldPA_UserState(job)
rm_cnt=HandleAnyFSDeletions(job) rm_cnt=HandleAnyFSDeletions(job)