diff --git a/pa_job_manager.py b/pa_job_manager.py index 55679d7..bc6a10a 100644 --- a/pa_job_manager.py +++ b/pa_job_manager.py @@ -15,7 +15,7 @@ ### SQLALCHEMY IMPORTS ### from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import Column, Integer, String, Sequence, Float, ForeignKey, DateTime, LargeBinary, Boolean, func +from sqlalchemy import Column, Integer, String, Sequence, Float, ForeignKey, DateTime, LargeBinary, Boolean, func, text from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import relationship from sqlalchemy import create_engine @@ -2094,7 +2094,7 @@ def JobCheckForDups(job): AddLogForJob( job, f"Check for duplicates" ) ClearOtherDupMessagesAndJobs() - res = session.execute( "select count(e1.id) from entry e1, file f1, dir d1, entry_dir_link edl1, path_dir_link pdl1, path p1, entry e2, file f2, dir d2, entry_dir_link edl2, path_dir_link pdl2, path p2 where e1.id = f1.eid and e2.id = f2.eid and d1.eid = edl1.dir_eid and edl1.entry_id = e1.id and edl2.dir_eid = d2.eid and edl2.entry_id = e2.id and p1.type_id != (select id from path_type where name = 'Bin') and p1.id = pdl1.path_id and pdl1.dir_eid = d1.eid and p2.type_id != (select id from path_type where name = 'Bin') and p2.id = pdl2.path_id and pdl2.dir_eid = d2.eid and f1.hash = f2.hash and e1.id != e2.id and f1.size_mb = f2.size_mb" ) + res = session.execute( text( "select count(e1.id) from entry e1, file f1, dir d1, entry_dir_link edl1, path_dir_link pdl1, path p1, entry e2, file f2, dir d2, entry_dir_link edl2, path_dir_link pdl2, path p2 where e1.id = f1.eid and e2.id = f2.eid and d1.eid = edl1.dir_eid and edl1.entry_id = e1.id and edl2.dir_eid = d2.eid and edl2.entry_id = e2.id and p1.type_id != (select id from path_type where name = 'Bin') and p1.id = pdl1.path_id and pdl1.dir_eid = d1.eid and p2.type_id != (select id from path_type where name = 'Bin') and p2.id = pdl2.path_id and pdl2.dir_eid = d2.eid and f1.hash = f2.hash and e1.id != e2.id and f1.size_mb = f2.size_mb") ) for row in res: if row.count > 0: AddLogForJob(job, f"Found duplicates, Creating Status message in front-end for attention") @@ -2242,18 +2242,19 @@ def JobRestoreFiles(job): def CopyOverrides(): try: for tbl in override_tbls: - session.execute( f"select * into tmp_{tbl} from {tbl}") + session.execute( text( f"select * into tmp_{tbl} from {tbl}") ) # force a commit here - I want to fail before I delete override content session.commit() # now take all 4 override tables in DB and clear them out for tbl in override_tbls: - session.execute( f"delete from {tbl}" ) + session.execute( text( f"delete from {tbl}" ) ) session.commit() except Exception as ex: print( f"ERROR: there are existing tmp tables when processing metadata. This SHOULD NEVER HAPPEN - manual intervention needed" ) print( f"ERROR: most likely the job manager was killed during processing metadata - you may want to manually put" ) print( f"ERROR: the contents of the 'tmp_*' tables back into their corresponding official metadata tables " ) print( f"ERROR: and try to restart the job manager" ) + print( f"ERROR: orig ex: {ex}" ) exit( 1 ) return @@ -2346,7 +2347,7 @@ def ReloadMetadata(job): # now process each of the tmp tables for anything that was in the DB but not on FS (e.g rm'd metadata) - overrides=session.execute( "select face_id, type_id from tmp_face_no_match_override" ) + overrides=session.execute( text( "select face_id, type_id from tmp_face_no_match_override" ) ) for o in overrides: print( f"F Force Match: o.face_id={o.face_id}" ) print( f"F No Match: o.type_id={o.type_id}" ) @@ -2354,7 +2355,7 @@ def ReloadMetadata(job): if not nmo: session.add( FaceNoMatchOverride( face_id=o.face_id, type_id=o.type_id ) ) - overrides=session.execute( "select face_id, person_id from tmp_face_force_match_override" ) + overrides=session.execute( text( "select face_id, person_id from tmp_face_force_match_override" ) ) for o in overrides: print( f"F Force Match: o.face_id={o.face_id}" ) print( f"F Force Match: o.person_id={o.person_id}" ) @@ -2362,14 +2363,14 @@ def ReloadMetadata(job): if not fmo: session.add( FaceForceMatchOverride( face_id=o.face_id, person_id=o.person_id ) ) - overrides=session.execute( "select face, type_id from tmp_disconnected_no_match_override" ) + overrides=session.execute( text( "select face, type_id from tmp_disconnected_no_match_override" ) ) for o in overrides: print( f"D No Match: o.type_id={o.type_id}" ) dnmo=session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==o.face).filter(DisconnectedNoMatchOverride.type_id==o.type_id).first() if not dnmo: session.add( DisconnectedNoMatchOverride( face=o.face, type_id=o.type_id ) ) - overrides=session.execute( "select face, person_id from tmp_disconnected_force_match_override" ) + overrides=session.execute( text( "select face, person_id from tmp_disconnected_force_match_override" ) ) for o in overrides: print( f"D Force Match: o.person_id={o.person_id}" ) dfmo=session.query(DisconnectedForceMatchOverride).filter(DisconnectedForceMatchOverride.face==o.face).filter(DisconnectedForceMatchOverride.person_id==o.person_id).first() @@ -2378,7 +2379,7 @@ def ReloadMetadata(job): # finally, drop the tmp tables for tbl in override_tbls: - session.execute( f"drop table tmp_{tbl}" ) + session.execute( text( f"drop table tmp_{tbl}" ) ) # ok, finally commit all these changes - dont do this until now. Worst case if we crash/fail, the overrides should continue to be in tmp_{tbl} session.commit() @@ -2517,7 +2518,7 @@ def DelMatchesForFile( job, ent ): if DEBUG: AddLogForJob(job, f'Remove any old matches in {ent.name}') - session.execute( f"delete from face_refimg_link where face_id in (select face_id from face_file_link where file_eid = {ent.id})" ) + session.execute( text( f"delete from face_refimg_link where face_id in (select face_id from face_file_link where file_eid = {ent.id})" ) ) ent.file_details.last_ai_scan=0 session.add(ent) return @@ -2539,7 +2540,7 @@ def DelFacesForFile( job, eid ): if o: DisconnectSingleNoMatchOverride( job, o ) - session.execute( f"delete from face where id in (select face_id from face_file_link where file_eid = {eid})" ) + session.execute( text( f"delete from face where id in (select face_id from face_file_link where file_eid = {eid})" ) ) session.commit() return