updated for newer postgres and sqlachemy, mainly use text() for explicit sqls, made sure value is a string in JobExtra, removed incorrect use of distinct in GetEntries query, finally used ORM to work out last/num_entry counts instead of raw sql
This commit is contained in:
38
files.py
38
files.py
@@ -2,7 +2,7 @@ from wtforms import SubmitField, StringField, HiddenField, validators, Form
|
|||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from flask import request, render_template, redirect, send_from_directory, url_for, jsonify, make_response
|
from flask import request, render_template, redirect, send_from_directory, url_for, jsonify, make_response
|
||||||
from main import db, app, ma
|
from main import db, app, ma
|
||||||
from sqlalchemy import Sequence
|
from sqlalchemy import Sequence, text
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
import os
|
import os
|
||||||
import glob
|
import glob
|
||||||
@@ -236,7 +236,7 @@ def GetEntriesInSearchView( OPT ):
|
|||||||
search_term=OPT.orig_search_term
|
search_term=OPT.orig_search_term
|
||||||
if 'AI:' in OPT.orig_search_term:
|
if 'AI:' in OPT.orig_search_term:
|
||||||
search_term = search_term.replace('AI:','')
|
search_term = search_term.replace('AI:','')
|
||||||
join=f"Entry.query.join(File).distinct().join(FaceFileLink).join(Face).join(FaceRefimgLink).join(Refimg).join(PersonRefimgLink).join(Person).filter(Person.tag.ilike('%{search_term}%'))"
|
join=f"Entry.query.join(File).join(FaceFileLink).join(Face).join(FaceRefimgLink).join(Refimg).join(PersonRefimgLink).join(Person).filter(Person.tag.ilike('%{search_term}%'))"
|
||||||
if 'AI:' in OPT.orig_search_term:
|
if 'AI:' in OPT.orig_search_term:
|
||||||
all_entries = eval( f"{join}.{OPT.order}.offset(OPT.offset).limit(OPT.how_many).all()")
|
all_entries = eval( f"{join}.{OPT.order}.offset(OPT.offset).limit(OPT.how_many).all()")
|
||||||
else:
|
else:
|
||||||
@@ -280,15 +280,12 @@ def GetEntriesInSearchView( OPT ):
|
|||||||
|
|
||||||
#num_entries
|
#num_entries
|
||||||
num_e_sql = f"select count(1) from ( {by_fname} union {by_dirname} union {by_ai} ) as foo"
|
num_e_sql = f"select count(1) from ( {by_fname} union {by_dirname} union {by_ai} ) as foo"
|
||||||
num_e_result = db.engine.execute( num_e_sql )
|
with db.engine.connect() as conn:
|
||||||
for res in num_e_result:
|
OPT.num_entries = conn.execute( text( num_e_sql ) ).first().count
|
||||||
OPT.num_entries=res.count
|
|
||||||
|
|
||||||
last_entry_sql= f"{sel_no_order} order by {OPT.last_order_raw} limit 1"
|
last_entry_sql= f"{sel_no_order} order by {OPT.last_order_raw} limit 1"
|
||||||
last_entry=db.engine.execute( last_entry_sql )
|
with db.engine.connect() as conn:
|
||||||
# can only be 1 due to limit above
|
OPT.last_eid = conn.execute( text( last_entry_sql ) ).first().id
|
||||||
for l in last_entry:
|
|
||||||
OPT.last_eid = l.id
|
|
||||||
# store first/last eid into prefs
|
# store first/last eid into prefs
|
||||||
pref=PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn,PA_UserState.path_type==OPT.path_type,PA_UserState.orig_ptype==OPT.orig_ptype,PA_UserState.orig_search_term==OPT.orig_search_term).first()
|
pref=PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn,PA_UserState.path_type==OPT.path_type,PA_UserState.orig_ptype==OPT.orig_ptype,PA_UserState.orig_search_term==OPT.orig_search_term).first()
|
||||||
UpdatePref( pref, OPT )
|
UpdatePref( pref, OPT )
|
||||||
@@ -488,7 +485,8 @@ def scan_sp():
|
|||||||
@app.route("/fix_dups", methods=["POST"])
|
@app.route("/fix_dups", methods=["POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def fix_dups():
|
def fix_dups():
|
||||||
rows = db.engine.execute( "select e1.id as id1, f1.hash, d1.rel_path as rel_path1, d1.eid as did1, e1.name as fname1, p1.id as path1, p1.type_id as path_type1, e2.id as id2, d2.rel_path as rel_path2, d2.eid as did2, e2.name as fname2, p2.id as path2, p2.type_id as path_type2 from entry e1, file f1, dir d1, entry_dir_link edl1, path_dir_link pdl1, path p1, entry e2, file f2, dir d2, entry_dir_link edl2, path_dir_link pdl2, path p2 where e1.id = f1.eid and e2.id = f2.eid and d1.eid = edl1.dir_eid and edl1.entry_id = e1.id and edl2.dir_eid = d2.eid and edl2.entry_id = e2.id and p1.type_id != (select id from path_type where name = 'Bin') and p1.id = pdl1.path_id and pdl1.dir_eid = d1.eid and p2.type_id != (select id from path_type where name = 'Bin') and p2.id = pdl2.path_id and pdl2.dir_eid = d2.eid and f1.hash = f2.hash and e1.id != e2.id and f1.size_mb = f2.size_mb order by path1, rel_path1, fname1");
|
with db.engine.connect() as conn:
|
||||||
|
rows = conn.execute( text( "select e1.id as id1, f1.hash, d1.rel_path as rel_path1, d1.eid as did1, e1.name as fname1, p1.id as path1, p1.type_id as path_type1, e2.id as id2, d2.rel_path as rel_path2, d2.eid as did2, e2.name as fname2, p2.id as path2, p2.type_id as path_type2 from entry e1, file f1, dir d1, entry_dir_link edl1, path_dir_link pdl1, path p1, entry e2, file f2, dir d2, entry_dir_link edl2, path_dir_link pdl2, path p2 where e1.id = f1.eid and e2.id = f2.eid and d1.eid = edl1.dir_eid and edl1.entry_id = e1.id and edl2.dir_eid = d2.eid and edl2.entry_id = e2.id and p1.type_id != (select id from path_type where name = 'Bin') and p1.id = pdl1.path_id and pdl1.dir_eid = d1.eid and p2.type_id != (select id from path_type where name = 'Bin') and p2.id = pdl2.path_id and pdl2.dir_eid = d2.eid and f1.hash = f2.hash and e1.id != e2.id and f1.size_mb = f2.size_mb order by path1, rel_path1, fname1" ) )
|
||||||
|
|
||||||
if rows.returns_rows == False:
|
if rows.returns_rows == False:
|
||||||
SetFELog(f"Err, No more duplicates? Old link followed, or something is wrong!", "warning")
|
SetFELog(f"Err, No more duplicates? Old link followed, or something is wrong!", "warning")
|
||||||
@@ -498,7 +496,7 @@ def fix_dups():
|
|||||||
# default to 10, see if we have a larger value as someone reset it in the gui, rather than first time invoked
|
# default to 10, see if we have a larger value as someone reset it in the gui, rather than first time invoked
|
||||||
pagesize = 10
|
pagesize = 10
|
||||||
jexes = JobExtra.query.join(Job).filter(Job.name=='check_dups').filter(Job.pa_job_state=='New').all()
|
jexes = JobExtra.query.join(Job).filter(Job.name=='check_dups').filter(Job.pa_job_state=='New').all()
|
||||||
jexes.append( JobExtra( name="pagesize", value=pagesize ) )
|
jexes.append( JobExtra( name="pagesize", value=str(pagesize) ) )
|
||||||
else:
|
else:
|
||||||
pagesize=int(request.form['pagesize'])
|
pagesize=int(request.form['pagesize'])
|
||||||
DD=Duplicates()
|
DD=Duplicates()
|
||||||
@@ -523,15 +521,15 @@ def rm_dups():
|
|||||||
if 'kfhash-' in el:
|
if 'kfhash-' in el:
|
||||||
# get which row/number kf it is...
|
# get which row/number kf it is...
|
||||||
_, which = el.split('-')
|
_, which = el.split('-')
|
||||||
jex.append( JobExtra( name=f"kfid-{which}", value=request.form['kfid-'+which] ) )
|
jex.append( JobExtra( name=f"kfid-{which}", value=str(request.form['kfid-'+which] )) )
|
||||||
jex.append( JobExtra( name=f"kfhash-{which}", value=request.form[el] ) )
|
jex.append( JobExtra( name=f"kfhash-{which}", value=str(request.form[el] )) )
|
||||||
if 'kdhash-' in el:
|
if 'kdhash-' in el:
|
||||||
# get which row/number kd it is...
|
# get which row/number kd it is...
|
||||||
_, which = el.split('-')
|
_, which = el.split('-')
|
||||||
jex.append( JobExtra( name=f"kdid-{which}", value=request.form['kdid-'+which] ) )
|
jex.append( JobExtra( name=f"kdid-{which}", value=str(request.form['kdid-'+which]) ) )
|
||||||
jex.append( JobExtra( name=f"kdhash-{which}", value=request.form[el] ) )
|
jex.append( JobExtra( name=f"kdhash-{which}", value=str(request.form[el]) ) )
|
||||||
|
|
||||||
jex.append( JobExtra( name="pagesize", value=10 ) )
|
jex.append( JobExtra( name="pagesize", value="10" ) )
|
||||||
|
|
||||||
job=NewJob( name="rm_dups", num_files=0, wait_for=None, jex=jex, desc="to delete duplicate files" )
|
job=NewJob( name="rm_dups", num_files=0, wait_for=None, jex=jex, desc="to delete duplicate files" )
|
||||||
|
|
||||||
@@ -545,7 +543,7 @@ def rm_dups():
|
|||||||
def restore_files():
|
def restore_files():
|
||||||
jex=[]
|
jex=[]
|
||||||
for el in request.form:
|
for el in request.form:
|
||||||
jex.append( JobExtra( name=f"{el}", value=request.form[el] ) )
|
jex.append( JobExtra( name=f"{el}", value=str(request.form[el]) ) )
|
||||||
|
|
||||||
job=NewJob( name="restore_files", num_files=0, wait_for=None, jex=jex, desc="to restore selected file(s)" )
|
job=NewJob( name="restore_files", num_files=0, wait_for=None, jex=jex, desc="to restore selected file(s)" )
|
||||||
return redirect("/jobs")
|
return redirect("/jobs")
|
||||||
@@ -558,7 +556,7 @@ def restore_files():
|
|||||||
def delete_files():
|
def delete_files():
|
||||||
jex=[]
|
jex=[]
|
||||||
for el in request.form:
|
for el in request.form:
|
||||||
jex.append( JobExtra( name=f"{el}", value=request.form[el] ) )
|
jex.append( JobExtra( name=f"{el}", value=str(request.form[el]) ) )
|
||||||
|
|
||||||
job=NewJob( name="delete_files", num_files=0, wait_for=None, jex=jex, desc="to delete selected file(s)" )
|
job=NewJob( name="delete_files", num_files=0, wait_for=None, jex=jex, desc="to delete selected file(s)" )
|
||||||
return redirect("/jobs")
|
return redirect("/jobs")
|
||||||
@@ -572,7 +570,7 @@ def move_files():
|
|||||||
|
|
||||||
jex=[]
|
jex=[]
|
||||||
for el in request.form:
|
for el in request.form:
|
||||||
jex.append( JobExtra( name=f"{el}", value=request.form[el] ) )
|
jex.append( JobExtra( name=f"{el}", value=str(request.form[el]) ) )
|
||||||
job=NewJob( name="move_files", num_files=0, wait_for=None, jex=jex, desc="to move selected file(s)" )
|
job=NewJob( name="move_files", num_files=0, wait_for=None, jex=jex, desc="to move selected file(s)" )
|
||||||
# data is not used, but send response to trigger CheckForJobs()
|
# data is not used, but send response to trigger CheckForJobs()
|
||||||
return make_response( jsonify( job_id=job.id ) )
|
return make_response( jsonify( job_id=job.id ) )
|
||||||
@@ -715,7 +713,7 @@ def transform():
|
|||||||
|
|
||||||
jex=[]
|
jex=[]
|
||||||
for el in request.form:
|
for el in request.form:
|
||||||
jex.append( JobExtra( name=f"{el}", value=request.form[el] ) )
|
jex.append( JobExtra( name=f"{el}", value=str(request.form[el]) ) )
|
||||||
|
|
||||||
job=NewJob( name="transform_image", num_files=0, wait_for=None, jex=jex, desc="to transform selected file(s)" )
|
job=NewJob( name="transform_image", num_files=0, wait_for=None, jex=jex, desc="to transform selected file(s)" )
|
||||||
return make_response( jsonify( job_id=job.id ) )
|
return make_response( jsonify( job_id=job.id ) )
|
||||||
|
|||||||
Reference in New Issue
Block a user