Compare commits
13 Commits
master
...
5f8c48ac18
| Author | SHA1 | Date | |
|---|---|---|---|
| 5f8c48ac18 | |||
| b67f2d9dcb | |||
| 5842bf2ab8 | |||
| be218c5049 | |||
| a28f016b8a | |||
| d2db7f6184 | |||
| 9ec8195d0a | |||
| 2325dcd22a | |||
| e0b597c58c | |||
| 0895268df2 | |||
| efceef7e57 | |||
| 21059a6235 | |||
| 4d80fa4e7c |
3
BUGs
3
BUGs
@@ -1,4 +1,5 @@
|
||||
### Next: 140
|
||||
### Next: 141
|
||||
BUG-140: When db is restarted underneath PA, it crashes job mgr... It should just accept timeouts, and keep trying to reconnect every 2? mins
|
||||
BUG-139: using any large entry list and going next a few times, ends say 4 pages of 50 into 4000 matches (entries from DB < 50)...
|
||||
- confirmed this is when person has 2 or more refimgs:
|
||||
- on page "2", we get 49 pulled back in the ORM instead of the 50 expected -- b/c I use that to indicate we must be at the end of the list if not 50 found
|
||||
|
||||
5
TODO
5
TODO
@@ -8,6 +8,7 @@
|
||||
* client side always has query_id. IF DB does not have query_id, then its really old? - just say so...
|
||||
|
||||
* client side takes query_id, entry_lst, current_eid, offset, first/last_eid, etc. as part of its first route / html creation.
|
||||
* get this data as a json blob? or ask chatgpt to see how best to take the data and turn it into jscript data
|
||||
* it then decides based on all this to GetEntryDetails( subset of entry_lst ) <- needs new route
|
||||
* IN THEORY some of the subset of entry_lst don't exist -- BUT, we can handle that on response, e.g. say my query used to have 1,2,3, and since then another user/action deleted 2:
|
||||
- I ask for details on 1,2,3 and get back details on 1,3 only.
|
||||
@@ -21,6 +22,10 @@
|
||||
- When job that flips, rotates, deletes completes then lets update the query details (e.g. remove eids, or remove the ammendments)
|
||||
- this actually is quite an improvement, if someone is deleting 2 as per above, I will see that as a pending change in my unrelated query, ditto flips, etc.
|
||||
|
||||
* NEED to work through how we deal with directories when we do the json data versions above?
|
||||
- e.g. does entry_list only contain files? OR filter the details in the jscript?
|
||||
- how do we do dirs in this context? (when folders=True)
|
||||
|
||||
### GENERAL
|
||||
* jobs for AI should show path name
|
||||
* rm dups job should show progress bar
|
||||
|
||||
163
files.py
163
files.py
@@ -1,11 +1,11 @@
|
||||
from wtforms import SubmitField, StringField, HiddenField, validators, Form
|
||||
from flask_wtf import FlaskForm
|
||||
from flask import request, render_template, redirect, send_from_directory, url_for, jsonify, make_response
|
||||
from main import db, app, ma
|
||||
from sqlalchemy import Sequence, text
|
||||
from sqlalchemy import Sequence, text, select
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
import os
|
||||
import glob
|
||||
import json
|
||||
from PIL import Image
|
||||
from pymediainfo import MediaInfo
|
||||
import hashlib
|
||||
@@ -20,8 +20,8 @@ import pytz
|
||||
import html
|
||||
from flask_login import login_required, current_user
|
||||
from states import States, PA_UserState
|
||||
from query import Query
|
||||
|
||||
################################################################################
|
||||
# Local Class imports
|
||||
################################################################################
|
||||
from job import Job, JobExtra, Joblog, NewJob, SetFELog
|
||||
@@ -119,7 +119,7 @@ class File(db.Model):
|
||||
eid = db.Column(db.Integer, db.ForeignKey("entry.id"), primary_key=True )
|
||||
size_mb = db.Column(db.Integer, unique=False, nullable=False)
|
||||
thumbnail = db.Column(db.String, unique=False, nullable=True)
|
||||
hash = db.Column(db.Integer)
|
||||
hash = db.Column(db.String)
|
||||
year = db.Column(db.Integer)
|
||||
month = db.Column(db.Integer)
|
||||
day = db.Column(db.Integer)
|
||||
@@ -141,6 +141,62 @@ class FileType(db.Model):
|
||||
def __repr__(self):
|
||||
return f"<id: {self.id}, name={self.name}>"
|
||||
|
||||
|
||||
################################################################################
|
||||
# this is how we order all queries based on value of 'noo' - used with
|
||||
# access *order_map.get(OPT.noo)
|
||||
################################################################################
|
||||
order_map = {
|
||||
"Newest": (File.year.desc(),File.month.desc(),File.day.desc(),Entry.name.desc()),
|
||||
"Oldest": (File.year,File.month,File.day,Entry.name),
|
||||
# careful, these need to be tuples, so with a , at the end
|
||||
"Z to A": (Entry.name.desc(),),
|
||||
"A to Z": (Entry.name.asc(),),
|
||||
}
|
||||
|
||||
################################################################################
|
||||
|
||||
################################################################################
|
||||
# Schemas for Path, FileType, File, Dir - used in EntrySchema
|
||||
################################################################################
|
||||
class PathType(ma.SQLAlchemyAutoSchema):
|
||||
class Meta: model = PathType
|
||||
load_instance = True
|
||||
|
||||
class PathSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta: model = Path
|
||||
load_instance = True
|
||||
type = ma.Nested(PathType)
|
||||
|
||||
class FileTypeSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta: model = FileType
|
||||
load_instance = True
|
||||
|
||||
class FileSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta: model = File
|
||||
load_instance = True
|
||||
|
||||
class DirSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta: model = Dir
|
||||
load_instance = True
|
||||
eid = ma.auto_field() # Explicitly include eid
|
||||
in_path = ma.Nested(PathSchema)
|
||||
|
||||
################################################################################
|
||||
# Schema for Entry so we can json for data to the client
|
||||
################################################################################
|
||||
class EntrySchema(ma.SQLAlchemyAutoSchema):
|
||||
# gives id, name, type_id
|
||||
class Meta: model = Entry
|
||||
load_instance = True
|
||||
|
||||
type = ma.Nested(FileTypeSchema)
|
||||
file_details = ma.Nested(FileSchema)
|
||||
# noting dir_details needs in_path to work
|
||||
dir_details = ma.Nested(DirSchema)
|
||||
# noting in_dir needs in_path and in_path.type to work
|
||||
in_dir = ma.Nested(DirSchema)
|
||||
|
||||
################################################################################
|
||||
# util function to just update the current/first/last positions needed for
|
||||
# viewing / using pa_user_state DB table
|
||||
@@ -327,9 +383,97 @@ def SetOrderStrings( OPT ):
|
||||
OPT.last_order_raw=f"e.name desc"
|
||||
return
|
||||
|
||||
################################################################################
|
||||
# /get_entries_by_ids -> route where we supply list of entry ids (for next/prev
|
||||
# page of data we want to show). Returns json of all matching entries
|
||||
################################################################################
|
||||
@app.route('/get_entries_by_ids', methods=['POST'])
|
||||
@login_required
|
||||
def process_ids():
|
||||
data = request.get_json() # Parse JSON body
|
||||
ids = data.get('ids', []) # Extract list of ids
|
||||
|
||||
# DDP: debate here, do I get query_id, do I validate whether we are asking
|
||||
# for ids not in the query? OR, dont even make/store/have query?
|
||||
|
||||
# marshmallow will allow us to json the data the way we need for the client
|
||||
entries_schema = EntrySchema(many=True)
|
||||
|
||||
# Query DB for matching entries
|
||||
entries = Entry.query.filter(Entry.id.in_(ids)).all()
|
||||
|
||||
# return entries as json
|
||||
return jsonify(entries_schema.dump(entries))
|
||||
|
||||
|
||||
################################################################################
|
||||
# /get_dir_entries -> show thumbnail view of files from import_path(s)
|
||||
################################################################################
|
||||
@app.route("/get_dir_entries", methods=["POST"])
|
||||
@login_required
|
||||
def get_dir_entries():
|
||||
data = request.get_json() # Parse JSON body
|
||||
dir_id = data.get('dir_id', []) # Extract list of ids
|
||||
back = data.get('back', False) # Extract back boolean
|
||||
|
||||
# if we are going back, find the parent id and use that instead
|
||||
if back:
|
||||
stmt=( select(EntryDirLink.dir_eid).filter(EntryDirLink.entry_id==dir_id) )
|
||||
dir_id = db.session.execute(stmt).scalars().all() [0]
|
||||
|
||||
# get content of dir_id
|
||||
stmt=( select(Entry.id).join(EntryDirLink).filter(EntryDirLink.dir_eid==dir_id) )
|
||||
# FIXME: what do we do with ordering anyway???
|
||||
#stmt=stmt.order_by(*order_map.get(OPT.noo) )
|
||||
ids=db.session.execute(stmt).scalars().all()
|
||||
entries_schema = EntrySchema(many=True)
|
||||
entries = Entry.query.filter(Entry.id.in_(ids)).all()
|
||||
return jsonify(entries_schema.dump(entries))
|
||||
|
||||
################################################################################
|
||||
# Call this ONCE on first menu choice of View files, or search box submission
|
||||
# create the list of entry ids that matcht the required viewing/list
|
||||
################################################################################
|
||||
def GetQueryData( OPT ):
|
||||
query_data={}
|
||||
query_data['query_id']=None
|
||||
query_data['entry_list']=None
|
||||
|
||||
if OPT.path_type == 'Search':
|
||||
print ("NOT YET")
|
||||
return query_data
|
||||
|
||||
# always get the top of the (OPT.prefix) Path's eid and keep it for OPT.folders toggling/use
|
||||
dir_stmt=( select(Entry.id).join(Dir).join(PathDirLink).join(Path).filter(Dir.rel_path == '').filter(Path.path_prefix==OPT.prefix) )
|
||||
# this should return the 1 Dir (that we want to see the content of) - and with only 1, no need to worry about order
|
||||
dir_arr=db.session.execute(dir_stmt).scalars().all()
|
||||
dir_id=dir_arr[0]
|
||||
query_data['root_eid']=dir_id
|
||||
|
||||
if OPT.folders:
|
||||
# start folder view with only the root folder
|
||||
stmt=( select(Entry.id).join(EntryDirLink).filter(EntryDirLink.dir_eid==dir_id) )
|
||||
query_data['entry_list']=db.session.execute(stmt).scalars().all()
|
||||
else:
|
||||
# get every File that is in the OPT.prefix Path
|
||||
stmt=( select(Entry.id).join(File).join(EntryDirLink).join(Dir).join(PathDirLink).join(Path).filter(Path.path_prefix == OPT.prefix) )
|
||||
stmt=stmt.order_by(*order_map.get(OPT.noo) )
|
||||
query_data['entry_list']= db.session.execute(stmt).scalars().all()
|
||||
|
||||
# not sure I need this in hindsight - any value at all???
|
||||
# # first time we get the data q_offset is 0, current=first one, search never gets here, so search_term=''
|
||||
# # FIXME: Doubt we need cwd -- I only need originals to either invalidate this list, or recreate it... need to think about that a lot more
|
||||
# query = Query( path_type=OPT.path_type, noo=OPT.noo, q_offset=0, folder=OPT.folders, grouping=OPT.grouping, root=OPT.root, cwd=OPT.cwd, search_term='',
|
||||
# entry_list=query_data['entry_list'], current=query_data['entry_list'][0], created=datetime.now(pytz.utc) )
|
||||
# db.session.add(query)
|
||||
# db.session.commit()
|
||||
#
|
||||
# query_data['query_id']=query.id
|
||||
return query_data
|
||||
|
||||
################################################################################
|
||||
# /GetEntries -> helper function that Gets Entries for required files to show
|
||||
# for several routes (files_ip, files_sp, files_rbp, search, view_list)
|
||||
# for several routes (ifles_ip, files_sp, files_rbp, search, view_list)
|
||||
################################################################################
|
||||
def GetEntries( OPT ):
|
||||
entries=[]
|
||||
@@ -374,6 +518,7 @@ def GetEntries( OPT ):
|
||||
OPT.num_entries=num_entries
|
||||
pref=PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn,PA_UserState.path_type==OPT.path_type).first()
|
||||
UpdatePref( pref, OPT )
|
||||
|
||||
return entries
|
||||
|
||||
@app.route("/change_file_opts", methods=["POST"])
|
||||
@@ -406,10 +551,10 @@ def files_ip():
|
||||
# now we have reset the offset, etc. into the prefs, we can use a GET and this will be back/forward browser button safe
|
||||
if request.method=='POST':
|
||||
redirect("/files_ip")
|
||||
entries=GetEntries( OPT )
|
||||
people = Person.query.all()
|
||||
move_paths = MovePathDetails()
|
||||
return render_template("files.html", page_title=f"View Files ({OPT.path_type} Path)", entry_data=entries, OPT=OPT, people=people, move_paths=move_paths )
|
||||
query_data = GetQueryData( OPT )
|
||||
return render_template("files.html", page_title=f"View Files ({OPT.path_type} Path)", OPT=OPT, people=people, move_paths=move_paths, query_data=query_data )
|
||||
|
||||
################################################################################
|
||||
# /files -> show thumbnail view of files from storage_path
|
||||
@@ -421,10 +566,10 @@ def files_sp():
|
||||
# now we have reset the offset, etc. into the prefs, we can use a GET and this will be back/forward browser button safe
|
||||
if request.method=='POST':
|
||||
redirect("/files_sp")
|
||||
entries=GetEntries( OPT )
|
||||
people = Person.query.all()
|
||||
move_paths = MovePathDetails()
|
||||
return render_template("files.html", page_title=f"View Files ({OPT.path_type} Path)", entry_data=entries, OPT=OPT, people=people, move_paths=move_paths )
|
||||
query_data = GetQueryData( OPT )
|
||||
return render_template("files.html", page_title=f"View Files ({OPT.path_type} Path)", OPT=OPT, people=people, move_paths=move_paths, query_data=query_data )
|
||||
|
||||
|
||||
################################################################################
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
// GLOBAL ICON array
|
||||
ICON={}
|
||||
ICON["Import"]="import"
|
||||
ICON["Storage"]="db"
|
||||
ICON["Bin"]="trash"
|
||||
|
||||
// grab all selected thumbnails and return a <div> containing the thumbnails
|
||||
// with extra yr and date attached as attributes so we can set the default
|
||||
// dir name for a move directory - not used in del, but no harm to include them
|
||||
@@ -316,3 +322,309 @@ function NoSel() {
|
||||
else
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a group header or entry based on the object and options.
|
||||
* @param {Object} obj - The object containing file/directory details.
|
||||
* @param {Object} last - Tracks the last printed group (e.g., { printed: null }).
|
||||
* @param {Object} ecnt - Entry counter (e.g., { val: 0 }).
|
||||
* @returns {string} - Generated HTML string.
|
||||
*/
|
||||
function addFigure( obj, last, ecnt)
|
||||
{
|
||||
let html = "";
|
||||
|
||||
// Grouping logic
|
||||
if (OPT.grouping === "Day") {
|
||||
if (last.printed !== obj.file_details.day) {
|
||||
html += `<div class="row ps-3"><h6>Day: ${obj.file_details.day} of ${obj.file_details.month}/${obj.file_details.year}</h6></div>`;
|
||||
last.printed = obj.file_details.day;
|
||||
}
|
||||
} else if (OPT.grouping === "Week") {
|
||||
if (last.printed !== obj.file_details.woy) {
|
||||
html += `<div class="row ps-3"><h6>Week #: ${obj.file_details.woy} of ${obj.file_details.year}</h6></div>`;
|
||||
last.printed = obj.file_details.woy;
|
||||
}
|
||||
} else if (OPT.grouping === "Month") {
|
||||
if (last.printed !== obj.file_details.month) {
|
||||
html += `<div class="row ps-3"><h6>Month: ${obj.file_details.month} of ${obj.file_details.year}</h6></div>`;
|
||||
last.printed = obj.file_details.month;
|
||||
}
|
||||
}
|
||||
/*
|
||||
{% if not entry_data %}
|
||||
<span class="alert alert-danger p-2 col-auto"> No matches for: '{{search_term}}'</span>
|
||||
{% endif %}
|
||||
*/
|
||||
|
||||
// Image/Video/Unknown entry
|
||||
if (obj.type.name === "Image" || obj.type.name === "Video" || obj.type.name === "Unknown") {
|
||||
if (!OPT.folders || isTopLevelFolder(obj.in_dir.in_path.path_prefix + '/' + obj.in_dir.rel_path + '/' + obj.name, OPT.cwd)) {
|
||||
const pathType = obj.in_dir.in_path.type.name;
|
||||
const size = obj.file_details.size_mb;
|
||||
const hash = obj.file_details.hash;
|
||||
const inDir = `${obj.in_dir.in_path.path_prefix}/${obj.in_dir.rel_path}`;
|
||||
const fname = obj.name;
|
||||
const yr = obj.file_details.year;
|
||||
const date = `${yr}${String(obj.file_details.month).padStart(2, '0')}${String(obj.file_details.day).padStart(2, '0')}`;
|
||||
const prettyDate = `${obj.file_details.day}/${obj.file_details.month}/${obj.file_details.year}`;
|
||||
const type = obj.type.name;
|
||||
|
||||
html += `
|
||||
<figure id="${obj.id}" ecnt="${ecnt}" class="col col-auto g-0 figure entry m-1"
|
||||
path_type="${pathType}" size="${size}" hash="${hash}" in_dir="${inDir}"
|
||||
fname="${fname}" yr="${yr}" date="${date}" pretty_date="${prettyDate}" type="${type}">
|
||||
${renderMedia(obj)}
|
||||
</figure>
|
||||
`;
|
||||
}
|
||||
}
|
||||
// Directory entry
|
||||
else if (obj.type.name === "Directory" && OPT.folders) {
|
||||
const dirname = obj.dir_details.rel_path.length
|
||||
? `${obj.dir_details.in_path.path_prefix}/${obj.dir_details.rel_path}`
|
||||
: obj.dir_details.in_path.path_prefix;
|
||||
|
||||
if (isTopLevelFolder(dirname, OPT.cwd)) {
|
||||
html += `
|
||||
<figure class="col col-auto g-0 dir entry m-1" id="${obj.id}" ecnt="${ecnt}" dir="${dirname}" type="Directory">
|
||||
<svg class="svg" width="${OPT.size - 22}" height="${OPT.size - 22}" fill="currentColor">
|
||||
<use xlink:href="/internal/icons.svg#Directory"></use>
|
||||
</svg>
|
||||
<figcaption class="svg_cap figure-caption text-center text-wrap text-break">${obj.name}</figcaption>
|
||||
</figure>
|
||||
`;
|
||||
html += `<script>f=$('#${obj.id}'); w=f.find('svg').width(); f.find('figcaption').width(w);</script>`;
|
||||
}
|
||||
}
|
||||
|
||||
$('#figures').append( html )
|
||||
return
|
||||
}
|
||||
|
||||
// Helper function to render media (image/video/unknown)
|
||||
function renderMedia(obj) {
|
||||
const isImageOrUnknown = obj.type.name === "Image" || obj.type.name === "Unknown";
|
||||
const isVideo = obj.type.name === "Video";
|
||||
const path = `${obj.in_dir.in_path.path_prefix}/${obj.in_dir.rel_path}/${obj.name}`;
|
||||
const thumb = obj.file_details.thumbnail
|
||||
? `<a href="${path}"><img alt="${obj.name}" class="thumb" height="${OPT.size}" src="data:image/jpeg;base64,${obj.file_details.thumbnail}"></a>`
|
||||
: `<a href="${path}"><svg width="${OPT.size}" height="${OPT.size}" fill="white"><use xlink:href="/internal/icons.svg#unknown_ftype"/></svg></a>`;
|
||||
|
||||
let mediaHtml = `<div style="position:relative; width:100%">${thumb}`;
|
||||
|
||||
if (isImageOrUnknown) {
|
||||
if (OPT.search_term) {
|
||||
mediaHtml += `
|
||||
<div style="position:absolute; bottom: 0px; left: 2px;">
|
||||
<svg width="16" height="16" fill="white"><use xlink:href="/internal/icons.svg#${getLocationIcon(obj)}"/></svg>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
mediaHtml += `
|
||||
<div id="s${obj.id}" style="display:none; position:absolute; top: 50%; left:50%; transform:translate(-50%, -50%);">
|
||||
<img height="64px" src="/internal/throbber.gif">
|
||||
</div>
|
||||
`;
|
||||
} else if (isVideo) {
|
||||
mediaHtml += `
|
||||
<div style="position:absolute; top: 0px; left: 2px;">
|
||||
<svg width="16" height="16" fill="white"><use xlink:href="/internal/icons.svg#film"/></svg>
|
||||
</div>
|
||||
`;
|
||||
if (OPT.search_term) {
|
||||
mediaHtml += `
|
||||
<div style="position:absolute; bottom: 0px; left: 2px;">
|
||||
<svg width="16" height="16" fill="white"><use xlink:href="/internal/icons.svg#${getLocationIcon(obj)}"/></svg>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
}
|
||||
|
||||
mediaHtml += `</div>`;
|
||||
return mediaHtml;
|
||||
}
|
||||
|
||||
// Helper: Check if path is a top-level folder of cwd
|
||||
function isTopLevelFolder(path, cwd) {
|
||||
// Implement your logic here
|
||||
return true; // Placeholder
|
||||
}
|
||||
|
||||
// Helper: Get location icon (placeholder)
|
||||
function getLocationIcon(obj) {
|
||||
return ICON[obj.in_dir.in_path.type.name]
|
||||
}
|
||||
|
||||
// POST to get entry ids, and then getPage for a specified directory
|
||||
function getDirEntries(dir_id, back)
|
||||
{
|
||||
data={}
|
||||
data.dir_id=dir_id
|
||||
data.back=back
|
||||
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: '/get_dir_entries',
|
||||
data: JSON.stringify(data), // Stringify the data
|
||||
contentType: 'application/json', // Set content type
|
||||
dataType: 'json', // Expect JSON response
|
||||
success: function(res) {
|
||||
document.entries=res
|
||||
if( back )
|
||||
document.back_id = res[0].in_dir.eid
|
||||
drawPageOfFigures()
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
console.error("Error:", error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// this function draws all the figures from document.entries - called when we
|
||||
// change pages, but also when we change say grouping/other OPTs
|
||||
function drawPageOfFigures()
|
||||
{
|
||||
$('#figures').empty()
|
||||
var last = { printed: null }
|
||||
var ecnt=0
|
||||
|
||||
if( OPT.folders )
|
||||
{
|
||||
if( document.entries.length && document.entries[0].in_dir.rel_path == '' )
|
||||
{
|
||||
gray="_gray"
|
||||
back=""
|
||||
cl=""
|
||||
}
|
||||
else
|
||||
{
|
||||
gray=""
|
||||
back="Back"
|
||||
cl="back"
|
||||
}
|
||||
// back button, if gray/back decide if we see grayed out folder and/or the name of the folder we go back to
|
||||
html=`<div class="col col-auto g-0 m-1">
|
||||
<figure id="${document.back_id}" ecnt="0" class="${cl} entry m-1" type="Directory">
|
||||
<svg class="svg" width="${OPT.size-22}" height="${OPT.size-22}">
|
||||
<use xlink:href="internal/icons.svg#folder_back${gray}"/>
|
||||
</svg>
|
||||
<figcaption class="figure-caption text-center">${back}</figcaption>
|
||||
</figure>
|
||||
</div>`
|
||||
ecnt++
|
||||
/*
|
||||
<script>f=$('#_back'); w=f.find('svg').width(); f.find('figcaption').width(w);</script>
|
||||
*/
|
||||
$('#figures').append(html)
|
||||
}
|
||||
for (const obj of document.entries) {
|
||||
addFigure( obj, last, ecnt )
|
||||
ecnt++
|
||||
}
|
||||
$('.figure').click( function(e) { DoSel(e, this ); SetButtonState(); return false; });
|
||||
$('.figure').dblclick( CallViewRouteWrapper )
|
||||
// for dir, getDirEntries 2nd param is back (or "up" a dir)
|
||||
$(".dir").click( function(e) { document.back_id=this.id; getDirEntries(this.id,false) } )
|
||||
$(".back").click( function(e) { getDirEntries(this.id,true) } )
|
||||
}
|
||||
|
||||
// Function to get the 'page' of entry ids out of entryList
|
||||
function getPage(pageNumber)
|
||||
{
|
||||
const startIndex = (pageNumber - 1) * OPT.howMany;
|
||||
const endIndex = startIndex + OPT.howMany;
|
||||
pageList = entryList.slice(startIndex, endIndex);
|
||||
|
||||
// set up data to send to server to get the entry data for entries in pageList
|
||||
data={}
|
||||
data.ids = pageList
|
||||
data.query = 99999
|
||||
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: '/get_entries_by_ids',
|
||||
data: JSON.stringify(data), // Stringify the data
|
||||
contentType: 'application/json', // Set content type
|
||||
dataType: 'json', // Expect JSON response
|
||||
success: function(res) {
|
||||
document.entries=res
|
||||
drawPageOfFigures()
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
console.error("Error:", error);
|
||||
}
|
||||
});
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Quick Function to check if we are on the first page
|
||||
function isFirstPage(pageNumber)
|
||||
{
|
||||
return pageNumber <= 1;
|
||||
}
|
||||
|
||||
// Function to check if we are on the last page
|
||||
function isLastPage(pageNumber)
|
||||
{
|
||||
const totalPages = Math.ceil(entryList.length / OPT.howMany);
|
||||
return pageNumber >= totalPages;
|
||||
}
|
||||
|
||||
// given an id in the list, return which page we are on (page 1 is first page)
|
||||
function getPageNumberForId(id) {
|
||||
const idx = entryList.indexOf(id);
|
||||
// should be impossible but jic
|
||||
if (idx === -1) {
|
||||
return -1; // or null, if you prefer
|
||||
}
|
||||
return Math.floor(idx / OPT.howMany) + 1;
|
||||
}
|
||||
|
||||
// if we are on first page, disable prev, it not ensure next is enabled
|
||||
// if we are on last page, disable next, it not ensure prev is enabled
|
||||
function resetNextPrevButtons()
|
||||
{
|
||||
if ( isFirstPage( getPageNumberForId(pageList[0]) ) )
|
||||
$('.prev').prop('disabled', true).addClass('disabled');
|
||||
else
|
||||
$('.prev').prop('disabled', false).removeClass('disabled');
|
||||
|
||||
if ( isLastPage( getPageNumberForId(pageList[0]) ) )
|
||||
$('.next').prop('disabled', true).addClass('disabled');
|
||||
else
|
||||
$('.next').prop('disabled', false).removeClass('disabled');
|
||||
}
|
||||
|
||||
// get list of eids for the next page, also make sure next/prev buttons make sense for page we are on
|
||||
function nextPage()
|
||||
{
|
||||
// pageList[0] is the first entry on this page
|
||||
const currentPage=getPageNumberForId( pageList[0] )
|
||||
// should never happen / just return pageList unchanged
|
||||
if ( currentPage === -1 || isLastPage( currentPage ) )
|
||||
{
|
||||
console.log( "WARNING: seems first on pg=" + firstEntryOnPage + " of how many=" + OPT.howMany + " gives currentPage=" + currentPage + " and we cant go next page?" )
|
||||
return
|
||||
}
|
||||
getPage( currentPage+1 )
|
||||
resetNextPrevButtons()
|
||||
return
|
||||
}
|
||||
|
||||
// get list of eids for the prev page, also make sure next/prev buttons make sense for page we are on
|
||||
function prevPage()
|
||||
{
|
||||
// pageList[0] is the first entry on this page
|
||||
const currentPage=getPageNumberForId( pageList[0] )
|
||||
// should never happen / just return pageList unchanged
|
||||
if (currentPage === 1 || currentPage === -1 )
|
||||
{
|
||||
console.log( "WARNING: seems first on pg=" + firstEntryOnPage + " of how many=" + OPT.howMany + " gives currentPage=" + currentPage + " and we cant go prev page?" )
|
||||
return
|
||||
}
|
||||
getPage( currentPage-1 )
|
||||
resetNextPrevButtons()
|
||||
return
|
||||
}
|
||||
|
||||
41
query.py
Normal file
41
query.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from flask_login import UserMixin, login_required
|
||||
from main import db
|
||||
#from sqlalchemy import Sequence
|
||||
#from flask import request, redirect, make_response, jsonify
|
||||
#from main import db, app, ma
|
||||
#from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
|
||||
# pylint: disable=no-member
|
||||
|
||||
################################################################################
|
||||
# Class describing Person in the database and DB via sqlalchemy
|
||||
# id is unique id in DB
|
||||
# dn is ldap distinguised name
|
||||
# any entry in this DB is effectively a record you already authed successfully
|
||||
# so acts as a session marker. If you fail ldap auth, you dont get a row here
|
||||
################################################################################
|
||||
class Query(UserMixin,db.Model):
|
||||
__tablename__ = "query"
|
||||
id = db.Column(db.Integer, db.Sequence('query_id_seq'), primary_key=True)
|
||||
path_type = db.Column(db.String)
|
||||
noo = db.Column(db.String)
|
||||
grouping = db.Column(db.String)
|
||||
q_offset = db.Column(db.Integer)
|
||||
folder = db.Column(db.Boolean)
|
||||
entry_list = db.Column(db.String)
|
||||
root = db.Column(db.String)
|
||||
cwd = db.Column(db.String)
|
||||
search_term = db.Column(db.String)
|
||||
current = db.Column(db.Integer)
|
||||
created = db.Column(db.DateTime(timezone=True))
|
||||
|
||||
def __repr__(self):
|
||||
str=f"<{self.__class__.__name__}("
|
||||
for k, v in self.__dict__.items():
|
||||
str += f"{k}={v!r}, "
|
||||
str=str.rstrip(", ") + ")>"
|
||||
return str
|
||||
|
||||
def get_id(self):
|
||||
return self.dn
|
||||
15
states.py
15
states.py
@@ -1,10 +1,12 @@
|
||||
from flask import request, render_template, redirect, url_for
|
||||
from settings import Settings, SettingsIPath, SettingsSPath, SettingsRBPath
|
||||
from flask_login import login_required, current_user
|
||||
from main import db, app, ma
|
||||
from shared import PA
|
||||
from user import PAUser
|
||||
from datetime import datetime
|
||||
from job import SetFELog
|
||||
from shared import SymlinkName
|
||||
import pytz
|
||||
import re
|
||||
|
||||
@@ -58,6 +60,7 @@ class States(PA):
|
||||
self.first_eid=0
|
||||
self.last_eid=0
|
||||
self.num_entries=0
|
||||
self.prefix=None
|
||||
|
||||
# this is any next/prev or noo, grouping, etc. change (so use referrer to work out what to do with this)
|
||||
# because this can happen on a view, or files_up, etc. change this FIRST
|
||||
@@ -273,6 +276,18 @@ class States(PA):
|
||||
self.current = int(request.form['current'])
|
||||
|
||||
last_used=datetime.now(pytz.utc)
|
||||
|
||||
# set the prefix based on path
|
||||
path=None
|
||||
if self.path_type == 'Storage':
|
||||
path = SettingsSPath()
|
||||
elif self.path_type == 'Import':
|
||||
path = SettingsIPath()
|
||||
elif self.path_type == 'Bin':
|
||||
path = SettingsRBPath()
|
||||
if path:
|
||||
self.prefix = SymlinkName(self.path_type,path,path+'/')
|
||||
|
||||
# now save pref
|
||||
if not pref:
|
||||
# insert new pref for this combo (might be a new search or view, or first time for a path)
|
||||
|
||||
309
tables.sql
309
tables.sql
@@ -1,189 +1,196 @@
|
||||
alter database PA set timezone to 'Australia/Victoria';
|
||||
ALTER DATABASE pa SET TIMEZONE TO 'aUSTRALIA/vICTORIA';
|
||||
|
||||
create sequence PA_USER_ID_SEQ;
|
||||
create sequence PA_USER_STATE_ID_SEQ;
|
||||
create sequence FACE_ID_SEQ;
|
||||
create sequence PATH_ID_SEQ;
|
||||
create sequence PATH_TYPE_ID_SEQ;
|
||||
create sequence FILE_ID_SEQ;
|
||||
create sequence FILE_TYPE_ID_SEQ;
|
||||
create sequence JOBEXTRA_ID_SEQ;
|
||||
create sequence JOBLOG_ID_SEQ;
|
||||
create sequence JOB_ID_SEQ;
|
||||
create sequence PERSON_ID_SEQ;
|
||||
create sequence REFIMG_ID_SEQ;
|
||||
create sequence SETTINGS_ID_SEQ;
|
||||
create sequence PA_JOB_MANAGER_ID_SEQ;
|
||||
create sequence PA_JOB_MANAGER_FE_MESSAGE_ID_SEQ;
|
||||
create sequence FACE_OVERRIDE_TYPE_ID_SEQ;
|
||||
create sequence FACE_OVERRIDE_ID_SEQ;
|
||||
CREATE SEQUENCE pa_user_id_seq;
|
||||
CREATE SEQUENCE pa_user_state_id_seq;
|
||||
CREATE SEQUENCE face_id_seq;
|
||||
CREATE SEQUENCE path_id_seq;
|
||||
CREATE SEQUENCE path_type_id_seq;
|
||||
CREATE SEQUENCE file_id_seq;
|
||||
CREATE SEQUENCE file_type_id_seq;
|
||||
CREATE SEQUENCE jobextra_id_seq;
|
||||
CREATE SEQUENCE joblog_id_seq;
|
||||
CREATE SEQUENCE job_id_seq;
|
||||
CREATE SEQUENCE person_id_seq;
|
||||
CREATE SEQUENCE refimg_id_seq;
|
||||
CREATE SEQUENCE settings_id_seq;
|
||||
CREATE SEQUENCE pa_job_manager_id_seq;
|
||||
CREATE SEQUENCE pa_job_manager_fe_message_id_seq;
|
||||
CREATE SEQUENCE face_override_type_id_seq;
|
||||
CREATE SEQUENCE face_override_id_seq;
|
||||
CREATE SEQUENCE query_id_seq;
|
||||
|
||||
-- these are hard-coded at present, not sure I can reflexively find models from API?
|
||||
create table AI_MODEL ( ID integer, NAME varchar(24), DESCRIPTION varchar(80), constraint PK_AI_MODEL primary key(ID) );
|
||||
insert into AI_MODEL values ( 1, 'hog', 'normal' );
|
||||
insert into AI_MODEL values ( 2, 'cnn', 'more accurate / much slower' );
|
||||
CREATE TABLE ai_model ( id INTEGER, name VARCHAR(24), description VARCHAR(80), CONSTRAINT pk_ai_model PRIMARY KEY(id) );
|
||||
INSERT INTO ai_model VALUES ( 1, 'HOG', 'NORMAL' );
|
||||
INSERT INTO ai_model VALUES ( 2, 'CNN', 'MORE ACCURATE / MUCH SLOWER' );
|
||||
|
||||
create table SETTINGS(
|
||||
ID integer,
|
||||
BASE_PATH varchar, IMPORT_PATH varchar, STORAGE_PATH varchar, RECYCLE_BIN_PATH varchar, METADATA_PATH varchar,
|
||||
AUTO_ROTATE Boolean,
|
||||
DEFAULT_REFIMG_MODEL integer, DEFAULT_SCAN_MODEL integer, DEFAULT_THRESHOLD float,
|
||||
FACE_SIZE_LIMIT integer,
|
||||
SCHEDULED_IMPORT_SCAN integer, SCHEDULED_STORAGE_SCAN integer,
|
||||
SCHEDULED_BIN_CLEANUP integer, BIN_CLEANUP_FILE_AGE integer,
|
||||
JOB_ARCHIVE_AGE integer,
|
||||
constraint PK_SETTINGS_ID primary key(ID),
|
||||
constraint FK_DEFAULT_REFIMG_MODEL foreign key (DEFAULT_REFIMG_MODEL) references AI_MODEL(ID),
|
||||
constraint FK_DEFAULT_SCAN_MODEL foreign key (DEFAULT_SCAN_MODEL) references AI_MODEL(ID) );
|
||||
CREATE TABLE settings(
|
||||
id INTEGER,
|
||||
base_path VARCHAR, import_path VARCHAR, storage_path VARCHAR, recycle_bin_path VARCHAR, metadata_path VARCHAR,
|
||||
auto_rotate bOOLEAN,
|
||||
default_refimg_model INTEGER, default_scan_model INTEGER, default_threshold FLOAT,
|
||||
face_size_limit INTEGER,
|
||||
scheduled_import_scan INTEGER, scheduled_storage_scan INTEGER,
|
||||
scheduled_bin_cleanup INTEGER, bin_cleanup_file_age INTEGER,
|
||||
job_archive_age INTEGER,
|
||||
CONSTRAINT pk_settings_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_default_refimg_model FOREIGN KEY (default_refimg_model) REFERENCES ai_model(id),
|
||||
CONSTRAINT fk_default_scan_model FOREIGN KEY (default_scan_model) REFERENCES ai_model(id) );
|
||||
|
||||
create table PA_USER(
|
||||
ID integer,
|
||||
DN varchar unique,
|
||||
DEFAULT_IMPORT_NOO varchar,
|
||||
DEFAULT_STORAGE_NOO varchar,
|
||||
DEFAULT_SEARCH_NOO varchar,
|
||||
DEFAULT_GROUPING varchar(16),
|
||||
DEFAULT_HOW_MANY integer,
|
||||
DEFAULT_SIZE integer,
|
||||
DEFAULT_IMPORT_FOLDERS Boolean,
|
||||
DEFAULT_STORAGE_FOLDERS Boolean,
|
||||
constraint PK_PA_USER_ID primary key(ID) );
|
||||
CREATE TABLE pa_user(
|
||||
id INTEGER,
|
||||
dn VARCHAR UNIQUE,
|
||||
default_import_noo VARCHAR,
|
||||
default_storage_noo VARCHAR,
|
||||
default_search_noo VARCHAR,
|
||||
default_grouping VARCHAR(16),
|
||||
default_how_many INTEGER,
|
||||
default_size INTEGER,
|
||||
default_import_folders bOOLEAN,
|
||||
default_storage_folders bOOLEAN,
|
||||
CONSTRAINT pk_pa_user_id PRIMARY KEY(id) );
|
||||
|
||||
-- this is totally not 3rd normal form, but when I made it that, it was so complex, it was stupid
|
||||
-- so for the little data here, I'm deliberately doing a redundant data structure
|
||||
create table PA_USER_STATE ( ID integer, PA_USER_DN varchar(128), PATH_TYPE varchar(16),
|
||||
NOO varchar(16), GROUPING varchar(16), HOW_MANY integer, ST_OFFSET integer, SIZE integer, FOLDERS Boolean,
|
||||
ROOT varchar, CWD varchar,
|
||||
ORIG_PTYPE varchar, ORIG_SEARCH_TERM varchar, ORIG_URL varchar,
|
||||
VIEW_EID integer, CURRENT integer, FIRST_EID integer, LAST_EID integer, NUM_ENTRIES integer, LAST_USED timestamptz,
|
||||
constraint FK_PA_USER_DN foreign key (PA_USER_DN) references PA_USER(DN),
|
||||
constraint PK_PA_USER_STATES_ID primary key(ID ) );
|
||||
CREATE TABLE pa_user_state ( id INTEGER, pa_user_dn VARCHAR(128), path_type VARCHAR(16),
|
||||
noo VARCHAR(16), grouping VARCHAR(16), how_many INTEGER, st_offset INTEGER, size INTEGER, folders bOOLEAN,
|
||||
root VARCHAR, cwd VARCHAR,
|
||||
orig_ptype VARCHAR, orig_search_term VARCHAR, orig_url VARCHAR,
|
||||
view_eid INTEGER, current INTEGER, first_eid INTEGER, last_eid INTEGER, num_entries INTEGER, last_used TIMESTAMPTZ,
|
||||
CONSTRAINT fk_pa_user_dn FOREIGN KEY (pa_user_dn) REFERENCES pa_user(dn),
|
||||
CONSTRAINT pk_pa_user_states_id PRIMARY KEY(id ) );
|
||||
|
||||
create table FILE_TYPE ( ID integer, NAME varchar(32) unique, constraint PK_FILE_TYPE_ID primary key(ID) );
|
||||
|
||||
create table PATH_TYPE ( ID integer, NAME varchar(16) unique, constraint PK_PATH_TYPE_ID primary key(ID) );
|
||||
|
||||
create table PATH ( ID integer, TYPE_ID integer, PATH_PREFIX varchar(1024), NUM_FILES integer,
|
||||
constraint PK_PATH_ID primary key(ID),
|
||||
constraint FK_PATH_TYPE_TYPE_ID foreign key (TYPE_ID) references PATH_TYPE(ID) );
|
||||
|
||||
create table ENTRY( ID integer, NAME varchar(128), TYPE_ID integer, EXISTS_ON_FS boolean,
|
||||
constraint PK_ENTRY_ID primary key(ID),
|
||||
constraint FK_FILE_TYPE_TYPE_ID foreign key (TYPE_ID) references FILE_TYPE(ID) );
|
||||
|
||||
create table FILE ( EID integer, SIZE_MB integer, HASH varchar(34), THUMBNAIL varchar, FACES_CREATED_ON float, LAST_HASH_DATE float, LAST_AI_SCAN float, YEAR integer, MONTH integer, DAY integer, WOY integer,
|
||||
constraint PK_FILE_ID primary key(EID),
|
||||
constraint FK_FILE_ENTRY_ID foreign key (EID) references ENTRY(ID) );
|
||||
|
||||
create table DEL_FILE ( FILE_EID integer, ORIG_PATH_PREFIX varchar(1024), constraint PK_DEL_FILE_FILE_EID primary key (FILE_EID),
|
||||
constraint FK_ENTRY_ID foreign key (FILE_EID) references FILE(EID) );
|
||||
|
||||
create table DIR ( EID integer, REL_PATH varchar(256), NUM_FILES integer, LAST_IMPORT_DATE float,
|
||||
constraint PK_DIR_EID primary key(EID),
|
||||
constraint FK_DIR_ENTRY_ID foreign key (EID) references ENTRY(ID) );
|
||||
|
||||
create table PATH_DIR_LINK ( path_id integer, dir_eid integer,
|
||||
constraint PK_PDL_path_id_dir_eid primary key (path_id, dir_eid),
|
||||
constraint FK_PDL_PATH_ID foreign key (PATH_ID) references PATH(ID),
|
||||
constraint FK_PDL_DIR_EID foreign key (DIR_EID) references DIR(EID) );
|
||||
|
||||
create table ENTRY_DIR_LINK ( entry_id integer, dir_eid integer,
|
||||
constraint PK_EDL_entry_id_dir_eid primary key (entry_id, dir_eid),
|
||||
constraint FK_EDL_ENTRY_ID foreign key (ENTRY_ID) references ENTRY(ID),
|
||||
constraint FK_EDL_DIR_EID foreign key (DIR_EID) references DIR(EID) );
|
||||
|
||||
create table PERSON ( ID integer default nextval('PERSON_ID_SEQ'), TAG varchar(48), FIRSTNAME varchar(48), SURNAME varchar(48),
|
||||
constraint PK_PERSON_ID primary key(ID) );
|
||||
alter sequence PERSON_ID_SEQ owned by PERSON.ID;
|
||||
CREATE TABLE query ( id INTEGER, path_type VARCHAR(16), noo VARCHAR(16), grouping VARCHAR(16), q_offset INTEGER,
|
||||
entry_list VARCHAR, folders BOOLEAN, root VARCHAR, cwd VARCHAR, search_term VARCHAR, current INTEGER,
|
||||
created TIMESTAMPTZ,
|
||||
CONSTRAINT pk_query_id PRIMARY KEY(id ) );
|
||||
|
||||
|
||||
create table REFIMG ( ID integer, FNAME varchar(128), FACE bytea, ORIG_W integer, ORIG_H integer,
|
||||
FACE_TOP integer, FACE_RIGHT integer, FACE_BOTTOM integer, FACE_LEFT integer, CREATED_ON float, THUMBNAIL varchar, MODEL_USED integer,
|
||||
constraint PK_REFIMG_ID primary key(ID),
|
||||
constraint FK_REFIMG_MODEL_USED foreign key (MODEL_USED) references AI_MODEL(ID) );
|
||||
alter sequence REFIMG_ID_SEQ owned by REFIMG.ID;
|
||||
CREATE TABLE file_type ( id INTEGER, name VARCHAR(32) UNIQUE, CONSTRAINT pk_file_type_id PRIMARY KEY(id) );
|
||||
|
||||
create table FACE( ID integer, FACE bytea, FACE_TOP integer, FACE_RIGHT integer, FACE_BOTTOM integer, FACE_LEFT integer,
|
||||
W integer, H integer, constraint PK_FACE_ID primary key(ID) );
|
||||
CREATE TABLE path_type ( id INTEGER, name VARCHAR(16) UNIQUE, CONSTRAINT pk_path_type_id PRIMARY KEY(id) );
|
||||
|
||||
create table FACE_FILE_LINK( FACE_ID integer, FILE_EID integer, MODEL_USED integer,
|
||||
constraint PK_FFL_FACE_ID_FILE_ID primary key(FACE_ID, FILE_EID),
|
||||
constraint FK_FFL_FACE_ID foreign key (FACE_ID) references FACE(ID) on delete cascade,
|
||||
constraint FK_FFL_FILE_EID foreign key (FILE_EID) references FILE(EID),
|
||||
constraint FK_FFL_MODEL_USED foreign key (MODEL_USED) references AI_MODEL(ID) );
|
||||
CREATE TABLE path ( id INTEGER, type_id INTEGER, path_prefix VARCHAR(1024), num_files INTEGER,
|
||||
CONSTRAINT pk_path_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_path_type_type_id FOREIGN KEY (type_id) REFERENCES path_type(id) );
|
||||
|
||||
create table FACE_REFIMG_LINK( FACE_ID integer, REFIMG_ID integer, FACE_DISTANCE float,
|
||||
constraint PK_FRL_FACE_ID_REFIMG_ID primary key(FACE_ID, REFIMG_ID),
|
||||
constraint FK_FRL_FACE_ID foreign key (FACE_ID) references FACE(ID) on delete cascade,
|
||||
constraint FK_FRL_REFIMG_ID foreign key (REFIMG_ID) references REFIMG(ID) );
|
||||
CREATE TABLE entry( id INTEGER, name VARCHAR(128), type_id INTEGER, exists_on_fs BOOLEAN,
|
||||
CONSTRAINT pk_entry_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_file_type_type_id FOREIGN KEY (type_id) REFERENCES file_type(id) );
|
||||
|
||||
create table FACE_OVERRIDE_TYPE ( ID integer, NAME varchar unique, constraint PK_FACE_OVERRIDE_TYPE_ID primary key(ID) );
|
||||
insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_SEQ')), 'Manual match to existing person' );
|
||||
insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_SEQ')), 'Not a face' );
|
||||
insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_SEQ')), 'Too young' );
|
||||
insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_SEQ')), 'Ignore face' );
|
||||
CREATE TABLE file ( eid INTEGER, size_mb INTEGER, hash VARCHAR(34), thumbnail VARCHAR, faces_created_on FLOAT, last_hash_date FLOAT, last_ai_scan FLOAT, year INTEGER, month INTEGER, day INTEGER, woy INTEGER,
|
||||
CONSTRAINT pk_file_id PRIMARY KEY(eid),
|
||||
CONSTRAINT fk_file_entry_id FOREIGN KEY (eid) REFERENCES entry(id) );
|
||||
|
||||
CREATE TABLE del_file ( file_eid INTEGER, orig_path_prefix VARCHAR(1024), CONSTRAINT pk_del_file_file_eid PRIMARY KEY (file_eid),
|
||||
CONSTRAINT fk_entry_id FOREIGN KEY (file_eid) REFERENCES file(eid) );
|
||||
|
||||
CREATE TABLE dir ( eid INTEGER, rel_path VARCHAR(256), num_files INTEGER, last_import_date FLOAT,
|
||||
CONSTRAINT pk_dir_eid PRIMARY KEY(eid),
|
||||
CONSTRAINT fk_dir_entry_id FOREIGN KEY (eid) REFERENCES entry(id) );
|
||||
|
||||
CREATE TABLE path_dir_link ( PATH_ID INTEGER, DIR_EID INTEGER,
|
||||
CONSTRAINT pk_pdl_PATH_ID_DIR_EID PRIMARY KEY (PATH_ID, DIR_EID),
|
||||
CONSTRAINT fk_pdl_path_id FOREIGN KEY (path_id) REFERENCES path(id),
|
||||
CONSTRAINT fk_pdl_dir_eid FOREIGN KEY (dir_eid) REFERENCES dir(eid) );
|
||||
|
||||
CREATE TABLE entry_dir_link ( ENTRY_ID INTEGER, DIR_EID INTEGER,
|
||||
CONSTRAINT pk_edl_ENTRY_ID_DIR_EID PRIMARY KEY (ENTRY_ID, DIR_EID),
|
||||
CONSTRAINT fk_edl_entry_id FOREIGN KEY (entry_id) REFERENCES entry(id),
|
||||
CONSTRAINT fk_edl_dir_eid FOREIGN KEY (dir_eid) REFERENCES dir(eid) );
|
||||
|
||||
CREATE TABLE person ( id INTEGER DEFAULT NEXTVAL('person_id_seq'), tag VARCHAR(48), firstname VARCHAR(48), surname VARCHAR(48),
|
||||
CONSTRAINT pk_person_id PRIMARY KEY(id) );
|
||||
ALTER SEQUENCE person_id_seq OWNED BY person.id;
|
||||
|
||||
|
||||
CREATE TABLE refimg ( id INTEGER, fname VARCHAR(128), face BYTEA, orig_w INTEGER, orig_h INTEGER,
|
||||
face_top INTEGER, face_right INTEGER, face_bottom INTEGER, face_left INTEGER, created_on FLOAT, thumbnail VARCHAR, model_used INTEGER,
|
||||
CONSTRAINT pk_refimg_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_refimg_model_used FOREIGN KEY (model_used) REFERENCES ai_model(id) );
|
||||
ALTER SEQUENCE refimg_id_seq OWNED BY refimg.id;
|
||||
|
||||
CREATE TABLE face( id INTEGER, face BYTEA, face_top INTEGER, face_right INTEGER, face_bottom INTEGER, face_left INTEGER,
|
||||
w INTEGER, h INTEGER, CONSTRAINT pk_face_id PRIMARY KEY(id) );
|
||||
|
||||
CREATE TABLE face_file_link( face_id INTEGER, file_eid INTEGER, model_used INTEGER,
|
||||
CONSTRAINT pk_ffl_face_id_file_id PRIMARY KEY(face_id, file_eid),
|
||||
CONSTRAINT fk_ffl_face_id FOREIGN KEY (face_id) REFERENCES face(id) ON DELETE CASCADE,
|
||||
CONSTRAINT fk_ffl_file_eid FOREIGN KEY (file_eid) REFERENCES file(eid),
|
||||
CONSTRAINT fk_ffl_model_used FOREIGN KEY (model_used) REFERENCES ai_model(id) );
|
||||
|
||||
CREATE TABLE face_refimg_link( face_id INTEGER, refimg_id INTEGER, face_distance FLOAT,
|
||||
CONSTRAINT pk_frl_face_id_refimg_id PRIMARY KEY(face_id, refimg_id),
|
||||
CONSTRAINT fk_frl_face_id FOREIGN KEY (face_id) REFERENCES face(id) ON DELETE CASCADE,
|
||||
CONSTRAINT fk_frl_refimg_id FOREIGN KEY (refimg_id) REFERENCES refimg(id) );
|
||||
|
||||
CREATE TABLE face_override_type ( id INTEGER, name VARCHAR UNIQUE, CONSTRAINT pk_face_override_type_id PRIMARY KEY(id) );
|
||||
INSERT INTO face_override_type VALUES ( (SELECT NEXTVAL('face_override_type_id_seq')), 'mANUAL MATCH TO EXISTING PERSON' );
|
||||
INSERT INTO face_override_type VALUES ( (SELECT NEXTVAL('face_override_type_id_seq')), 'nOT A FACE' );
|
||||
INSERT INTO face_override_type VALUES ( (SELECT NEXTVAL('face_override_type_id_seq')), 'tOO YOUNG' );
|
||||
INSERT INTO face_override_type VALUES ( (SELECT NEXTVAL('face_override_type_id_seq')), 'iGNORE FACE' );
|
||||
|
||||
-- keep non-redundant FACE because, when we rebuild data we may have a null FACE_ID, but still want to connect to this override
|
||||
-- from a previous AI pass... (would happen if we delete a file and then reimport/scan it), OR, more likely we change (say) a threshold, etc.
|
||||
-- any reordering of faces, generates new face_ids... (but if the face data was the same, then this override should stand)
|
||||
create table FACE_NO_MATCH_OVERRIDE ( ID integer, FACE_ID integer, TYPE_ID integer,
|
||||
constraint FK_FNMO_FACE_ID foreign key (FACE_ID) references FACE(ID),
|
||||
constraint FK_FNMO_TYPE foreign key (TYPE_ID) references FACE_OVERRIDE_TYPE(ID),
|
||||
constraint PK_FNMO_ID primary key(ID) );
|
||||
CREATE TABLE face_no_match_override ( id INTEGER, face_id INTEGER, type_id INTEGER,
|
||||
CONSTRAINT fk_fnmo_face_id FOREIGN KEY (face_id) REFERENCES face(id),
|
||||
CONSTRAINT fk_fnmo_type FOREIGN KEY (type_id) REFERENCES face_override_type(id),
|
||||
CONSTRAINT pk_fnmo_id PRIMARY KEY(id) );
|
||||
|
||||
-- manual match goes to person not refimg, so on search, etc. we deal with this anomaly (via sql not ORM)
|
||||
create table FACE_FORCE_MATCH_OVERRIDE ( ID integer, FACE_ID integer, PERSON_ID integer, constraint PK_FACE_FORCE_MATCH_OVERRIDE_ID primary key(ID) );
|
||||
CREATE TABLE face_force_match_override ( id INTEGER, face_id INTEGER, person_id INTEGER, CONSTRAINT pk_face_force_match_override_id PRIMARY KEY(id) );
|
||||
|
||||
create table DISCONNECTED_NO_MATCH_OVERRIDE ( FACE bytea, TYPE_ID integer,
|
||||
constraint FK_DNMO_TYPE_ID foreign key (TYPE_ID) references FACE_OVERRIDE_TYPE(ID),
|
||||
constraint PK_DNMO_FACE primary key (FACE) );
|
||||
CREATE TABLE disconnected_no_match_override ( face BYTEA, type_id INTEGER,
|
||||
CONSTRAINT fk_dnmo_type_id FOREIGN KEY (type_id) REFERENCES face_override_type(id),
|
||||
CONSTRAINT pk_dnmo_face PRIMARY KEY (face) );
|
||||
|
||||
create table DISCONNECTED_FORCE_MATCH_OVERRIDE ( FACE bytea, PERSON_ID integer,
|
||||
constraint FK_DFMO_PERSON_ID foreign key (PERSON_ID) references PERSON(ID),
|
||||
constraint PK_DFMO_FACE primary key (FACE) );
|
||||
CREATE TABLE disconnected_force_match_override ( face BYTEA, person_id INTEGER,
|
||||
CONSTRAINT fk_dfmo_person_id FOREIGN KEY (person_id) REFERENCES person(id),
|
||||
CONSTRAINT pk_dfmo_face PRIMARY KEY (face) );
|
||||
|
||||
create table PERSON_REFIMG_LINK ( PERSON_ID integer, REFIMG_ID integer,
|
||||
constraint PK_PRL primary key(PERSON_ID, REFIMG_ID),
|
||||
constraint FK_PRL_PERSON_ID foreign key (PERSON_ID) references PERSON(ID),
|
||||
constraint FK_PRL_REFIMG_ID foreign key (REFIMG_ID) references REFIMG(ID),
|
||||
constraint U_PRL_REFIMG_ID unique(REFIMG_ID) );
|
||||
CREATE TABLE person_refimg_link ( person_id INTEGER, refimg_id INTEGER,
|
||||
CONSTRAINT pk_prl PRIMARY KEY(person_id, refimg_id),
|
||||
CONSTRAINT fk_prl_person_id FOREIGN KEY (person_id) REFERENCES person(id),
|
||||
CONSTRAINT fk_prl_refimg_id FOREIGN KEY (refimg_id) REFERENCES refimg(id),
|
||||
CONSTRAINT u_prl_refimg_id UNIQUE(refimg_id) );
|
||||
|
||||
create table JOB (
|
||||
ID integer, START_TIME timestamptz, LAST_UPDATE timestamptz, NAME varchar(64), STATE varchar(128),
|
||||
NUM_FILES integer, CURRENT_FILE_NUM integer, CURRENT_FILE varchar(256), WAIT_FOR integer, PA_JOB_STATE varchar(48),
|
||||
constraint PK_JOB_ID primary key(ID) );
|
||||
CREATE TABLE job (
|
||||
id INTEGER, start_time TIMESTAMPTZ, last_update TIMESTAMPTZ, name VARCHAR(64), state VARCHAR(128),
|
||||
num_files INTEGER, current_file_num INTEGER, current_file VARCHAR(256), wait_for INTEGER, pa_job_state VARCHAR(48),
|
||||
CONSTRAINT pk_job_id PRIMARY KEY(id) );
|
||||
|
||||
-- used to pass / keep extra values, e.g. num_files for jobs that have sets of files, or out* for adding output from jobs that you want to pass to next job in the chain
|
||||
create table JOBEXTRA ( ID integer, JOB_ID integer, NAME varchar(32), VALUE varchar,
|
||||
constraint PK_JOBEXTRA_ID primary key(ID), constraint FK_JOBEXTRA_JOB_ID foreign key(JOB_ID) references JOB(ID) );
|
||||
CREATE TABLE jobextra ( id INTEGER, job_id INTEGER, name VARCHAR(32), value VARCHAR,
|
||||
CONSTRAINT pk_jobextra_id PRIMARY KEY(id), CONSTRAINT fk_jobextra_job_id FOREIGN KEY(job_id) REFERENCES job(id) );
|
||||
|
||||
create table JOBLOG ( ID integer, JOB_ID integer, LOG_DATE timestamptz, LOG varchar,
|
||||
constraint PK_JL_ID primary key(ID), constraint FK_JL_JOB_ID foreign key(JOB_ID) references JOB(ID) );
|
||||
CREATE TABLE joblog ( id INTEGER, job_id INTEGER, log_date TIMESTAMPTZ, log VARCHAR,
|
||||
CONSTRAINT pk_jl_id PRIMARY KEY(id), CONSTRAINT fk_jl_job_id FOREIGN KEY(job_id) REFERENCES job(id) );
|
||||
|
||||
create table PA_JOB_MANAGER_FE_MESSAGE ( ID integer, JOB_ID integer, LEVEL varchar(16), MESSAGE varchar(8192), PERSISTENT boolean, CANT_CLOSE boolean,
|
||||
constraint PA_JOB_MANAGER_FE_ACKS_ID primary key(ID),
|
||||
constraint FK_PA_JOB_MANAGER_FE_MESSAGE_JOB_ID foreign key(JOB_ID) references JOB(ID) );
|
||||
CREATE TABLE pa_job_manager_fe_message ( id INTEGER, job_id INTEGER, level VARCHAR(16), message VARCHAR(8192), persistent BOOLEAN, cant_close BOOLEAN,
|
||||
CONSTRAINT pa_job_manager_fe_acks_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_pa_job_manager_fe_message_job_id FOREIGN KEY(job_id) REFERENCES job(id) );
|
||||
|
||||
-- default data for types of paths
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Import' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Storage' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Bin' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Metadata' );
|
||||
INSERT INTO path_type VALUES ( (SELECT NEXTVAL('path_type_id_seq')), 'iMPORT' );
|
||||
INSERT INTO path_type VALUES ( (SELECT NEXTVAL('path_type_id_seq')), 'sTORAGE' );
|
||||
INSERT INTO path_type VALUES ( (SELECT NEXTVAL('path_type_id_seq')), 'bIN' );
|
||||
INSERT INTO path_type VALUES ( (SELECT NEXTVAL('path_type_id_seq')), 'mETADATA' );
|
||||
|
||||
-- default data for types of files
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Image' );
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Video' );
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Directory' );
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Unknown' );
|
||||
INSERT INTO file_type VALUES ( (SELECT NEXTVAL('file_type_id_seq')), 'iMAGE' );
|
||||
INSERT INTO file_type VALUES ( (SELECT NEXTVAL('file_type_id_seq')), 'vIDEO' );
|
||||
INSERT INTO file_type VALUES ( (SELECT NEXTVAL('file_type_id_seq')), 'dIRECTORY' );
|
||||
INSERT INTO file_type VALUES ( (SELECT NEXTVAL('file_type_id_seq')), 'uNKNOWN' );
|
||||
|
||||
-- fake data only for making testing easier
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'dad', 'Damien', 'De Paoli' );
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'mum', 'Mandy', 'De Paoli' );
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'cam', 'Cameron', 'De Paoli' );
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'mich', 'Michelle', 'De Paoli' );
|
||||
--INSERT INTO person VALUES ( (SELECT NEXTVAL('person_id_seq')), 'dad', 'Damien', 'De Paoli' );
|
||||
--INSERT INTO person VALUES ( (SELECT NEXTVAL('person_id_seq')), 'mum', 'Mandy', 'De Paoli' );
|
||||
--INSERT INTO person VALUES ( (SELECT NEXTVAL('person_id_seq')), 'cam', 'Cameron', 'De Paoli' );
|
||||
--INSERT INTO person VALUES ( (SELECT NEXTVAL('person_id_seq')), 'mich', 'Michelle', 'De Paoli' );
|
||||
-- DEV(ddp):
|
||||
insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/home/ddp/src/photoassistant/', 'images_to_process/', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
INSERT INTO settings ( ID, BASE_PATH, IMPORT_PATH, STORAGE_PATH, RECYCLE_BIN_PATH, METADATA_PATH, AUTO_ROTATE, DEFAULT_REFIMG_MODEL, DEFAULT_SCAN_MODEL, DEFAULT_THRESHOLD, FACE_SIZE_LIMIT, SCHEDULED_IMPORT_SCAN, SCHEDULED_STORAGE_SCAN, SCHEDULED_BIN_CLEANUP, BIN_CLEANUP_FILE_AGE, JOB_ARCHIVE_AGE ) VALUES ( (SELECT NEXTVAL('settings_id_seq')), '/HOME/DDP/SRC/PHOTOASSISTANT/', 'IMAGES_TO_PROCESS/', 'PHOTOS/', '.PA_BIN/', '.PA_METADATA/', TRUE, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
-- DEV(cam):
|
||||
--insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), 'c:/Users/cam/Desktop/code/python/photoassistant/', 'c:\images_to_process', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
--INSERT INTO settings ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) VALUES ( (select nextval('SETTINGS_ID_SEQ')), 'c:/Users/cam/Desktop/code/python/photoassistant/', 'c:\images_to_process', 'photos/', '.pa_bin/', '.pa_metadata/', TRUE, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
-- PROD:
|
||||
--insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/export/docker/storage/', 'Camera_uploads/', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 4 );
|
||||
--INSERT INTO settings ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) VALUES ( (SELECT NEXTVAL('settings_id_seq')), '/export/docker/storage/', 'Camera_uploads/', 'photos/', '.pa_bin/', '.pa_metadata/', TRUE, 1, 1, '0.55', 43, 1, 1, 7, 30, 4 );
|
||||
|
||||
@@ -16,14 +16,30 @@
|
||||
move_paths.push(p)
|
||||
{% endfor %}
|
||||
|
||||
document.OPT = '{{OPT}}'
|
||||
document.entries = '{{entry_data}}'
|
||||
document.how_many = '{{OPT.how_many}}'
|
||||
document.entries_len = '{{entry_data|length}}'
|
||||
// GLOBALS
|
||||
// OPTions set via GUI, will change if we alter drop-downs, etc. in GUI
|
||||
// TODO: reference these from GUI, so we can finally ditch the form to submit/change them.
|
||||
// BUT -- must handle noo changing with a form/post as it requires a new ordering
|
||||
|
||||
var OPT={}
|
||||
OPT.grouping='{{OPT.grouping}}'
|
||||
OPT.cwd='{{OPT.cwd}}'
|
||||
OPT.root_eid={{query_data.root_eid}}
|
||||
OPT.search_term='{{OPT.orig_search_term}}'
|
||||
OPT.folders="{{OPT.folders}}" === "True"
|
||||
OPT.howMany={{OPT.how_many}}
|
||||
OPT.size={{OPT.size}}
|
||||
|
||||
// this is the list of entry ids for the images for ALL matches for this query
|
||||
var entryList={{query_data.entry_list}}
|
||||
|
||||
// pageList is just those entries shown on this page from the full entryList
|
||||
var pageList=[]
|
||||
// force pageList to set pageList for & render the first page
|
||||
getPage(1)
|
||||
</script>
|
||||
|
||||
<div class="container-fluid">
|
||||
<form id="main_form" method="POST" action="/change_file_opts">
|
||||
<input type="hidden" name="cwd" id="cwd" value="{{OPT.cwd}}">
|
||||
{% if search_term is defined %}
|
||||
<input type="hidden" name="search_term" id="view_term" value="{{search_term}}">
|
||||
@@ -55,7 +71,7 @@
|
||||
{% else %}
|
||||
{{CreateFoldersSelect( OPT.folders )|safe }}
|
||||
<span class="sm-txt my-auto btn btn-outline-info disabled border-top border-bottom">grouped by:</span>
|
||||
{{CreateSelect( "grouping", OPT.grouping, ["None", "Day", "Week", "Month"], "", "rounded-end")|safe }}
|
||||
{{CreateSelect( "grouping", OPT.grouping, ["None", "Day", "Week", "Month"], "OPT.grouping=$('#grouping').val();drawPageOfFigures();return false", "rounded-end")|safe }}
|
||||
{% endif %}
|
||||
</div class="input-group">
|
||||
</div class="col">
|
||||
@@ -68,15 +84,11 @@
|
||||
</script>
|
||||
{% endif %}
|
||||
<div class="col flex-grow-1 my-auto d-flex justify-content-center w-100">
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary">
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary disabled" onClick="prevPage()" disabled>
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#prev"/></svg>
|
||||
</button>
|
||||
<span class="sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
{% set nxt_disabled="" %}
|
||||
{% if not entry_data or entry_data|length < OPT.how_many|int %}
|
||||
{% set nxt_disabled="disabled" %}
|
||||
{% endif %}
|
||||
<button aria-label="next" id="next" {{nxt_disabled}} name="next" class="next sm-txt btn btn-outline-secondary">
|
||||
<button aria-label="next" id="next" name="next" class="next sm-txt btn btn-outline-secondary" onClick="nextPage()">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#next"/></svg>
|
||||
</button>
|
||||
<button aria-label="move" id="move" disabled name="move" class="sm-txt btn btn-outline-primary ms-4" onClick="MoveDBox(move_paths,'{{url_for('internal', filename='icons.svg')}}'); return false;">
|
||||
@@ -132,16 +144,11 @@
|
||||
</div class="form-row">
|
||||
{% set eids=namespace( str="" ) %}
|
||||
{# gather all the file eids and collect them in case we go gallery mode #}
|
||||
{% for obj in entry_data %}
|
||||
{% if obj.type.name != "Directory" %}
|
||||
{% set eids.str = eids.str + obj.id|string +"," %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
<input name="eids" id="eids" type="hidden" value="{{eids.str}}">
|
||||
</form>
|
||||
<input name="eids" id="eids" type="hidden" value="{{query_data.entry_list}}">
|
||||
</div>
|
||||
{% set ecnt=namespace( val=0 ) %}
|
||||
<div class="row ms-2">
|
||||
<div id="figures" class="row ms-2">
|
||||
<!--
|
||||
{% set last = namespace(printed=0) %}
|
||||
{# rare event of empty folder, still need to show back button #}
|
||||
{% if OPT.folders and entry_data|length == 0 %}
|
||||
@@ -263,29 +270,28 @@
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
-->
|
||||
|
||||
</div>
|
||||
<div class="container-fluid">
|
||||
<form id="nav_form" method="POST" action="/change_file_opts">
|
||||
<input type="hidden" name="cwd" id="cwd" value="{{OPT.cwd}}">
|
||||
<div class="row">
|
||||
<div class="col my-auto d-flex justify-content-center">
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary">
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary disabled" onClick="prevPage()" disabled>
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#prev"/></svg>
|
||||
</button>
|
||||
<span class="sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
<button aria-label="next" id="next" {{nxt_disabled}} name="next" class="next sm-txt btn btn-outline-secondary">
|
||||
<button aria-label="next" id="next" name="next" class="next sm-txt btn btn-outline-secondary" onClick="nextPage()">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#next"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div class="container">
|
||||
{% endblock main_content %}
|
||||
{% block script_content %}
|
||||
|
||||
<script>
|
||||
|
||||
$('.figure').click( function(e) { DoSel(e, this ); SetButtonState(); return false; });
|
||||
$(document).on('click', function(e) { $('.highlight').removeClass('highlight') ; SetButtonState() });
|
||||
|
||||
function CallViewRouteWrapper()
|
||||
@@ -310,8 +316,6 @@ function CallViewRoute(id)
|
||||
$(s).appendTo('body').submit();
|
||||
}
|
||||
|
||||
$('.figure').dblclick( CallViewRouteWrapper )
|
||||
|
||||
// different context menu on files
|
||||
$.contextMenu({
|
||||
selector: '.entry',
|
||||
|
||||
Reference in New Issue
Block a user