Compare commits
209 Commits
v1.0
...
refactor-e
| Author | SHA1 | Date | |
|---|---|---|---|
| 74647bcdfb | |||
| 0ee55ee73d | |||
| 89e8c5d9f7 | |||
| 76b0745cc3 | |||
| bc2d4384c9 | |||
| d247ecec54 | |||
| bb43cc5623 | |||
| a6edbd184b | |||
| 09e7b8eea7 | |||
| f2ef55a58a | |||
| 4d7f3dfed9 | |||
| 4421da0d1d | |||
| 6eba21e028 | |||
| 9bdd9d5b78 | |||
| d3ae9b788f | |||
| 1396cbab78 | |||
| b7500e5170 | |||
| 392708bad9 | |||
| 90b3fe4c2f | |||
| bd6c9c1fbd | |||
| 56771308a6 | |||
| 905910ecf0 | |||
| a38c54812c | |||
| dc6b831481 | |||
| 8969cd452e | |||
| d65f3b32d3 | |||
| 0b0035d1d2 | |||
| 80ceb7aaed | |||
| 9cf47f4582 | |||
| a683da13cc | |||
| 9ffb704648 | |||
| 27e06a9462 | |||
| 4556afb3bb | |||
| 0eee594206 | |||
| 78b112d050 | |||
| 97e738dc13 | |||
| b61f048dec | |||
| e3f6b416ce | |||
| 0ac0eedef9 | |||
| cb5ff7e985 | |||
| 517b5c6167 | |||
| 16d28bc02e | |||
| da0019ecdc | |||
| e4bf9606b9 | |||
| 3a053bea49 | |||
| 1e421c3f22 | |||
| 346defde8b | |||
| 6419e20d7e | |||
| b51d9e1776 | |||
| fa2197adbe | |||
| 66344e146e | |||
| ee1c9b5494 | |||
| 846bdc4e52 | |||
| 541cbec0de | |||
| 071164d381 | |||
| 1adb1bc7af | |||
| 747f524343 | |||
| 4feae96511 | |||
| e6c9429466 | |||
| 005b5be2b9 | |||
| f369b6d796 | |||
| eb7bb84e09 | |||
| 7f13d78700 | |||
| e5d6ce9b73 | |||
| e0654edd21 | |||
| 24c2922f74 | |||
| 24c3762c61 | |||
| 40f0b5d369 | |||
| 2f5c6ec949 | |||
| 81ebf6fa01 | |||
| 7a4f5ddb17 | |||
| 00e42447fc | |||
| 50f9fbee20 | |||
| 1b9b4c9c4f | |||
| 86761994df | |||
| 4138f392d3 | |||
| 525b823632 | |||
| d019dc7960 | |||
| a15fbd74d5 | |||
| 3c8babb619 | |||
| cd73c16545 | |||
| efaec00127 | |||
| 0777bbe237 | |||
| b0c738fcc1 | |||
| 2b9e0e19a2 | |||
| e526d99389 | |||
| 5a923359bc | |||
| a147308b64 | |||
| 9e943c7e1f | |||
| 87651e80a0 | |||
| 2e952deda0 | |||
| b9b7a24326 | |||
| a7ce8e66b5 | |||
| 6199c042e3 | |||
| c32b99f53e | |||
| 175e43c9bb | |||
| 4bb99ce589 | |||
| 70ca93b14e | |||
| a0e06717ac | |||
| 0851b79e16 | |||
| 8e6342b627 | |||
| 5b6dc1b3e9 | |||
| 59bd2af15e | |||
| 1ca5ca192c | |||
| 5f8c48ac18 | |||
| b67f2d9dcb | |||
| 5842bf2ab8 | |||
| be218c5049 | |||
| a28f016b8a | |||
| d2db7f6184 | |||
| 9ec8195d0a | |||
| 2325dcd22a | |||
| e0b597c58c | |||
| 0895268df2 | |||
| efceef7e57 | |||
| 21059a6235 | |||
| 4d80fa4e7c | |||
| a4526e2b70 | |||
| cf9b0996be | |||
| 57e736a495 | |||
| aaf04e7f9b | |||
| eb9e7f2ce1 | |||
| df7a81df09 | |||
| 4742509d56 | |||
| 391fd52332 | |||
| 1831c49b15 | |||
| fa63e08b59 | |||
| e36fc170f1 | |||
| e3c94bf5eb | |||
| db8cf68d4b | |||
| 1a182d30b5 | |||
| c7c08c1c32 | |||
| 5fa35b90eb | |||
| ae718fe249 | |||
| 48ca6c5e64 | |||
| 6ceb4c4604 | |||
| b7b2eaeb1f | |||
| 0660ca55d7 | |||
| e6e621d6f7 | |||
| 0a49c2a627 | |||
| e460164d83 | |||
| f6ddb66f3f | |||
| 1555cd5ad6 | |||
| 62acd34ae0 | |||
| 5448e0a7a4 | |||
| 70908d60f5 | |||
| 58715491ea | |||
| f5dfb92db1 | |||
| eb819e37d7 | |||
| c6a0352c7d | |||
| f4aeadc019 | |||
| 79437477f7 | |||
| 989fc7c6db | |||
| febdfbbd32 | |||
| 37c0125fbd | |||
| b34b71bf32 | |||
| bd3ee98328 | |||
| 47190a8829 | |||
| f97653c4f1 | |||
| 8157292df4 | |||
| a0b4b80ca9 | |||
| cbfd00fe53 | |||
| 61b3edf5fa | |||
| fdba16c004 | |||
| 9a359a1a4c | |||
| 15cc101bbe | |||
| 9dac54ae97 | |||
| aba96f6c52 | |||
| 0895bc5983 | |||
| 46eb87bc31 | |||
| cbea1f09ab | |||
| 5fa9b5e386 | |||
| 28d4f8d998 | |||
| 29ea2ca833 | |||
| 06ceefda97 | |||
| 974d96d6eb | |||
| 030eb785f3 | |||
| 1277558b0e | |||
| d7f028f3d1 | |||
| d53bffcab9 | |||
| 4885acea80 | |||
| fed78672dc | |||
| bc23c9a3c6 | |||
| ffbedc3bca | |||
| d39f1734a3 | |||
| d86893875d | |||
| 2665ab086e | |||
| 063c75809a | |||
| 8b89d845db | |||
| 6ef12d7636 | |||
| 42eeed7c45 | |||
| 096251f8ce | |||
| 33c86f49fb | |||
| 98d33b4880 | |||
| 448b635d5a | |||
| 53ef671d34 | |||
| 7e25c33f1a | |||
| 4db3d27cbc | |||
| a3350b9316 | |||
| a8ee7b4f89 | |||
| 478a7e5add | |||
| aad0389d77 | |||
| 262efd89d4 | |||
| 9c4da5ec6f | |||
| 3f39f74716 | |||
| 48353f0486 | |||
| 0ed56d6a1a | |||
| 1005acb339 | |||
| 62cfe6305e |
@@ -2,10 +2,7 @@ images_to_process
|
||||
new_img_dir
|
||||
photos
|
||||
reference_images
|
||||
static/Bin/*
|
||||
static/Import/*
|
||||
static/Storage/*
|
||||
static/Metadata/*
|
||||
static
|
||||
.pa_metadata
|
||||
.pa_bin
|
||||
.python
|
||||
|
||||
33
BUGs
@@ -1,30 +1,9 @@
|
||||
### Next: 121
|
||||
BUG-100: I managed to get 2 photos matching mich in the NOT_WORKING photo (probably dif refimgs but same p.tag?)
|
||||
= /photos/2012/20120414-damien/IMG_8467.JPG
|
||||
### Next: 146
|
||||
BUG-140: When db is restarted underneath PA, it crashes job mgr... It should just accept timeouts, and keep trying to reconnect every 2? mins
|
||||
BUG-118: can move files from Bin path, but it leaves the del_file entry for it - need to remove it
|
||||
BUG-117: when search returns files that can be deleted and/or restored, the icon stays as delete and tries to delete!
|
||||
BUG-106: cant add trudy /pat? as refimgs via FaceDBox
|
||||
- seems the cropped trudy face is not sufficient to find a face, how odd...
|
||||
(it came from a face bbox, BUT, I have grown the face seln by 10%?)
|
||||
BUG-117: when search returns files that can be deleted and/or restored, the icon stays as delete and tries to delete!
|
||||
BUG-118: can move files from Bin path, but it leaves the del_file entry for it - need to remove it
|
||||
BUG-119: "Uncaught (in promise) Error: A listener indicated an asynchronous
|
||||
response by returning true, but the message channel closed before a response
|
||||
was received"
|
||||
investigate this (possible I'm calling check_for_jobs and maybe not doing the async right?)
|
||||
|
||||
BUG-120: on tablet:
|
||||
[2023-04-09 12:21:21,214] ERROR in app: Exception on /view/16978 [GET]
|
||||
Traceback (most recent call last):
|
||||
File "/usr/local/lib/python3.10/dist-packages/flask/app.py", line 2525, in wsgi_app
|
||||
response = self.full_dispatch_request()
|
||||
File "/usr/local/lib/python3.10/dist-packages/flask/app.py", line 1822, in full_dispatch_request
|
||||
rv = self.handle_user_exception(e)
|
||||
File "/usr/local/lib/python3.10/dist-packages/flask/app.py", line 1820, in full_dispatch_request
|
||||
rv = self.dispatch_request()
|
||||
File "/usr/local/lib/python3.10/dist-packages/flask/app.py", line 1796, in dispatch_request
|
||||
return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args)
|
||||
File "/code/files.py", line 655, in view
|
||||
OPT=States( request )
|
||||
File "/code/states.py", line 92, in __init__
|
||||
self.url = pref.orig_url
|
||||
AttributeError: 'NoneType' object has no attribute 'orig_url'
|
||||
|
||||
BUG-100: I managed to get 2 photos matching mich in the NOT_WORKING photo (probably dif refimgs but same p.tag?)
|
||||
= /photos/2012/20120414-damien/IMG_8467.JPG
|
||||
|
||||
74
DEEPFACE
Normal file
@@ -0,0 +1,74 @@
|
||||
#
|
||||
# https://pypi.org/project/deepface/
|
||||
#
|
||||
|
||||
from deepface import DeepFace
|
||||
import os
|
||||
import cv2
|
||||
import numpy as np
|
||||
|
||||
# Function to compute embeddings for a given image path
|
||||
def compute_embeddings(image_path, model_name='VGG-Face'):
|
||||
try:
|
||||
# This returns a list of embeddings for each face found in the image
|
||||
embeddings = DeepFace.represent(img_path=image_path, model_name=model_name)
|
||||
return [res['embedding'] for res in embeddings]
|
||||
except Exception as e:
|
||||
print(f"Error processing image {image_path}: {e}")
|
||||
return []
|
||||
|
||||
# Function to find the best matches between reference and target images
|
||||
def find_best_matches(reference_images, target_images, model_name='VGG-Face', metric='cosine'):
|
||||
# Store embeddings for reference images
|
||||
reference_embeddings = {}
|
||||
|
||||
for ref_img in reference_images:
|
||||
ref_embeddings = compute_embeddings(ref_img, model_name=model_name)
|
||||
if ref_embeddings:
|
||||
reference_embeddings[ref_img] = ref_embeddings
|
||||
|
||||
# Store best matches
|
||||
best_matches = []
|
||||
|
||||
# Loop through each target image
|
||||
for target_img in target_images:
|
||||
target_embeddings = compute_embeddings(target_img, model_name=model_name)
|
||||
|
||||
for target_emb in target_embeddings:
|
||||
for ref_img, ref_emb_list in reference_embeddings.items():
|
||||
# Compare each reference embedding with the target embedding
|
||||
for ref_emb in ref_emb_list:
|
||||
# Compute the distance between embeddings using DeepFace's distance functions
|
||||
distance = DeepFace.find_distance(ref_emb, target_emb, distance_metric=metric)
|
||||
|
||||
# Store match details (target image, reference image, and distance)
|
||||
best_matches.append({
|
||||
'target_image': target_img,
|
||||
'reference_image': ref_img,
|
||||
'distance': distance
|
||||
})
|
||||
|
||||
# Sort matches by the distance (smallest distances indicate best matches)
|
||||
best_matches = sorted(best_matches, key=lambda x: x['distance'])
|
||||
|
||||
return best_matches
|
||||
|
||||
# Example usage
|
||||
reference_images = [
|
||||
"path_to_reference_image1.jpg",
|
||||
"path_to_reference_image2.jpg"
|
||||
]
|
||||
|
||||
target_images = [
|
||||
"path_to_target_image1.jpg",
|
||||
"path_to_target_image2.jpg",
|
||||
# Add more target images here...
|
||||
]
|
||||
|
||||
# Find the best matches
|
||||
matches = find_best_matches(reference_images, target_images, model_name='VGG-Face', metric='cosine')
|
||||
|
||||
# Display the best match
|
||||
for match in matches[:5]: # Display top 5 matches
|
||||
print(f"Reference Image: {match['reference_image']}, Target Image: {match['target_image']}, Distance: {match['distance']}")
|
||||
|
||||
54
Dockerfile
@@ -1,35 +1,27 @@
|
||||
FROM ubuntu:22.04
|
||||
FROM python:latest
|
||||
|
||||
# declare var
|
||||
ARG PA_ID
|
||||
|
||||
WORKDIR /code
|
||||
USER root
|
||||
ENV PJM_UID=500
|
||||
ENV PJM_GID=500
|
||||
RUN groupadd -g ${PJM_GID} mythtv && useradd -r -u ${PJM_UID} -g ${PJM_GID} mythtv
|
||||
ENV TZ=Australia/Melbourne
|
||||
RUN truncate -s0 /tmp/preseed.cfg && \
|
||||
(echo "tzdata tzdata/Areas select Australia" >> /tmp/preseed.cfg) && \
|
||||
(echo "tzdata tzdata/Zones/Australia select Melbourne" >> /tmp/preseed.cfg) && \
|
||||
debconf-set-selections /tmp/preseed.cfg && \
|
||||
rm -f /etc/timezone /etc/localtime && \
|
||||
apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive DEBCONF_NONINTERACTIVE_SEEN=true \
|
||||
apt-get install -y tzdata
|
||||
RUN apt-get update && apt-get -y dist-upgrade && apt-get -y install python3-pip libpq-dev gunicorn mediainfo cmake libgl1-mesa-glx libglib2.0-0 python3-ldap libjpeg-turbo-progs ffmpeg git
|
||||
COPY requirements.txt requirements.txt
|
||||
RUN pip3 install -r requirements.txt
|
||||
RUN pip3 install --upgrade pillow --user
|
||||
EXPOSE 443
|
||||
EXPOSE 55432
|
||||
# do this first, allowing layer / build efficiencies if only code changes...
|
||||
# sudo used in dev container
|
||||
RUN apt-get update && apt-get -y install libpq-dev mediainfo cmake libglib2.0-0 libjpeg-turbo-progs ffmpeg git sudo
|
||||
COPY requirements.txt .
|
||||
RUN pip3 install --upgrade pip && pip3 install -r requirements.txt && pip3 install --upgrade pillow
|
||||
|
||||
RUN groupadd -g ${PA_ID} pauser && useradd -r -u ${PA_ID} -g ${PA_ID} pauser
|
||||
|
||||
# NOW, deal with any code changes, and reset the container for 'first run'...
|
||||
COPY . .
|
||||
RUN echo $RANDOM | md5sum | head -c 30 > /code/.sk
|
||||
RUN chmod 600 .sk
|
||||
RUN date > internal/build-date.txt
|
||||
RUN git log -n 15 > internal/git-log.txt
|
||||
RUN ln -s /code/TODO /code/internal/TODO
|
||||
RUN ln -s /code/BUGs /code/internal/BUGs
|
||||
|
||||
# set up static dir and subdirs
|
||||
RUN mkdir -p static/Bin && mkdir -p static/Import && mkdir -p static/Storage && mkdir -p static/Metadata
|
||||
|
||||
EXPOSE 80
|
||||
RUN cat /dev/urandom | head -c 50 | md5sum | head -c 32 > /code/.sk && chmod 600 .sk && chown pauser:pauser .sk
|
||||
RUN TZ="Australia/Melbourne" date > internal/build-date.txt && git log -n 15 > internal/git-log.txt && ln -s /code/TODO /code/internal/TODO && ln -s /code/BUGs /code/internal/BUGs
|
||||
RUN rm -rf .git
|
||||
RUN chown mythtv:mythtv /code
|
||||
RUN chown mythtv:mythtv ./static
|
||||
RUN chown mythtv:mythtv ./static/*
|
||||
## cleanup of files from setup
|
||||
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
RUN chown pauser:pauser /code && chown pauser:pauser ./static && chown pauser:pauser ./static/*
|
||||
|
||||
CMD ["./wrapper.sh"]
|
||||
|
||||
37
README
@@ -3,34 +3,22 @@ In here we can put instructions on how to run this / any general info
|
||||
to edit src:
|
||||
|
||||
git....
|
||||
CAM: fill this in pls
|
||||
cd ~/src ; # or wherever you want the code to live
|
||||
git clone https://git.depaoli.id.au/ddp/photoassistant.git
|
||||
|
||||
|
||||
ubuntu packages:
|
||||
sudo apt-get install -y mediainfo cmake python3-flask
|
||||
sudo apt-get install -y cmake libpq-dev
|
||||
|
||||
|
||||
REDO THIS SOON (good to test on a clean install):
|
||||
AFTER a fresh ubuntu, need to remove/make a new .python env and get pip-based python packages installed into .python:
|
||||
cd ~/src/photoassistant
|
||||
rm .python
|
||||
# use a venv (to get consistent python versions with Prod container)
|
||||
python3 -m venv .python
|
||||
source ./.python/bin/activate
|
||||
pip3 install -r requirements.txt
|
||||
|
||||
|
||||
pip packages:
|
||||
* pymediainfo
|
||||
* PIL (should be there by default)
|
||||
- on ubuntu, need to do: pip3 install --upgrade pillow --user
|
||||
* ExifRead
|
||||
* opencv-python
|
||||
* datetime
|
||||
* pytz
|
||||
* face_recognition
|
||||
* flask_login
|
||||
* flask-ldap3-login
|
||||
#### dlib (might need to install this before face_recognitioin, but it might not be needed, cmake clearly was)
|
||||
|
||||
|
||||
upstream packages...
|
||||
mkdir static/upstream
|
||||
cd static/upstream
|
||||
@@ -60,14 +48,11 @@ pip3 list | tail -n +3 | grep -v mysqlclient | grep -v duplicity | grep -v gp
|
||||
|
||||
To run debug version of web server (gunicorn deprecates FLASK_ENV, so co-exist for now):
|
||||
|
||||
# older flask:
|
||||
FLASK_APP=main ENV=development FLASK_ENV=development flask run --host=192.168.0.2
|
||||
|
||||
# flask 2.2.2+ (in venv .python)
|
||||
FLASK_APP=main ENV=development ./.python/bin/flask --debug run --host=192.168.0.2
|
||||
FLASK_APP=main ENV=development ./.python/bin/flask --debug run --host=mara.ddp.net
|
||||
|
||||
to run prod version of web server:
|
||||
gunicorn --bind="192.168.0.2:5000" --threads=2 --workers=2 main:app
|
||||
gunicorn --bind="mara.ddp.net:5000" --threads=2 --workers=2 main:app
|
||||
|
||||
Also have to run the job manager for jobs to work:
|
||||
ENV="development" python3 pa_job_manager.py
|
||||
@@ -110,13 +95,13 @@ To get back a 'working' but scanned set of data:
|
||||
# pg_dump --user=pa -a -t person -t refimg -t person_refimg_link > /docker-entrypoint-initdb.d/users.sql
|
||||
|
||||
# export all content so we can upgrade versions of postgres
|
||||
sudo docker exec -it padb bash
|
||||
docker exec -it padb bash
|
||||
# pg_dump --user=pa pa > /docker-entrypoint-initdb.d/bkup.sql
|
||||
### check sql looks good
|
||||
sudo mv /srv/docker/container/padb/docker-entrypoint-initdb.d/bkup.sql /srv/docker/container/padb/docker-entrypoint-initdb.d/tables.sql
|
||||
sudo rm /srv/docker/container/padb/docker-entrypoint-initdb.d/users.sql
|
||||
sudo docker-compose -f /srv/docker/config/docker-compose.yaml build padb
|
||||
( cd /srv/docker/config/ ; sudo docker-compose stop padb ; yes | sudo docker-compose rm padb ; sudo rm -rf /srv/docker/container/padb/data/ ; sudo docker-compose up -d padb ; sudo docker-compose restart paweb )
|
||||
docker-compose -f /srv/docker/config/docker-compose.yaml build padb
|
||||
( cd /srv/docker/config/ ; docker-compose stop padb ; yes | docker-compose rm padb ; sudo rm -rf /srv/docker/container/padb/data/ ; docker-compose up -d padb ; docker-compose restart paweb )
|
||||
|
||||
|
||||
HANDY SQLs/commands:
|
||||
|
||||
232
TODO
@@ -1,21 +1,42 @@
|
||||
### GENERAL
|
||||
* change the rotation code to use that jpeg util to reduce/remove compression loss?
|
||||
* jobs for AI should show path name
|
||||
* rm dups job should show progress bar
|
||||
* in viewer, there is no move button (maybe add one?)
|
||||
* think I killed pa_job_manager without passing an eid to a transform job, shouldn't crash
|
||||
- SHOULD JUST get AI to help clean-up and write defensive code here...
|
||||
* consider doing duplicates before AI, and if there are say 100s+, then maybe pause the AI work
|
||||
- had 5000+ new photos, took 8 hours to finish, for me to just delete them anyway
|
||||
* consider how to better version jscript - across all html files, consistently
|
||||
- mtime, didnt work anyway, my phone still wont pick up the change, it was adding any ?v= changed this (once)
|
||||
* optimisation:
|
||||
- keep track of just new files since scan (even if we did this from the DB),
|
||||
then we could just feed those eid's explicitly into a 'get_file_details_on_new_files'.
|
||||
- ALSO use new_eids list IF no new refimgs to do a 'run_ai_on_new_files' :)
|
||||
|
||||
* allow changing dates in move dbox and then re-scan for existing folders OR just have a browse for existing...
|
||||
- for use on scanned photos, they register as 2010, but are datestamped in visuals for 95
|
||||
(is there a library for this???)
|
||||
|
||||
* sqlalchemy 2 migration:
|
||||
* fix unmapped (in fact make all the code properly sqlachemy 2.0 compliant)
|
||||
-- path.py has the __allow_unmapped__ = True
|
||||
* remove all '.execute' from *.py
|
||||
- get AI to help
|
||||
|
||||
* allow actions for wrong person:
|
||||
-> someone else? OR override no match for this person ever for this image?
|
||||
* groups of persons (non-exclusive, recursive), so:
|
||||
- fam (ddp, mandy, cam, mich)
|
||||
- mandy_fam (mandy, kirsty, tony, anne, tenille)
|
||||
- uncle_peters_fam (peter, joan, jarrod, aaron, alana)
|
||||
- cousin_aarons_fam (aaron, karly, kai, etc)
|
||||
- storviks (mandy_fam, uncle_peters_fam, cousin_aarons_fam, etc)
|
||||
|
||||
* birthdates in PA:
|
||||
- M was interested in birthdate appearing in hass, so need an API /
|
||||
sensor in hass too - maybe could store (optional) birthdate and
|
||||
heads up (of X days, e.g. so can get present) or just come up on the day so you can remember to text them
|
||||
* search logic (AND vs OR)
|
||||
|
||||
* read this: https://flask.palletsprojects.com/en/2.2.x/testing/#faking-resources-and-context
|
||||
|
||||
* could get better AI optim, by keeping track of just new files since scan (even if we did this from the DB),
|
||||
then we could just feed those eid's explicitly into a 'run_ai_on_new_files' :) -- maybe particularly
|
||||
if count('new files') < say 1000 do eids, otherwise do path AND no new refimgs
|
||||
|
||||
* ignore face should ignore ALL matching faces (re: Declan)
|
||||
|
||||
* should allow context menu from View thumbs (particularly useful on search) to show other files around this one by date (maybe that folder or something?)
|
||||
@@ -26,10 +47,6 @@
|
||||
- rename (does this work already somehow? see issue below)
|
||||
- dont allow me to stupidly move a folder to itself
|
||||
|
||||
* browser back/forward buttons dont work -- use POST -> redirect to GET
|
||||
- need some sort of clean-up of pa_user_state -- I spose its triggered by browser session, so maybe just after a week is lazy/good enough
|
||||
-- pa_user_state has last_used as a timestamp so can be used to delete old entries
|
||||
|
||||
* back button will fail if we do these POSTs:
|
||||
job.py:@app.route("/jobs", methods=["GET", "POST"])
|
||||
job.py:@app.route("/job/<id>", methods=["GET","POST"])
|
||||
@@ -37,10 +54,8 @@
|
||||
* if on jobs page and jobs increase, then 'rebuild' the content of the page to show new jobs, and potentially do that every 5 seconds...
|
||||
- THINK: could also 'refresh' /job/id via Ajax not a reload, to avoid the POST issue above needing to remember job prefs somewhere?
|
||||
|
||||
files.py:@app.route("/fix_dups", methods=["POST"])
|
||||
???
|
||||
|
||||
* allow user to choose default log level to show
|
||||
* files.py:@app.route("/fix_dups", methods=["POST"])
|
||||
- ???
|
||||
|
||||
* GUI overhaul?
|
||||
* on a phone, the files.html page header is a mess "Oldest.." line is too large to fit on 1 line (make it a hamburger?)
|
||||
@@ -59,9 +74,6 @@
|
||||
* fix up logging in general
|
||||
ProcessFileForJob --> really need to better handle log levels and counting
|
||||
|
||||
* metadata at folder level with file level to add more richness
|
||||
- store in DB? or store in hidden file (or both)... IF it is outside the DB, then I can 'rebuild' the DB at anytime from scratch
|
||||
|
||||
* video player cannot handle non mp4 formats... do I care? (could just offer a download link and hope the client deals with it)
|
||||
--> OR? https://jsmpeg.com/
|
||||
--> OR? convert all videos to mp4/webm
|
||||
@@ -78,7 +90,6 @@
|
||||
* revisit SymlinkName() and make it simpler (see comment in shared.py)
|
||||
|
||||
*** Need to use thread-safe sessions per Thread, half-assed version did not work
|
||||
|
||||
Admin
|
||||
-> do I want to have admin roles/users?
|
||||
-> purge deleted files (and associated DB data) needs a dbox or privs
|
||||
@@ -91,6 +102,8 @@
|
||||
* viewer needs to allow toggle to scan_model (and prob. right-click on file... AI (with CNN) AI (with hog)
|
||||
- make the form-select AI_Model actually do the change (but need more mem on mara really -- even mem is not enough
|
||||
need graphic support --> need to allow pa_job_manager run on borric with acceleration)
|
||||
- test this with new CPU in mara
|
||||
- test this on borric for comparison
|
||||
|
||||
For AI / rescan:
|
||||
way to override per file:
|
||||
@@ -209,3 +222,182 @@ def AddRefimgToPerson(person_id: int, ref_images: List[str]) -> bool:
|
||||
>>> AddRefimgToPerson(123, ['http://example.com/img1.jpg', '/path/to/img2.png'])
|
||||
True
|
||||
"""
|
||||
|
||||
|
||||
|
||||
|
||||
checkout deepface:
|
||||
|
||||
|
||||
User: I have this code I wrote using pythons face_recognition library, but I would like to alter it to work with deepface. My code is: im = face_recognition.load_image_file(e.FullPathOnFS())
|
||||
face_locations = face_recognition.face_locations(im, model=model.name )
|
||||
unknown_encodings = face_recognition.face_encodings(im, known_face_locations=face_locations)
|
||||
|
||||
Assistant: To alter your existing code to work with **DeepFace**, the changes involve replacing the methods of the `face_recognition` library with similar functionalities provided by DeepFace.
|
||||
|
||||
In **DeepFace**, the equivalent functionalities include:
|
||||
|
||||
1. **Detecting faces**: This is automatically handled by the `analyze` method in DeepFace. You don't need to manually specify the face locations.
|
||||
|
||||
2. **Encoding faces**: DeepFace internally computes embeddings for faces through its supported models (like VGG-Face, Facenet, OpenFace, DeepID, etc.).
|
||||
|
||||
Here's how you can modify your code to use DeepFace instead of `face_recognition`:
|
||||
|
||||
### Original Code (using `face_recognition`):
|
||||
```python
|
||||
im = face_recognition.load_image_file(e.FullPathOnFS())
|
||||
face_locations = face_recognition.face_locations(im, model=model.name)
|
||||
unknown_encodings = face_recognition.face_encodings(im, known_face_locations=face_locations)
|
||||
```
|
||||
|
||||
### Modified Code (using `DeepFace`):
|
||||
```python
|
||||
from deepface import DeepFace
|
||||
import cv2
|
||||
|
||||
# Load the image
|
||||
im = cv2.imread(e.FullPathOnFS())
|
||||
|
||||
# Analyze the image to get face embeddings (encodings) and other details
|
||||
results = DeepFace.represent(img_path=e.FullPathOnFS(), model_name=model.name)
|
||||
|
||||
# `results` will contain a list of dictionaries where each dictionary contains the embedding (encoding) of a detected face.
|
||||
# Example of accessing the embeddings (face encodings)
|
||||
unknown_encodings = [res['embedding'] for res in results]
|
||||
```
|
||||
|
||||
### Explanation:
|
||||
|
||||
- `DeepFace.represent()` takes the path to the image (`e.FullPathOnFS()`) and the name of the model (`model.name`). It processes the image and extracts the embeddings (encodings) for each face detected.
|
||||
|
||||
- **Note**: In DeepFace, the model name can be one of several options: `VGG-Face`, `Facenet`, `OpenFace`, `DeepFace`, `DeepID`, etc. Make sure that `model.name` corresponds to one of these supported models.
|
||||
|
||||
- The result is a list of dictionaries. Each dictionary contains a detected face's encoding (embedding), which you can access using the `'embedding'` key.
|
||||
|
||||
DeepFace abstracts away a lot of the manual face detection work that you were handling with `face_recognition`. Let me know if you'd like to handle anything else, like visualizing the detected faces or working with other DeepFace models.
|
||||
|
||||
|
||||
User: I want to use Deepface to take a set of reference images and match against all the images I have to find the best matches, how would you go about that?
|
||||
|
||||
Assistant: To use **DeepFace** to match a set of reference images against other images to find the best matches, you can follow these general steps:
|
||||
|
||||
1. **Compute embeddings (encodings) for the reference images**.
|
||||
2. **Compute embeddings for the target images** (the ones you're searching through).
|
||||
3. **Compare the embeddings** of the reference images with those of the target images to find the best matches based on a distance metric.
|
||||
4. **Rank the matches** by similarity and return the best ones.
|
||||
|
||||
Here's a step-by-step guide on how you can achieve this with **DeepFace**:
|
||||
|
||||
### Step 1: Compute embeddings for reference images
|
||||
You will load the reference images, compute their embeddings using DeepFace, and store them.
|
||||
|
||||
### Step 2: Compute embeddings for all target images
|
||||
Do the same for the target images you want to match against.
|
||||
|
||||
### Step 3: Compare embeddings
|
||||
You will compare the embeddings from the reference images to the embeddings of each target image. The smaller the distance between the embeddings, the more similar the faces are.
|
||||
|
||||
### Step 4: Return the best matches
|
||||
|
||||
### Example Code
|
||||
|
||||
```python
|
||||
from deepface import DeepFace
|
||||
import os
|
||||
import cv2
|
||||
import numpy as np
|
||||
|
||||
# Function to compute embeddings for a given image path
|
||||
def compute_embeddings(image_path, model_name='VGG-Face'):
|
||||
try:
|
||||
# This returns a list of embeddings for each face found in the image
|
||||
embeddings = DeepFace.represent(img_path=image_path, model_name=model_name)
|
||||
return [res['embedding'] for res in embeddings]
|
||||
except Exception as e:
|
||||
print(f"Error processing image {image_path}: {e}")
|
||||
return []
|
||||
|
||||
# Function to find the best matches between reference and target images
|
||||
def find_best_matches(reference_images, target_images, model_name='VGG-Face', metric='cosine'):
|
||||
# Store embeddings for reference images
|
||||
reference_embeddings = {}
|
||||
|
||||
for ref_img in reference_images:
|
||||
ref_embeddings = compute_embeddings(ref_img, model_name=model_name)
|
||||
if ref_embeddings:
|
||||
reference_embeddings[ref_img] = ref_embeddings
|
||||
|
||||
# Store best matches
|
||||
best_matches = []
|
||||
|
||||
# Loop through each target image
|
||||
for target_img in target_images:
|
||||
target_embeddings = compute_embeddings(target_img, model_name=model_name)
|
||||
|
||||
for target_emb in target_embeddings:
|
||||
for ref_img, ref_emb_list in reference_embeddings.items():
|
||||
# Compare each reference embedding with the target embedding
|
||||
for ref_emb in ref_emb_list:
|
||||
# Compute the distance between embeddings using DeepFace's distance functions
|
||||
distance = DeepFace.find_distance(ref_emb, target_emb, distance_metric=metric)
|
||||
|
||||
# Store match details (target image, reference image, and distance)
|
||||
best_matches.append({
|
||||
'target_image': target_img,
|
||||
'reference_image': ref_img,
|
||||
'distance': distance
|
||||
})
|
||||
|
||||
# Sort matches by the distance (smallest distances indicate best matches)
|
||||
best_matches = sorted(best_matches, key=lambda x: x['distance'])
|
||||
|
||||
return best_matches
|
||||
|
||||
# Example usage
|
||||
reference_images = [
|
||||
"path_to_reference_image1.jpg",
|
||||
"path_to_reference_image2.jpg"
|
||||
]
|
||||
|
||||
target_images = [
|
||||
"path_to_target_image1.jpg",
|
||||
"path_to_target_image2.jpg",
|
||||
# Add more target images here...
|
||||
]
|
||||
|
||||
# Find the best matches
|
||||
matches = find_best_matches(reference_images, target_images, model_name='VGG-Face', metric='cosine')
|
||||
|
||||
# Display the best match
|
||||
for match in matches[:5]: # Display top 5 matches
|
||||
print(f"Reference Image: {match['reference_image']}, Target Image: {match['target_image']}, Distance: {match['distance']}")
|
||||
```
|
||||
|
||||
### Explanation:
|
||||
|
||||
1. **`compute_embeddings(image_path, model_name)`**: This function uses DeepFace's `represent` function to compute face embeddings for each face found in the image.
|
||||
|
||||
2. **`find_best_matches()`**:
|
||||
- It first computes and stores embeddings for all reference images in the `reference_embeddings` dictionary.
|
||||
- For each target image, it computes the embeddings, then compares those embeddings with all the reference embeddings using the chosen distance metric (default is `'cosine'`).
|
||||
- The function then stores the matches and sorts them by distance (the smaller the distance, the closer the match).
|
||||
|
||||
3. **Distance metrics**: You can choose between several distance metrics like `'cosine'`, `'euclidean'`, or `'euclidean_l2'`, depending on the model and your preference.
|
||||
|
||||
4. **Matching logic**: It finds the best matches by comparing each face embedding in the target images with each face embedding in the reference images and storing the distances.
|
||||
|
||||
### DeepFace Distance Metrics:
|
||||
|
||||
- **Cosine**: Measures the cosine of the angle between two vectors (common for face embeddings).
|
||||
- **Euclidean**: Measures the straight-line distance between two vectors.
|
||||
- **Euclidean L2**: A variant of Euclidean where embeddings are normalized before distance calculation.
|
||||
|
||||
### Example of top match output:
|
||||
```bash
|
||||
Reference Image: path_to_reference_image1.jpg, Target Image: path_to_target_image1.jpg, Distance: 0.256
|
||||
Reference Image: path_to_reference_image2.jpg, Target Image: path_to_target_image2.jpg, Distance: 0.310
|
||||
...
|
||||
```
|
||||
|
||||
This approach will give you the best matching images based on facial similarity. You can adjust the model used (`VGG-Face`, `Facenet`, etc.) and the distance metric to fine-tune results. Let me know if you'd like to visualize the results or handle larger datasets more efficiently!
|
||||
|
||||
|
||||
65
amend.py
Normal file
@@ -0,0 +1,65 @@
|
||||
from sqlalchemy import select
|
||||
from flask import request, jsonify
|
||||
from flask_login import login_required
|
||||
|
||||
from shared import PA
|
||||
from main import db, app
|
||||
|
||||
################################################################################
|
||||
# Amendments are used to define types of changes being made to an entry (e.g.
|
||||
# rotate, flip) should contain relatively transient content (e.g. we might be
|
||||
# processing a long-running job now, and then add a rotate, the rotate wont
|
||||
# finish for minutes, so these classes allow the UI to handle that gracefully
|
||||
################################################################################
|
||||
|
||||
################################################################################
|
||||
# Class describing AmendmentType in the DB (via sqlalchemy)
|
||||
################################################################################
|
||||
class AmendmentType(PA,db.Model):
|
||||
__tablename__ = "amendment_type"
|
||||
id = db.Column(db.Integer, db.Sequence('file_type_id_seq'), primary_key=True )
|
||||
job_name = db.Column(db.String, nullable=False )
|
||||
which = db.Column(db.String, nullable=False )
|
||||
what = db.Column(db.String, nullable=False )
|
||||
colour = db.Column(db.String, nullable=False )
|
||||
|
||||
################################################################################
|
||||
# Class describing which Entry has a pending Amendment in the DB (via sqlalchemy)
|
||||
################################################################################
|
||||
class EntryAmendment(PA,db.Model):
|
||||
__tablename__ = "entry_amendment"
|
||||
eid = db.Column(db.Integer, db.ForeignKey("entry.id"), primary_key=True )
|
||||
job_id = db.Column(db.Integer, db.ForeignKey("job.id"), primary_key=True )
|
||||
amend_type = db.Column(db.Integer, db.ForeignKey("amendment_type.id"))
|
||||
type = db.relationship("AmendmentType", backref="entry_amendment")
|
||||
job = db.relationship("Job", back_populates="amendments")
|
||||
|
||||
|
||||
################################################################################
|
||||
# check if this job is something we need to log an EntryAmendment for, based on
|
||||
# job name and potentially amt in extras, to find the type of amendment
|
||||
################################################################################
|
||||
def inAmendmentTypes(job):
|
||||
if not hasattr(job, 'extra' ) or not job.extra:
|
||||
return None
|
||||
amt=None
|
||||
for jex in job.extra:
|
||||
if jex.name == "amt":
|
||||
amt=jex.value
|
||||
|
||||
# FIXME: should just cache this once per build, only would change with code updates
|
||||
for at in getAmendments():
|
||||
# for transform_image, amt=flip*, 90/180/270 - so amt will be set, use it, otherwise just use job.name
|
||||
if (amt and f"{job.name}:{amt}" == at.job_name) or (at.job_name == job.name):
|
||||
return at.id
|
||||
return None
|
||||
|
||||
|
||||
################################################################################
|
||||
# Class describing which Entry has a pending Amendment in the DB (via sqlalchemy)
|
||||
################################################################################
|
||||
def getAmendments():
|
||||
# get Amend types (get EAT data once - used in inAmendmentTypes()
|
||||
stmt=select(AmendmentType)
|
||||
eat=db.session.execute(stmt).scalars().all()
|
||||
return eat
|
||||
12
face.py
@@ -28,9 +28,11 @@ class Face(PA,db.Model):
|
||||
face_left = db.Column( db.Integer )
|
||||
w = db.Column( db.Integer )
|
||||
h = db.Column( db.Integer )
|
||||
refimg_lnk = db.relationship("FaceRefimgLink", uselist=False, viewonly=True)
|
||||
facefile_lnk = db.relationship("FaceFileLink", uselist=False, viewonly=True)
|
||||
refimg_lnk = db.relationship("FaceRefimgLink", uselist=False, viewonly=True )
|
||||
facefile_lnk = db.relationship("FaceFileLink", uselist=False, viewonly=True )
|
||||
refimg =db.relationship("Refimg", secondary="face_refimg_link", uselist=False)
|
||||
fnmo = db.relationship("FaceNoMatchOverride", back_populates="face")
|
||||
ffmo = db.relationship("FaceForceMatchOverride", back_populates="face")
|
||||
|
||||
|
||||
################################################################################
|
||||
@@ -62,13 +64,13 @@ class FaceRefimgLink(PA, db.Model):
|
||||
Attributes:
|
||||
face_id (int): face id of row in Face table / foreign key - part primary key
|
||||
refimg_id (int): face id of row in Face table / foreign key - part primary key
|
||||
face_distance (int): distance value (how similar matched Face was)
|
||||
face_distance (float): distance value (how similar matched Face was)
|
||||
"""
|
||||
|
||||
__tablename__ = "face_refimg_link"
|
||||
face_id = db.Column(db.Integer, db.ForeignKey("face.id"), primary_key=True )
|
||||
refimg_id = db.Column(db.Integer, db.ForeignKey("refimg.id"), primary_key=True )
|
||||
face_distance = db.Column(db.Integer)
|
||||
face_distance = db.Column(db.Float)
|
||||
|
||||
|
||||
################################################################################
|
||||
@@ -104,6 +106,7 @@ class FaceNoMatchOverride(PA, db.Model):
|
||||
face_id = db.Column(db.Integer, db.ForeignKey("face.id"), primary_key=True )
|
||||
type_id = db.Column(db.Integer, db.ForeignKey("face_override_type.id"))
|
||||
type = db.relationship("FaceOverrideType")
|
||||
face = db.relationship("Face", back_populates="fnmo")
|
||||
|
||||
|
||||
################################################################################
|
||||
@@ -123,3 +126,4 @@ class FaceForceMatchOverride(PA, db.Model):
|
||||
face_id = db.Column(db.Integer, db.ForeignKey("face.id"), primary_key=True )
|
||||
person_id = db.Column(db.Integer, db.ForeignKey("person.id"), primary_key=True )
|
||||
person = db.relationship("Person")
|
||||
face = db.relationship("Face", back_populates="ffmo")
|
||||
|
||||
@@ -161,39 +161,18 @@
|
||||
c4.142,0,7.5-3.357,7.5-7.5S339.642,328,335.5,328z"/>
|
||||
<g style="fill:#00000025;" transform="matrix(16, 0, 0, 16, 120, 115)"><path d="M4.502 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3z"/><path d="M14.002 13a2 2 0 0 1-2 2h-10a2 2 0 0 1-2-2V5A2 2 0 0 1 2 3a2 2 0 0 1 2-2h10a2 2 0 0 1 2 2v8a2 2 0 0 1-1.998 2zM14 2H4a1 1 0 0 0-1 1h9.002a2 2 0 0 1 2 2v7A1 1 0 0 0 15 11V3a1 1 0 0 0-1-1zM2.002 4a1 1 0 0 0-1 1v8l2.646-2.354a.5.5 0 0 1 .63-.062l2.66 1.773 3.71-3.71a.5.5 0 0 1 .577-.094l1.777 1.947V5a1 1 0 0 0-1-1h-10z"/></g>
|
||||
</svg>
|
||||
<svg id="flip_h" fill="currentColor" viewBox='0 0 512 512'>
|
||||
<svg id="flip_h" viewBox='0 0 512 512'>
|
||||
<path fill='none' stroke='currentColor' stroke-linecap='round' stroke-linejoin='round' stroke-width='32' d='M304 48l112 112-112 112M398.87 160H96M208 464L96 352l112-112M114 352h302'/>
|
||||
</svg>
|
||||
<svg id="flip_v" fill="currentColor" viewBox='0 0 512 512'>
|
||||
<svg id="flip_v" viewBox='0 0 512 512'>
|
||||
<path fill='none' stroke='currentColor' stroke-linecap='round' stroke-linejoin='round' stroke-width='32' d='M464 208L352 96 240 208M352 113.13V416M48 304l112 112 112-112M160 398V96'/>
|
||||
</svg>
|
||||
<svg id="fullscreen" fill="currentColor" viewBox="0 0 16 16">
|
||||
<svg id="fullscreen" viewBox="0 0 16 16">
|
||||
<path fill-rule="evenodd" d="M5.828 10.172a.5.5 0 0 0-.707 0l-4.096 4.096V11.5a.5.5 0 0 0-1 0v3.975a.5.5 0 0 0 .5.5H4.5a.5.5 0 0 0 0-1H1.732l4.096-4.096a.5.5 0 0 0 0-.707zm4.344 0a.5.5 0 0 1 .707 0l4.096 4.096V11.5a.5.5 0 1 1 1 0v3.975a.5.5 0 0 1-.5.5H11.5a.5.5 0 0 1 0-1h2.768l-4.096-4.096a.5.5 0 0 1 0-.707zm0-4.344a.5.5 0 0 0 .707 0l4.096-4.096V4.5a.5.5 0 1 0 1 0V.525a.5.5 0 0 0-.5-.5H11.5a.5.5 0 0 0 0 1h2.768l-4.096 4.096a.5.5 0 0 0 0 .707zm-4.344 0a.5.5 0 0 1-.707 0L1.025 1.732V4.5a.5.5 0 0 1-1 0V.525a.5.5 0 0 1 .5-.5H4.5a.5.5 0 0 1 0 1H1.732l4.096 4.096a.5.5 0 0 1 0 .707z"/>
|
||||
</svg>
|
||||
<svg id="unknown_ftype" fill="grey" viewBox="0 0 16 16">
|
||||
<path d="M2 0a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V2a2 2 0 0 0-2-2H2zm3.496 6.033a.237.237 0 0 1-.24-.247C5.35 4.091 6.737 3.5 8.005 3.5c1.396 0 2.672.73 2.672 2.24 0 1.08-.635 1.594-1.244 2.057-.737.559-1.01.768-1.01 1.486v.105a.25.25 0 0 1-.25.25h-.81a.25.25 0 0 1-.25-.246l-.004-.217c-.038-.927.495-1.498 1.168-1.987.59-.444.965-.736.965-1.371 0-.825-.628-1.168-1.314-1.168-.803 0-1.253.478-1.342 1.134-.018.137-.128.25-.266.25h-.825zm2.325 6.443c-.584 0-1.009-.394-1.009-.927 0-.552.425-.94 1.01-.94.609 0 1.028.388 1.028.94 0 .533-.42.927-1.029.927z"/>
|
||||
</svg>
|
||||
<svg id="rot90" viewBox="0 0 225 225" xmlns="http://www.w3.org/2000/svg" xmlns:bx="https://boxy-svg.com">
|
||||
<g transform="matrix(1, 0, 0, 1, 7, 20)">
|
||||
<path style="fill: rgba(216, 216, 216, 0); stroke: rgb(13, 202, 240); stroke-width: 15px;" d="M 100.468 0.428 C 155.454 0.428 200.022 45.014 200 100"/>
|
||||
<path d="M -100.468 -5.428 H -100.468 L -100.468 -20.428 L -60.468 -0.428 L -100.468 19.572 L -100.468 4.572 H -100.468 V -5.428 Z" style="fill: rgb(13, 202, 240);" transform="matrix(-1, 0, 0, -1, 0, 0)" bx:shape="arrow -100.468 -20.428 40 40 10 40 0 1@18b8b13c"/>
|
||||
<text style="fill: rgb(13, 202, 240); font-family: Arial, sans-serif; font-size: 96px; font-weight: 700; white-space: pre;" transform="matrix(1, 0, 0, 1, -50.667503, 38.507301)"><tspan x="97.106" y="94.348">90</tspan><tspan x="97.106" dy="1em"></tspan></text>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
<svg id="rot180" viewBox="0 0 225 225" xmlns:bx="https://boxy-svg.com">
|
||||
<path style="fill: rgba(216, 216, 216, 0); paint-order: stroke; stroke-width: 15px; stroke: rgb(13, 202, 240);" d="M 70.624 -41.413 C 147.233 -41.507 195.215 41.367 156.992 107.76 C 139.252 138.572 106.422 157.581 70.867 157.624" transform="matrix(0, -1, 1, 0, 62.412816, 178.623736)"/>
|
||||
<path d="M 0 95 H 0 L 0 80 L 40 100 L 0 120 L 0 105 H 0 V 95 Z" style="fill: rgb(13, 202, 240);" transform="matrix(0.000048, 1, -1, 0.000048, 120.999475, 107.995157)" bx:shape="arrow 0 80 40 40 10 40 0 1@de4046a6"/>
|
||||
<text style="fill: rgb(13, 202, 240); font-family: Arial, sans-serif; font-size: 96px; font-weight: 700; white-space: pre;" transform="matrix(1, 0, 0, 1, -13.373604, 45.416005)"><tspan x="54.39" y="96.53">180</tspan><tspan x="54.39" dy="1em"></tspan></text>
|
||||
</svg>
|
||||
|
||||
<svg id="rot270" viewBox="0 0 225 225" xmlns:bx="https://boxy-svg.com">
|
||||
<g transform="matrix(1, 0, 0, 1, 8, 9)">
|
||||
<path style="fill: rgba(13, 202, 240, 0); paint-order: fill; stroke: rgb(13, 202, 240); stroke-width: 15px;" d="M 100.308 199.384 C 177.051 199.384 225.015 116.307 186.644 49.846 C 148.272 -16.615 52.344 -16.615 13.972 49.846 C 5.371 64.744 0.771 81.615 0.62 98.817" transform="matrix(0, -1, 1, 0, 0.692227, 200.07649)"/>
|
||||
<path d="M 99 195 H 99 L 99 180 L 139 200 L 99 220 L 99 205 H 99 V 195 Z" style="fill: rgb(13, 202, 240);" bx:shape="arrow 99 180 40 40 10 40 0 1@077af6d0"/>
|
||||
<text style="fill: rgb(13, 202, 240); font-family: Arial, sans-serif; font-size: 96px; font-weight: 700; white-space: pre;" x="20.59" y="133.396">270</text>
|
||||
</g>
|
||||
</svg>
|
||||
<svg id="sort-num-asc" viewBox="0 0 16 16">
|
||||
<path d="M12.438 1.668V7H11.39V2.684h-.051l-1.211.859v-.969l1.262-.906h1.046z"/>
|
||||
<path fill-rule="evenodd" d="M11.36 14.098c-1.137 0-1.708-.657-1.762-1.278h1.004c.058.223.343.45.773.45.824 0 1.164-.829 1.133-1.856h-.059c-.148.39-.57.742-1.261.742-.91 0-1.72-.613-1.72-1.758 0-1.148.848-1.835 1.973-1.835 1.09 0 2.063.636 2.063 2.687 0 1.867-.723 2.848-2.145 2.848zm.062-2.735c.504 0 .933-.336.933-.972 0-.633-.398-1.008-.94-1.008-.52 0-.927.375-.927 1 0 .64.418.98.934.98z"/>
|
||||
@@ -229,4 +208,7 @@
|
||||
<path d="M4.406 1.342A5.53 5.53 0 0 1 8 0c2.69 0 4.923 2 5.166 4.579C14.758 4.804 16 6.137 16 7.773 16 9.569 14.502 11 12.687 11H10a.5.5 0 0 1 0-1h2.688C13.979 10 15 8.988 15 7.773c0-1.216-1.02-2.228-2.313-2.228h-.5v-.5C12.188 2.825 10.328 1 8 1a4.53 4.53 0 0 0-2.941 1.1c-.757.652-1.153 1.438-1.153 2.055v.448l-.445.049C2.064 4.805 1 5.952 1 7.318 1 8.785 2.23 10 3.781 10H6a.5.5 0 0 1 0 1H3.781C1.708 11 0 9.366 0 7.318c0-1.763 1.266-3.223 2.942-3.593.143-.863.698-1.723 1.464-2.383z"/>
|
||||
<path d="M7.646 15.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 14.293V5.5a.5.5 0 0 0-1 0v8.793l-2.146-2.147a.5.5 0 0 0-.708.708l3 3z"/>
|
||||
</svg>
|
||||
<svg id="back" viewBox="0 0 16 16">
|
||||
<path d="m7.247 4.86-4.796 5.481c-.566.647-.106 1.659.753 1.659h9.592a1 1 0 0 0 .753-1.659l-4.796-5.48a1 1 0 0 0-1.506 0z"/>
|
||||
</svg>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 25 KiB |
@@ -1,3 +1,16 @@
|
||||
// GLOBAL ICON array
|
||||
ICON={}
|
||||
ICON["Import"]="import"
|
||||
ICON["Storage"]="db"
|
||||
ICON["Bin"]="trash"
|
||||
|
||||
// function called when we get another page from inside the files view
|
||||
function getPageFigures(res, viewingIdx)
|
||||
{
|
||||
// add all the figures to files_div
|
||||
drawPageOfFigures()
|
||||
}
|
||||
|
||||
// grab all selected thumbnails and return a <div> containing the thumbnails
|
||||
// with extra yr and date attached as attributes so we can set the default
|
||||
// dir name for a move directory - not used in del, but no harm to include them
|
||||
@@ -74,21 +87,9 @@ function GetExistingDirsAsDiv( dt, divname, ptype )
|
||||
} )
|
||||
}
|
||||
|
||||
// wrapper to do some clean up before POST to /move_files or /delete_files
|
||||
// used to remove the highlighted item(s) && reset the numbering so highlighting continues to work
|
||||
function MoveOrDelCleanUpUI()
|
||||
{
|
||||
// remove the images being moved (so UI immediately 'sees' the move)
|
||||
$("[name^=eid-]").each( function() { $('#'+$(this).attr('value')).remove() } )
|
||||
// reorder the images via ecnt again, so future highlighting can work
|
||||
document.mf_id=0; $('.figure').each( function() { $(this).attr('ecnt', document.mf_id ); document.mf_id++ } )
|
||||
$('#dbox').modal('hide')
|
||||
}
|
||||
|
||||
|
||||
// show the DBox for a move file, includes all thumbnails of selected files to move
|
||||
// and a pre-populated folder to move them into, with text field to add a suffix
|
||||
function MoveDBox(path_details, db_url)
|
||||
function MoveDBox()
|
||||
{
|
||||
$('#dbox-title').html('Move Selected File(s) to new directory in Storage Path')
|
||||
div =`
|
||||
@@ -98,21 +99,21 @@ function MoveDBox(path_details, db_url)
|
||||
<form id="mv_fm" class="form form-control-inline col-12">
|
||||
<input id="move_path_type" name="move_path_type" type="hidden"
|
||||
`
|
||||
div += ' value="' + path_details[0].type + '"></input>'
|
||||
div += ' value="' + move_paths[0].type.name + '"></input>'
|
||||
div+=GetSelnAsDiv()
|
||||
yr=$('.highlight').first().attr('yr')
|
||||
dt=$('.highlight').first().attr('date')
|
||||
div+='<div class="row">Use Existing Directory (in the chosen path):</div><div id="existing"></div>'
|
||||
GetExistingDirsAsDiv( dt, "existing", path_details[0].type )
|
||||
GetExistingDirsAsDiv( dt, "existing", 'Storage' )
|
||||
div+=`
|
||||
<div class="input-group my-3">
|
||||
<alert class="alert alert-primary my-auto py-1">
|
||||
`
|
||||
// NB: alert-primary here is a hack to get the bg the same color as the alert primary by
|
||||
div+= '<svg id="move_path_icon" width="20" height="20" fill="currentColor"><use xlink:href="' + path_details[0].icon_url + '"></svg>'
|
||||
div+= '<svg id="move_path_icon" width="20" height="20" fill="currentColor"><use xlink:href="' + move_paths[0].icon_url + '"></svg>'
|
||||
div+= '<select id="rp_sel" name="rel_path" class="text-primary alert-primary py-1 border border-primary rounded" onChange="change_rp_sel()">'
|
||||
for(p of path_details) {
|
||||
div+= '<option path_type="'+p.type+'" icon_url="'+p.icon_url+'">'+p.path+'</option>'
|
||||
for(p of move_paths) {
|
||||
div+= `<option path_type="${p.type.name}" icon_url="${p.icon_url}">${p.root_dir}</option>`
|
||||
}
|
||||
div+= '</select>'
|
||||
div+=`
|
||||
@@ -126,11 +127,26 @@ function MoveDBox(path_details, db_url)
|
||||
</div>
|
||||
<div class="form-row col-12 mt-2">
|
||||
<button onClick="$('#dbox').modal('hide'); return false;" class="btn btn-outline-secondary offset-1 col-2">Cancel</button>
|
||||
<button id="move_submit" onClick="MoveOrDelCleanUpUI(); $.ajax({ type: 'POST', data: $('#mv_fm').serialize(), url: '/move_files', success: function(data) {
|
||||
if( $(location).attr('pathname').match('search') !== null ) { window.location='/' }; CheckForJobs() } }); return false" class="btn btn-outline-primary col-2">Ok</button>
|
||||
<button onClick="
|
||||
$.ajax({ type: 'POST', data: $('#mv_fm').serialize(), url: '/move_files',
|
||||
success: function(data) {
|
||||
processAmendments( data.job.amendments )
|
||||
checkForAmendmentJobToComplete(data.job.id)
|
||||
}
|
||||
});
|
||||
$('#dbox').modal('hide')
|
||||
return false"
|
||||
class="btn btn-outline-secondary col-2">Ok</button>
|
||||
</div>
|
||||
</form>
|
||||
`
|
||||
// force to Storage always - if in Import, liekly storing, if in Storage, likely moving, user can always override
|
||||
div+=`
|
||||
<script>
|
||||
storage_rp = move_paths.find(item => item.type.name === "Storage")?.root_dir;
|
||||
$('#rp_sel').val(storage_rp);change_rp_sel()
|
||||
</script>
|
||||
`
|
||||
|
||||
$('#dbox-content').html(div)
|
||||
$('#dbox').modal('show')
|
||||
@@ -138,36 +154,71 @@ function MoveDBox(path_details, db_url)
|
||||
$("#suffix").keypress(function (e) { if (e.which == 13) { $("#move_submit").click(); return false; } } )
|
||||
}
|
||||
|
||||
// This function is called anytime we have a job that returns amendments
|
||||
// (visually we want to show this entry is being amended by a job)
|
||||
// as we check for a job to end every second, we can call this multiple times
|
||||
// during the runtime of a job, so only redraw/react to a new amendment
|
||||
// NOTE: we update all views, as we might go into one via jscript before the job ends
|
||||
function processAmendments( ams )
|
||||
{
|
||||
for (const am of ams)
|
||||
{
|
||||
// if we return anything here, we already have this amendment, so continue to next
|
||||
if( document.amendments.filter(obj => obj.eid === am.eid).length > 0 )
|
||||
continue
|
||||
|
||||
document.amendments.push(am)
|
||||
|
||||
if( document.viewing && document.viewing.id == am.eid )
|
||||
{
|
||||
im.src=im.src + '?t=' + new Date().getTime();
|
||||
DrawImg()
|
||||
}
|
||||
|
||||
// find where in the page this image is being viewed
|
||||
idx = pageList.indexOf(am.eid)
|
||||
// createFigureHtml uses matching document.amendments to show thobber, etc
|
||||
html = createFigureHtml( document.entries[idx] )
|
||||
$('#'+am.eid).replaceWith( html )
|
||||
}
|
||||
}
|
||||
|
||||
// function to add data for document.amendment based on id and amt
|
||||
// used when we transform several images in files_*, or single image in viewer
|
||||
// show the DBox for a delete/restore file, includes all thumbnails of selected files
|
||||
// with appropriate coloured button to Delete or Restore files`
|
||||
// with appropriate coloured button to Delete or Restore files
|
||||
function DelDBox(del_or_undel)
|
||||
{
|
||||
to_del = GetSelnAsData()
|
||||
$('#dbox-title').html(del_or_undel+' Selected File(s)')
|
||||
div ='<div class="row col-12"><p class="col">' + del_or_undel + ' the following files?</p></div>'
|
||||
div+=GetSelnAsDiv()
|
||||
if( del_or_undel == "Delete" )
|
||||
{
|
||||
which="delete"
|
||||
col="danger"
|
||||
}
|
||||
else
|
||||
{
|
||||
which="restore"
|
||||
col="success"
|
||||
}
|
||||
|
||||
document.ents_to_del=[]
|
||||
$('.highlight').each(function( cnt ) { document.ents_to_del[cnt]=parseInt($(this).attr('id')) } )
|
||||
div+=`<div class="row col-12 mt-3">
|
||||
<button onClick="$('#dbox').modal('hide')" class="btn btn-outline-secondary col-2">Cancel</button>
|
||||
`
|
||||
div+=`
|
||||
<button onClick="MoveOrDelCleanUpUI(); $.ajax({ type: 'POST', data: to_del, url:
|
||||
`
|
||||
if( del_or_undel == "Delete" )
|
||||
div+=`
|
||||
'/delete_files',
|
||||
success: function(data){
|
||||
if( $(location).attr('pathname').match('search') !== null ) { window.location='/' }; CheckForJobs() } }); return false" class="btn btn-outline-danger col-2">Ok</button>
|
||||
</div>
|
||||
`
|
||||
else
|
||||
// just force page reload to / for now if restoring files from a search path -- a search (by name)
|
||||
// would match the deleted/restored file, so it would be complex to clean up the UI (and can't reload, as DB won't be changed yet)
|
||||
div+=`
|
||||
'/restore_files',
|
||||
success: function(data){
|
||||
if( $(location).attr('pathname').match('search') !== null ) { window.location='/' }; CheckForJobs() } }); return false" class="btn btn-outline-success col-2">Ok</button>
|
||||
</div>
|
||||
`
|
||||
<button onClick="
|
||||
$.ajax({ type: 'POST', data: to_del, url: '/${which}_files',
|
||||
success: function(data) {
|
||||
processAmendments( data.job.amendments )
|
||||
checkForAmendmentJobToComplete(data.job.id)
|
||||
}
|
||||
});
|
||||
$('#dbox').modal('hide')
|
||||
return false"
|
||||
class="btn btn-outline-${col} col-2">Ok</button>
|
||||
</div>`
|
||||
$('#dbox-content').html(div)
|
||||
$('#dbox').modal('show')
|
||||
}
|
||||
@@ -200,53 +251,82 @@ function DetailsDBox()
|
||||
}
|
||||
|
||||
|
||||
// function to change the size of thumbnails (and resets button bar to newly
|
||||
// selected size)
|
||||
function ChangeSize(clicked_button,sz)
|
||||
{
|
||||
$('.sz-but.btn-info').removeClass('btn-info text-white').addClass('btn-outline-info')
|
||||
$(clicked_button).addClass('btn-info text-white').removeClass('btn-outline-info')
|
||||
$('.thumb').attr( {height: sz, style: 'font-size:'+sz+'px' } )
|
||||
$('#size').val(sz)
|
||||
sz=sz-22
|
||||
$('.svg').height(sz);
|
||||
$('.svg').width(sz);
|
||||
$('.svg_cap').width(sz);
|
||||
}
|
||||
|
||||
// DoSel is called when a click event occurs, and sets the selection via adding
|
||||
// 'highlight' to the class of the appropriate thumbnails
|
||||
// e == event (can see if shift/ctrl held down while left-clicking
|
||||
// el == element the click is on
|
||||
// this allows single-click to select, ctrl-click to (de)select 1 item, and
|
||||
// shift-click to add all elements between highlighted area and clicked area,
|
||||
// whether you click after highlight or before
|
||||
function DoSel(e, el)
|
||||
{
|
||||
if( e.ctrlKey )
|
||||
{
|
||||
$(el).toggleClass('highlight')
|
||||
return
|
||||
// shift-click to add all elements between highlighted area and clicked el,
|
||||
// whether you click before highlight or after, or inside a gap and then back
|
||||
// or forward to the closest higlighted entry - also, only works on entry class,
|
||||
// so it ignores figures that we take entry off while we transform, etc it
|
||||
function DoSel(e, el) {
|
||||
const id = $(el).attr('id');
|
||||
const entries = $('.entry');
|
||||
|
||||
// Collect currently highlighted entries
|
||||
const currentHighlights = $('.highlight');
|
||||
const highlighted = new Set();
|
||||
currentHighlights.each(function() {
|
||||
highlighted.add($(this).attr('id'));
|
||||
});
|
||||
|
||||
// Ctrl+click: toggle highlight for the clicked entry
|
||||
if (e.ctrlKey || document.fake_ctrl === 1) {
|
||||
$(el).toggleClass('highlight');
|
||||
if (highlighted.has(id)) {
|
||||
highlighted.delete(id);
|
||||
} else {
|
||||
highlighted.add(id);
|
||||
}
|
||||
if( e.shiftKey )
|
||||
{
|
||||
st=Number($('.highlight').first().attr('ecnt'))
|
||||
end=Number($('.highlight').last().attr('ecnt'))
|
||||
clicked=Number($(el).attr('ecnt'))
|
||||
// if we shift-click first element, then st/end are NaN, so just highlightthe one clicked
|
||||
if( isNaN(st) )
|
||||
{
|
||||
$('.entry').slice( clicked, clicked+1 ).addClass('highlight')
|
||||
return
|
||||
if (document.fake_ctrl === 1) {
|
||||
document.fake_ctrl = 0;
|
||||
}
|
||||
if( clicked > end )
|
||||
$('.entry').slice( end, clicked+1 ).addClass('highlight')
|
||||
else
|
||||
$('.entry').slice( clicked, st ).addClass('highlight')
|
||||
return
|
||||
return;
|
||||
}
|
||||
// Shift+click: select a range
|
||||
else if (e.shiftKey || document.fake_shift === 1) {
|
||||
if (currentHighlights.length === 0) {
|
||||
// If no highlights, just highlight the clicked entry
|
||||
$(el).addClass('highlight');
|
||||
highlighted.add(id);
|
||||
} else {
|
||||
// Find the nearest highlighted entry
|
||||
const clickedIndex = entries.index($(el));
|
||||
let nearestHighlightIndex = -1;
|
||||
let minDistance = Infinity;
|
||||
|
||||
currentHighlights.each(function() {
|
||||
const highlightIndex = entries.index($(this));
|
||||
const distance = Math.abs(highlightIndex - clickedIndex);
|
||||
if (distance < minDistance) {
|
||||
minDistance = distance;
|
||||
nearestHighlightIndex = highlightIndex;
|
||||
}
|
||||
});
|
||||
|
||||
// Highlight the range between the nearest highlighted entry and the clicked entry
|
||||
const from = Math.min(clickedIndex, nearestHighlightIndex);
|
||||
const to = Math.max(clickedIndex, nearestHighlightIndex);
|
||||
|
||||
for (let i = from; i <= to; i++) {
|
||||
const entryId = entries.eq(i).attr('id');
|
||||
highlighted.add(entryId);
|
||||
entries.eq(i).addClass('highlight');
|
||||
}
|
||||
}
|
||||
if (document.fake_shift === 1) {
|
||||
document.fake_shift = 0;
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Single click: clear all highlights and highlight the clicked entry
|
||||
else {
|
||||
$('.highlight').removeClass('highlight');
|
||||
highlighted.clear();
|
||||
$(el).addClass('highlight');
|
||||
highlighted.add(id);
|
||||
}
|
||||
$('.highlight').removeClass('highlight')
|
||||
$(el).addClass('highlight')
|
||||
}
|
||||
|
||||
// if a selection exists, enable move & del/restore buttons otherwise disable them
|
||||
@@ -311,3 +391,623 @@ function NoSel() {
|
||||
else
|
||||
return true
|
||||
}
|
||||
|
||||
// quick wrapper to add a single <figure> to the #figures div
|
||||
function addFigure( obj )
|
||||
{
|
||||
html=createFigureHtml( obj )
|
||||
$('#figures').append( html )
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a group header or entry based on the object and options.
|
||||
* obj - The object containing file/directory details.
|
||||
* returns {string} - Generated HTML string.
|
||||
*/
|
||||
function createFigureHtml( obj )
|
||||
{
|
||||
// if am is null, no amendment for this obj, otherwise we have one
|
||||
var am=null
|
||||
for (const tmp of document.amendments)
|
||||
if( tmp.eid == obj.id )
|
||||
am=tmp
|
||||
|
||||
let html = "";
|
||||
|
||||
// Image/Video/Unknown entry
|
||||
if (obj.type.name === "Image" || obj.type.name === "Video" || obj.type.name === "Unknown") {
|
||||
const pathType = obj.in_dir.in_path.type.name;
|
||||
const size = obj.file_details.size_mb;
|
||||
const hash = obj.file_details.hash;
|
||||
const inDir = `${obj.in_dir.in_path.path_prefix}/${obj.in_dir.rel_path}`;
|
||||
const fname = obj.name;
|
||||
const yr = obj.file_details.year;
|
||||
const date = `${yr}${String(obj.file_details.month).padStart(2, '0')}${String(obj.file_details.day).padStart(2, '0')}`;
|
||||
const prettyDate = `${obj.file_details.day}/${obj.file_details.month}/${obj.file_details.year}`;
|
||||
const type = obj.type.name;
|
||||
|
||||
// if amendment for this obj, do not add entry class - prevents highlighting
|
||||
if( am ) {
|
||||
ent=""
|
||||
gs="style='filter: grayscale(100%);'"
|
||||
am_html ='<img class="position-absolute top-50 start-50 translate-middle" height="60" src="/internal/white-circle.png">'
|
||||
am_html+='<img class="position-absolute top-50 start-50 translate-middle" height="64" src="/internal/throbber.gif">'
|
||||
if( am.type.which == 'icon' )
|
||||
am_html+=`<svg class="position-absolute top-50 start-50 translate-middle" height="32" style="color:${am.type.colour}" fill="${am.type.colour}"><use xlink:href="/internal/icons.svg#${am.type.what}"></use></svg>`
|
||||
else
|
||||
am_html+=`<img class="position-absolute top-50 start-50 translate-middle" src="/internal/${am.type.what}?v={{js_vers['r270']}}" height="32">`
|
||||
} else {
|
||||
ent="entry"
|
||||
gs=""
|
||||
am_html=""
|
||||
}
|
||||
html += `
|
||||
<figure id="${obj.id}" class="col col-auto g-0 figure ${ent} m-1"
|
||||
path_type="${pathType}" size="${size}" hash="${hash}" in_dir="${inDir}"
|
||||
fname="${fname}" yr="${yr}" date="${date}" pretty_date="${prettyDate}" type="${type}">
|
||||
${renderMedia(obj,gs,am_html)}
|
||||
`
|
||||
}
|
||||
// Directory entry
|
||||
else if (obj.type.name === "Directory" && OPT.folders) {
|
||||
const dirname = obj.dir_details.rel_path.length
|
||||
? `${obj.dir_details.in_path.path_prefix}/${obj.dir_details.rel_path}`
|
||||
: obj.dir_details.in_path.path_prefix;
|
||||
|
||||
html += `
|
||||
<figure class="col col-auto g-0 dir entry m-1" id="${obj.id}" dir="${dirname}" type="Directory">
|
||||
<svg class="svg" width="${OPT.size - 22}" height="${OPT.size - 22}" fill="currentColor">
|
||||
<use xlink:href="/internal/icons.svg#Directory"></use>
|
||||
</svg>
|
||||
<figcaption class="svg_cap figure-caption text-center text-wrap text-break">${obj.name}</figcaption>
|
||||
`;
|
||||
html += `<script>f=$('#${obj.id}'); w=f.find('svg').width(); f.find('figcaption').width(w);</script>`;
|
||||
}
|
||||
// moved the bindings to here as we need to reset them if we recreate this Figure (after a transform job)
|
||||
html += `<script>
|
||||
if( "${obj.type.name}" === "Directory" ) {
|
||||
$("#${obj.id}").click( function(e) { document.back_id=this.id; getDirEntries(this.id,false) } )
|
||||
} else {
|
||||
$('#${obj.id}').click( function(e) { DoSel(e, this ); SetButtonState(); return false; });
|
||||
$('#${obj.id}').dblclick( function(e) { startViewing( $(this).attr('id') ) } )
|
||||
}
|
||||
</script>
|
||||
</figure>`
|
||||
return html
|
||||
}
|
||||
|
||||
// Helper function to render media (image/video/unknown)
|
||||
function renderMedia(obj,gs,am_html) {
|
||||
const isImageOrUnknown = obj.type.name === "Image" || obj.type.name === "Unknown";
|
||||
const isVideo = obj.type.name === "Video";
|
||||
const path = `${obj.in_dir.in_path.path_prefix}/${obj.in_dir.rel_path}/${obj.name}`;
|
||||
const thumb = obj.file_details.thumbnail
|
||||
? `<a href="${path}"><img alt="${obj.name}" ${gs} class="thumb" height="${OPT.size}" src="data:image/jpeg;base64,${obj.file_details.thumbnail}"></a>`
|
||||
: `<a href="${path}"><svg width="${OPT.size}" height="${OPT.size}" fill="white"><use xlink:href="/internal/icons.svg#unknown_ftype"/></svg></a>`;
|
||||
|
||||
let mediaHtml = `<div style="position:relative; width:100%">${thumb}${am_html}`;
|
||||
|
||||
if (isVideo) {
|
||||
mediaHtml += `
|
||||
<div style="position:absolute; top: 0px; left: 2px;">
|
||||
<svg width="16" height="16" fill="white"><use xlink:href="/internal/icons.svg#film"/></svg>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
if (OPT.search_term) {
|
||||
mediaHtml += `
|
||||
<div style="position:absolute; bottom: 0px; left: 2px;">
|
||||
<svg width="16" height="16" fill="white"><use xlink:href="/internal/icons.svg#${getLocationIcon(obj)}"/></svg>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
mediaHtml += `</div>`;
|
||||
return mediaHtml;
|
||||
}
|
||||
|
||||
// Helper: Get location icon (placeholder)
|
||||
function getLocationIcon(obj) {
|
||||
return ICON[obj.in_dir.in_path.type.name]
|
||||
}
|
||||
|
||||
// POST to get entry ids, and then getPage for a specified directory
|
||||
function getDirEntries(dir_id, back)
|
||||
{
|
||||
data={}
|
||||
data.dir_id=dir_id
|
||||
data.back=back
|
||||
data.noo=OPT.noo
|
||||
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: '/get_dir_eids',
|
||||
data: JSON.stringify(data),
|
||||
contentType: 'application/json',
|
||||
dataType: 'json',
|
||||
success: function(res) {
|
||||
if( res.valid === false )
|
||||
{
|
||||
$('#figures').html( "<alert class='alert alert-danger'>ERROR! directory has changed since you loaded this view. You have to reload and reset your view (probably someone deleted the directory or its parent since you loaded this page)" )
|
||||
return
|
||||
}
|
||||
entryList=res.entry_list
|
||||
pageList=entryList.slice(0, OPT.how_many)
|
||||
// now go get actual data/entries
|
||||
getPage(1,getPageFigures)
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
console.error("Error:", error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// this function draws all the figures from document.entries - called when we
|
||||
// change pages, but also when we change say grouping/other OPTs
|
||||
function drawPageOfFigures()
|
||||
{
|
||||
$('#figures').empty()
|
||||
var last = { printed: null }
|
||||
|
||||
// something is up, let the user know
|
||||
if( document.alert )
|
||||
$('#figures').append( document.alert )
|
||||
|
||||
if( OPT.folders )
|
||||
{
|
||||
// it root_eid is 0, then no entries in this path - cant go up
|
||||
if( OPT.root_eid == 0 || (document.entries.length && document.entries[0].in_dir.eid == OPT.root_eid ) )
|
||||
{
|
||||
gray="_gray"
|
||||
back=""
|
||||
cl=""
|
||||
back_id=0
|
||||
}
|
||||
else
|
||||
{
|
||||
gray=""
|
||||
back="Back"
|
||||
cl="back"
|
||||
if( document.entries.length > 0 )
|
||||
back_id = document.entries[0].in_dir.eid
|
||||
else
|
||||
back_id = document.back_id
|
||||
}
|
||||
// back button, if gray/back decide if we see grayed out folder and/or the name of the folder we go back to
|
||||
// with clas "back" this gets a different click handler which flags server to return data by 'going back/up' in dir tree
|
||||
// we give the server the id of the first item on the page so it can work out how to go back
|
||||
html=`<div class="col col-auto g-0 m-1">
|
||||
<figure id="${back_id}" class="${cl} entry m-1" type="Directory">
|
||||
<svg class="svg" width="${OPT.size-22}" height="${OPT.size-22}">
|
||||
<use xlink:href="internal/icons.svg#folder_back${gray}"/>
|
||||
</svg>
|
||||
<figcaption class="figure-caption text-center">${back}</figcaption>
|
||||
</figure>
|
||||
</div>`
|
||||
$('#figures').append(html)
|
||||
}
|
||||
for (const obj of document.entries) {
|
||||
// Grouping logic
|
||||
if (OPT.grouping === "Day") {
|
||||
if (last.printed !== obj.file_details.day) {
|
||||
$('#figures').append(`<div class="row ps-3"><h6>Day: ${obj.file_details.day} of ${obj.file_details.month}/${obj.file_details.year}</h6></div>` );
|
||||
last.printed = obj.file_details.day;
|
||||
}
|
||||
} else if (OPT.grouping === "Week") {
|
||||
if (last.printed !== obj.file_details.woy) {
|
||||
$('#figures').append(`<div class="row ps-3"><h6>Week #: ${obj.file_details.woy} of ${obj.file_details.year}</h6></div>` );
|
||||
last.printed = obj.file_details.woy;
|
||||
}
|
||||
} else if (OPT.grouping === "Month") {
|
||||
if (last.printed !== obj.file_details.month) {
|
||||
$('#figures').append(`<div class="row ps-3"><h6>Month: ${obj.file_details.month} of ${obj.file_details.year}</h6></div>` );
|
||||
last.printed = obj.file_details.month;
|
||||
}
|
||||
}
|
||||
addFigure( obj )
|
||||
}
|
||||
$(".back").click( function(e) { getDirEntries(this.id,true) } )
|
||||
if( document.entries.length == 0 )
|
||||
if( OPT.search_term )
|
||||
$('#figures').append( `<span class="alert alert-danger p-2 col-auto"> No matches for: '${OPT.search_term}'</span>` )
|
||||
else if( OPT.root_eid == 0 )
|
||||
$('#figures').append( `<span class="alert alert-danger p-2 col-auto d-flex align-items-center">No files in Path!</span>` )
|
||||
}
|
||||
|
||||
// emtpy out file_list_div, and repopulate it with new page of content
|
||||
function getPageFileList(res, viewingIdx)
|
||||
{
|
||||
$('#file_list_div').empty()
|
||||
|
||||
// something is up, let the user know
|
||||
if( document.alert )
|
||||
$('#file_list_div').append( '<div class="row">' + document.alert + '</div>' )
|
||||
|
||||
if( OPT.root_eid == 0 )
|
||||
{
|
||||
$('#file_list_div').append( `<span class="alert alert-danger p-2">No files in Path!</span>` )
|
||||
return
|
||||
}
|
||||
html='<table class="table table-striped table-sm col-12">'
|
||||
html+='<thead><tr class="table-primary"><th>Name</th><th>Size (MB)</th><th>Path Prefix</th><th>Hash</th></tr></thead><tbody>'
|
||||
for (const obj of res) {
|
||||
html+=`<tr>
|
||||
<td>
|
||||
<div class="d-flex align-items-center">
|
||||
<a href="${obj.in_dir.in_path.path_prefix}/${obj.in_dir.rel_path}/${obj.name}">
|
||||
<img class="img-fluid me-2" style="max-width: 100px;"
|
||||
src="data:image/jpeg;base64,${obj.file_details.thumbnail}"></img>
|
||||
</a>
|
||||
<span>${obj.name}</span>
|
||||
</div>
|
||||
<td>${obj.file_details.size_mb}</td>
|
||||
<td>${obj.in_dir.in_path.path_prefix.replace("static/","")}/${obj.in_dir.rel_path}</td>
|
||||
<td>${obj.file_details.hash}</td>
|
||||
</tr>`
|
||||
}
|
||||
html+='</tbody></table>'
|
||||
$('#file_list_div').append(html)
|
||||
}
|
||||
|
||||
// wrapper function as we want to handle real DB query success, but also do the
|
||||
// same when we just use cache
|
||||
function getEntriesByIdSuccessHandler(res,pageNumber,successCallback,viewingIdx)
|
||||
{
|
||||
if( res.length != pageList.length )
|
||||
document.alert="<alert class='alert alert-warning'>WARNING: something has changed since viewing this page (likely someone deleted content in another view), strongly suggest a page reload to get the latest data</alert>"
|
||||
|
||||
document.entries=res;
|
||||
// cache this
|
||||
document.page[pageNumber]=res
|
||||
// FIXME: I want to remove successCallback, instead: if viewing, or files_*, or file_list, then call relevant draw routine
|
||||
successCallback(res,viewingIdx)
|
||||
resetNextPrevButtons()
|
||||
// if search, disable folders
|
||||
if( OPT.search_term )
|
||||
$('#folders').prop('disabled', 'disabled').removeClass('border-info').addClass('border-secondary').removeClass('text-info').addClass('text-secondary');
|
||||
else if( document.entries.length == 0 )
|
||||
{
|
||||
html=`<span class="alert alert-danger p-2 col-auto">No files in Path</span>`
|
||||
$('#file_list_div').append(html)
|
||||
$('#figures').append(html)
|
||||
}
|
||||
}
|
||||
|
||||
// Function to get the 'page' of entry ids out of entryList
|
||||
function getPage(pageNumber, successCallback, viewingIdx=0)
|
||||
{
|
||||
// before we do anything, disabled left/right arrows on viewer to stop
|
||||
// getting another event before we have the data for the page back
|
||||
$('#la').prop('disabled', true)
|
||||
$('#ra').prop('disabled', true)
|
||||
const startIndex = (pageNumber - 1) * OPT.how_many;
|
||||
const endIndex = startIndex + OPT.how_many;
|
||||
pageList = entryList.slice(startIndex, endIndex);
|
||||
|
||||
// set up data to send to server to get the entry data for entries in pageList
|
||||
data={}
|
||||
data.ids = pageList
|
||||
|
||||
// assume nothing wrong, but if the data goes odd, then this will be non-null and displayed later (cant add here, as later code does .empty() of file divs)
|
||||
document.alert=null
|
||||
// see if we can use cache, and dont reload from DB
|
||||
if( !OPT.folders && document.page.length && document.page[pageNumber] )
|
||||
{
|
||||
getEntriesByIdSuccessHandler( document.page[pageNumber], pageNumber, successCallback, viewingIdx )
|
||||
return
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
type: 'POST', url: '/get_entries_by_ids',
|
||||
data: JSON.stringify(data), contentType: 'application/json',
|
||||
dataType: 'json',
|
||||
success: function(res) {
|
||||
document.amendments=res.amendments;
|
||||
// only called when an amendment is pending & we are viewing a page in files/list view
|
||||
// so check for amendment job(s) ending...
|
||||
for (const tmp of document.amendments)
|
||||
checkForAmendmentJobToComplete(tmp.job_id)
|
||||
getEntriesByIdSuccessHandler( res.entries, pageNumber, successCallback, viewingIdx )
|
||||
},
|
||||
error: function(xhr, status, error) { console.error("Error:", error); } });
|
||||
return
|
||||
}
|
||||
|
||||
// Quick Function to check if we are on the first page
|
||||
function isFirstPage(pageNumber)
|
||||
{
|
||||
return pageNumber <= 1;
|
||||
}
|
||||
|
||||
// Function to check if we are on the last page
|
||||
function isLastPage(pageNumber)
|
||||
{
|
||||
const totalPages = Math.ceil(entryList.length / OPT.how_many);
|
||||
return pageNumber >= totalPages;
|
||||
}
|
||||
|
||||
// given an id in the list, return which page we are on (page 1 is first page)
|
||||
function getPageNumberForId(id) {
|
||||
const idx = entryList.indexOf(id);
|
||||
// should be impossible but jic
|
||||
if (idx === -1) { return -1 }
|
||||
return Math.floor(idx / OPT.how_many) + 1;
|
||||
}
|
||||
|
||||
// if we are on first page, disable prev, it not ensure next is enabled
|
||||
// if we are on last page, disable next, it not ensure prev is enabled
|
||||
function resetNextPrevButtons()
|
||||
{
|
||||
// no data, so disabled both
|
||||
if( getPageNumberForId(pageList[0]) == -1 )
|
||||
{
|
||||
$('.prev').prop('disabled', true).addClass('disabled');
|
||||
$('.next').prop('disabled', true).addClass('disabled');
|
||||
return
|
||||
}
|
||||
if ( isFirstPage( getPageNumberForId(pageList[0]) ) )
|
||||
$('.prev').prop('disabled', true).addClass('disabled');
|
||||
else
|
||||
$('.prev').prop('disabled', false).removeClass('disabled');
|
||||
|
||||
if ( isLastPage( getPageNumberForId(pageList[0]) ) )
|
||||
$('.next').prop('disabled', true).addClass('disabled');
|
||||
else
|
||||
$('.next').prop('disabled', false).removeClass('disabled');
|
||||
}
|
||||
|
||||
// get list of eids for the next page, also make sure next/prev buttons make sense for page we are on
|
||||
function nextPage(successCallback)
|
||||
{
|
||||
// start with disabling more next presses until we are ready to process them
|
||||
$('.prev').prop('disabled', true).addClass('disabled');
|
||||
$('.next').prop('disabled', true).addClass('disabled');
|
||||
|
||||
// pageList[0] is the first entry on this page
|
||||
const currentPage=getPageNumberForId( pageList[0] )
|
||||
// should never happen / just return pageList unchanged
|
||||
if ( currentPage === -1 || isLastPage( currentPage ) )
|
||||
{
|
||||
console.error( "WARNING: seems first on pg=" + pageList[0] + " of how many=" + OPT.how_many + " gives currentPage=" + currentPage + " and we cant go next page?" )
|
||||
return
|
||||
}
|
||||
getPage( currentPage+1, successCallback )
|
||||
return
|
||||
}
|
||||
|
||||
// get list of eids for the prev page, also make sure next/prev buttons make sense for page we are on
|
||||
function prevPage(successCallback)
|
||||
{
|
||||
// start with disabling more prev presses until we are ready to process them
|
||||
$('.prev').prop('disabled', true).addClass('disabled');
|
||||
$('.next').prop('disabled', true).addClass('disabled');
|
||||
|
||||
// pageList[0] is the first entry on this page
|
||||
const currentPage=getPageNumberForId( pageList[0] )
|
||||
// should never happen / just return pageList unchanged
|
||||
if (currentPage === 1 || currentPage === -1 )
|
||||
{
|
||||
console.error( "WARNING: seems first on pg=" + pageList[0] + " of how many=" + OPT.how_many + " gives currentPage=" + currentPage + " and we cant go prev page?" )
|
||||
return
|
||||
}
|
||||
getPage( currentPage-1, successCallback )
|
||||
return
|
||||
}
|
||||
|
||||
// function to see if we are on a phone or tablet (where we dont have ctrl or shift keys - helps to display fake buttons to allow multiselect on mobiles)
|
||||
function isMobile() {
|
||||
try{ document.createEvent("TouchEvent"); return true; }
|
||||
catch(e){ return false; }
|
||||
}
|
||||
|
||||
// when we change one of the options (noo, how_many, folders) - then update '{how_many} files' str,
|
||||
// tweak noo menu for folders/flat view then reset the page contents based on current OPT values
|
||||
function changeOPT(successCallback) {
|
||||
OPT.how_many=$('#how_many').val()
|
||||
// changes invalidate page cache so clear it out
|
||||
document.page.length=0
|
||||
new_f=$('#folders').val()
|
||||
new_f=( new_f == 'True' )
|
||||
// if change to/from folders, also fix the noo menu
|
||||
if( new_f != OPT.folders )
|
||||
{
|
||||
if( new_f )
|
||||
{
|
||||
$('#noo option:lt(2)').prop('disabled', true);
|
||||
$('#noo').val(OPT.default_folder_noo)
|
||||
}
|
||||
else
|
||||
{
|
||||
$('#noo option:lt(2)').prop('disabled', false);
|
||||
$('#noo').val(OPT.default_flat_noo)
|
||||
}
|
||||
}
|
||||
OPT.noo=$('#noo').val()
|
||||
OPT.folders=new_f
|
||||
OPT.folders=$('#folders').val()
|
||||
OPT.grouping=$('#grouping').val()
|
||||
OPT.size=$('input[name="size"]:checked').val();
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: '/change_file_opts',
|
||||
data: JSON.stringify(OPT),
|
||||
contentType: 'application/json',
|
||||
success: function(resp) {
|
||||
entryList=resp.query_data.entry_list
|
||||
OPT.how_many=parseInt(OPT.how_many)
|
||||
pageList=entryList.slice(0, OPT.how_many)
|
||||
// put data back into booleans, ints, etc
|
||||
OPT.folders=( OPT.folders == 'True' )
|
||||
$('.how_many_text').html( ` ${OPT.how_many} files ` )
|
||||
OPT.size=parseInt(OPT.size)
|
||||
getPage(1,successCallback)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// function to change the size of thumbnails when user clicks xs/s/m/l/xl buttons
|
||||
function changeSize()
|
||||
{
|
||||
sz=$('input[name="size"]:checked').val();
|
||||
OPT.size=sz
|
||||
$('.thumb').attr( {height: sz, style: 'font-size:'+sz+'px' } )
|
||||
$('#size').val(sz)
|
||||
sz=sz-22
|
||||
$('.svg').height(sz);
|
||||
$('.svg').width(sz);
|
||||
$('.svg_cap').width(sz);
|
||||
}
|
||||
|
||||
// when a delete or restore files job has completed successfullly, then get ids
|
||||
// find the page we are on, remove amendments & ids from entryList and re-get page
|
||||
// which will reset pageList and the UI of images for that page
|
||||
function handleMoveOrDeleteOrRestoreFileJobCompleted(job)
|
||||
{
|
||||
// this grabs the values from the object attributes of eid-0, eid-1, etc.
|
||||
const ids = job.extra.filter(item => item.name.startsWith("eid-")).map(item => item.value);
|
||||
|
||||
// find page number of first element to delete (this is the page we will return too)
|
||||
pnum=getPageNumberForId( parseInt(ids[0]) )
|
||||
|
||||
// remove amendment data
|
||||
for (const ent of ids)
|
||||
{
|
||||
id=parseInt(ent)
|
||||
removeAmendment( id )
|
||||
// remove the item in the entryList
|
||||
index=entryList.indexOf(id);
|
||||
if( index != -1 )
|
||||
entryList.splice(index, 1); // Remove the element
|
||||
else
|
||||
{
|
||||
return; // have to get out of here, or calling getPage() below will loop forever
|
||||
}
|
||||
}
|
||||
|
||||
// re-create pageList by reloading the page
|
||||
getPage(pnum,getPageFigures)
|
||||
}
|
||||
|
||||
// POST to a check URL, that will tell us if the amendment job has completed,
|
||||
// it also calls CheckForJobs() which will fix up the Active Jobs badge,
|
||||
function checkForAmendmentJobToComplete(job_id)
|
||||
{
|
||||
CheckForJobs()
|
||||
$.ajax( { type: 'POST', data: '&job_id='+job_id, url: '/check_amend_job_status',
|
||||
success: function(res) { handleCheckAmendmentJobStatus(res); } } )
|
||||
}
|
||||
|
||||
// the status of a Amendment Job has been returned, finished is True/False
|
||||
// if not finished try again in 1 second... If finished then invalidate page
|
||||
// cache and based on job type call code correct func to update the UI appropriately
|
||||
function handleCheckAmendmentJobStatus(data)
|
||||
{
|
||||
if( data.finished )
|
||||
{
|
||||
// invalidate the cache
|
||||
document.page.length=0
|
||||
|
||||
// transforms contain the single transformed entry data for convenience
|
||||
if( data.job.name == 'transform_image' )
|
||||
handleTransformImageJobCompleted(data.job, data.entry)
|
||||
else if ( data.job.name == 'delete_files' || data.job.name == 'restore_files' || data.job.name == 'move_files' )
|
||||
handleMoveOrDeleteOrRestoreFileJobCompleted(data.job)
|
||||
// if we are viewing this file, then just go up / back,b/c this file is "gone" from this view
|
||||
if( document.viewing )
|
||||
goOutOfViewer()
|
||||
}
|
||||
else { setTimeout( function() { checkForAmendmentJobToComplete(data.job.id) }, 1000 ); }
|
||||
}
|
||||
|
||||
// different context menu on files
|
||||
$.contextMenu({
|
||||
selector: '.entry',
|
||||
itemClickEvent: "click",
|
||||
build: function($triggerElement, e) {
|
||||
// if we are not in the highlight set, then move the highlight to this element
|
||||
if( ! $(e.currentTarget).is('.highlight') )
|
||||
{
|
||||
$('.highlight').removeClass('highlight');
|
||||
$(e.currentTarget).addClass('highlight')
|
||||
}
|
||||
// when right-clicking & no selection add one OR deal with ctrl/shift right-lick as it always changes seln
|
||||
if( NoSel() || e.ctrlKey || e.shiftKey )
|
||||
{
|
||||
DoSel(e, e.currentTarget )
|
||||
SetButtonState();
|
||||
}
|
||||
|
||||
if( FiguresOrDirsOrBoth() == "figure" )
|
||||
{
|
||||
item_list = {
|
||||
details: { name: "Details..." },
|
||||
view: { name: "View File" },
|
||||
sep: "---",
|
||||
}
|
||||
if( e.currentTarget.getAttribute('type') == 'Image' )
|
||||
{
|
||||
item_list['transform'] = {
|
||||
name: "Transform",
|
||||
items: {
|
||||
"r90": { "name" : "Rotate 90 degrees" },
|
||||
"r180": { "name" : "Rotate 180 degrees" },
|
||||
"r270": { "name" : "Rotate 270 degrees" },
|
||||
"fliph": { "name" : "Flip horizontally" },
|
||||
"flipv": { "name" : "Flip vertically" }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
item_list['move'] = { name: "Move selected file(s) to new folder" }
|
||||
item_list['sep2'] = { sep: "---" }
|
||||
}
|
||||
else
|
||||
item_list = {
|
||||
move: { name: "Move selection(s) to new folder" }
|
||||
}
|
||||
|
||||
item_list['ai'] = {
|
||||
name: "Scan file for faces",
|
||||
items: {
|
||||
"ai-all": { name: "all" }
|
||||
}
|
||||
};
|
||||
|
||||
// Dynamically add entries for each person in the `people` array
|
||||
people.forEach(person => {
|
||||
item_list['ai'].items[`ai-${person.tag}`] = { name: person.tag };
|
||||
});
|
||||
|
||||
if( SelContainsBinAndNotBin() ) {
|
||||
item_list['both']= { name: 'Cannot delete and restore at same time', disabled: true }
|
||||
} else {
|
||||
if (e.currentTarget.getAttribute('path_type') == 'Bin' )
|
||||
item_list['undel']= { name: "Restore selected file(s)" }
|
||||
else if( e.currentTarget.getAttribute('type') != 'Directory' )
|
||||
item_list['del']= { name: "Delete Selected file(s)" }
|
||||
}
|
||||
|
||||
return {
|
||||
callback: function( key, options) {
|
||||
if( key == "details" ) { DetailsDBox() }
|
||||
if( key == "view" ) { startViewing( $(this).attr('id') ) }
|
||||
if( key == "move" ) { MoveDBox() }
|
||||
if( key == "del" ) { DelDBox('Delete') }
|
||||
if( key == "undel") { DelDBox('Restore') }
|
||||
if( key == "r90" ) { Transform(90) }
|
||||
if( key == "r180" ) { Transform(180) }
|
||||
if( key == "r270" ) { Transform(270) }
|
||||
if( key == "fliph" ) { Transform("fliph") }
|
||||
if( key == "flipv" ) { Transform("flipv") }
|
||||
if( key.startsWith("ai")) { RunAIOnSeln(key) }
|
||||
// dont flow this event through the dom
|
||||
e.stopPropagation()
|
||||
},
|
||||
items: item_list
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// finally, for files_ip/files_sp/files_rbp - set click inside document (NOT an entry) to remove seln
|
||||
$(document).on('click', function(e) { $('.highlight').removeClass('highlight') ; SetButtonState() });
|
||||
document.page=[]
|
||||
|
||||
@@ -1,27 +1,31 @@
|
||||
// POST to a check URL, that will tell us if the transformation has completed,
|
||||
// if not, try again in 1 second... If it has finished then reset the thumbnail
|
||||
// to full colour, put it back to being an entry and reset the thumbnail to the
|
||||
// newly created one that was sent back in the response to the POST
|
||||
function CheckTransformJob(id,job_id)
|
||||
// This function will remove the matching amendment for this entry (id)
|
||||
// can only have 1 ammendment per image, its grayed out for other changes
|
||||
function removeAmendment( id )
|
||||
{
|
||||
CheckForJobs()
|
||||
$.ajax(
|
||||
document.amendments=document.amendments.filter(obj => obj.eid !== id)
|
||||
}
|
||||
|
||||
// If Transform job has finished then reset relevant document.entries
|
||||
// with updated from DB, remove the amendment and redraw image
|
||||
function handleTransformImageJobCompleted(job, entry)
|
||||
{
|
||||
removeAmendment( entry.id )
|
||||
// update viewer if we are viewing an image
|
||||
if( document.viewing )
|
||||
{
|
||||
type: 'POST', data: '&job_id='+job_id, url: '/check_transform_job', success: function(data) {
|
||||
if( data.finished )
|
||||
{
|
||||
$('#s'+id).hide()
|
||||
$('#'+id).find('img.thumb').attr('style', 'filter: color(100%);' );
|
||||
$('#'+id).addClass('entry')
|
||||
$('#'+id).find('.thumb').attr('src', 'data:image/jpeg;base64,'+data.thumbnail)
|
||||
return false;
|
||||
// force reload with timestamped version of im.src
|
||||
im.src=im.src + '?t=' + new Date().getTime();
|
||||
DrawImg()
|
||||
}
|
||||
else
|
||||
{
|
||||
setTimeout( function() { CheckTransformJob(id,job_id) }, 1000,id, job_id );
|
||||
}
|
||||
},
|
||||
} )
|
||||
|
||||
// ALWAYS update files* div as we could go back to this from a viewer, and
|
||||
// the thumbnail needs the updated data
|
||||
idx = entryList.indexOf(entry.id)
|
||||
// replace data for this entry now its been transformed
|
||||
document.entries[idx]=entry
|
||||
// redraw into figure html in dom
|
||||
html = createFigureHtml( entry )
|
||||
$('#'+entry.id).replaceWith( html )
|
||||
}
|
||||
|
||||
// for each highlighted image, POST the transform with amt (90, 180, 270,
|
||||
@@ -31,9 +35,25 @@ function CheckTransformJob(id,job_id)
|
||||
// to finish
|
||||
function Transform(amt)
|
||||
{
|
||||
$('.highlight').each(function( id, e ) {
|
||||
// we are in the viewer with 1 image only...
|
||||
if( $('#viewer_div').length && ! $('#viewer_div').hasClass('d-none') )
|
||||
{
|
||||
post_data = '&amt='+amt+'&id='+document.viewing.id
|
||||
// POST /transform for image, grayscale the image, add throbber, & start checking for end of job
|
||||
$.ajax({ type: 'POST', data: post_data, url: '/transform', success: function(data) {
|
||||
processAmendments(data.job.amendments)
|
||||
checkForAmendmentJobToComplete(data.job.id)
|
||||
} })
|
||||
}
|
||||
else
|
||||
{
|
||||
$('.highlight').each(function( cnt, e ) {
|
||||
post_data = '&amt='+amt+'&id='+e.id
|
||||
// send /transform for this image, grayscale the thumbmail, add color spinning wheel overlay, and start checking for job end
|
||||
$.ajax({ type: 'POST', data: post_data, url: '/transform', success: function(data){ $('#'+e.id).find('img.thumb').attr('style', 'filter: grayscale(100%);' ); $('#'+e.id).removeClass('entry'); $('#s'+e.id).show(); CheckTransformJob(e.id,data.job_id); return false; } })
|
||||
// POST /transform for image, grayscale the thumbnail, add throbber, & start checking for end of job
|
||||
$.ajax({ type: 'POST', data: post_data, url: '/transform', success: function(data){
|
||||
processAmendments(data.job.amendments)
|
||||
checkForAmendmentJobToComplete(data.job.id)
|
||||
} })
|
||||
} )
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,21 +71,14 @@ function SetActiveJobsBadge(num_jobs)
|
||||
// after a 1 second timeout
|
||||
function CheckForJobs()
|
||||
{
|
||||
$.ajax(
|
||||
{
|
||||
$.ajax( {
|
||||
type: 'POST', url: '/check_for_jobs',
|
||||
success: function(data) {
|
||||
data.sts.forEach(
|
||||
function(el)
|
||||
{
|
||||
StatusMsg(el)
|
||||
}
|
||||
)
|
||||
// for each status, handle it/make toast in UI
|
||||
data.sts.forEach( function(el) { StatusMsg(el) } )
|
||||
SetActiveJobsBadge(data.num_active_jobs)
|
||||
if( data.num_active_jobs > 0 )
|
||||
{
|
||||
setTimeout( function() { CheckForJobs() }, 1000 );
|
||||
}
|
||||
// still active job(s), keep checking for them to end
|
||||
if( data.num_active_jobs > 0 ) { setTimeout( function() { CheckForJobs() }, 1000 ); }
|
||||
},
|
||||
} )
|
||||
return false;
|
||||
|
||||
@@ -25,6 +25,8 @@ function NewHeight()
|
||||
return im.height*gap / (im.width/window.innerWidth)
|
||||
}
|
||||
|
||||
// draw 'str' as a label above the bounding box of the face (with a white
|
||||
// transparent background to enhance readability of str)
|
||||
function DrawLabelOnFace(str)
|
||||
{
|
||||
// finish face box, need to clear out new settings for // transparent backed-name tag
|
||||
@@ -60,6 +62,11 @@ function DrawImg()
|
||||
if( im.width == 0 )
|
||||
return
|
||||
|
||||
// find any matching ammendment
|
||||
am=document.amendments.filter(obj => obj.eid === document.viewing.id)
|
||||
if( am.length )
|
||||
am=am[0]
|
||||
|
||||
canvas.width=NewWidth(im)
|
||||
canvas.height=NewHeight(im)
|
||||
|
||||
@@ -67,14 +74,32 @@ function DrawImg()
|
||||
$('#img-cap').width(canvas.width)
|
||||
|
||||
// actually draw the pixel images to the canvas at the right size
|
||||
if( grayscale )
|
||||
if (!Array.isArray(am))
|
||||
context.filter='grayscale(1)'
|
||||
context.drawImage(im, 0, 0, canvas.width, canvas.height )
|
||||
// -50 is a straight up hack, no idea why this works, but its good enough for me
|
||||
if( throbber )
|
||||
$('#throbber').attr('style', 'display:show; position:absolute; left:'+canvas.width/2+'px; top:'+(canvas.height/2-50)+'px' )
|
||||
if (!Array.isArray(am))
|
||||
{
|
||||
$('#throbber').show()
|
||||
$('#white-circle').show()
|
||||
if(am.type.which == 'img' )
|
||||
{
|
||||
$('#inside-img').attr('src', '/internal/'+am.type.what );
|
||||
$('#inside-img').show()
|
||||
}
|
||||
else
|
||||
$('#throbber').hide();
|
||||
{
|
||||
$('#inside-icon').attr('style', `color:${am.type.colour};height:64px` )
|
||||
$('#inside-icon').attr('fill', am.type.colour )
|
||||
$('#inside-icon use').attr('xlink:href', `/internal/icons.svg#${am.type.what}`);
|
||||
$('#inside-icon').show()
|
||||
}
|
||||
} else {
|
||||
$('#throbber').hide()
|
||||
$('#white-circle').hide()
|
||||
$('#inside-img').hide()
|
||||
$('#inside-icon').hide()
|
||||
}
|
||||
|
||||
// show (or not) the whole figcaption with fname in it - based on state of fname_toggle
|
||||
if( $('#fname_toggle').prop('checked' ) )
|
||||
@@ -85,13 +110,13 @@ function DrawImg()
|
||||
else
|
||||
$('.figcaption').hide()
|
||||
|
||||
// if we have faces, the enable the toggles, otherwise disable them
|
||||
// and reset model select too
|
||||
if( objs[current].faces )
|
||||
// if we have faces, the enable the toggles, otherwise disable them and reset model select too
|
||||
if( document.viewing.file_details.faces.length )
|
||||
{
|
||||
$('#faces').attr('disabled', false)
|
||||
$('#distance').attr('disabled', false)
|
||||
$('#model').val( Number(objs[current].face_model) )
|
||||
// first face is good enough as whole file has to have used same model
|
||||
$('#model').val( document.viewing.file_details.faces[0].facefile_lnk.model_used )
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -102,33 +127,37 @@ function DrawImg()
|
||||
}
|
||||
|
||||
// okay, we want faces drawn so lets do it
|
||||
if( $('#faces').prop('checked') && objs[current].faces )
|
||||
if( $('#faces').prop('checked') && document.viewing.file_details.faces )
|
||||
{
|
||||
faces=document.viewing.file_details.faces
|
||||
// draw rect on each face
|
||||
for( i=0; i<objs[current].faces.length; i++ )
|
||||
for( i=0; i<faces.length; i++ )
|
||||
{
|
||||
x = objs[current].faces[i].x / ( im.width/canvas.width )
|
||||
y = objs[current].faces[i].y / ( im.height/canvas.height )
|
||||
w = objs[current].faces[i].w / ( im.width/canvas.width )
|
||||
h = objs[current].faces[i].h / ( im.height/canvas.height )
|
||||
x = faces[i].face_left / ( im.width/canvas.width )
|
||||
y = faces[i].face_top / ( im.height/canvas.height )
|
||||
w = faces[i].w / ( im.width/canvas.width )
|
||||
h = faces[i].h / ( im.height/canvas.height )
|
||||
context.beginPath()
|
||||
context.rect( x, y, w, h )
|
||||
context.lineWidth = 2
|
||||
|
||||
// this face has an override so diff colour
|
||||
if( objs[current].faces[i].override )
|
||||
if( faces[i].fnmo.length || faces[i].ffmo.length )
|
||||
{
|
||||
context.strokeStyle = 'blue'
|
||||
DrawLabelOnFace( objs[current].faces[i].override.who )
|
||||
if( faces[i].ffmo.length )
|
||||
DrawLabelOnFace( faces[i].ffmo[0].person.tag )
|
||||
else
|
||||
DrawLabelOnFace( faces[i].fnmo[0].type.name )
|
||||
}
|
||||
else
|
||||
{
|
||||
context.strokeStyle = 'green'
|
||||
if( objs[current].faces[i].who )
|
||||
if( faces[i].refimg )
|
||||
{
|
||||
str=objs[current].faces[i].who
|
||||
str=faces[i].refimg.person.tag
|
||||
if( $('#distance').prop('checked') )
|
||||
str += "("+objs[current].faces[i].distance+")"
|
||||
str += "("+faces[i].refimg_lnk.face_distance.toFixed(2)+")"
|
||||
DrawLabelOnFace( str )
|
||||
}
|
||||
}
|
||||
@@ -156,13 +185,15 @@ function FaceToggle()
|
||||
// also deals with fullsecreen if needed
|
||||
function ViewImageOrVideo()
|
||||
{
|
||||
if( objs[current].type == 'Image' )
|
||||
// can happen if no content to display
|
||||
if( ! document.viewing ) return
|
||||
if( document.viewing.type.name == 'Image' )
|
||||
{
|
||||
im.src='../' + objs[current].url
|
||||
im.src='../' + document.viewing.FullPathOnFS + '?t=' + new Date().getTime();
|
||||
$('#video_div').hide()
|
||||
if( $('#fname_toggle').prop('checked' ) )
|
||||
$('#img-cap').show()
|
||||
$('#fname_i').html(PrettyFname(objs[current].url))
|
||||
$('#fname_i').html(PrettyFname(document.viewing.FullPathOnFS))
|
||||
$('#figure').show()
|
||||
if( fullscreen )
|
||||
$('#canvas').get(0).requestFullscreen()
|
||||
@@ -170,11 +201,11 @@ function ViewImageOrVideo()
|
||||
if( document.fullscreen )
|
||||
document.exitFullscreen()
|
||||
}
|
||||
if( objs[current].type == 'Video' )
|
||||
if( document.viewing.type.name == 'Video' )
|
||||
{
|
||||
$('#figure').hide()
|
||||
$('#video').prop('src', '../' + objs[current].url )
|
||||
$('#fname_v').html(PrettyFname(objs[current].url))
|
||||
$('#video').prop('src', '../' + document.viewing.FullPathOnFS )
|
||||
$('#fname_v').html(PrettyFname(document.viewing.FullPathOnFS))
|
||||
if( $('#fname_toggle').prop('checked' ) )
|
||||
$('#img-cap').hide()
|
||||
ResizeVideo()
|
||||
@@ -189,6 +220,8 @@ function ViewImageOrVideo()
|
||||
|
||||
var offsetX,offsetY;
|
||||
|
||||
// find the edge of the canvas, so when we have a PAGE event with x,y we can see
|
||||
// where we clicked in it (PAGE.x - canvas.x to see where in canvas, etc)
|
||||
function reOffset()
|
||||
{
|
||||
var BB=$('#canvas').get(0).getBoundingClientRect();
|
||||
@@ -196,23 +229,27 @@ function reOffset()
|
||||
offsetY=BB.top;
|
||||
}
|
||||
|
||||
window.onscroll=function(e){ reOffset(); }
|
||||
window.onresize=function(e){ reOffset(); }
|
||||
|
||||
// when we are ready,
|
||||
$(document).ready( function()
|
||||
{
|
||||
var cw=$('#canvas').width;
|
||||
var ch=$('#canvas').height;
|
||||
reOffset();
|
||||
// if we scroll or resize the window, the canvas moves on the page, reset the offsets
|
||||
window.onscroll=function(e){ reOffset(); }
|
||||
window.onresize=function(e){ reOffset(); }
|
||||
|
||||
// clicking in the viewer canvas gets its own handlers to handle faces (or not)
|
||||
$.contextMenu({
|
||||
selector: '#canvas',
|
||||
trigger: 'left',
|
||||
// trigger: 'none',
|
||||
hideOnSecondTrigger: true,
|
||||
|
||||
// go through each face, and add appropriate 'left-click' menu.
|
||||
// e.g if known face, say name, offer add refimg to person, etc.
|
||||
// this is a bit complex, the item_list var has a key (which is what we
|
||||
// will do if we are chosen from the menu), and data to process the action
|
||||
build: function($triggerElement, e) {
|
||||
reOffset();
|
||||
// get mouse position relative to the canvas (left-click uses page*)
|
||||
@@ -221,30 +258,33 @@ $(document).ready( function()
|
||||
|
||||
item_list = { not_a_face: { name: "Not a face", which_face: '-1' } }
|
||||
|
||||
for( i=0; i<objs[current].faces.length; i++ )
|
||||
faces=document.viewing.file_details.faces
|
||||
for( i=0; i<faces.length; i++ )
|
||||
{
|
||||
fx = objs[current].faces[i].x / ( im.width/canvas.width )
|
||||
fy = objs[current].faces[i].y / ( im.height/canvas.height )
|
||||
fw = objs[current].faces[i].w / ( im.width/canvas.width )
|
||||
fh = objs[current].faces[i].h / ( im.height/canvas.height )
|
||||
fx = faces[i].face_left / ( im.width/canvas.width )
|
||||
fy = faces[i].face_top / ( im.height/canvas.height )
|
||||
fw = faces[i].w / ( im.width/canvas.width )
|
||||
fh = faces[i].h / ( im.height/canvas.height )
|
||||
|
||||
if( x >= fx && x <= fx+fw && y >= fy && y <= fy+fh )
|
||||
{
|
||||
if( objs[current].faces[i].override )
|
||||
if( faces[i].ffmo.length || faces[i].fnmo.length )
|
||||
{
|
||||
item_list['remove_force_match_override']={ 'name': 'Remove override for this face', 'which_face': i, 'id': objs[current].faces[i].id }
|
||||
item_list['remove_force_match_override']={ 'name': 'Remove override for this face', 'which_face': i, 'id': faces[i].id }
|
||||
}
|
||||
else if( objs[current].faces[i].who )
|
||||
else if( faces[i].refimg )
|
||||
{
|
||||
item_list['match']={ 'name': objs[current].faces[i].who, 'which_face': i, 'id': objs[current].faces[i].id }
|
||||
item_list['wrong_person']={ 'name': 'wrong person', 'which_face': i, 'id': objs[current].faces[i].id }
|
||||
item_list['match']={ 'name': faces[i].refimg.person.tag, 'which_face': i, 'id': faces[i].id }
|
||||
item_list['match_add_refimg']={ 'name': 'Add this as refimg for ' + faces[i].refimg.person.tag,
|
||||
'person_id': faces[i].refimg.person.id, 'who': faces[i].refimg.person.tag, 'which_face': i, 'id': faces[i].id, }
|
||||
item_list['wrong_person']={ 'name': 'wrong person', 'which_face': i, 'id': faces[i].id }
|
||||
}
|
||||
else
|
||||
{
|
||||
item_list['no_match_new_person']={ 'name': 'Add as reference image to NEW person', 'which_face': i, 'id': objs[current].faces[i].id }
|
||||
item_list['no_match_new_refimg']={ 'name': 'Add as reference image to EXISTING person', 'which_face': i, 'id': objs[current].faces[i].id }
|
||||
item_list['no_match_new_person']={ 'name': 'Add as reference image to NEW person', 'which_face': i, 'id': faces[i].id }
|
||||
item_list['no_match_new_refimg']={ 'name': 'Add as reference image to EXISTING person', 'which_face': i, 'id': faces[i].id }
|
||||
for( var el in NMO ) {
|
||||
item_list['NMO_'+el]={'type_id': NMO[el].type_id, 'name': 'Override: ' + NMO[el].name, 'which_face': i, 'id': objs[current].faces[i].id }
|
||||
item_list['NMO_'+el]={'type_id': NMO[el].id, 'name': 'Override: ' + NMO[el].name, 'which_face': i, 'id': faces[i].id }
|
||||
}
|
||||
}
|
||||
delete item_list['not_a_face']
|
||||
@@ -264,26 +304,15 @@ $(document).ready( function()
|
||||
} )
|
||||
} );
|
||||
|
||||
// quick wrapper function to make calling this ajax code simpler in SearchForPerson
|
||||
// POST to the server to force a match for this face to person_id
|
||||
// FIXME: could I not pass person_id, and use // ...[item[key].which_face].refimg.person.id
|
||||
function OverrideForceMatch( person_id, key )
|
||||
{
|
||||
// we have type_id passed in, so dig the NMO out, and use that below (its really just for name, but in case we change that in the DB)
|
||||
for( el in NMO )
|
||||
{
|
||||
if( NMO[el].type_id == item[key].type_id )
|
||||
{
|
||||
fm_idx=el
|
||||
break
|
||||
}
|
||||
}
|
||||
ofm='&person_id='+person_id+'&face_id='+item[key].id
|
||||
$.ajax({ type: 'POST', data: ofm, url: '/add_force_match_override', success: function(data) {
|
||||
objs[current].faces[item[key].which_face].override={}
|
||||
objs[current].faces[item[key].which_face].override.who=data.person_tag
|
||||
objs[current].faces[item[key].which_face].override.distance='N/A'
|
||||
objs[current].faces[item[key].which_face].override.type_id=NMO[fm_idx].id
|
||||
objs[current].faces[item[key].which_face].override.type_name=NMO[fm_idx].name
|
||||
|
||||
document.viewing.file_details.faces[item[key].which_face].ffmo=[]
|
||||
document.viewing.file_details.faces[item[key].which_face].ffmo[0]={}
|
||||
document.viewing.file_details.faces[item[key].which_face].ffmo[0].person=data.person
|
||||
$('#dbox').modal('hide')
|
||||
$('#faces').prop('checked',true)
|
||||
DrawImg()
|
||||
@@ -292,6 +321,23 @@ function OverrideForceMatch( person_id, key )
|
||||
} )
|
||||
}
|
||||
|
||||
// function that handles the POSTed data that comes back when we add a
|
||||
// reference image to a new or existing person (right-click on a face)
|
||||
// used in success callbacks from CreatePersonAndRefimg() and AddRefimgTo()
|
||||
function handleAddRefimgData(key, data)
|
||||
{
|
||||
document.viewing.file_details.faces[item[key].which_face].refimg=data.refimg
|
||||
document.viewing.file_details.faces[item[key].which_face].refimg_lnk={}
|
||||
// if we used this img, for now set distance to 0 - it is an exact match!
|
||||
document.viewing.file_details.faces[item[key].which_face].refimg_lnk.face_distance=0.0
|
||||
$('#dbox').modal('hide')
|
||||
$('#faces').prop('checked',true)
|
||||
DrawImg()
|
||||
CheckForJobs()
|
||||
}
|
||||
|
||||
// when we right-click a face and make a new person, this code creates and
|
||||
// associates the face
|
||||
function CreatePersonAndRefimg( key )
|
||||
{
|
||||
d='&face_id='+item[key].id
|
||||
@@ -300,29 +346,17 @@ function CreatePersonAndRefimg( key )
|
||||
+'&surname='+$('#surname').val()
|
||||
+'&refimg_data='+item[key].refimg_data
|
||||
$.ajax({ type: 'POST', data: d, url: '/match_with_create_person',
|
||||
success: function(data) {
|
||||
objs[current].faces[item[key].which_face].who=data.who
|
||||
objs[current].faces[item[key].which_face].distance=data.distance
|
||||
$('#dbox').modal('hide')
|
||||
$('#faces').prop('checked',true)
|
||||
DrawImg()
|
||||
CheckForJobs()
|
||||
}
|
||||
success: function(data) { handleAddRefimgData(key, data ) },
|
||||
})
|
||||
}
|
||||
|
||||
// when we right-click a face and connect to an existing person, this connects
|
||||
// the refimg and associates the face
|
||||
function AddRefimgTo( person_id, key, search )
|
||||
{
|
||||
d='&face_id='+item[key].id+'&person_id='+person_id+'&refimg_data='+item[key].refimg_data+'&search='+search
|
||||
$.ajax({ type: 'POST', data: d, url: '/add_refimg_to_person',
|
||||
success: function(data) {
|
||||
objs[current].faces[item[key].which_face].who=data.who
|
||||
objs[current].faces[item[key].which_face].distance=data.distance
|
||||
$('#dbox').modal('hide')
|
||||
$('#faces').prop('checked',true)
|
||||
DrawImg()
|
||||
CheckForJobs()
|
||||
}
|
||||
success: function(data) { handleAddRefimgData(key, data ) },
|
||||
})
|
||||
}
|
||||
|
||||
@@ -339,8 +373,7 @@ function SearchForPerson(content, key, face_id, face_pos, type_id)
|
||||
for( var el in data ) {
|
||||
content+='<div class="row">'
|
||||
var person = data[el];
|
||||
// NMO_1 is a non-match-override type_id==1 (or force match to existing person)
|
||||
if( key == "NMO_1" )
|
||||
if( item[key].name == "Override: Manual match to existing person" )
|
||||
{
|
||||
func='OverrideForceMatch('+person.id+',\''+key+'\' )'
|
||||
content+= '<div class="col">' + person.tag + ' (' + person.firstname+' '+person.surname+ ') </div>'
|
||||
@@ -363,17 +396,19 @@ function SearchForPerson(content, key, face_id, face_pos, type_id)
|
||||
return false
|
||||
}
|
||||
|
||||
// if we force a match, this func allows us to POST to the server to remove the override
|
||||
function RemoveOverrideForceMatch(face_pos)
|
||||
{
|
||||
if( objs[current].faces[face_pos].override )
|
||||
who=objs[current].faces[face_pos].override.who
|
||||
if( document.viewing.file_details.faces[face_pos].ffmo.length )
|
||||
who=document.viewing.file_details.faces[face_pos].ffmo[0].person.tag
|
||||
else
|
||||
who=objs[current].faces[face_pos].who
|
||||
who=document.viewing.file_details.faces[face_pos].refimg.person.tag
|
||||
|
||||
d='&face_id='+objs[current].faces[face_pos].id+'&person_tag='+who+'&file_eid='+current
|
||||
d='&face_id='+document.viewing.file_details.faces[face_pos].id+'&person_tag='+who+'&file_eid='+document.viewing.id
|
||||
$.ajax({ type: 'POST', data: d, url: '/remove_force_match_override',
|
||||
success: function(data) {
|
||||
delete objs[current].faces[face_pos].override
|
||||
// force/delete the ffmo cleanly
|
||||
document.viewing.file_details.faces[face_pos].ffmo.length=0
|
||||
$('#dbox').modal('hide')
|
||||
DrawImg()
|
||||
CheckForJobs()
|
||||
@@ -383,12 +418,13 @@ function RemoveOverrideForceMatch(face_pos)
|
||||
return false
|
||||
}
|
||||
|
||||
// if we force NO match, this func allows us to POST to the server to remove the override
|
||||
function RemoveOverrideNoMatch(face_pos, type_id)
|
||||
{
|
||||
d='&face_id='+objs[current].faces[face_pos].id+'&type_id='+type_id
|
||||
d='&face_id='+document.viewing.file_details.faces[face_pos].id+'&type_id='+type_id
|
||||
$.ajax({ type: 'POST', data: d, url: '/remove_no_match_override',
|
||||
success: function(data) {
|
||||
delete objs[current].faces[face_pos].override
|
||||
document.viewing.file_details.faces[face_pos].fnmo.length=0
|
||||
$('#dbox').modal('hide')
|
||||
DrawImg()
|
||||
CheckForJobs()
|
||||
@@ -398,16 +434,13 @@ function RemoveOverrideNoMatch(face_pos, type_id)
|
||||
return false
|
||||
}
|
||||
|
||||
// POST to the server to force NO match for this face
|
||||
function AddNoMatchOverride(type_id, face_id, face_pos, type_id)
|
||||
{
|
||||
d='&type_id='+type_id+'&face_id='+face_id
|
||||
$.ajax({ type: 'POST', data: d, url: '/add_no_match_override',
|
||||
success: function(data) {
|
||||
objs[current].faces[face_pos].override={}
|
||||
objs[current].faces[face_pos].override.who=NMO[type_id].name
|
||||
objs[current].faces[face_pos].override.distance='N/A'
|
||||
objs[current].faces[face_pos].override.type_id=type_id
|
||||
objs[current].faces[face_pos].override.type_name=NMO[type_id].name
|
||||
document.viewing.file_details.faces[face_pos].fnmo[0]=data
|
||||
$('#dbox').modal('hide')
|
||||
$('#faces').prop('checked',true)
|
||||
DrawImg()
|
||||
@@ -416,6 +449,9 @@ function AddNoMatchOverride(type_id, face_id, face_pos, type_id)
|
||||
} )
|
||||
}
|
||||
|
||||
// generate html for the appropriate content to search for a person when adding
|
||||
// override DBox. has a button that when clicked calls SeachForPerson() which
|
||||
// POSTs to the server, and fills in the 'search_person_results' div with content
|
||||
function AddSearch( content, key, face_pos )
|
||||
{
|
||||
html='<h5>search for existing person:</h5>'
|
||||
@@ -455,17 +491,17 @@ function FaceDBox(key, item)
|
||||
div+='</div><div class="col-6">'
|
||||
if ( key == 'remove_force_match_override' )
|
||||
{
|
||||
if( objs[current].faces[face_pos].override.type_name == 'Manual match to existing person' )
|
||||
div+='<div class="row col-12">remove this override (force match to: ' + objs[current].faces[face_pos].override.who + ')</div>'
|
||||
if( document.viewing.file_details.faces[face_pos].ffmo.length )
|
||||
div+='<div class="row col-12">remove this override (force match to: ' + document.viewing.file_details.faces[face_pos].ffmo[0].person.tag + ')</div>'
|
||||
else
|
||||
div+='<div class="row col-12">remove this override (no match)</div>'
|
||||
div+='<div class="row col-12">remove this override (' + document.viewing.file_details.faces[face_pos].fnmo[0].type.name + ')</div>'
|
||||
div+='<div class="row">'
|
||||
div+='<button class="btn btn-outline-info col-6" type="button" onClick="$(\'#dbox\').modal(\'hide\'); return false">Cancel</button>'
|
||||
div+='<button class="btn btn-outline-danger col-6" type="button" '
|
||||
if( objs[current].faces[face_pos].override.type_name == 'Manual match to existing person' )
|
||||
if( document.viewing.file_details.faces[face_pos].ffmo.length )
|
||||
div+='onClick="RemoveOverrideForceMatch(' +face_pos+ ')">Remove</button>'
|
||||
else
|
||||
div+='onClick="RemoveOverrideNoMatch(' +face_pos+','+objs[current].faces[face_pos].override.type_id+ ')">Remove</button>'
|
||||
div+='onClick="RemoveOverrideNoMatch(' +face_pos+','+document.viewing.file_details.faces[face_pos].fnmo[0].type.id+ ')">Remove</button>'
|
||||
div+='</div>'
|
||||
}
|
||||
if ( key == 'no_match_new_person' )
|
||||
@@ -494,10 +530,22 @@ function FaceDBox(key, item)
|
||||
{
|
||||
div+=AddSearch( 'Click one of the link(s) below to add this face as a reference image to the person:<br><br>', key, face_pos );
|
||||
}
|
||||
if ( key == 'match_add_refimg' )
|
||||
{
|
||||
func='AddRefimgTo('+item[key]['person_id']+',\''+key+'\''
|
||||
func_sn=func+ ', true )'
|
||||
func_ao=func+ ', false )'
|
||||
div+="Confirm you wish to add this face as a reference image for " + item[key]['who']
|
||||
div+= '<div class="col">' + item[key]['who'] + '</div><div class="col input-group">'
|
||||
div+= '<button onClick="'+func_sn+'" class="btn btn-success py-1 input-group-prepend">Add & search now</button> '
|
||||
div+= '<button onClick="'+func_ao+'" class="btn btn-outline-success py-1 input-group-append">Add only</button></div>'
|
||||
|
||||
}
|
||||
if ( key == 'wrong_person' )
|
||||
{
|
||||
div+='<br>wrong person, so mark this as the wrong person/refimg connection, for face#' + item[key]['which_face']
|
||||
div+='<br>face db id: ' + item[key]['id']
|
||||
div += '<br>not yet'
|
||||
}
|
||||
if( /NMO_/.test(key) )
|
||||
{
|
||||
@@ -527,7 +575,7 @@ function FaceDBox(key, item)
|
||||
// pops results up in a dbox
|
||||
function JoblogSearch()
|
||||
{
|
||||
data="eid="+current
|
||||
data="eid="+document.viewing.id
|
||||
$.ajax({ type: 'POST', data: data, url: '/joblog_search', success: function(res) {
|
||||
data = JSON.parse(res)
|
||||
div ='<div><table class="table table-striped table-sm sm-txt">'
|
||||
@@ -544,3 +592,168 @@ function JoblogSearch()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// helper func to resert the src on the video div
|
||||
function setVideoSource(newSrc) {
|
||||
$('#videoSource').attr('src', newSrc);
|
||||
$('#video')[0].load();
|
||||
}
|
||||
|
||||
// function called when we get another page from inside the viewer
|
||||
function getPageViewer(res, viewingIdx)
|
||||
{
|
||||
document.viewing=document.entries[viewingIdx]
|
||||
// update viewing, arrows and image/video too
|
||||
ViewImageOrVideo()
|
||||
}
|
||||
|
||||
// handler used when we double click an entry to show it in the viewer
|
||||
function dblClickToViewEntry(id) {
|
||||
$('#files_div').addClass('d-none')
|
||||
$('#viewer_div').removeClass('d-none')
|
||||
setEntryById( id )
|
||||
ViewImageOrVideo()
|
||||
}
|
||||
|
||||
// quick function that allows us to go out of the viewer and back, the viewercomes from files_ip/sp
|
||||
// so just redraw the page with drawPageOfFigures() as we have all the data
|
||||
function goOutOfViewer()
|
||||
{
|
||||
// if this returns -1, we have used arrows to go onto a new page(s)
|
||||
if( getPageNumberForId( $('#figures').find('.figure').first().prop('id') ) == -1 )
|
||||
drawPageOfFigures()
|
||||
|
||||
// hide viewer div, then show files_div
|
||||
$('#viewer_div').addClass('d-none')
|
||||
$('#files_div').removeClass('d-none')
|
||||
// no longer viewing an image too
|
||||
document.viewing=null
|
||||
}
|
||||
|
||||
// change the viewer to the previous entry (handle page change too)
|
||||
function getPreviousEntry() {
|
||||
var currentIndex = entryList.indexOf(document.viewing.id);
|
||||
|
||||
oldPageOffset=Math.floor(currentIndex / OPT.how_many)
|
||||
if (currentIndex > 0) {
|
||||
currentIndex--;
|
||||
pageOffset=Math.floor(currentIndex / OPT.how_many)
|
||||
currentIndex=currentIndex-(pageOffset*OPT.how_many)
|
||||
// pref page, load it
|
||||
if( oldPageOffset != pageOffset )
|
||||
// pref page is pageOffset+1 now
|
||||
getPage(pageOffset+1,getPageViewer,currentIndex)
|
||||
else
|
||||
document.viewing=document.entries[currentIndex]
|
||||
}
|
||||
}
|
||||
|
||||
// change the viewer to the next entry (handle page change too)
|
||||
function getNextEntry() {
|
||||
var currentIndex = entryList.indexOf(document.viewing.id);
|
||||
|
||||
oldPageOffset=Math.floor(currentIndex / OPT.how_many)
|
||||
if (currentIndex < entryList.length - 1) {
|
||||
currentIndex++
|
||||
pageOffset=Math.floor(currentIndex / OPT.how_many)
|
||||
currentIndex=currentIndex-(pageOffset*OPT.how_many)
|
||||
// next page, load it
|
||||
if( oldPageOffset != pageOffset )
|
||||
// next page is pageOffset+1 now
|
||||
getPage(pageOffset+1,getPageViewer,currentIndex)
|
||||
else
|
||||
document.viewing=document.entries[currentIndex]
|
||||
}
|
||||
}
|
||||
|
||||
// check if we are viewing the very first entry (helps to disable la)
|
||||
function entryIsAtStart() {
|
||||
return document.viewing.id === entryList[0];
|
||||
}
|
||||
|
||||
// check if we are viewing the very last entry (helps to disable ra)
|
||||
function entryIsAtEnd() {
|
||||
return document.viewing.id === entryList[entryList.length - 1];
|
||||
}
|
||||
|
||||
// helper func to ensure document.viewing is the right entry from document.entries array
|
||||
function setEntryById(id) {
|
||||
var currentIndex = entryList.indexOf(parseInt(id));
|
||||
// if we are on a different page, adjust as document.entries only has <= how_many
|
||||
pageOffset=Math.floor(currentIndex / OPT.how_many)
|
||||
currentIndex = currentIndex-(pageOffset*OPT.how_many)
|
||||
document.viewing=document.entries[currentIndex]
|
||||
}
|
||||
|
||||
// disable la button if we are viewing first entry and/or ra button if we are viewing last entry
|
||||
function setDisabledForViewingNextPrevBttons()
|
||||
{
|
||||
$('#la').attr('disabled', entryIsAtStart());
|
||||
$('#ra').attr('disabled', entryIsAtEnd());
|
||||
}
|
||||
|
||||
// when we go into the view, the keybindings are set here for items like 'f' for face box/name
|
||||
function addViewerKeyHandler() {
|
||||
// allow a keypress on the viewer_div
|
||||
$(document).keydown(function(event) {
|
||||
// if dbox is visible, dont process this hot-key, we are inputting text into inputs instead
|
||||
if( $("#dbox").is(':visible') )
|
||||
return
|
||||
switch (event.key)
|
||||
{
|
||||
case "Left": // IE/Edge specific value
|
||||
case "ArrowLeft":
|
||||
$('#la').click()
|
||||
break;
|
||||
case "Right": // IE/Edge specific value
|
||||
case "ArrowRight":
|
||||
$('#ra').click()
|
||||
break;
|
||||
case "d":
|
||||
$('#distance').click()
|
||||
break;
|
||||
case "f":
|
||||
$('#faces').click()
|
||||
break;
|
||||
case "n":
|
||||
$('#fname_toggle').click()
|
||||
break;
|
||||
case "F":
|
||||
fullscreen=!document.fullscreen
|
||||
ViewImageOrVideo()
|
||||
break;
|
||||
case "l":
|
||||
JoblogSearch()
|
||||
break;
|
||||
case "Delete":
|
||||
$('#del').click()
|
||||
default:
|
||||
return; // Quit when this doesn't handle the key event.
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// left arrow onclick handler to go to prev image from inside the viewer
|
||||
function prevImageInViewer()
|
||||
{
|
||||
getPreviousEntry()
|
||||
setDisabledForViewingNextPrevBttons()
|
||||
ViewImageOrVideo()
|
||||
}
|
||||
|
||||
// right arrow onclick handler to go to next image from inside the viewer
|
||||
function nextImageInViewer()
|
||||
{
|
||||
getNextEntry()
|
||||
setDisabledForViewingNextPrevBttons()
|
||||
ViewImageOrVideo()
|
||||
}
|
||||
|
||||
// wrapper func to start the viewer - needed as we have a dbl-click & View file
|
||||
// to start the viewer
|
||||
function startViewing(eid)
|
||||
{
|
||||
dblClickToViewEntry( eid );
|
||||
setDisabledForViewingNextPrevBttons();
|
||||
addViewerKeyHandler()
|
||||
}
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
// POST to a check URL, that will tell us if the transformation has completed,
|
||||
// if not, try again in 1 second... If it has finished then reset the thumbnail
|
||||
// to full colour, put it back to being an entry and reset the thumbnail to the
|
||||
// newly created one that was sent back in the response to the POST
|
||||
function CheckTransformJob(id,job_id)
|
||||
{
|
||||
CheckForJobs()
|
||||
$.ajax(
|
||||
{
|
||||
type: 'POST', data: '&job_id='+job_id, url: '/check_transform_job', success: function(data) {
|
||||
if( data.finished )
|
||||
{
|
||||
// stop throbber, remove grayscale & then force reload with timestamped version of im.src
|
||||
grayscale=0
|
||||
throbber=0
|
||||
im.src=im.src + '?t=' + new Date().getTime();
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
setTimeout( function() { CheckTransformJob(id,job_id) }, 1000,id, job_id );
|
||||
}
|
||||
},
|
||||
} )
|
||||
}
|
||||
|
||||
// for each highlighted image, POST the transform with amt (90, 180, 270,
|
||||
// fliph, flipv) which will let the job manager know what to do to this file.
|
||||
// we also grayscale the thumbnail out, remove the entry class for now, show
|
||||
// the spinning wheel, and finally kick of the checking for the transform job
|
||||
// to finish
|
||||
function Transform(amt)
|
||||
{
|
||||
post_data = '&amt='+amt+'&id='+current
|
||||
// send /transform for this image, grayscale the thumbmail, add color spinning wheel overlay, and start checking for job end
|
||||
$.ajax({ type: 'POST', data: post_data, url: '/transform', success: function(data) { grayscale=1; throbber=1; DrawImg(); CheckTransformJob(current,data.job_id); return false; } })
|
||||
}
|
||||
BIN
internal/pa-logo.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
internal/rot180-invert.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
internal/rot180.png
Normal file
|
After Width: | Height: | Size: 9.3 KiB |
BIN
internal/rot270-invert.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
internal/rot270.png
Normal file
|
After Width: | Height: | Size: 9.2 KiB |
BIN
internal/rot90-invert.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
internal/rot90.png
Normal file
|
After Width: | Height: | Size: 8.4 KiB |
107
internal/rot90.svg
Normal file
@@ -0,0 +1,107 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="265mm"
|
||||
height="297mm"
|
||||
viewBox="0 0 265 297"
|
||||
version="1.1"
|
||||
id="svg5"
|
||||
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
|
||||
sodipodi:docname="rot90.svg"
|
||||
inkscape:export-filename="rot90.svg"
|
||||
inkscape:export-xdpi="96"
|
||||
inkscape:export-ydpi="96"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview7"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:document-units="mm"
|
||||
showgrid="false"
|
||||
inkscape:zoom="0.914906"
|
||||
inkscape:cx="222.97373"
|
||||
inkscape:cy="261.22902"
|
||||
inkscape:window-width="3840"
|
||||
inkscape:window-height="2128"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg5" />
|
||||
<defs
|
||||
id="defs2">
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="Arrow2"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="95"
|
||||
inkscape:stockid="Arrow2"
|
||||
markerWidth="2.7"
|
||||
markerHeight="3.964"
|
||||
viewBox="0 0 7.7 5.6"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always"
|
||||
preserveAspectRatio="none">
|
||||
<path
|
||||
transform="scale(0.7)"
|
||||
d="M -2,-4 9,0 -2,4 c 2,-2.33 2,-5.66 0,-8 z"
|
||||
style="fill:context-stroke;fill-rule:evenodd;stroke:none"
|
||||
id="arrow2L" />
|
||||
</marker>
|
||||
</defs>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1">
|
||||
<ellipse
|
||||
style="font-variation-settings:normal;display:inline;opacity:1;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:10;stroke-linecap:round;stroke-linejoin:bevel;stroke-miterlimit:4;stroke-dasharray:20,50;stroke-dashoffset:2;stroke-opacity:1;paint-order:markers stroke fill;stop-color:#000000;stop-opacity:1"
|
||||
id="path2244"
|
||||
cx="131.0041"
|
||||
cy="137.873"
|
||||
rx="98.982185"
|
||||
ry="108.3931"
|
||||
inkscape:label="dashes" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer2"
|
||||
inkscape:label="270-arc" />
|
||||
<path
|
||||
style="font-variation-settings:normal;display:inline;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:12;stroke-linecap:round;stroke-linejoin:bevel;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker-end:url(#Arrow2);paint-order:markers stroke fill;stop-color:#000000"
|
||||
id="path2244-3"
|
||||
inkscape:label="arc"
|
||||
sodipodi:type="arc"
|
||||
sodipodi:cx="131.08505"
|
||||
sodipodi:cy="137.8826"
|
||||
sodipodi:rx="98.982185"
|
||||
sodipodi:ry="108.3931"
|
||||
sodipodi:start="4.7127986"
|
||||
sodipodi:end="0"
|
||||
sodipodi:arc-type="arc"
|
||||
d="M 131.1256,29.489511 A 98.982185,108.3931 0 0 1 230.06724,137.8826"
|
||||
inkscape:export-filename="rot90.svg"
|
||||
inkscape:export-xdpi="96"
|
||||
inkscape:export-ydpi="96"
|
||||
sodipodi:open="true" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:90.3111px;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;stroke:none;stroke-width:4;stroke-linecap:round;stroke-linejoin:bevel;stroke-dasharray:none;stroke-dashoffset:0;paint-order:markers stroke fill"
|
||||
x="74.990334"
|
||||
y="174.16937"
|
||||
id="text2468"
|
||||
inkscape:label="270-text"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan2466"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:95.25px;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;fill:#000000;stroke:none;stroke-width:4;stroke-dasharray:none"
|
||||
x="74.990334"
|
||||
y="174.16937">90</tspan></text>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.2 KiB |
BIN
internal/white-circle.png
Normal file
|
After Width: | Height: | Size: 6.1 KiB |
66
job.py
@@ -3,12 +3,14 @@ from flask_wtf import FlaskForm
|
||||
from flask import request, render_template, redirect, make_response, jsonify, url_for
|
||||
from settings import Settings
|
||||
from main import db, app, ma
|
||||
from sqlalchemy import Sequence, func
|
||||
from sqlalchemy import Sequence, func, select
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import socket
|
||||
from shared import PA, PA_JOB_MANAGER_HOST, PA_JOB_MANAGER_PORT, NEWEST_LOG_LIMIT, OLDEST_LOG_LIMIT
|
||||
from amend import EntryAmendment, inAmendmentTypes
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy.dialects.postgresql import INTERVAL
|
||||
from sqlalchemy.sql.functions import concat
|
||||
@@ -57,10 +59,12 @@ class Job(db.Model):
|
||||
|
||||
extra = db.relationship( "JobExtra")
|
||||
logs = db.relationship( "Joblog")
|
||||
amendments = db.relationship("EntryAmendment", back_populates="job")
|
||||
|
||||
def __repr__(self):
|
||||
return "<id: {}, start_time: {}, last_update: {}, name: {}, state: {}, num_files: {}, current_file_num: {}, current_file: {}, pa_job_state: {}, wait_for: {}, extra: {}, logs: {}>".format(self.id, self.start_time, self.last_update, self.name, self.state, self.num_files, self.current_file_num, self.current_file, self.pa_job_state, self.wait_for, self.extra, self.logs)
|
||||
|
||||
|
||||
################################################################################
|
||||
# Class describing PA_JobManager_Message and in the DB (via sqlalchemy)
|
||||
# the job manager can send a message back to the front end (this code) via the
|
||||
@@ -78,18 +82,11 @@ class PA_JobManager_Message(PA,db.Model):
|
||||
job = db.relationship ("Job" )
|
||||
|
||||
|
||||
################################################################################
|
||||
# GetJM_Message: used in html to display any message for this front-end
|
||||
################################################################################
|
||||
def GetJM_Message():
|
||||
msg=PA_JPA_JobManager_MessageobManager_Message.query.first()
|
||||
return msg
|
||||
|
||||
################################################################################
|
||||
# Used in main html to show a red badge of # jobs to draw attention there are
|
||||
# active jobs being processed in the background
|
||||
################################################################################
|
||||
def GetNumActiveJobs():
|
||||
def getNumActiveJobs():
|
||||
ret=Job.query.filter(Job.pa_job_state != 'Completed').with_entities(func.count(Job.id).label('count') ).first()
|
||||
return ret[0]
|
||||
|
||||
@@ -121,8 +118,23 @@ def NewJob(name, num_files="0", wait_for=None, jex=None, desc="No description pr
|
||||
|
||||
db.session.add(job)
|
||||
db.session.commit()
|
||||
SetFELog( message=f'Created <a class="link-light" href="/job/{job.id}">Job #{job.id}</a> to {desc}', level="success" )
|
||||
|
||||
# if this job changes an eid we store that in DB and client shows until it finishes the job
|
||||
at_id = inAmendmentTypes(job)
|
||||
if at_id:
|
||||
if job.name == 'transform_image':
|
||||
id=[jex.value for jex in job.extra if jex.name == "id"][0]
|
||||
ea=EntryAmendment( eid=id, job_id=job.id, amend_type=at_id )
|
||||
db.session.add(ea)
|
||||
job.amendments.append(ea)
|
||||
elif job.name == 'delete_files' or job.name == 'restore_files' or job.name == 'move_files':
|
||||
for j in jex:
|
||||
if 'eid-' in j.name:
|
||||
ea=EntryAmendment( eid=j.value, job_id=job.id, amend_type=at_id )
|
||||
db.session.add(ea)
|
||||
job.amendments.append(ea)
|
||||
|
||||
SetFELog( message=f'Created <a class="link-light" href="/job/{job.id}">Job #{job.id}</a> to {desc}', level="success" )
|
||||
WakePAJobManager(job.id)
|
||||
return job
|
||||
|
||||
@@ -178,7 +190,9 @@ def jobs():
|
||||
jobs = Job.query.order_by(Job.id.desc()).all()
|
||||
else:
|
||||
page_title='Job list (recent)'
|
||||
jobs = Job.query.filter( Job.last_update >= (func.now() - func.cast(concat(settings.job_archive_age, 'DAYS'), INTERVAL)) ).order_by(Job.id.desc()).all()
|
||||
# work out cutoff in python (used to do this in sql and it was too slow)
|
||||
cutoff = datetime.now() - timedelta(days=settings.job_archive_age)
|
||||
jobs = Job.query.filter( Job.last_update >= cutoff ).order_by(Job.id.desc()).all()
|
||||
return render_template("jobs.html", jobs=jobs, page_title=page_title)
|
||||
|
||||
|
||||
@@ -188,7 +202,7 @@ def jobs():
|
||||
@app.route("/job/<id>", methods=["GET","POST"])
|
||||
@login_required
|
||||
def joblog(id):
|
||||
joblog = Job.query.get(id)
|
||||
joblog = db.session.get(Job,id)
|
||||
|
||||
if request.method == 'POST':
|
||||
logs=Joblog.query.filter(Joblog.job_id==id).order_by(Joblog.log_date).all()
|
||||
@@ -285,7 +299,8 @@ def joblog_search():
|
||||
from sqlalchemy import text
|
||||
|
||||
eid=request.form['eid']
|
||||
ent=Entry.query.get(eid)
|
||||
stmt = select(Entry).where(Entry.id == eid)
|
||||
ent = db.session.scalars(stmt).one_or_none()
|
||||
logs=Joblog.query.join(Job).filter(Joblog.log.ilike(text(f"'%%{ent.name}%%'"))).with_entities(Joblog.log, Job.id, Job.name, Job.state, Joblog.log_date).all()
|
||||
|
||||
# turn DB output into json and return it to the f/e
|
||||
@@ -313,14 +328,22 @@ def joblog_search():
|
||||
@app.route("/check_for_jobs", methods=["POST"])
|
||||
@login_required
|
||||
def check_for_jobs():
|
||||
num=GetNumActiveJobs()
|
||||
from files import job_schemas
|
||||
|
||||
num=getNumActiveJobs()
|
||||
messages = PA_JobManager_Message.query.all()
|
||||
sts=[]
|
||||
for msg in PA_JobManager_Message.query.all():
|
||||
for msg in messages:
|
||||
u=''
|
||||
if 'Job #' not in msg.message and msg.job_id:
|
||||
u='<a class="link-light" href="' + url_for('joblog', id=msg.job_id) + '">Job #' + str(msg.job_id) + '</a>: '
|
||||
sts.append( { 'id': msg.id, 'message': u+msg.message, 'level': msg.level, 'job_id': msg.job_id, 'persistent': msg.persistent, 'cant_close': msg.cant_close } )
|
||||
return make_response( jsonify( num_active_jobs=num, sts=sts ) )
|
||||
|
||||
# get jobs mentioned in messages as we may need to process the by client for UI
|
||||
job_list=[obj.job_id for obj in messages]
|
||||
stmt = select(Job).options(joinedload(Job.amendments)).where(Job.id.in_(job_list))
|
||||
jobs=db.session.execute(stmt).unique().scalars().all()
|
||||
return make_response( jsonify( num_active_jobs=num, sts=sts, jobs=job_schemas.dump(jobs) ) )
|
||||
|
||||
###############################################################################
|
||||
# /clear_msg -> POST -> clears out a F/E message based on passed in <id>
|
||||
@@ -344,3 +367,14 @@ def _jinja2_filter_datetime(date, fmt=None):
|
||||
return date.strftime("%d/%m/%Y %I:%M:%S %p")
|
||||
else:
|
||||
return "N/A"
|
||||
|
||||
################################################################################
|
||||
# allow a way to force the messages to be deleted if really needed - its a bit
|
||||
# lame, but a quick fix
|
||||
################################################################################
|
||||
@app.route('/force_clear')
|
||||
@login_required
|
||||
def force_clear():
|
||||
PA_JobManager_Message.query.delete();
|
||||
db.session.commit()
|
||||
return redirect("/")
|
||||
|
||||
24
main.py
@@ -11,7 +11,7 @@ from datetime import datetime
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
from shared import CreateSelect, CreateFoldersSelect, LocationIcon, DB_URL, PROD_HOST, OLDEST_LOG_LIMIT
|
||||
from shared import CreateSelect, CreateFoldersSelect, LocationIcon, DB_URL, OLDEST_LOG_LIMIT
|
||||
|
||||
# for ldap auth
|
||||
from flask_ldap3_login import LDAP3LoginManager
|
||||
@@ -60,15 +60,16 @@ except Exception:
|
||||
app.config['SECRET_KEY'] = b'my_insecure_PA_token_with_random_2134876adsfjhlkasdf87'
|
||||
|
||||
# ldap config vars: (the last one is required, or python ldap freaks out)
|
||||
app.config['LDAP_HOST'] = 'mara.ddp.net'
|
||||
app.config['LDAP_HOST'] = 'ldap.ddp.net'
|
||||
app.config['LDAP_BASE_DN'] = 'dc=depaoli,dc=id,dc=au'
|
||||
app.config['LDAP_USER_DN'] = 'ou=users'
|
||||
app.config['LDAP_GROUP_DN'] = 'ou=groups'
|
||||
app.config['LDAP_USER_RDN_ATTR'] = 'cn'
|
||||
app.config['LDAP_USER_RDN_ATTR'] = 'uid'
|
||||
app.config['LDAP_USER_LOGIN_ATTR'] = 'uid'
|
||||
app.config['LDAP_GROUP_OBJECT_FILTER'] = '(objectclass=posixGroup)'
|
||||
app.config['LDAP_BIND_USER_DN'] = None
|
||||
app.config['LDAP_BIND_USER_PASSWORD'] = None
|
||||
app.config['LDAP_GROUP_OBJECT_FILTER'] = '(objectclass=posixGroup)'
|
||||
|
||||
|
||||
db = SQLAlchemy(app) # create the (flask) sqlalchemy connection
|
||||
ma = Marshmallow(app) # set up Marshmallow - data marshalling / serialising
|
||||
@@ -86,6 +87,7 @@ from files import Entry
|
||||
from person import Person
|
||||
from settings import Settings
|
||||
from user import PAUser
|
||||
from states import PA_UserState
|
||||
|
||||
####################################### GLOBALS #######################################
|
||||
# allow jinja2 to call these python functions directly
|
||||
@@ -142,12 +144,17 @@ def login():
|
||||
# ldap-injecting our username, so send them back to the login page instead
|
||||
if request.method == 'POST' and re.search( r'[()\\*&!]', request.form['username']):
|
||||
from job import SetFELog
|
||||
SetFELog( message=f"ERROR: Detected special LDAP chars in username: {request.form['username']}", log_level="error", persistent=True, cant_close=True )
|
||||
SetFELog( message=f"ERROR: Detected special LDAP chars in username: {request.form['username']}", level="danger", persistent=True, cant_close=True )
|
||||
return redirect(url_for('login'))
|
||||
if form.validate_on_submit():
|
||||
# Successfully logged in, We can now access the saved user object via form.user.
|
||||
login_user(form.user, remember=True) # Tell flask-login to log them in.
|
||||
next = request.args.get("next")
|
||||
|
||||
# just (re)-authenticated, so clear old state from UserState, to avoid re-using old data that is no longer valid
|
||||
PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn).delete()
|
||||
db.session.commit()
|
||||
|
||||
if next:
|
||||
return redirect(next) # Send them back where they came from
|
||||
else:
|
||||
@@ -237,14 +244,15 @@ def logout():
|
||||
logout_user()
|
||||
return redirect('/login')
|
||||
|
||||
# quick health route so traefik knows we are up
|
||||
@app.route('/health')
|
||||
def health():
|
||||
return {"status": "ok"}, 200
|
||||
|
||||
###############################################################################
|
||||
# main to be called via Flask/Gunicorn
|
||||
###############################################################################
|
||||
def main():
|
||||
if hostname == PROD_HOST:
|
||||
app.run(ssl_context=('/etc/letsencrypt/live/pa.depaoli.id.au/cert.pem', '/etc/letsencrypt/live/pa.depaoli.id.au/privkey.pem'), host="0.0.0.0", debug=False)
|
||||
else:
|
||||
app.run(host="0.0.0.0", debug=True)
|
||||
|
||||
###############################################################################
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#
|
||||
#
|
||||
# This file controls the 'external' job control manager, that (periodically #
|
||||
# looks / somehow is pushed an event?) picks up new jobs, and processes them.
|
||||
#
|
||||
@@ -15,7 +14,7 @@
|
||||
|
||||
### SQLALCHEMY IMPORTS ###
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import Column, Integer, String, Sequence, Float, ForeignKey, DateTime, LargeBinary, Boolean, func
|
||||
from sqlalchemy import Column, Integer, String, Sequence, Float, ForeignKey, DateTime, LargeBinary, Boolean, func, text, select
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy import create_engine
|
||||
@@ -23,7 +22,7 @@ from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm import scoped_session
|
||||
|
||||
### LOCAL FILE IMPORTS ###
|
||||
from shared import DB_URL, PA_JOB_MANAGER_HOST, PA_JOB_MANAGER_PORT, THUMBSIZE, SymlinkName, GenThumb, SECS_IN_A_DAY
|
||||
from shared import DB_URL, PA_JOB_MANAGER_HOST, PA_JOB_MANAGER_PORT, THUMBSIZE, SymlinkName, GenThumb, SECS_IN_A_DAY, PA_EXIF_ROTATER, PA
|
||||
from datetime import datetime, timedelta, date
|
||||
|
||||
### PYTHON LIB IMPORTS ###
|
||||
@@ -45,12 +44,20 @@ import face_recognition
|
||||
import re
|
||||
import sys
|
||||
import ffmpeg
|
||||
import subprocess
|
||||
# FIXME: remove this
|
||||
import time
|
||||
|
||||
|
||||
# global debug setting
|
||||
if 'ENV' not in os.environ or os.environ['ENV'] != "production":
|
||||
if 'ENV' not in os.environ:
|
||||
DEBUG=True
|
||||
print("PA job manager starting as non-production -- ENV not set! Please set ENV, I'd rather not guess what to do")
|
||||
elif os.environ['ENV'] != "production":
|
||||
DEBUG=True
|
||||
print(f"PA job manager starting as non-production -- ENV={os.environ['ENV']}")
|
||||
else:
|
||||
print("PA job manager starting as Production")
|
||||
DEBUG=False
|
||||
|
||||
# global list of override tables to allow enumeration over them ...
|
||||
@@ -506,36 +513,28 @@ class PA_JobManager_FE_Message(Base):
|
||||
def __repr__(self):
|
||||
return "<id: {}, job_id: {}, level: {}, message: {}".format(self.id, self.job_id, self.level, self.message)
|
||||
|
||||
################################################################################
|
||||
# Class describing which Entry has a pending Amendment in the DB (via sqlalchemy)
|
||||
################################################################################
|
||||
class EntryAmendment(PA,Base):
|
||||
__tablename__ = "entry_amendment"
|
||||
eid = Column(Integer, ForeignKey("entry.id"), primary_key=True )
|
||||
job_id = Column(Integer, ForeignKey("job.id"), primary_key=True )
|
||||
# don't over think this, we just use eid to delete this entry anyway
|
||||
amend_type = Column(Integer)
|
||||
|
||||
class PA_UserState(Base):
|
||||
__tablename__ = "pa_user_state"
|
||||
id = Column(Integer, Sequence('pa_user_state_id_seq'), primary_key=True )
|
||||
pa_user_dn = Column(String, ForeignKey('pa_user.dn'), primary_key=True )
|
||||
last_used = Column(DateTime(timezone=True))
|
||||
path_type = Column(String, primary_key=True, unique=False, nullable=False )
|
||||
noo = Column(String, unique=False, nullable=False )
|
||||
grouping = Column(String, unique=False, nullable=False )
|
||||
how_many = Column(Integer, unique=False, nullable=False )
|
||||
st_offset = Column(Integer, unique=False, nullable=False )
|
||||
size = Column(Integer, unique=False, nullable=False )
|
||||
folders = Column(Boolean, unique=False, nullable=False )
|
||||
root = Column(String, unique=False, nullable=False )
|
||||
cwd = Column(String, unique=False, nullable=False )
|
||||
## for now being lazy and not doing a separate table until I settle on needed fields and when
|
||||
# only used if ptype == View
|
||||
view_eid = Column(Integer, unique=False, nullable=False )
|
||||
orig_ptype = Column(String, unique=False, nullable=False )
|
||||
# only used if view and orig_ptype was search
|
||||
orig_search_term = Column(String, unique=False, nullable=False )
|
||||
orig_url = Column(String, unique=False, nullable=False )
|
||||
current = Column(Integer)
|
||||
first_eid = Column(Integer)
|
||||
last_eid = Column(Integer)
|
||||
num_entries = Column(Integer)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<pa_user_dn: {self.pa_user_dn}, path_type: {self.path_type}, noo: {self.noo}, grouping: {self.grouping}, how_many: {self.how_many}, st_offset: {self.st_offset}, size: {self.size}, folders: {self.folders}, root: {self.root}, cwd: {self.cwd}, view_eid: {self.view_eid}, orig_ptype: {self.orig_ptype}, orig_search_term: {self.orig_search_term}, orig_url: {self.orig_url}, current={self.current}, first_eid={self.first_eid}, last_eid={self.last_eid}, num_entries={self.num_entries}>"
|
||||
|
||||
##############################################################################
|
||||
# PAprint(): convenience function to prepend a timestamp to a printed string
|
||||
##############################################################################
|
||||
def PAprint( msg ):
|
||||
now=datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
|
||||
if 'INFO:' in msg or 'WARN:' in msg:
|
||||
msg = ' '+msg
|
||||
if 'DEBUG:' in msg or 'ERROR:' in msg or 'WARN:' in msg or 'INFO:' in msg:
|
||||
print( f"{now} {msg}" )
|
||||
else:
|
||||
print( f"{now} DEBUG: {msg}" )
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
# NewJob(): convenience function to create a job, appropriately
|
||||
@@ -576,7 +575,7 @@ def MessageToFE( job_id, message, level, persistent, cant_close ):
|
||||
def SettingsRBPath():
|
||||
settings = session.query(Settings).first()
|
||||
if settings == None:
|
||||
print("ERROR: Cannot create file data with no settings / recycle bin path is missing")
|
||||
PAprint("ERROR: Cannot create file data with no settings / recycle bin path is missing")
|
||||
return None
|
||||
# path setting is an absolute path, just use it, otherwise prepend base_path first
|
||||
if settings.recycle_bin_path[0] == '/':
|
||||
@@ -614,7 +613,7 @@ def ProcessRecycleBinDir(job):
|
||||
def SettingsSPath():
|
||||
settings = session.query(Settings).first()
|
||||
if settings == None or settings.storage_path == "":
|
||||
print("ERROR: Cannot create file data with no settings / storage path is missing")
|
||||
PAprint("ERROR: Cannot create file data with no settings / storage path is missing")
|
||||
return None
|
||||
if settings.storage_path[0] == '/':
|
||||
path=settings.storage_path
|
||||
@@ -641,10 +640,9 @@ def ProcessStorageDirs(parent_job):
|
||||
# SettingsIPath(): return import path (abs or add base_path if needed)
|
||||
##############################################################################
|
||||
def SettingsIPath():
|
||||
paths=[]
|
||||
settings = session.query(Settings).first()
|
||||
if not settings or settings.import_path == "":
|
||||
print("ERROR: Cannot create file data with no settings / import path is missing")
|
||||
PAprint("ERROR: Cannot create file data with no settings / import path is missing")
|
||||
return None
|
||||
if settings.import_path[0] == '/':
|
||||
path=settings.import_path
|
||||
@@ -838,7 +836,7 @@ def JobMetadata(job):
|
||||
def AddLogForJob(job, message):
|
||||
now=datetime.now(pytz.utc)
|
||||
log=Joblog( job_id=job.id, log=message, log_date=now )
|
||||
job.last_update=datetime.now(pytz.utc)
|
||||
job.last_update=now
|
||||
session.add(log)
|
||||
# some logs have DEBUG: in front, so clean that up
|
||||
message = message.replace("DEBUG:", "" )
|
||||
@@ -850,7 +848,7 @@ def AddLogForJob(job, message):
|
||||
else:
|
||||
job.last_commit = now
|
||||
if DEBUG:
|
||||
print( f"DEBUG: {message}" )
|
||||
PAprint( f"{message}" )
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
@@ -919,7 +917,7 @@ def FinishJob(job, last_log, state="Completed", pa_job_state="Completed", level=
|
||||
session.commit()
|
||||
MessageToFE( job_id=job.id, message=last_log, level=level, persistent=persistent, cant_close=cant_close )
|
||||
if DEBUG:
|
||||
print( f"DEBUG: {last_log}" )
|
||||
PAprint( f"DEBUG: {last_log}" )
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
@@ -930,13 +928,13 @@ def FinishJob(job, last_log, state="Completed", pa_job_state="Completed", level=
|
||||
##############################################################################
|
||||
def HandleJobs(first_run=False):
|
||||
if first_run:
|
||||
print("INFO: PA job manager is starting up - check for stale jobs" )
|
||||
PAprint("INFO: PA job manager is starting up - check for stale jobs" )
|
||||
else:
|
||||
if DEBUG:
|
||||
print("DEBUG: PA job manager is scanning for new jobs to process")
|
||||
PAprint("DEBUG: PA job manager is scanning for new jobs to process")
|
||||
for job in session.query(Job).filter(Job.pa_job_state != 'Complete').all():
|
||||
if first_run and job.pa_job_state == 'In Progress':
|
||||
print( f"INFO: Found stale job#{job.id} - {job.name}" )
|
||||
PAprint( f"INFO: Found stale job#{job.id} - {job.name}" )
|
||||
job.pa_job_state = 'Stale'
|
||||
session.add(job)
|
||||
AddLogForJob( job, "ERROR: Job has been marked stale as it did not complete" )
|
||||
@@ -948,17 +946,17 @@ def HandleJobs(first_run=False):
|
||||
j2 = session.query(Job).get(job.wait_for)
|
||||
if not j2:
|
||||
AddLogForJob( job, f"ERROR: waiting for a job#({job.wait_for}) that does not exist? ")
|
||||
print(f"ERROR: job.wait_for ({job.wait_for}) does not exist in below? " )
|
||||
PAprint(f"ERROR: job.wait_for ({job.wait_for}) does not exist in below? " )
|
||||
for j in session.query(Job).all():
|
||||
print(f"ERROR: j={j.id}")
|
||||
PAprint(f"ERROR: j={j.id}")
|
||||
continue
|
||||
if j2.pa_job_state != 'Completed':
|
||||
continue
|
||||
|
||||
# use this to remove threads for easier debugging, and errors will stacktrace to the console
|
||||
if DEBUG:
|
||||
print("*************************************")
|
||||
print("RUNNING job: id={} name={} wait_for={}".format(job.id, job.name, job.wait_for ))
|
||||
PAprint("*************************************")
|
||||
PAprint("RUNNING job: id={} name={} wait_for={}".format(job.id, job.name, job.wait_for ))
|
||||
RunJob(job)
|
||||
else:
|
||||
try:
|
||||
@@ -968,8 +966,8 @@ def HandleJobs(first_run=False):
|
||||
try:
|
||||
MessageToFE( job_id=job.id, level="danger", message="Failed with: {} (try job log for details)".format(e), persistent=True, cant_close=False )
|
||||
except Exception as e2:
|
||||
print("ERROR: Failed to let front-end know, but back-end Failed to run job (id: {}, name: {} -- orig exep was: {}, this exception was: {})".format( job.id, job.name, e, e2) )
|
||||
print("INFO: PA job manager is waiting for a job")
|
||||
PAprint("ERROR: Failed to let front-end know, but back-end Failed to run job (id: {}, name: {} -- orig exep was: {}, this exception was: {})".format( job.id, job.name, e, e2) )
|
||||
PAprint("INFO: PA job manager is waiting for a job")
|
||||
return
|
||||
|
||||
##############################################################################
|
||||
@@ -1107,7 +1105,6 @@ def DisconnectAllOverrides(job):
|
||||
def JobForceScan(job):
|
||||
JobProgressState( job, "In Progress" )
|
||||
DisconnectAllOverrides(job)
|
||||
session.query(PA_UserState).delete()
|
||||
session.query(FaceFileLink).delete()
|
||||
session.query(FaceRefimgLink).delete()
|
||||
session.query(Face).delete()
|
||||
@@ -1132,12 +1129,12 @@ def JobForceScan(job):
|
||||
def CreateSymlink(job,ptype,path):
|
||||
path_type = session.query(PathType).get(ptype)
|
||||
symlink=SymlinkName(path_type.name, path, path)
|
||||
if not os.path.exists(symlink):
|
||||
print( f"INFO: symlink does not exist, actually creating it -- s={symlink}" )
|
||||
try:
|
||||
if not os.path.exists(symlink):
|
||||
PAprint( f"INFO: symlink does not exist, actually creating it -- s={symlink}" )
|
||||
# SAFE: SafePath() on init forces symlink to be safe
|
||||
os.makedirs( os.path.dirname(symlink), mode=0o777, exist_ok=True )
|
||||
os.symlink(path, symlink)
|
||||
r=os.makedirs( os.path.dirname(symlink), mode=0o777, exist_ok=True )
|
||||
r=os.symlink(path, symlink)
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to create symlink - tried to link {symlink} -> {path}: {e}")
|
||||
return symlink
|
||||
@@ -1251,25 +1248,25 @@ def RemoveEmptyDirFromDB( job, del_me ):
|
||||
####################################################################################################################################
|
||||
def CleanUpDirInDB(job, e):
|
||||
session.commit()
|
||||
print( f"CleanUpDirInDB(): checking dir: {e.FullPathOnFS()} ({e.id})" )
|
||||
PAprint( f"CleanUpDirInDB(): checking dir: {e.FullPathOnFS()} ({e.id})" )
|
||||
content = session.query(Entry).join(EntryDirLink).filter(EntryDirLink.dir_eid==e.id).first()
|
||||
if not content:
|
||||
print( f" Dir {e.FullPathOnFS()} - {e.id} is empty - removing it" )
|
||||
print( f" Entry {e}" )
|
||||
PAprint( f" Dir {e.FullPathOnFS()} - {e.id} is empty - removing it" )
|
||||
PAprint( f" Entry {e}" )
|
||||
# if no in_dir, we are at the root of the path, STOP
|
||||
if not e.in_dir:
|
||||
print( " Parent is empty, so NEVER delete this entry, returning" )
|
||||
PAprint( " Parent is empty, so NEVER delete this entry, returning" )
|
||||
return
|
||||
# okay remove this empty dir
|
||||
RemoveEmtpyDirFromFS( job, e )
|
||||
RemoveEmptyDirFromDB( job, e )
|
||||
# get an Entry from DB (in_dir is a Dir/we need the ORM entry for code to work)
|
||||
parent_dir = session.query(Entry).get(e.in_dir.eid)
|
||||
print( f" Dir {e.FullPathOnFS()} is in {parent_dir.FullPathOnFS()} ({parent_dir.id}) -> check next" )
|
||||
PAprint( f" Dir {e.FullPathOnFS()} is in {parent_dir.FullPathOnFS()} ({parent_dir.id}) -> check next" )
|
||||
# check to see if removing the empty dir has left the parent dir empty
|
||||
CleanUpDirInDB(job, parent_dir)
|
||||
else:
|
||||
print( f"There is content (first entry: {content.name}) in {e.FullPathOnFS()} - finished for this dir" )
|
||||
PAprint( f"There is content (first entry: {content.name}) in {e.FullPathOnFS()} - finished for this dir" )
|
||||
return
|
||||
|
||||
####################################################################################################################################
|
||||
@@ -1359,7 +1356,7 @@ def MoveFileToRecycleBin(job,del_me):
|
||||
# SAFE: as SafePaths(rbpath) combined with data I control in this func (explicit remove of 'static/' + DB entry path)
|
||||
os.replace( src, dst )
|
||||
if DEBUG:
|
||||
print( f"MoveFileToRecycleBin({job.id},{del_me.name}): os.replace {src} with {dst} " )
|
||||
PAprint( f"MoveFileToRecycleBin({job.id},{del_me.name}): os.replace {src} with {dst} " )
|
||||
except Exception as e:
|
||||
AddLogForJob( job, f"ERROR: Failed to remove file from filesystem - which={src}, err: {e}")
|
||||
|
||||
@@ -1413,7 +1410,7 @@ def MoveFileToRecycleBin(job,del_me):
|
||||
####################################################################################################################################
|
||||
def MoveEntriesToOtherFolder(job, move_me, dst_storage_path, dst_rel_path):
|
||||
if DEBUG:
|
||||
print( f"DEBUG: MoveEntriesToOtherFolder( job={job.id}, move_me={move_me.name}, dst_storage_path={dst_storage_path.id}, dst_rel_path={dst_rel_path})")
|
||||
PAprint( f"DEBUG: MoveEntriesToOtherFolder( job={job.id}, move_me={move_me.name}, dst_storage_path={dst_storage_path.id}, dst_rel_path={dst_rel_path})")
|
||||
orig_name=move_me.name
|
||||
orig_fs_pos=move_me.FullPathOnFS()
|
||||
|
||||
@@ -1513,16 +1510,16 @@ def CreateFSLocation( job, dst_path, dst_locn ):
|
||||
####################################################################################################################################
|
||||
def ResetAnySubdirPaths( moving_dir, dst_storage_path, parent_rel_path ):
|
||||
if DEBUG:
|
||||
print( f"ResetAnySubdirPaths( {moving_dir.name}, {dst_storage_path.path_prefix}, {parent_rel_path} )" )
|
||||
PAprint( f"ResetAnySubdirPaths( {moving_dir.name}, {dst_storage_path.path_prefix}, {parent_rel_path} )" )
|
||||
sub_dirs = session.query(Entry).join(FileType).join(EntryDirLink).filter(EntryDirLink.dir_eid==moving_dir.id).filter(FileType.name=='Directory').all()
|
||||
for sub in sub_dirs:
|
||||
if DEBUG:
|
||||
print( f"ResetAnySubdirPaths: WAS sub={sub.name}, ip={sub.in_dir.in_path.path_prefix}, rp={sub.dir_details.rel_path}" )
|
||||
PAprint( f"ResetAnySubdirPaths: WAS sub={sub.name}, ip={sub.in_dir.in_path.path_prefix}, rp={sub.dir_details.rel_path}" )
|
||||
sub.in_path = dst_storage_path
|
||||
sub.dir_details.in_path = dst_storage_path
|
||||
sub.dir_details.rel_path = parent_rel_path + '/' + sub.name
|
||||
if DEBUG:
|
||||
print( f"ResetAnySubdirPaths: NOW sub={sub.name}, ip={sub.in_dir.in_path.path_prefix}, rp={sub.dir_details.rel_path}" )
|
||||
PAprint( f"ResetAnySubdirPaths: NOW sub={sub.name}, ip={sub.in_dir.in_path.path_prefix}, rp={sub.dir_details.rel_path}" )
|
||||
ResetAnySubdirPaths( sub, dst_storage_path, sub.dir_details.rel_path )
|
||||
return
|
||||
|
||||
@@ -1644,18 +1641,6 @@ def find_last_successful_ai_scan(job):
|
||||
return ai_job.last_update.timestamp()
|
||||
return 0
|
||||
|
||||
####################################################################################################################################
|
||||
# when an import job actually finds new files, then the pa_user_state caches will become invalid (offsets are now wrong)
|
||||
####################################################################################################################################
|
||||
def DeleteOldPA_UserState(job):
|
||||
# clear them out for now - this is 'dumb', just delete ALL. Eventually, can do this based on just the path &/or whether the last_used is
|
||||
# newer than this delete moment (only would be a race condition between an import changing things and someone simultaneously viewing)
|
||||
# path=[jex.value for jex in job.extra if jex.name == "path"][0]
|
||||
session.query(PA_UserState).delete()
|
||||
return
|
||||
|
||||
|
||||
|
||||
####################################################################################################################################
|
||||
# JobImportDir(): job that scan import dir and processes entries in there - key function that uses os.walk() to traverse the
|
||||
# file system and calls AddFile()/AddDir() as necessary
|
||||
@@ -1668,7 +1653,7 @@ def JobImportDir(job):
|
||||
ptype = session.query(PathType).get(path_type)
|
||||
AddLogForJob(job, f"Checking {ptype.name} Directory: {path}" )
|
||||
if DEBUG:
|
||||
print( f"DEBUG: Checking Directory: {path}" )
|
||||
PAprint( f"DEBUG: Checking Directory: {path}" )
|
||||
if not os.path.exists( path ):
|
||||
WithdrawDependantJobs( job, job.id, "scan job found no new files to process" )
|
||||
FinishJob( job, f"Finished Importing: {path} -- Path does not exist", "Failed" )
|
||||
@@ -1739,7 +1724,7 @@ def JobImportDir(job):
|
||||
# use ctime as even a metadata change (mv'd file on the fs, or a perms change) needs to be checked
|
||||
if stat.st_ctime > dir.last_import_date:
|
||||
if DEBUG:
|
||||
print("DEBUG: {} - {} is newer than {}".format( basename, stat.st_ctime, dir.last_import_date ) )
|
||||
PAprint("DEBUG: {} - {} is newer than {}".format( basename, stat.st_ctime, dir.last_import_date ) )
|
||||
if isImage(fname):
|
||||
type_str = 'Image'
|
||||
elif isVideo(fname):
|
||||
@@ -1753,7 +1738,7 @@ def JobImportDir(job):
|
||||
found_new_files += 1
|
||||
else:
|
||||
if DEBUG:
|
||||
print( f"DEBUG: { basename} - {stat.st_ctime} is OLDER than {dir.last_import_date}" )
|
||||
PAprint( f"DEBUG: { basename} - {stat.st_ctime} is OLDER than {dir.last_import_date}" )
|
||||
e=session.query(Entry).join(EntryDirLink).join(Dir).filter(Entry.name==basename,Dir.eid==dir.eid).first()
|
||||
e.exists_on_fs=True
|
||||
job.current_file=basename
|
||||
@@ -1764,8 +1749,6 @@ def JobImportDir(job):
|
||||
if found_new_files:
|
||||
job.extra.append( JobExtra( name="new_files", value=str(found_new_files) ) )
|
||||
session.add(job)
|
||||
# this will invalidate pa_user_state for this path's contents (offsets are now wrong), clear them out
|
||||
DeleteOldPA_UserState(job)
|
||||
|
||||
rm_cnt=HandleAnyFSDeletions(job)
|
||||
|
||||
@@ -1890,9 +1873,22 @@ def JobRunAIOn(job):
|
||||
FinishJob(job, "Finished Processesing AI")
|
||||
return
|
||||
|
||||
################################################################################
|
||||
# removeEntryAmendment(): helper routine to remove an Etnry Amendment for a
|
||||
# given job and eid (called after Transform or Delete/Restore/Move files
|
||||
################################################################################
|
||||
def removeEntryAmendment( job, eid ):
|
||||
# now remove the matching amendment for the transform job
|
||||
stmt=select(EntryAmendment).where(EntryAmendment.eid==eid)
|
||||
ea=session.execute(stmt).scalars().one_or_none()
|
||||
if ea:
|
||||
session.delete(ea)
|
||||
else:
|
||||
AddLogForJob( job, f"ERROR: failed to remove entry amendment in DB for this transformation? (eid={id})" )
|
||||
PAprint( f"ERROR: failed to remove entry amendment in DB for this transformation? (eid={id}, job={job} )" )
|
||||
|
||||
####################################################################################################################################
|
||||
# JobTransformImage(): transform an image by the amount requested (can also flip horizontal or vertical)
|
||||
# TODO: should be JobTransformImage() ;)
|
||||
####################################################################################################################################
|
||||
def JobTransformImage(job):
|
||||
JobProgressState( job, "In Progress" )
|
||||
@@ -1900,27 +1896,39 @@ def JobTransformImage(job):
|
||||
id=[jex.value for jex in job.extra if jex.name == "id"][0]
|
||||
amt=[jex.value for jex in job.extra if jex.name == "amt"][0]
|
||||
e=session.query(Entry).join(File).filter(Entry.id==id).first()
|
||||
print( f"JobTransformImage: job={job.id}, id={id}, amt={amt}" )
|
||||
im = Image.open( e.FullPathOnFS() )
|
||||
PAprint( f"JobTransformImage: job={job.id}, id={id}, amt={amt}" )
|
||||
# cant transfer non-image, but may get here if multi-select includes non-Image
|
||||
if e.type.name != 'Image':
|
||||
removeEntryAmendment( job, id )
|
||||
FinishJob(job, "Cannot rotate file as it is not an Image","Failed")
|
||||
return
|
||||
|
||||
if amt == "fliph":
|
||||
AddLogForJob(job, f"INFO: Flipping {e.FullPathOnFS()} horizontally" )
|
||||
im = Image.open( e.FullPathOnFS() )
|
||||
out = im.transpose(Image.FLIP_LEFT_RIGHT)
|
||||
out.save( e.FullPathOnFS() )
|
||||
elif amt == "flipv":
|
||||
AddLogForJob(job, f"INFO: Flipping {e.FullPathOnFS()} vertically" )
|
||||
im = Image.open( e.FullPathOnFS() )
|
||||
out = im.transpose(Image.FLIP_TOP_BOTTOM)
|
||||
out.save( e.FullPathOnFS() )
|
||||
else:
|
||||
AddLogForJob(job, f"INFO: Rotating {e.FullPathOnFS()} by {amt} degrees" )
|
||||
if im.format == 'JPEG':
|
||||
im=ImageOps.exif_transpose(im)
|
||||
out = im.rotate(int(amt), expand=True)
|
||||
out.save( e.FullPathOnFS() )
|
||||
print( f"JobTransformImage DONE transform: job={job.id}, id={id}, amt={amt}" )
|
||||
p = subprocess.run([PA_EXIF_ROTATER, e.FullPathOnFS(), amt ], capture_output=True, text=True )
|
||||
if p.stderr:
|
||||
PAprint(p.stderr)
|
||||
else:
|
||||
PAprint(p.stdout)
|
||||
settings = session.query(Settings).first()
|
||||
e.file_details.thumbnail, _ , _ = GenThumb( e.FullPathOnFS(), settings.auto_rotate )
|
||||
e.file_details.hash = md5( job, e )
|
||||
print( f"JobTransformImage DONE thumb: job={job.id}, id={id}, amt={amt}" )
|
||||
PAprint( f"JobTransformImage DONE thumb: job={job.id}, id={id}, amt={amt}" )
|
||||
session.add(e)
|
||||
# any faces in this file are no longer valid, remove them
|
||||
session.query(FaceFileLink).filter(FaceFileLink.file_eid==e.id).delete()
|
||||
removeEntryAmendment( job, id )
|
||||
|
||||
FinishJob(job, "Finished Processesing image rotation/flip")
|
||||
return
|
||||
|
||||
@@ -1940,7 +1948,7 @@ def GenHashAndThumb(job, e):
|
||||
# use mtime as only if the content is different do we need to redo the hash
|
||||
if stat.st_mtime < e.file_details.last_hash_date:
|
||||
if DEBUG:
|
||||
print(f"OPTIM: GenHashAndThumb {e.name} file is older than last hash, skip this")
|
||||
PAprint(f"OPTIM: GenHashAndThumb {e.name} file is older than last hash, skip this")
|
||||
job.current_file_num+=1
|
||||
return
|
||||
|
||||
@@ -1948,7 +1956,7 @@ def GenHashAndThumb(job, e):
|
||||
# same hash and we already have a thumbnail-> just return
|
||||
if new_hash == e.file_details.hash and e.file_details.thumbnail:
|
||||
if DEBUG:
|
||||
print(f"OPTIM: GenHashAndThumb {e.name} md5 is same - likely a mv on filesystem so skip md5/thumb")
|
||||
PAprint(f"OPTIM: GenHashAndThumb {e.name} md5 is same - likely a mv on filesystem so skip md5/thumb")
|
||||
job.current_file_num+=1
|
||||
return
|
||||
e.file_details.hash = new_hash
|
||||
@@ -1966,7 +1974,7 @@ def GenHashAndThumb(job, e):
|
||||
####################################################################################################################################
|
||||
def ProcessFilesInDir(job, e, file_func, count_dirs):
|
||||
if DEBUG:
|
||||
print( f"DEBUG: ProcessFilesInDir: {e.FullPathOnFS()}")
|
||||
PAprint( f"DEBUG: ProcessFilesInDir: {e.FullPathOnFS()}")
|
||||
if e.type.name != 'Directory':
|
||||
file_func(job, e)
|
||||
else:
|
||||
@@ -1986,7 +1994,7 @@ def JobGetFileDetails(job):
|
||||
path=[jex.value for jex in job.extra if jex.name == "path"][0]
|
||||
path_prefix=[jex.value for jex in job.extra if jex.name == "path_prefix"][0]
|
||||
if DEBUG:
|
||||
print( f"DEBUG: JobGetFileDetails for path={path_prefix}" )
|
||||
PAprint( f"DEBUG: JobGetFileDetails for path={path_prefix}" )
|
||||
p=session.query(Path).filter(Path.path_prefix==path_prefix).first()
|
||||
job.current_file_num = 0
|
||||
job.num_files = p.num_files
|
||||
@@ -2094,7 +2102,7 @@ def JobCheckForDups(job):
|
||||
AddLogForJob( job, f"Check for duplicates" )
|
||||
ClearOtherDupMessagesAndJobs()
|
||||
|
||||
res = session.execute( "select count(e1.id) from entry e1, file f1, dir d1, entry_dir_link edl1, path_dir_link pdl1, path p1, entry e2, file f2, dir d2, entry_dir_link edl2, path_dir_link pdl2, path p2 where e1.id = f1.eid and e2.id = f2.eid and d1.eid = edl1.dir_eid and edl1.entry_id = e1.id and edl2.dir_eid = d2.eid and edl2.entry_id = e2.id and p1.type_id != (select id from path_type where name = 'Bin') and p1.id = pdl1.path_id and pdl1.dir_eid = d1.eid and p2.type_id != (select id from path_type where name = 'Bin') and p2.id = pdl2.path_id and pdl2.dir_eid = d2.eid and f1.hash = f2.hash and e1.id != e2.id and f1.size_mb = f2.size_mb" )
|
||||
res = session.execute( text( "select count(e1.id) from entry e1, file f1, dir d1, entry_dir_link edl1, path_dir_link pdl1, path p1, entry e2, file f2, dir d2, entry_dir_link edl2, path_dir_link pdl2, path p2 where e1.id = f1.eid and e2.id = f2.eid and d1.eid = edl1.dir_eid and edl1.entry_id = e1.id and edl2.dir_eid = d2.eid and edl2.entry_id = e2.id and p1.type_id != (select id from path_type where name = 'Bin') and p1.id = pdl1.path_id and pdl1.dir_eid = d1.eid and p2.type_id != (select id from path_type where name = 'Bin') and p2.id = pdl2.path_id and pdl2.dir_eid = d2.eid and f1.hash = f2.hash and e1.id != e2.id and f1.size_mb = f2.size_mb") )
|
||||
for row in res:
|
||||
if row.count > 0:
|
||||
AddLogForJob(job, f"Found duplicates, Creating Status message in front-end for attention")
|
||||
@@ -2198,6 +2206,7 @@ def JobMoveFiles(job):
|
||||
if 'eid-' in jex.name:
|
||||
move_me=session.query(Entry).get(jex.value)
|
||||
MoveEntriesToOtherFolder( job, move_me, dst_storage_path, f"{prefix}{suffix}" )
|
||||
removeEntryAmendment( job, move_me.id )
|
||||
NewJob( name="check_dups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
|
||||
FinishJob(job, f"Finished move selected file(s)")
|
||||
return
|
||||
@@ -2212,6 +2221,7 @@ def JobDeleteFiles(job):
|
||||
if 'eid-' in jex.name:
|
||||
del_me=session.query(Entry).join(File).filter(Entry.id==jex.value).first()
|
||||
MoveFileToRecycleBin(job,del_me)
|
||||
removeEntryAmendment(job,del_me.id)
|
||||
NewJob( name="check_dups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
|
||||
FinishJob(job, f"Finished deleting selected file(s)")
|
||||
return
|
||||
@@ -2226,6 +2236,7 @@ def JobRestoreFiles(job):
|
||||
if 'eid-' in jex.name:
|
||||
restore_me=session.query(Entry).join(File).filter(Entry.id==jex.value).first()
|
||||
RestoreFile(job,restore_me)
|
||||
removeEntryAmendment(job,restore_me.id)
|
||||
NewJob( name="check_dups", num_files=0, wait_for=None, jex=None, parent_job=None, desc="check for duplicate files" )
|
||||
FinishJob(job, f"Finished restoring selected file(s)")
|
||||
return
|
||||
@@ -2242,18 +2253,19 @@ def JobRestoreFiles(job):
|
||||
def CopyOverrides():
|
||||
try:
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"select * into tmp_{tbl} from {tbl}")
|
||||
session.execute( text( f"select * into tmp_{tbl} from {tbl}") )
|
||||
# force a commit here - I want to fail before I delete override content
|
||||
session.commit()
|
||||
# now take all 4 override tables in DB and clear them out
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"delete from {tbl}" )
|
||||
session.execute( text( f"delete from {tbl}" ) )
|
||||
session.commit()
|
||||
except Exception as ex:
|
||||
print( f"ERROR: there are existing tmp tables when processing metadata. This SHOULD NEVER HAPPEN - manual intervention needed" )
|
||||
print( f"ERROR: most likely the job manager was killed during processing metadata - you may want to manually put" )
|
||||
print( f"ERROR: the contents of the 'tmp_*' tables back into their corresponding official metadata tables " )
|
||||
print( f"ERROR: and try to restart the job manager" )
|
||||
PAprint( f"ERROR: there are existing tmp tables when processing metadata. This SHOULD NEVER HAPPEN - manual intervention needed" )
|
||||
PAprint( f"ERROR: most likely the job manager was killed during processing metadata - you may want to manually put" )
|
||||
PAprint( f"ERROR: the contents of the 'tmp_*' tables back into their corresponding official metadata tables " )
|
||||
PAprint( f"ERROR: and try to restart the job manager" )
|
||||
PAprint( f"ERROR: orig ex: {ex}" )
|
||||
exit( 1 )
|
||||
return
|
||||
|
||||
@@ -2267,8 +2279,8 @@ def GetFaceInMetadata(fname):
|
||||
face_data=file_h.read(-1)
|
||||
file_h.close()
|
||||
except Exception as ex:
|
||||
print( f"ERROR: FATAL tried to read in override data and cant read content" )
|
||||
print( f"ERROR: manual intervention needed - exc={ex}" )
|
||||
PAprint( f"ERROR: FATAL tried to read in override data and cant read content" )
|
||||
PAprint( f"ERROR: manual intervention needed - exc={ex}" )
|
||||
exit(1)
|
||||
return face_data
|
||||
|
||||
@@ -2294,13 +2306,13 @@ def ReloadMetadata(job):
|
||||
fnames = glob.glob( f'{mpath}/no_match_overrides/*' )
|
||||
for fname in fnames:
|
||||
# type derived from fname (e.g. 0_Too Young_md5*, 1_Too Young, 2_Ingore Face, etc.)
|
||||
match=re.search( '(\d+)_([^_\.]+)', fname )
|
||||
match=re.search( r'(\d+)_([^_\.]+)', fname )
|
||||
face_id=match.group(1)
|
||||
type_name=match.group(2)
|
||||
otype = session.query(FaceOverrideType).filter(FaceOverrideType.name==type_name).one()
|
||||
face_data=GetFaceInMetadata(fname)
|
||||
if DEBUG:
|
||||
print( f"Found metadata showing Override of type: {type_name}" )
|
||||
PAprint( f"DEBUG: Found metadata showing Override of type: {type_name}" )
|
||||
|
||||
# check that both the id and data match - if so make new FaceNoMatch otherwise Disco*FaceNoMatch
|
||||
face=session.query( Face ).filter( Face.id==face_id ).filter( Face.face == face_data ). first()
|
||||
@@ -2313,22 +2325,22 @@ def ReloadMetadata(job):
|
||||
# SAFE: as SafePaths(mpath) combined with data I control in this func
|
||||
os.replace( fname, f'{mpath}no_match_overrides/0_{otype.name}_{md5face(face_data)}' )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: renaming no-match metadata on filesystem failed: {ex}" )
|
||||
PAprint( f"ERROR: renaming no-match metadata on filesystem failed: {ex}" )
|
||||
|
||||
# process Metadata on FS for force_match_overrides (disco ones, will have 0 as face_id)
|
||||
fnames = glob.glob( f'{mpath}force_match_overrides/*' )
|
||||
for fname in fnames:
|
||||
# person derived from fname (e.g. 0_ddp_md5*, 1_ddp, 2_mich, etc.)
|
||||
match=re.search( '(\d+)_([^_]+)', fname )
|
||||
match=re.search( r'(\d+)_([^_]+)', fname )
|
||||
face_id=match.group(1)
|
||||
person_tag=match.group(2)
|
||||
p = session.query(Person).filter(Person.tag==person_tag).first()
|
||||
if not p:
|
||||
print( f"There is a metadata override on the file system for person: {person_tag} - but they are no longer in the DB - skip" )
|
||||
PAprint( f"INFO: There is a metadata override on the file system for person: {person_tag} - but they are no longer in the DB - skip" )
|
||||
continue
|
||||
face_data=GetFaceInMetadata(fname)
|
||||
if DEBUG:
|
||||
print( f"Found metadata showing Override match for person: {person_tag}" )
|
||||
PAprint( f"DEBUG: Found metadata showing Override match for person: {person_tag}" )
|
||||
|
||||
# check that both the id and data match - if so make new FaceNoMatch otherwise Disco*FaceNoMatch
|
||||
face=session.query( Face ).filter( Face.id==face_id ).filter( Face.face == face_data ).first()
|
||||
@@ -2342,43 +2354,46 @@ def ReloadMetadata(job):
|
||||
# SAFE: as SafePaths(mpath) combined with data I control in this func
|
||||
os.replace( fname, f'{mpath}force_match_overrides/0_{p.tag}_{md5face(face_data)}' )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: renaming force-match metadata on filesystem failed: {ex}" )
|
||||
PAprint( f"ERROR: renaming force-match metadata on filesystem failed: {ex}" )
|
||||
|
||||
|
||||
# now process each of the tmp tables for anything that was in the DB but not on FS (e.g rm'd metadata)
|
||||
overrides=session.execute( "select face_id, type_id from tmp_face_no_match_override" )
|
||||
overrides=session.execute( text( "select face_id, type_id from tmp_face_no_match_override" ) )
|
||||
for o in overrides:
|
||||
print( f"F Force Match: o.face_id={o.face_id}" )
|
||||
PAprint( f"F Force Match: o.face_id={o.face_id}" )
|
||||
print( f"F No Match: o.type_id={o.type_id}" )
|
||||
nmo=session.query(FaceNoMatchOverride).filter(FaceNoMatchOverride.face_id==o.face_id).filter(FaceNoMatchOverride.type_id==o.type_id).first()
|
||||
if not nmo:
|
||||
session.add( FaceNoMatchOverride( face_id=o.face_id, type_id=o.type_id ) )
|
||||
|
||||
overrides=session.execute( "select face_id, person_id from tmp_face_force_match_override" )
|
||||
overrides=session.execute( text( "select face_id, person_id from tmp_face_force_match_override" ) )
|
||||
for o in overrides:
|
||||
print( f"F Force Match: o.face_id={o.face_id}" )
|
||||
print( f"F Force Match: o.person_id={o.person_id}" )
|
||||
if DEBUG:
|
||||
PAprint( f"F Force Match: o.face_id={o.face_id}" )
|
||||
PAprint( f"F Force Match: o.person_id={o.person_id}" )
|
||||
fmo=session.query(FaceForceMatchOverride).filter(FaceForceMatchOverride.face_id==o.face_id,FaceForceMatchOverride.person_id==o.person_id).first()
|
||||
if not fmo:
|
||||
session.add( FaceForceMatchOverride( face_id=o.face_id, person_id=o.person_id ) )
|
||||
|
||||
overrides=session.execute( "select face, type_id from tmp_disconnected_no_match_override" )
|
||||
overrides=session.execute( text( "select face, type_id from tmp_disconnected_no_match_override" ) )
|
||||
for o in overrides:
|
||||
print( f"D No Match: o.type_id={o.type_id}" )
|
||||
if DEBUG:
|
||||
PAprint( f"D No Match: o.type_id={o.type_id}" )
|
||||
dnmo=session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==o.face).filter(DisconnectedNoMatchOverride.type_id==o.type_id).first()
|
||||
if not dnmo:
|
||||
session.add( DisconnectedNoMatchOverride( face=o.face, type_id=o.type_id ) )
|
||||
|
||||
overrides=session.execute( "select face, person_id from tmp_disconnected_force_match_override" )
|
||||
overrides=session.execute( text( "select face, person_id from tmp_disconnected_force_match_override" ) )
|
||||
for o in overrides:
|
||||
print( f"D Force Match: o.person_id={o.person_id}" )
|
||||
if DEBUG:
|
||||
PAprint( f"D Force Match: o.person_id={o.person_id}" )
|
||||
dfmo=session.query(DisconnectedForceMatchOverride).filter(DisconnectedForceMatchOverride.face==o.face).filter(DisconnectedForceMatchOverride.person_id==o.person_id).first()
|
||||
if not dfmo:
|
||||
session.add( DisconnectedForceMatchOverride( face=o.face, person_id=o.person_id ) )
|
||||
|
||||
# finally, drop the tmp tables
|
||||
for tbl in override_tbls:
|
||||
session.execute( f"drop table tmp_{tbl}" )
|
||||
session.execute( text( f"drop table tmp_{tbl}" ) )
|
||||
|
||||
# ok, finally commit all these changes - dont do this until now. Worst case if we crash/fail, the overrides should continue to be in tmp_{tbl}
|
||||
session.commit()
|
||||
@@ -2392,7 +2407,6 @@ def ReloadMetadata(job):
|
||||
# If there is content in the Bin already, its logs this - mostly useful when testing)
|
||||
####################################################################################################################################
|
||||
def InitialValidationChecks():
|
||||
now=datetime.now(pytz.utc)
|
||||
job=NewJob( name="init", num_files=0, wait_for=None, jex=None, parent_job=None, desc="initialise photo assistant" )
|
||||
job.start_time=datetime.now(pytz.utc)
|
||||
JobProgressState( job, "In Progress" )
|
||||
@@ -2405,14 +2419,14 @@ def InitialValidationChecks():
|
||||
if len(dirs) + len(files) > 0:
|
||||
AddLogForJob(job, "INFO: the bin path contains content, cannot process to know where original deletes were form - skipping content!" )
|
||||
AddLogForJob(job, "TODO: could be smart about what is known in the DB vs on the FS, and change below to an ERROR if it is one")
|
||||
AddLogForJob(job, "WARNING: IF the files in the bin are in the DB (succeeded from GUI deletes) then this is okay, otherwise you should delete contents form the recycle bin and restart the job manager)" )
|
||||
AddLogForJob(job, "WARN: IF the files in the bin are in the DB (succeeded from GUI deletes) then this is okay, otherwise you should delete contents form the recycle bin and restart the job manager)" )
|
||||
# create symlink and Path/Dir if needed
|
||||
ProcessRecycleBinDir(job)
|
||||
rbp_exists=1
|
||||
except Exception as ex:
|
||||
print( f"FATAL ERROR: Failed to walk the recycle bin at {path} Err:{ex}" )
|
||||
PAprint( f"FATAL ERROR: Failed to walk the recycle bin at {path} Err:{ex}" )
|
||||
else:
|
||||
AddLogForJob(job, "ERROR: The bin path in settings does not exist - Please fix now");
|
||||
AddLogForJob(job, f"ERROR: The bin path {path} in settings does not exist - Please fix now");
|
||||
sp_exists=0
|
||||
path = SettingsSPath()
|
||||
if os.path.exists(path):
|
||||
@@ -2481,7 +2495,7 @@ def AddFaceToFile( locn_data, face_data, file_eid, model_id, settings ):
|
||||
# SAFE: as SafePaths(mpath) combined with data I control in this func
|
||||
os.replace( fname, new_fname )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: AddFaceToFile-face connects to 'disconnected-force-match' metadata, but fixing the filesystem metadata failed: {ex}" )
|
||||
PAprint( f"ERROR: AddFaceToFile-face connects to 'disconnected-force-match' metadata, but fixing the filesystem metadata failed: {ex}" )
|
||||
|
||||
dnmo=session.query(DisconnectedNoMatchOverride).filter(DisconnectedNoMatchOverride.face==face.face).first()
|
||||
if dnmo:
|
||||
@@ -2497,7 +2511,7 @@ def AddFaceToFile( locn_data, face_data, file_eid, model_id, settings ):
|
||||
# SAFE: as SafePaths(mpath) combined with data I control in this func
|
||||
os.replace( fname, new_fname )
|
||||
except Exception as ex:
|
||||
print( f"ERROR: AddFaceToFile-face connects to 'disconnected-no-match' metadata, but fixing the filesystem metadata failed: {ex}" )
|
||||
PAprint( f"ERROR: AddFaceToFile-face connects to 'disconnected-no-match' metadata, but fixing the filesystem metadata failed: {ex}" )
|
||||
|
||||
return
|
||||
|
||||
@@ -2517,7 +2531,7 @@ def DelMatchesForFile( job, ent ):
|
||||
if DEBUG:
|
||||
AddLogForJob(job, f'Remove any old matches in {ent.name}')
|
||||
|
||||
session.execute( f"delete from face_refimg_link where face_id in (select face_id from face_file_link where file_eid = {ent.id})" )
|
||||
session.execute( text( f"delete from face_refimg_link where face_id in (select face_id from face_file_link where file_eid = {ent.id})" ) )
|
||||
ent.file_details.last_ai_scan=0
|
||||
session.add(ent)
|
||||
return
|
||||
@@ -2539,7 +2553,7 @@ def DelFacesForFile( job, eid ):
|
||||
if o:
|
||||
DisconnectSingleNoMatchOverride( job, o )
|
||||
|
||||
session.execute( f"delete from face where id in (select face_id from face_file_link where file_eid = {eid})" )
|
||||
session.execute( text( f"delete from face where id in (select face_id from face_file_link where file_eid = {eid})" ) )
|
||||
|
||||
session.commit()
|
||||
return
|
||||
@@ -2697,7 +2711,7 @@ def CheckAndRunBinClean():
|
||||
|
||||
now=datetime.now(pytz.utc)
|
||||
if not j or (now-j.last_update).days >= settings.scheduled_bin_cleanup:
|
||||
print( f"INFO: Should force clean up bin path, del files older than {settings.bin_cleanup_file_age} days old" )
|
||||
PAprint( f"INFO: Should force clean up bin path, del files older than {settings.bin_cleanup_file_age} days old" )
|
||||
NewJob( name="clean_bin", num_files=0, wait_for=None, jex=None, parent_job=None, desc="periodic clean up on Bin path" )
|
||||
created_jobs=True
|
||||
return created_jobs
|
||||
@@ -2707,7 +2721,7 @@ def CheckAndRunBinClean():
|
||||
# based jobs that should run (e.g. last scanned a path X day(s) ago, then scan now), etc. X is defined in settings
|
||||
####################################################################################################################################
|
||||
def ScheduledJobs():
|
||||
print("DEBUG: Time to check for any scheduled jobs needing to be run" )
|
||||
PAprint("DEBUG: Time to check for any scheduled jobs needing to be run" )
|
||||
|
||||
created_jobs=False
|
||||
|
||||
@@ -2717,24 +2731,23 @@ def ScheduledJobs():
|
||||
settings = session.query(Settings).first()
|
||||
now=datetime.now(pytz.utc)
|
||||
if ndays_since_last_im_scan >= settings.scheduled_import_scan:
|
||||
print( f"INFO: Time to force an import scan, last scan was {ndays_since_last_im_scan} days ago" )
|
||||
PAprint( f"INFO: Time to force an import scan, last scan was {ndays_since_last_im_scan} days ago" )
|
||||
NewJob( name="scan_ip", num_files=0, wait_for=None, jex=None, parent_job=None, desc="periodic clean scan for new files in Import path" )
|
||||
created_jobs=True
|
||||
if ndays_since_last_st_scan >= settings.scheduled_storage_scan:
|
||||
print( f"INFO: Time to force a storage scan, last scan was {ndays_since_last_st_scan}" )
|
||||
PAprint( f"INFO: Time to force a storage scan, last scan was {ndays_since_last_st_scan}" )
|
||||
NewJob( name="scan_sp", num_files=0, wait_for=None, jex=None, parent_job=None, desc="periodic clean scan for new files in Storage path" )
|
||||
created_jobs=True
|
||||
if CheckAndRunBinClean():
|
||||
created_jobs=True
|
||||
return created_jobs
|
||||
|
||||
|
||||
####################################################################################################################################
|
||||
# MAIN - start with validation, then grab any jobs in the DB to process, then
|
||||
# go into waiting on a socket to be woken up (and then if woken, back into HandleJobs()
|
||||
####################################################################################################################################
|
||||
if __name__ == "__main__":
|
||||
print("INFO: PA job manager starting - listening on {}:{}".format( PA_JOB_MANAGER_HOST, PA_JOB_MANAGER_PORT) )
|
||||
PAprint("INFO: PA job manager starting - listening on {}:{}".format( PA_JOB_MANAGER_HOST, PA_JOB_MANAGER_PORT) )
|
||||
|
||||
InitialValidationChecks()
|
||||
|
||||
@@ -2748,11 +2761,11 @@ if __name__ == "__main__":
|
||||
try:
|
||||
conn, addr = s.accept()
|
||||
if DEBUG:
|
||||
print( f"accept finished, tout={s.timeout}" )
|
||||
PAprint( f"accept finished, tout={s.timeout}" )
|
||||
|
||||
except socket.timeout:
|
||||
if DEBUG:
|
||||
print( f"timeout occurred, tout={s.timeout}" )
|
||||
PAprint( f"timeout occurred, tout={s.timeout}" )
|
||||
if ScheduledJobs():
|
||||
HandleJobs(False)
|
||||
continue
|
||||
|
||||
47
path.py
@@ -42,50 +42,3 @@ class Path(db.Model):
|
||||
|
||||
def __repr__(self):
|
||||
return f"<id: {self.id}, path_prefix: {self.path_prefix}, num_files={self.num_files}, type={self.type}>"
|
||||
|
||||
|
||||
################################################################################
|
||||
# Class describing PathDetail (quick connvenence class for MovePathDetails())
|
||||
################################################################################
|
||||
class PathDetail(PA):
|
||||
"""Class describing details of a Path [internal class used in MovePathDetais()]"""
|
||||
|
||||
def __init__(self,ptype,path):
|
||||
"""Initialisation function for PathDetail class
|
||||
|
||||
Args:
|
||||
id (int): database id of row in PathDetail table / primary key
|
||||
ptype (int): database id of row in PathType table / foreign key
|
||||
"""
|
||||
|
||||
self.type:int=ptype
|
||||
self.path:str=path
|
||||
# construct icon_url based on type of storage path (icons.svg contains icons for each)
|
||||
self.icon_url:str=url_for("internal", filename="icons.svg") + "#" + ICON[self.type]
|
||||
|
||||
################################################################################
|
||||
# helper function to find path details for move destinations - used in html
|
||||
# for move DBox to show potential storage paths to move files into
|
||||
################################################################################
|
||||
def MovePathDetails():
|
||||
"""helper function to find path details for move destinations
|
||||
|
||||
used in html/javascript for move Dialog Box to show potential storage paths to move files into
|
||||
|
||||
Args:
|
||||
None
|
||||
|
||||
Returns:
|
||||
ret (List[PathDetail]): a list of Path Details for where files can be moved
|
||||
|
||||
"""
|
||||
ret=[]
|
||||
sps=Path.query.join(PathType).filter(PathType.name=="Storage").all()
|
||||
for p in sps:
|
||||
obj = PathDetail( ptype="Storage", path=p.path_prefix.replace("static/Storage/","") )
|
||||
ret.append( obj )
|
||||
ips=Path.query.join(PathType).filter(PathType.name=="Import").all()
|
||||
for p in ips:
|
||||
obj = PathDetail( ptype="Import", path=p.path_prefix.replace("static/Import/","") )
|
||||
ret.append( obj )
|
||||
return ret
|
||||
|
||||
79
person.py
@@ -3,8 +3,9 @@ from flask_wtf import FlaskForm
|
||||
from flask import request, render_template, redirect, url_for, make_response, jsonify
|
||||
from main import db, app, ma
|
||||
from settings import Settings, AIModel
|
||||
from sqlalchemy import Sequence, func
|
||||
from sqlalchemy import Sequence, func, select
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import joinedload
|
||||
from flask_login import login_required, current_user
|
||||
from werkzeug.utils import secure_filename
|
||||
from shared import GenFace, GenThumb, PA
|
||||
@@ -87,14 +88,14 @@ def AddRefimgToPerson( filename, person ):
|
||||
#False == dont autorotate, its not needed on this image
|
||||
refimg.thumbnail, refimg.orig_w, refimg.orig_h = GenThumb( filename, False )
|
||||
settings = Settings.query.first()
|
||||
model=AIModel.query.get(settings.default_refimg_model)
|
||||
model=db.session.get(AIModel,settings.default_refimg_model)
|
||||
refimg.face, face_locn = GenFace( filename, model=model.name )
|
||||
try:
|
||||
os.remove(filename)
|
||||
except Exception as e:
|
||||
# can fail "silently" here, if the face_locn worked, great, its only
|
||||
# a tmp file in /tmp - if not, the next if will send a msg to the front-end
|
||||
SetFELog( message=f"Failed to delete tmp file for refimg addition: {e}", log_level="danger", persistent=True, cant_close=True )
|
||||
SetFELog( message=f"Failed to delete tmp file for refimg addition: {e}", level="danger", persistent=True, cant_close=True )
|
||||
|
||||
if not face_locn:
|
||||
SetFELog( f"<b>Failed to find face in Refimg:</b>", "danger" )
|
||||
@@ -114,7 +115,7 @@ def AddRefimgToPerson( filename, person ):
|
||||
SetFELog( f"<b>Failed to add Refimg:</b> {e.orig}", "danger" )
|
||||
except Exception as e:
|
||||
SetFELog( f"<b>Failed to modify Refimg:</b> {e}", "danger" )
|
||||
return
|
||||
return refimg
|
||||
|
||||
################################################################################
|
||||
# TempRefimgFile: helper function that takes data POST'd (from dialog box to
|
||||
@@ -182,9 +183,12 @@ def match_with_create_person():
|
||||
p = Person( tag=request.form["tag"], surname=request.form["surname"], firstname=request.form["firstname"] )
|
||||
# add this fname (of temp refimg) to person
|
||||
fname=TempRefimgFile( request.form['refimg_data'], p.tag )
|
||||
AddRefimgToPerson( fname, p )
|
||||
r=AddRefimgToPerson( fname, p )
|
||||
SetFELog( f"Created person: {p.tag}" )
|
||||
return make_response( jsonify( who=p.tag, distance='0.0' ) )
|
||||
refimg_schema=RefimgSchema(many=False)
|
||||
r_data=refimg_schema.dump(r)
|
||||
|
||||
return make_response( jsonify( refimg=r_data, who=p.tag, distance='0.0' ) )
|
||||
|
||||
################################################################################
|
||||
# /person/<id> -> GET/POST(save or delete) -> shows/edits/delets a single person
|
||||
@@ -197,7 +201,7 @@ def person(id):
|
||||
|
||||
if request.method == 'POST':
|
||||
try:
|
||||
person = Person.query.get(id)
|
||||
person = db.session.get(Person, id)
|
||||
if 'delete' in request.form:
|
||||
SetFELog( f"Successfully deleted Person: ({person.tag})" )
|
||||
|
||||
@@ -237,7 +241,7 @@ def person(id):
|
||||
SetFELog( f"<b>Failed to modify Person:</b> {e}", "danger" )
|
||||
return redirect( url_for( 'persons' ) )
|
||||
else:
|
||||
person = Person.query.get(id)
|
||||
person = db.session.get(Person, id)
|
||||
if not person:
|
||||
SetFELog( f"No such person with id: {id}", "danger" )
|
||||
return redirect("/")
|
||||
@@ -267,7 +271,7 @@ def add_refimg():
|
||||
except Exception as e:
|
||||
SetFELog( f"<b>Failed to load reference image:</b> {e}", "danger" )
|
||||
|
||||
AddRefimgToPerson( fname, person )
|
||||
r=AddRefimgToPerson( fname, person )
|
||||
return redirect( url_for( 'person', id=person.id) )
|
||||
|
||||
################################################################################
|
||||
@@ -289,6 +293,29 @@ def find_persons(who):
|
||||
|
||||
return make_response( resp )
|
||||
|
||||
class FaceRefimgLinkSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta: model = FaceRefimgLink
|
||||
face_distance = ma.auto_field() # Explicitly include face_distance
|
||||
load_instance = True
|
||||
|
||||
class PersonSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta: model=Person
|
||||
load_instance = True
|
||||
|
||||
class RefimgSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta:
|
||||
model = Refimg
|
||||
exclude = ('face',)
|
||||
load_instance = True
|
||||
person = ma.Nested(PersonSchema)
|
||||
|
||||
class FaceSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta:
|
||||
model=Face
|
||||
exclude = ('face',)
|
||||
load_instance = True
|
||||
refimg = ma.Nested(RefimgSchema,allow_none=True)
|
||||
refimg_lnk = ma.Nested(FaceRefimgLinkSchema,allow_none=True)
|
||||
|
||||
################################################################################
|
||||
# /add_refimg_to_person/ -> POST
|
||||
@@ -296,12 +323,19 @@ def find_persons(who):
|
||||
@app.route("/add_refimg_to_person", methods=["POST"])
|
||||
@login_required
|
||||
def add_refimg_to_person():
|
||||
f = Face.query.get( request.form['face_id'] )
|
||||
p = Person.query.get( request.form['person_id'] )
|
||||
stmt = select(Face).options( joinedload(Face.refimg_lnk) ).where(Face.id == request.form['face_id'])
|
||||
f=db.session.execute(stmt).scalars().first()
|
||||
stmt = select(Person).options( joinedload(Person.refimg) ).where(Person.id == request.form['person_id'])
|
||||
p=db.session.execute(stmt).scalars().first()
|
||||
|
||||
# add this fname (of temp refimg) to person
|
||||
fname=TempRefimgFile( request.form['refimg_data'], p.tag )
|
||||
AddRefimgToPerson( fname, p )
|
||||
r=AddRefimgToPerson( fname, p )
|
||||
|
||||
# connect the refimg to the face in the db, now we have added this refimg to the person
|
||||
frl=FaceRefimgLink( face_id=f.id, refimg_id=r.id, face_distance=0 )
|
||||
db.session.add(frl)
|
||||
db.session.commit()
|
||||
|
||||
if request.form['search'] == "true":
|
||||
jex=[]
|
||||
@@ -316,7 +350,12 @@ def add_refimg_to_person():
|
||||
jex.append( JobExtra( name=f"path_type", value=str(ptype.id) ) )
|
||||
job=NewJob( name="run_ai_on_path", num_files=0, wait_for=None, jex=jex, desc="Look for face(s) in storage path(s)" )
|
||||
|
||||
return make_response( jsonify( who=p.tag, distance='0.0' ) )
|
||||
refimg_schema=RefimgSchema(many=False)
|
||||
r_data=refimg_schema.dump(r)
|
||||
frl_schema=FaceRefimgLinkSchema(many=False)
|
||||
frl_data=refimg_schema.dump(r)
|
||||
|
||||
return make_response( jsonify( refimg=r_data, frl=frl_data ) )
|
||||
|
||||
################################################################################
|
||||
# /add_force_match_override -> POST
|
||||
@@ -346,7 +385,9 @@ def add_force_match_override():
|
||||
NewJob( "metadata", num_files=0, wait_for=None, jex=jex, desc="create metadata for adding forced match" )
|
||||
|
||||
# this will reply to the Ajax / POST, and cause the page to re-draw with new face override to person_tag
|
||||
return make_response( jsonify( person_tag=p.tag ) )
|
||||
person_schema = PersonSchema(many=False)
|
||||
p_data = person_schema.dump(p)
|
||||
return make_response( jsonify( person=p_data ) )
|
||||
|
||||
################################################################################
|
||||
# /remove_force_match_override -> POST
|
||||
@@ -397,6 +438,11 @@ def remove_no_match_override():
|
||||
return make_response( jsonify( face_id=face_id ) )
|
||||
|
||||
|
||||
class FaceOverrideTypeSchema(ma.SQLAlchemyAutoSchema):
|
||||
class Meta:
|
||||
model = FaceOverrideType
|
||||
load_instance = True
|
||||
|
||||
################################################################################
|
||||
# /add_no_match_override -> POST
|
||||
################################################################################
|
||||
@@ -424,5 +470,6 @@ def add_no_match_override():
|
||||
# dont do status update here, the F/E is in the middle of a dbox, just send metadata through to the B/E
|
||||
NewJob( "metadata", num_files=0, wait_for=None, jex=jex, desc="create metadata for adding forced non-match" )
|
||||
|
||||
# this will reply to the Ajax / POST, and cause the page to re-draw with new face override to person_tag
|
||||
return make_response( jsonify( type=t.name ) )
|
||||
fot_schema = FaceOverrideTypeSchema(many=False)
|
||||
t_data=fot_schema.dump(t)
|
||||
return make_response( jsonify( type_id=t.id, type=t_data ) )
|
||||
|
||||
41
query.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from flask_login import UserMixin, login_required
|
||||
from main import db
|
||||
#from sqlalchemy import Sequence
|
||||
#from flask import request, redirect, make_response, jsonify
|
||||
#from main import db, app, ma
|
||||
#from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
|
||||
# pylint: disable=no-member
|
||||
|
||||
################################################################################
|
||||
# Class describing Person in the database and DB via sqlalchemy
|
||||
# id is unique id in DB
|
||||
# dn is ldap distinguised name
|
||||
# any entry in this DB is effectively a record you already authed successfully
|
||||
# so acts as a session marker. If you fail ldap auth, you dont get a row here
|
||||
################################################################################
|
||||
class Query(UserMixin,db.Model):
|
||||
__tablename__ = "query"
|
||||
id = db.Column(db.Integer, db.Sequence('query_id_seq'), primary_key=True)
|
||||
path_type = db.Column(db.String)
|
||||
noo = db.Column(db.String)
|
||||
grouping = db.Column(db.String)
|
||||
q_offset = db.Column(db.Integer)
|
||||
folder = db.Column(db.Boolean)
|
||||
entry_list = db.Column(db.String)
|
||||
root = db.Column(db.String)
|
||||
cwd = db.Column(db.String)
|
||||
search_term = db.Column(db.String)
|
||||
current = db.Column(db.Integer)
|
||||
created = db.Column(db.DateTime(timezone=True))
|
||||
|
||||
def __repr__(self):
|
||||
str=f"<{self.__class__.__name__}("
|
||||
for k, v in self.__dict__.items():
|
||||
str += f"{k}={v!r}, "
|
||||
str=str.rstrip(", ") + ")>"
|
||||
return str
|
||||
|
||||
def get_id(self):
|
||||
return self.dn
|
||||
@@ -1,7 +1,9 @@
|
||||
numpy==1.26.4
|
||||
setuptools
|
||||
flask
|
||||
flask_login
|
||||
flask-ldap3-login
|
||||
sqlalchemy
|
||||
sqlalchemy>=2.0.36
|
||||
flask-sqlalchemy
|
||||
SQLAlchemy-serializer
|
||||
marshmallow-sqlalchemy
|
||||
@@ -19,3 +21,5 @@ Werkzeug
|
||||
flask-compress
|
||||
ffmpeg-python
|
||||
psycopg2
|
||||
python3-ldap
|
||||
gunicorn
|
||||
|
||||
@@ -137,7 +137,7 @@ def SettingsRBPath():
|
||||
settings = Settings.query.first()
|
||||
if settings == None:
|
||||
from job import SetFELog
|
||||
SetFELog( message="Cannot create file data with no settings / recycle bin path is missing", log_level="error", persistent=True, cant_close=False)
|
||||
SetFELog( message="Cannot create file data with no settings / recycle bin path is missing", level="danger", persistent=True, cant_close=False)
|
||||
return
|
||||
# path setting is an absolute path, just use it, otherwise prepend base_path first
|
||||
if settings.recycle_bin_path[0] == '/':
|
||||
@@ -155,7 +155,7 @@ def SettingsSPath():
|
||||
settings = Settings.query.first()
|
||||
if settings == None:
|
||||
from job import SetFELog
|
||||
SetFELog( message="Cannot create file data with no settings / storage path is missing", log_level="error", persistent=True, cant_close=False)
|
||||
SetFELog( message="Cannot create file data with no settings / storage path is missing", level="danger", persistent=True, cant_close=False)
|
||||
return
|
||||
if settings.storage_path[0] == '/':
|
||||
path=settings.storage_path
|
||||
|
||||
30
shared.py
@@ -5,6 +5,7 @@ import io
|
||||
import base64
|
||||
import subprocess
|
||||
from PIL import Image, ImageOps
|
||||
import numpy as np
|
||||
|
||||
class PA:
|
||||
def __repr__(self):
|
||||
@@ -24,7 +25,6 @@ class PA:
|
||||
|
||||
|
||||
hostname = socket.gethostname()
|
||||
PROD_HOST="pa_web"
|
||||
|
||||
# dict to store name of icon in icons.svg so we can use by referece in html
|
||||
ICON={}
|
||||
@@ -40,19 +40,27 @@ OLDEST_LOG_LIMIT = 5
|
||||
if hostname == "lappy":
|
||||
PA_JOB_MANAGER_HOST="localhost"
|
||||
DB_URL = 'postgresql+psycopg2://pa:for_now_pa@localhost:5432/pa'
|
||||
PA_EXIF_ROTATER = './utils/pa_exifautotran'
|
||||
PA_EXIF_AUTOROTATE = './utils/pa_exifautotran'
|
||||
PA_EXIF_ROTATER = './utils/pa_rotate'
|
||||
# if we dont set the env or we are explicitly DEV, run web server on localhost & db on mara (port 65432)
|
||||
elif 'ENV' not in os.environ or os.environ['ENV'] == "development":
|
||||
elif 'ENV' not in os.environ or os.environ['ENV'] == "development" or os.environ['ENV'] == "container":
|
||||
PA_JOB_MANAGER_HOST="localhost"
|
||||
DB_URL = 'postgresql+psycopg2://pa:for_now_pa@mara.ddp.net:65432/pa'
|
||||
PA_EXIF_ROTATER = './utils/pa_exifautotran'
|
||||
# DB_URL = 'postgresql+psycopg2://pa:for_now_pa@mara.ddp.net:65432/pa'
|
||||
DB_URL = 'postgresql+psycopg2://pa:for_now_pa@padb_dev/pa'
|
||||
PA_EXIF_AUTOROTATE = './utils/pa_exifautotran'
|
||||
PA_EXIF_ROTATER = './utils/pa_rotate'
|
||||
# if we explicitly are on PROD, run web server on localhost (pa_web container) & db on mara (port 5432 on padb container)- only accessed via internal docker ports)
|
||||
elif os.environ['ENV'] == "production":
|
||||
PA_JOB_MANAGER_HOST="localhost"
|
||||
DB_URL = 'postgresql+psycopg2://pa:for_now_pa@padb/pa'
|
||||
PA_EXIF_ROTATER = '/code/utils/pa_exifautotran'
|
||||
PA_EXIF_AUTOROTATE = '/code/utils/pa_exifautotran'
|
||||
PA_EXIF_ROTATER = '/code/utils/pa_rotate'
|
||||
else:
|
||||
print( "ERROR: I do not know which environment (development, etc.) and which DB (on which host to use)" )
|
||||
if 'ENV' not in os.environ:
|
||||
print( f"ERROR: no ENV variable set in the environment" )
|
||||
else:
|
||||
print( f"ERROR: ENV is {os.environ['ENV']}" )
|
||||
exit( -1 )
|
||||
|
||||
# PORT number we connect to the pa_job_manager on - by default it runs on the
|
||||
@@ -85,8 +93,8 @@ def CreateSelect(name, selected, list, js="", add_class="", vals={} ):
|
||||
# TODO: can this be collapsed into using above - probably if the 'selected' passed in was 'In Folder' or 'Flat View' -- but I think that isn't in a var???
|
||||
# Helper function used in html files to create a bootstrap'd select with options. Same as CreateSelect() really, only contains
|
||||
# hard-coded True/False around the if selected part, but with string based "True"/"False" in the vals={}, and list has "In Folders", "Flat View"
|
||||
def CreateFoldersSelect(selected, add_class=""):
|
||||
str = f'<select id="folders" name="folders" class="{add_class} sm-txt bg-white text-info border-info border-1 p-1" onChange="this.form.submit()">'
|
||||
def CreateFoldersSelect(selected, js="", add_class=""):
|
||||
str = f'<select id="folders" name="folders" class="{add_class} sm-txt bg-white text-info border-info border-1 p-1" onChange="{js};this.form.submit()">'
|
||||
# if selected is true, then folders == true, so make this the selected option
|
||||
if( selected ):
|
||||
str += '<option selected value="True">In Folders</option>'
|
||||
@@ -132,7 +140,7 @@ def GenThumb(fname,auto_rotate):
|
||||
im_orig = Image.open(fname)
|
||||
if im_orig.format == 'JPEG':
|
||||
# run cmdline util to re-orient jpeg (only changes if needed, and does it losslessly)
|
||||
p = subprocess.run([PA_EXIF_ROTATER,fname] )
|
||||
p = subprocess.run([PA_EXIF_AUTOROTATE,fname] )
|
||||
im=Image.open(fname)
|
||||
else:
|
||||
im=im_orig
|
||||
@@ -161,10 +169,12 @@ def GenThumb(fname,auto_rotate):
|
||||
# generate Face data (and location) - wrapper func of face_recognition library
|
||||
# used to store refimg data into the DB
|
||||
def GenFace(fname, model):
|
||||
try:
|
||||
img = face_recognition.load_image_file(fname)
|
||||
location = face_recognition.face_locations(img, model=model)
|
||||
encodings = face_recognition.face_encodings(img, known_face_locations=location)
|
||||
if len(encodings) and len(location):
|
||||
return encodings[0].tobytes(), location[0]
|
||||
else:
|
||||
except Exception as e:
|
||||
print( f"GenFace failed: {e}" )
|
||||
return None, None
|
||||
|
||||
249
states.py
@@ -1,10 +1,12 @@
|
||||
from flask import request, render_template, redirect, url_for
|
||||
from settings import Settings, SettingsIPath, SettingsSPath, SettingsRBPath
|
||||
from flask_login import login_required, current_user
|
||||
from main import db, app, ma
|
||||
from shared import PA
|
||||
from user import PAUser
|
||||
from datetime import datetime
|
||||
from job import SetFELog
|
||||
from shared import SymlinkName
|
||||
import pytz
|
||||
import re
|
||||
|
||||
@@ -17,30 +19,17 @@ class PA_UserState(db.Model):
|
||||
__tablename__ = "pa_user_state"
|
||||
id = db.Column(db.Integer, db.Sequence('pa_user_state_id_seq'), primary_key=True )
|
||||
pa_user_dn = db.Column(db.String, db.ForeignKey('pa_user.dn'), primary_key=True )
|
||||
last_used = db.Column(db.DateTime(timezone=True))
|
||||
path_type = db.Column(db.String, primary_key=True, unique=False, nullable=False )
|
||||
noo = db.Column(db.String, unique=False, nullable=False )
|
||||
grouping = db.Column(db.String, unique=False, nullable=False )
|
||||
how_many = db.Column(db.Integer, unique=False, nullable=False )
|
||||
st_offset = db.Column(db.Integer, unique=False, nullable=False )
|
||||
size = db.Column(db.Integer, unique=False, nullable=False )
|
||||
folders = db.Column(db.Boolean, unique=False, nullable=False )
|
||||
root = db.Column(db.String, unique=False, nullable=False )
|
||||
cwd = db.Column(db.String, unique=False, nullable=False )
|
||||
## for now being lazy and not doing a separate table until I settle on needed fields and when
|
||||
# only used if ptype == View
|
||||
view_eid = db.Column(db.Integer, unique=False, nullable=False )
|
||||
orig_ptype = db.Column(db.String, unique=False, nullable=False )
|
||||
# only used if view and orig_ptype was search
|
||||
orig_search_term = db.Column(db.String, unique=False, nullable=False )
|
||||
orig_url = db.Column(db.String, unique=False, nullable=False )
|
||||
current = db.Column(db.Integer)
|
||||
first_eid = db.Column(db.Integer)
|
||||
last_eid = db.Column(db.Integer)
|
||||
num_entries = db.Column(db.Integer)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<pa_user_dn: {self.pa_user_dn}, path_type: {self.path_type}, noo: {self.noo}, grouping: {self.grouping}, how_many: {self.how_many}, st_offset: {self.st_offset}, size: {self.size}, folders: {self.folders}, root: {self.root}, cwd: {self.cwd}, view_eid: {self.view_eid}, orig_ptype: {self.orig_ptype}, orig_search_term: {self.orig_search_term}, orig_url: {self.orig_url}, current={self.current}, first_eid={self.first_eid}, last_eid={self.last_eid}, num_entries={self.num_entries}>"
|
||||
return f"<pa_user_dn: {self.pa_user_dn}, path_type: {self.path_type}, noo: {self.noo}, grouping: {self.grouping}, how_many: {self.how_many}, size: {self.size}, folders: {self.folders}, root: {self.root}, cwd: {self.cwd}>"
|
||||
|
||||
|
||||
################################################################################
|
||||
@@ -50,143 +39,35 @@ class PA_UserState(db.Model):
|
||||
################################################################################
|
||||
class States(PA):
|
||||
def __init__(self, request):
|
||||
self.path_type=''
|
||||
self.orig_search_term = ''
|
||||
self.url = request.path
|
||||
self.view_eid = None
|
||||
self.current=0
|
||||
self.first_eid=0
|
||||
self.last_eid=0
|
||||
self.num_entries=0
|
||||
|
||||
# this is any next/prev or noo, grouping, etc. change (so use referrer to work out what to do with this)
|
||||
# because this can happen on a view, or files_up, etc. change this FIRST
|
||||
if 'change_file_opts' in request.path:
|
||||
ref=request.referrer
|
||||
base=request.base_url
|
||||
base=base.replace("change_file_opts", "")
|
||||
self.url = "/"+ref.replace(base, "" )
|
||||
|
||||
# if view_list, then we really are a view, and view_eid should be in the form
|
||||
if 'view_list' in request.path:
|
||||
self.path_type = 'View'
|
||||
self.view_eid = request.form['view_eid']
|
||||
self.url = request.form['orig_url']
|
||||
# this occurs ONLY when a POST to /view/<id> occurs (at this stage orig_url will be from an import, storage, bin or search)
|
||||
elif 'view' in request.path:
|
||||
self.path_type = 'View'
|
||||
self.view_eid = self.url[6:]
|
||||
# use orig url to define defaults/look up states for 'last' import/storage/bin/search
|
||||
if request.method == "POST":
|
||||
self.url = request.form['orig_url']
|
||||
else:
|
||||
# GET's occur on redirect, and we don't have a form, so get it from pref
|
||||
st=self.url[8:]
|
||||
ref=request.referrer
|
||||
if 'search' in ref:
|
||||
st=re.sub( '.+/search/', '', ref )
|
||||
else:
|
||||
st=''
|
||||
pref=PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn,PA_UserState.path_type==self.path_type,PA_UserState.view_eid==self.view_eid,PA_UserState.orig_search_term==st).first()
|
||||
if not pref:
|
||||
SetFELog( message=f"ERROR: pref not found - DONT CLOSE THIS!!! dn={current_user.dn}, st={st}, s={self}????" , log_level="error", persistent=True, cant_close=True )
|
||||
redirect("/")
|
||||
else:
|
||||
self.url = pref.orig_url
|
||||
|
||||
# set the prefix based on path
|
||||
path=None
|
||||
if 'files_ip' in self.url or 'file_list_ip' in self.url:
|
||||
if self.path_type == "View":
|
||||
self.orig_ptype = 'Import'
|
||||
self.orig_url = self.url
|
||||
else:
|
||||
self.path_type = 'Import'
|
||||
path = SettingsIPath()
|
||||
elif 'files_sp' in self.url:
|
||||
if self.path_type == "View":
|
||||
self.orig_ptype = 'Storage'
|
||||
self.orig_url = self.url
|
||||
else:
|
||||
self.path_type = 'Storage'
|
||||
path = SettingsSPath()
|
||||
elif 'files_rbp' in self.url:
|
||||
if self.path_type == "View":
|
||||
self.orig_ptype = 'Bin'
|
||||
self.orig_url = self.url
|
||||
else:
|
||||
self.path_type = 'Bin'
|
||||
path = SettingsRBPath()
|
||||
elif 'search' in self.url:
|
||||
# okay if we are a search, but came from a view then get last_search_state form prefs and use it
|
||||
self.orig_search_term = self.url[8:]
|
||||
if self.path_type == "View":
|
||||
self.orig_ptype = 'Search'
|
||||
self.orig_url = self.url
|
||||
else:
|
||||
self.path_type = 'Search'
|
||||
elif 'view' in self.url:
|
||||
# use url to get eid of viewed entry
|
||||
self.view_eid = self.url[6:]
|
||||
self.path_type="View"
|
||||
self.orig_url=self.url
|
||||
elif 'change_file_opts' not in self.url:
|
||||
SetFELog( message=f"ERROR: DDP messed up, failed to match URL {self.url} for settings this will fail, redirecting to home" , log_level="error", persistent=True, cant_close=True )
|
||||
SetFELog( message=f"referrer={request.referrer}" , log_level="error",
|
||||
persistent=True, cant_close=True )
|
||||
return
|
||||
self.search_term = ''
|
||||
else:
|
||||
self.path_type=''
|
||||
|
||||
if self.path_type == 'View':
|
||||
pref=PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn,PA_UserState.path_type==self.path_type,PA_UserState.view_eid==self.view_eid,PA_UserState.orig_search_term==self.orig_search_term).first()
|
||||
# should find original path or search for this view (if not a search, search_term='')
|
||||
orig_pref=PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn,PA_UserState.path_type==self.orig_ptype,PA_UserState.orig_search_term==self.orig_search_term).first()
|
||||
if not orig_pref:
|
||||
SetFELog( message=f"ERROR: DDP messed up 2, failed to find orig_pref for a view pt={self.path_type} for search={self.orig_search_term}" , log_level="error", persistent=True, cant_close=True )
|
||||
SetFELog( message=f"referrer={request.referrer}" , log_level="error",
|
||||
persistent=True, cant_close=True )
|
||||
return
|
||||
elif self.path_type == 'Search':
|
||||
pref=PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn,PA_UserState.path_type==self.path_type,PA_UserState.orig_search_term==self.orig_search_term).first()
|
||||
if path:
|
||||
self.prefix = SymlinkName(self.path_type,path,path+'/')
|
||||
else:
|
||||
pref=PA_UserState.query.filter(PA_UserState.pa_user_dn==current_user.dn,PA_UserState.path_type==self.path_type).first()
|
||||
self.prefix=None
|
||||
|
||||
if pref:
|
||||
self.grouping=pref.grouping
|
||||
self.how_many=pref.how_many
|
||||
self.offset=pref.st_offset
|
||||
self.size=pref.size
|
||||
self.cwd=pref.cwd
|
||||
self.orig_ptype=pref.orig_ptype
|
||||
self.orig_search_term=pref.orig_search_term
|
||||
self.orig_url = pref.orig_url
|
||||
self.view_eid = pref.view_eid
|
||||
self.current = pref.current
|
||||
if self.path_type == "View":
|
||||
self.root='static/' + self.orig_ptype
|
||||
self.first_eid=orig_pref.first_eid
|
||||
self.last_eid=orig_pref.last_eid
|
||||
self.num_entries=orig_pref.num_entries
|
||||
self.noo=orig_pref.noo
|
||||
self.folders=orig_pref.folders
|
||||
self.orig_search_term=orig_pref.orig_search_term
|
||||
else:
|
||||
self.root=pref.root
|
||||
self.first_eid = pref.first_eid
|
||||
self.last_eid = pref.last_eid
|
||||
self.num_entries = pref.num_entries
|
||||
self.noo=pref.noo
|
||||
self.folders=pref.folders
|
||||
else:
|
||||
# retreive defaults from 'PAUser' where defaults are stored
|
||||
u=PAUser.query.filter(PAUser.dn==current_user.dn).one()
|
||||
self.grouping=u.default_grouping
|
||||
self.how_many=u.default_how_many
|
||||
self.offset=0
|
||||
self.size=u.default_size
|
||||
if self.path_type == "View":
|
||||
self.root='static/' + self.orig_ptype
|
||||
self.first_eid=orig_pref.first_eid
|
||||
self.last_eid=orig_pref.last_eid
|
||||
self.num_entries=orig_pref.num_entries
|
||||
self.noo=orig_pref.noo
|
||||
self.folders=orig_pref.folders
|
||||
self.orig_search_term=orig_pref.orig_search_term
|
||||
else:
|
||||
self.root='static/' + self.path_type
|
||||
if self.path_type == 'Import':
|
||||
self.noo = u.default_import_noo
|
||||
@@ -199,103 +80,17 @@ class States(PA):
|
||||
self.noo=u.default_search_noo
|
||||
self.folders=False
|
||||
|
||||
self.default_flat_noo=u.default_import_noo
|
||||
self.default_folder_noo=u.default_storage_noo
|
||||
self.default_search_noo=u.default_search_noo
|
||||
self.cwd=self.root
|
||||
if not hasattr(self, 'orig_ptype'):
|
||||
self.orig_ptype=None
|
||||
if not hasattr(self, 'orig_search_term'):
|
||||
self.orig_search_term=None
|
||||
self.orig_url = self.url
|
||||
|
||||
# the above are defaults, if we are here, then we have current values, use them instead if they are set -- AI: searches dont set them so then we use those in the DB first
|
||||
if request.method=="POST":
|
||||
if self.path_type != "View" and 'noo' in request.form:
|
||||
# we are changing values based on a POST to the form, if we changed the noo option, we need to reset things
|
||||
if 'change_file_opts' in request.path and self.noo != request.form['noo']:
|
||||
self.noo=request.form['noo']
|
||||
self.first_eid=0
|
||||
self.last_eid=0
|
||||
self.offset=0
|
||||
if 'how_many' in request.form:
|
||||
self.how_many=request.form['how_many']
|
||||
if 'offset' in request.form:
|
||||
self.offset=int(request.form['offset'])
|
||||
if 'grouping' in request.form:
|
||||
self.grouping=request.form['grouping']
|
||||
# this can be null if we come from view by details
|
||||
if 'size' in request.form:
|
||||
self.size = request.form['size']
|
||||
# seems html cant do boolean, but uses strings so convert
|
||||
if self.path_type != "View" and 'folders' in request.form:
|
||||
# we are changing values based on a POST to the form, if we are in folder view and we changed the folders option, we need to reset things
|
||||
if 'change_file_opts' in request.path:
|
||||
if self.folders and self.folders != request.form['folders']:
|
||||
self.num_entries=0
|
||||
self.first_eid=0
|
||||
self.last_eid=0
|
||||
if request.form['folders'] == "False":
|
||||
self.folders=False
|
||||
else:
|
||||
self.folders=True
|
||||
# have to force grouping to None if we flick to folders from a flat view with grouping (otherwise we print out
|
||||
# group headings for child content that is not in the CWD)
|
||||
self.grouping=None
|
||||
if 'orig_url' in request.form:
|
||||
self.orig_url = request.form['orig_url']
|
||||
|
||||
# possible to not be set for an AI: search
|
||||
if 'cwd' in request.form:
|
||||
self.cwd = request.form['cwd']
|
||||
if 'prev' in request.form:
|
||||
self.offset -= int(self.how_many)
|
||||
# just in case we hit prev too fast, stop this...
|
||||
if self.offset < 0:
|
||||
self.offset=0
|
||||
if 'next' in request.form:
|
||||
if (self.offset + int(self.how_many)) < self.num_entries:
|
||||
self.offset += int(self.how_many)
|
||||
else:
|
||||
# should be impossible now, but leave jic
|
||||
SetFELog( message=f"WARNING: next image requested, but would go past end of list? - ignore this" , log_level="warning", persistent=True, cant_close=True )
|
||||
SetFELog( message=f"DDP - offset={self.offset} + how_many={self.how_many} > num_entries={self.num_entries}" , log_level="error", persistent=True, cant_close=True )
|
||||
if 'current' in request.form:
|
||||
self.current = int(request.form['current'])
|
||||
|
||||
last_used=datetime.now(pytz.utc)
|
||||
# now save pref
|
||||
if not pref:
|
||||
# insert new pref for this combo (might be a new search or view, or first time for a path)
|
||||
pref=PA_UserState( pa_user_dn=current_user.dn, last_used=last_used, path_type=self.path_type, view_eid=self.view_eid,
|
||||
noo=self.noo, grouping=self.grouping, how_many=self.how_many, st_offset=self.offset, size=self.size,
|
||||
folders=self.folders, root=self.root, cwd=self.cwd, orig_ptype=self.orig_ptype, orig_search_term=self.orig_search_term,
|
||||
orig_url=self.orig_url, current=self.current, first_eid=self.first_eid, last_eid=self.last_eid, num_entries=self.num_entries )
|
||||
else:
|
||||
# update this pref with the values calculated above (most likely from POST to form)
|
||||
pref.pa_user_dn=current_user.dn
|
||||
pref.path_type=self.path_type
|
||||
pref.view_eid=self.view_eid
|
||||
pref.noo=self.noo
|
||||
pref.grouping=self.grouping
|
||||
pref.how_many=self.how_many
|
||||
pref.st_offset=self.offset
|
||||
pref.size=self.size
|
||||
pref.folders=self.folders
|
||||
pref.root = self.root
|
||||
pref.cwd = self.cwd
|
||||
pref.orig_ptype = self.orig_ptype
|
||||
pref.orig_search_term = self.orig_search_term
|
||||
pref.orig_url = self.orig_url
|
||||
pref.last_used = last_used
|
||||
pref.first_eid = self.first_eid
|
||||
pref.last_eid = self.last_eid
|
||||
pref.num_entries = self.num_entries
|
||||
# only passed in (at the moment) in view_list
|
||||
pref.current = self.current
|
||||
|
||||
db.session.add(pref)
|
||||
db.session.commit()
|
||||
|
||||
return
|
||||
|
||||
def to_dict(self):
|
||||
# Automatically include all instance attributes
|
||||
return {key: value for key, value in vars(self).items()}
|
||||
|
||||
|
||||
################################################################################
|
||||
# /states -> GET only -> prints out list of all prefs (simple for now)
|
||||
################################################################################
|
||||
|
||||
326
tables.sql
@@ -1,189 +1,209 @@
|
||||
alter database PA set timezone to 'Australia/Victoria';
|
||||
ALTER DATABASE pa SET TIMEZONE TO 'Australia/Victoria';
|
||||
|
||||
create sequence PA_USER_ID_SEQ;
|
||||
create sequence PA_USER_STATE_ID_SEQ;
|
||||
create sequence FACE_ID_SEQ;
|
||||
create sequence PATH_ID_SEQ;
|
||||
create sequence PATH_TYPE_ID_SEQ;
|
||||
create sequence FILE_ID_SEQ;
|
||||
create sequence FILE_TYPE_ID_SEQ;
|
||||
create sequence JOBEXTRA_ID_SEQ;
|
||||
create sequence JOBLOG_ID_SEQ;
|
||||
create sequence JOB_ID_SEQ;
|
||||
create sequence PERSON_ID_SEQ;
|
||||
create sequence REFIMG_ID_SEQ;
|
||||
create sequence SETTINGS_ID_SEQ;
|
||||
create sequence PA_JOB_MANAGER_ID_SEQ;
|
||||
create sequence PA_JOB_MANAGER_FE_MESSAGE_ID_SEQ;
|
||||
create sequence FACE_OVERRIDE_TYPE_ID_SEQ;
|
||||
create sequence FACE_OVERRIDE_ID_SEQ;
|
||||
CREATE SEQUENCE pa_user_id_seq;
|
||||
CREATE SEQUENCE pa_user_state_id_seq;
|
||||
CREATE SEQUENCE face_id_seq;
|
||||
CREATE SEQUENCE path_id_seq;
|
||||
CREATE SEQUENCE path_type_id_seq;
|
||||
CREATE SEQUENCE file_id_seq;
|
||||
CREATE SEQUENCE file_type_id_seq;
|
||||
CREATE SEQUENCE jobextra_id_seq;
|
||||
CREATE SEQUENCE joblog_id_seq;
|
||||
CREATE SEQUENCE job_id_seq;
|
||||
CREATE SEQUENCE person_id_seq;
|
||||
CREATE SEQUENCE refimg_id_seq;
|
||||
CREATE SEQUENCE settings_id_seq;
|
||||
CREATE SEQUENCE pa_job_manager_id_seq;
|
||||
CREATE SEQUENCE pa_job_manager_fe_message_id_seq;
|
||||
CREATE SEQUENCE face_override_type_id_seq;
|
||||
CREATE SEQUENCE face_override_id_seq;
|
||||
CREATE SEQUENCE query_id_seq;
|
||||
|
||||
-- these are hard-coded at present, not sure I can reflexively find models from API?
|
||||
create table AI_MODEL ( ID integer, NAME varchar(24), DESCRIPTION varchar(80), constraint PK_AI_MODEL primary key(ID) );
|
||||
insert into AI_MODEL values ( 1, 'hog', 'normal' );
|
||||
insert into AI_MODEL values ( 2, 'cnn', 'more accurate / much slower' );
|
||||
CREATE TABLE ai_model ( id INTEGER, name VARCHAR(24), description VARCHAR(80), CONSTRAINT pk_ai_model PRIMARY KEY(id) );
|
||||
INSERT INTO ai_model VALUES ( 1, 'hog', 'normal' );
|
||||
INSERT INTO ai_model VALUES ( 2, 'cnn', 'more accurate / much slower' );
|
||||
|
||||
create table SETTINGS(
|
||||
ID integer,
|
||||
BASE_PATH varchar, IMPORT_PATH varchar, STORAGE_PATH varchar, RECYCLE_BIN_PATH varchar, METADATA_PATH varchar,
|
||||
AUTO_ROTATE Boolean,
|
||||
DEFAULT_REFIMG_MODEL integer, DEFAULT_SCAN_MODEL integer, DEFAULT_THRESHOLD float,
|
||||
FACE_SIZE_LIMIT integer,
|
||||
SCHEDULED_IMPORT_SCAN integer, SCHEDULED_STORAGE_SCAN integer,
|
||||
SCHEDULED_BIN_CLEANUP integer, BIN_CLEANUP_FILE_AGE integer,
|
||||
JOB_ARCHIVE_AGE integer,
|
||||
constraint PK_SETTINGS_ID primary key(ID),
|
||||
constraint FK_DEFAULT_REFIMG_MODEL foreign key (DEFAULT_REFIMG_MODEL) references AI_MODEL(ID),
|
||||
constraint FK_DEFAULT_SCAN_MODEL foreign key (DEFAULT_SCAN_MODEL) references AI_MODEL(ID) );
|
||||
CREATE TABLE settings(
|
||||
id INTEGER,
|
||||
base_path VARCHAR, import_path VARCHAR, storage_path VARCHAR, recycle_bin_path VARCHAR, metadata_path VARCHAR,
|
||||
auto_rotate BOOLEAN,
|
||||
default_refimg_model INTEGER, default_scan_model INTEGER, default_threshold FLOAT,
|
||||
face_size_limit INTEGER,
|
||||
scheduled_import_scan INTEGER, scheduled_storage_scan INTEGER,
|
||||
scheduled_bin_cleanup INTEGER, bin_cleanup_file_age INTEGER,
|
||||
job_archive_age INTEGER,
|
||||
CONSTRAINT pk_settings_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_default_refimg_model FOREIGN KEY (default_refimg_model) REFERENCES ai_model(id),
|
||||
CONSTRAINT fk_default_scan_model FOREIGN KEY (default_scan_model) REFERENCES ai_model(id) );
|
||||
|
||||
create table PA_USER(
|
||||
ID integer,
|
||||
DN varchar unique,
|
||||
DEFAULT_IMPORT_NOO varchar,
|
||||
DEFAULT_STORAGE_NOO varchar,
|
||||
DEFAULT_SEARCH_NOO varchar,
|
||||
DEFAULT_GROUPING varchar(16),
|
||||
DEFAULT_HOW_MANY integer,
|
||||
DEFAULT_SIZE integer,
|
||||
DEFAULT_IMPORT_FOLDERS Boolean,
|
||||
DEFAULT_STORAGE_FOLDERS Boolean,
|
||||
constraint PK_PA_USER_ID primary key(ID) );
|
||||
CREATE TABLE pa_user(
|
||||
id INTEGER,
|
||||
dn VARCHAR UNIQUE,
|
||||
default_import_noo VARCHAR,
|
||||
default_storage_noo VARCHAR,
|
||||
default_search_noo VARCHAR,
|
||||
default_grouping VARCHAR(16),
|
||||
default_how_many INTEGER,
|
||||
default_size INTEGER,
|
||||
default_import_folders BOOLEAN,
|
||||
default_storage_folders BOOLEAN,
|
||||
CONSTRAINT pk_pa_user_id PRIMARY KEY(id) );
|
||||
|
||||
-- this is totally not 3rd normal form, but when I made it that, it was so complex, it was stupid
|
||||
-- so for the little data here, I'm deliberately doing a redundant data structure
|
||||
create table PA_USER_STATE ( ID integer, PA_USER_DN varchar(128), PATH_TYPE varchar(16),
|
||||
NOO varchar(16), GROUPING varchar(16), HOW_MANY integer, ST_OFFSET integer, SIZE integer, FOLDERS Boolean,
|
||||
ROOT varchar, CWD varchar,
|
||||
ORIG_PTYPE varchar, ORIG_SEARCH_TERM varchar, ORIG_URL varchar,
|
||||
VIEW_EID integer, CURRENT integer, FIRST_EID integer, LAST_EID integer, NUM_ENTRIES integer, LAST_USED timestamptz,
|
||||
constraint FK_PA_USER_DN foreign key (PA_USER_DN) references PA_USER(DN),
|
||||
constraint PK_PA_USER_STATES_ID primary key(ID ) );
|
||||
-- FIXME: NEED TO RETHINK THIS, not sure this even needs to be in the DB
|
||||
CREATE TABLE pa_user_state ( id INTEGER, pa_user_dn VARCHAR(128), path_type VARCHAR(16),
|
||||
noo VARCHAR(16), grouping VARCHAR(16), how_many INTEGER, size INTEGER, folders BOOLEAN,
|
||||
root VARCHAR, cwd VARCHAR,
|
||||
CONSTRAINT fk_pa_user_dn FOREIGN KEY (pa_user_dn) REFERENCES pa_user(dn),
|
||||
CONSTRAINT pk_pa_user_states_id PRIMARY KEY(id ) );
|
||||
|
||||
create table FILE_TYPE ( ID integer, NAME varchar(32) unique, constraint PK_FILE_TYPE_ID primary key(ID) );
|
||||
|
||||
create table PATH_TYPE ( ID integer, NAME varchar(16) unique, constraint PK_PATH_TYPE_ID primary key(ID) );
|
||||
|
||||
create table PATH ( ID integer, TYPE_ID integer, PATH_PREFIX varchar(1024), NUM_FILES integer,
|
||||
constraint PK_PATH_ID primary key(ID),
|
||||
constraint FK_PATH_TYPE_TYPE_ID foreign key (TYPE_ID) references PATH_TYPE(ID) );
|
||||
|
||||
create table ENTRY( ID integer, NAME varchar(128), TYPE_ID integer, EXISTS_ON_FS boolean,
|
||||
constraint PK_ENTRY_ID primary key(ID),
|
||||
constraint FK_FILE_TYPE_TYPE_ID foreign key (TYPE_ID) references FILE_TYPE(ID) );
|
||||
|
||||
create table FILE ( EID integer, SIZE_MB integer, HASH varchar(34), THUMBNAIL varchar, FACES_CREATED_ON float, LAST_HASH_DATE float, LAST_AI_SCAN float, YEAR integer, MONTH integer, DAY integer, WOY integer,
|
||||
constraint PK_FILE_ID primary key(EID),
|
||||
constraint FK_FILE_ENTRY_ID foreign key (EID) references ENTRY(ID) );
|
||||
|
||||
create table DEL_FILE ( FILE_EID integer, ORIG_PATH_PREFIX varchar(1024), constraint PK_DEL_FILE_FILE_EID primary key (FILE_EID),
|
||||
constraint FK_ENTRY_ID foreign key (FILE_EID) references FILE(EID) );
|
||||
|
||||
create table DIR ( EID integer, REL_PATH varchar(256), NUM_FILES integer, LAST_IMPORT_DATE float,
|
||||
constraint PK_DIR_EID primary key(EID),
|
||||
constraint FK_DIR_ENTRY_ID foreign key (EID) references ENTRY(ID) );
|
||||
|
||||
create table PATH_DIR_LINK ( path_id integer, dir_eid integer,
|
||||
constraint PK_PDL_path_id_dir_eid primary key (path_id, dir_eid),
|
||||
constraint FK_PDL_PATH_ID foreign key (PATH_ID) references PATH(ID),
|
||||
constraint FK_PDL_DIR_EID foreign key (DIR_EID) references DIR(EID) );
|
||||
|
||||
create table ENTRY_DIR_LINK ( entry_id integer, dir_eid integer,
|
||||
constraint PK_EDL_entry_id_dir_eid primary key (entry_id, dir_eid),
|
||||
constraint FK_EDL_ENTRY_ID foreign key (ENTRY_ID) references ENTRY(ID),
|
||||
constraint FK_EDL_DIR_EID foreign key (DIR_EID) references DIR(EID) );
|
||||
|
||||
create table PERSON ( ID integer default nextval('PERSON_ID_SEQ'), TAG varchar(48), FIRSTNAME varchar(48), SURNAME varchar(48),
|
||||
constraint PK_PERSON_ID primary key(ID) );
|
||||
alter sequence PERSON_ID_SEQ owned by PERSON.ID;
|
||||
CREATE TABLE query ( id INTEGER, path_type VARCHAR(16), noo VARCHAR(16), grouping VARCHAR(16), q_offset INTEGER,
|
||||
entry_list VARCHAR, folders BOOLEAN, root VARCHAR, cwd VARCHAR, search_term VARCHAR, current INTEGER,
|
||||
created TIMESTAMPTZ,
|
||||
CONSTRAINT pk_query_id PRIMARY KEY(id ) );
|
||||
|
||||
|
||||
create table REFIMG ( ID integer, FNAME varchar(128), FACE bytea, ORIG_W integer, ORIG_H integer,
|
||||
FACE_TOP integer, FACE_RIGHT integer, FACE_BOTTOM integer, FACE_LEFT integer, CREATED_ON float, THUMBNAIL varchar, MODEL_USED integer,
|
||||
constraint PK_REFIMG_ID primary key(ID),
|
||||
constraint FK_REFIMG_MODEL_USED foreign key (MODEL_USED) references AI_MODEL(ID) );
|
||||
alter sequence REFIMG_ID_SEQ owned by REFIMG.ID;
|
||||
CREATE TABLE file_type ( id INTEGER, name VARCHAR(32) UNIQUE, CONSTRAINT pk_file_type_id PRIMARY KEY(id) );
|
||||
|
||||
create table FACE( ID integer, FACE bytea, FACE_TOP integer, FACE_RIGHT integer, FACE_BOTTOM integer, FACE_LEFT integer,
|
||||
W integer, H integer, constraint PK_FACE_ID primary key(ID) );
|
||||
CREATE TABLE path_type ( id INTEGER, name VARCHAR(16) UNIQUE, CONSTRAINT pk_path_type_id PRIMARY KEY(id) );
|
||||
|
||||
create table FACE_FILE_LINK( FACE_ID integer, FILE_EID integer, MODEL_USED integer,
|
||||
constraint PK_FFL_FACE_ID_FILE_ID primary key(FACE_ID, FILE_EID),
|
||||
constraint FK_FFL_FACE_ID foreign key (FACE_ID) references FACE(ID) on delete cascade,
|
||||
constraint FK_FFL_FILE_EID foreign key (FILE_EID) references FILE(EID),
|
||||
constraint FK_FFL_MODEL_USED foreign key (MODEL_USED) references AI_MODEL(ID) );
|
||||
CREATE TABLE path ( id INTEGER, type_id INTEGER, path_prefix VARCHAR(1024), num_files INTEGER,
|
||||
CONSTRAINT pk_path_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_path_type_type_id FOREIGN KEY (type_id) REFERENCES path_type(id) );
|
||||
|
||||
create table FACE_REFIMG_LINK( FACE_ID integer, REFIMG_ID integer, FACE_DISTANCE float,
|
||||
constraint PK_FRL_FACE_ID_REFIMG_ID primary key(FACE_ID, REFIMG_ID),
|
||||
constraint FK_FRL_FACE_ID foreign key (FACE_ID) references FACE(ID) on delete cascade,
|
||||
constraint FK_FRL_REFIMG_ID foreign key (REFIMG_ID) references REFIMG(ID) );
|
||||
CREATE TABLE entry( id INTEGER, name VARCHAR(128), type_id INTEGER, exists_on_fs BOOLEAN,
|
||||
CONSTRAINT pk_entry_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_file_type_type_id FOREIGN KEY (type_id) REFERENCES file_type(id) );
|
||||
|
||||
create table FACE_OVERRIDE_TYPE ( ID integer, NAME varchar unique, constraint PK_FACE_OVERRIDE_TYPE_ID primary key(ID) );
|
||||
insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_SEQ')), 'Manual match to existing person' );
|
||||
insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_SEQ')), 'Not a face' );
|
||||
insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_SEQ')), 'Too young' );
|
||||
insert into FACE_OVERRIDE_TYPE values ( (select nextval('FACE_OVERRIDE_TYPE_ID_SEQ')), 'Ignore face' );
|
||||
CREATE TABLE file ( eid INTEGER, size_mb INTEGER, hash VARCHAR(34), thumbnail VARCHAR, faces_created_on FLOAT, last_hash_date FLOAT, last_ai_scan FLOAT, year INTEGER, month INTEGER, day INTEGER, woy INTEGER,
|
||||
CONSTRAINT pk_file_id PRIMARY KEY(eid),
|
||||
CONSTRAINT fk_file_entry_id FOREIGN KEY (eid) REFERENCES entry(id) );
|
||||
|
||||
CREATE TABLE del_file ( file_eid INTEGER, orig_path_prefix VARCHAR(1024), CONSTRAINT pk_del_file_file_eid PRIMARY KEY (file_eid),
|
||||
CONSTRAINT fk_entry_id FOREIGN KEY (file_eid) REFERENCES file(eid) );
|
||||
|
||||
CREATE TABLE dir ( eid INTEGER, rel_path VARCHAR(256), num_files INTEGER, last_import_date FLOAT,
|
||||
CONSTRAINT pk_dir_eid PRIMARY KEY(eid),
|
||||
CONSTRAINT fk_dir_entry_id FOREIGN KEY (eid) REFERENCES entry(id) );
|
||||
|
||||
CREATE TABLE path_dir_link ( PATH_ID INTEGER, DIR_EID INTEGER,
|
||||
CONSTRAINT pk_pdl_PATH_ID_DIR_EID PRIMARY KEY (PATH_ID, DIR_EID),
|
||||
CONSTRAINT fk_pdl_path_id FOREIGN KEY (path_id) REFERENCES path(id),
|
||||
CONSTRAINT fk_pdl_dir_eid FOREIGN KEY (dir_eid) REFERENCES dir(eid) );
|
||||
|
||||
CREATE TABLE entry_dir_link ( ENTRY_ID INTEGER, DIR_EID INTEGER,
|
||||
CONSTRAINT pk_edl_ENTRY_ID_DIR_EID PRIMARY KEY (ENTRY_ID, DIR_EID),
|
||||
CONSTRAINT fk_edl_entry_id FOREIGN KEY (entry_id) REFERENCES entry(id),
|
||||
CONSTRAINT fk_edl_dir_eid FOREIGN KEY (dir_eid) REFERENCES dir(eid) );
|
||||
|
||||
CREATE TABLE person ( id INTEGER DEFAULT NEXTVAL('person_id_seq'), tag VARCHAR(48), firstname VARCHAR(48), surname VARCHAR(48),
|
||||
CONSTRAINT pk_person_id PRIMARY KEY(id) );
|
||||
ALTER SEQUENCE person_id_seq OWNED BY person.id;
|
||||
|
||||
|
||||
CREATE TABLE refimg ( id INTEGER, fname VARCHAR(128), face BYTEA, orig_w INTEGER, orig_h INTEGER,
|
||||
face_top INTEGER, face_right INTEGER, face_bottom INTEGER, face_left INTEGER, created_on FLOAT, thumbnail VARCHAR, model_used INTEGER,
|
||||
CONSTRAINT pk_refimg_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_refimg_model_used FOREIGN KEY (model_used) REFERENCES ai_model(id) );
|
||||
ALTER SEQUENCE refimg_id_seq OWNED BY refimg.id;
|
||||
|
||||
CREATE TABLE face( id INTEGER, face BYTEA, face_top INTEGER, face_right INTEGER, face_bottom INTEGER, face_left INTEGER,
|
||||
w INTEGER, h INTEGER, CONSTRAINT pk_face_id PRIMARY KEY(id) );
|
||||
|
||||
CREATE TABLE face_file_link( face_id INTEGER, file_eid INTEGER, model_used INTEGER,
|
||||
CONSTRAINT pk_ffl_face_id_file_id PRIMARY KEY(face_id, file_eid),
|
||||
CONSTRAINT fk_ffl_face_id FOREIGN KEY (face_id) REFERENCES face(id) ON DELETE CASCADE,
|
||||
CONSTRAINT fk_ffl_file_eid FOREIGN KEY (file_eid) REFERENCES file(eid),
|
||||
CONSTRAINT fk_ffl_model_used FOREIGN KEY (model_used) REFERENCES ai_model(id) );
|
||||
|
||||
CREATE TABLE face_refimg_link( face_id INTEGER, refimg_id INTEGER, face_distance FLOAT,
|
||||
CONSTRAINT pk_frl_face_id_refimg_id PRIMARY KEY(face_id, refimg_id),
|
||||
CONSTRAINT fk_frl_face_id FOREIGN KEY (face_id) REFERENCES face(id) ON DELETE CASCADE,
|
||||
CONSTRAINT fk_frl_refimg_id FOREIGN KEY (refimg_id) REFERENCES refimg(id) );
|
||||
|
||||
CREATE TABLE face_override_type ( id INTEGER, name VARCHAR UNIQUE, CONSTRAINT pk_face_override_type_id PRIMARY KEY(id) );
|
||||
INSERT INTO face_override_type VALUES ( (SELECT NEXTVAL('face_override_type_id_seq')), 'Manual match to existing person' );
|
||||
INSERT INTO face_override_type VALUES ( (SELECT NEXTVAL('face_override_type_id_seq')), 'Not a face' );
|
||||
INSERT INTO face_override_type VALUES ( (SELECT NEXTVAL('face_override_type_id_seq')), 'Too young' );
|
||||
INSERT INTO face_override_type VALUES ( (SELECT NEXTVAL('face_override_type_id_seq')), 'Ignore face' );
|
||||
|
||||
-- keep non-redundant FACE because, when we rebuild data we may have a null FACE_ID, but still want to connect to this override
|
||||
-- from a previous AI pass... (would happen if we delete a file and then reimport/scan it), OR, more likely we change (say) a threshold, etc.
|
||||
-- any reordering of faces, generates new face_ids... (but if the face data was the same, then this override should stand)
|
||||
create table FACE_NO_MATCH_OVERRIDE ( ID integer, FACE_ID integer, TYPE_ID integer,
|
||||
constraint FK_FNMO_FACE_ID foreign key (FACE_ID) references FACE(ID),
|
||||
constraint FK_FNMO_TYPE foreign key (TYPE_ID) references FACE_OVERRIDE_TYPE(ID),
|
||||
constraint PK_FNMO_ID primary key(ID) );
|
||||
CREATE TABLE face_no_match_override ( id INTEGER, face_id INTEGER, type_id INTEGER,
|
||||
CONSTRAINT fk_fnmo_face_id FOREIGN KEY (face_id) REFERENCES face(id),
|
||||
CONSTRAINT fk_fnmo_type FOREIGN KEY (type_id) REFERENCES face_override_type(id),
|
||||
CONSTRAINT pk_fnmo_id PRIMARY KEY(id) );
|
||||
|
||||
-- manual match goes to person not refimg, so on search, etc. we deal with this anomaly (via sql not ORM)
|
||||
create table FACE_FORCE_MATCH_OVERRIDE ( ID integer, FACE_ID integer, PERSON_ID integer, constraint PK_FACE_FORCE_MATCH_OVERRIDE_ID primary key(ID) );
|
||||
CREATE TABLE face_force_match_override ( id INTEGER, face_id INTEGER, person_id INTEGER, CONSTRAINT pk_face_force_match_override_id PRIMARY KEY(id) );
|
||||
|
||||
create table DISCONNECTED_NO_MATCH_OVERRIDE ( FACE bytea, TYPE_ID integer,
|
||||
constraint FK_DNMO_TYPE_ID foreign key (TYPE_ID) references FACE_OVERRIDE_TYPE(ID),
|
||||
constraint PK_DNMO_FACE primary key (FACE) );
|
||||
CREATE TABLE disconnected_no_match_override ( face BYTEA, type_id INTEGER,
|
||||
CONSTRAINT fk_dnmo_type_id FOREIGN KEY (type_id) REFERENCES face_override_type(id),
|
||||
CONSTRAINT pk_dnmo_face PRIMARY KEY (face) );
|
||||
|
||||
create table DISCONNECTED_FORCE_MATCH_OVERRIDE ( FACE bytea, PERSON_ID integer,
|
||||
constraint FK_DFMO_PERSON_ID foreign key (PERSON_ID) references PERSON(ID),
|
||||
constraint PK_DFMO_FACE primary key (FACE) );
|
||||
CREATE TABLE disconnected_force_match_override ( face BYTEA, person_id INTEGER,
|
||||
CONSTRAINT fk_dfmo_person_id FOREIGN KEY (person_id) REFERENCES person(id),
|
||||
CONSTRAINT pk_dfmo_face PRIMARY KEY (face) );
|
||||
|
||||
create table PERSON_REFIMG_LINK ( PERSON_ID integer, REFIMG_ID integer,
|
||||
constraint PK_PRL primary key(PERSON_ID, REFIMG_ID),
|
||||
constraint FK_PRL_PERSON_ID foreign key (PERSON_ID) references PERSON(ID),
|
||||
constraint FK_PRL_REFIMG_ID foreign key (REFIMG_ID) references REFIMG(ID),
|
||||
constraint U_PRL_REFIMG_ID unique(REFIMG_ID) );
|
||||
CREATE TABLE person_refimg_link ( person_id INTEGER, refimg_id INTEGER,
|
||||
CONSTRAINT pk_prl PRIMARY KEY(person_id, refimg_id),
|
||||
CONSTRAINT fk_prl_person_id FOREIGN KEY (person_id) REFERENCES person(id),
|
||||
CONSTRAINT fk_prl_refimg_id FOREIGN KEY (refimg_id) REFERENCES refimg(id),
|
||||
CONSTRAINT u_prl_refimg_id UNIQUE(refimg_id) );
|
||||
|
||||
create table JOB (
|
||||
ID integer, START_TIME timestamptz, LAST_UPDATE timestamptz, NAME varchar(64), STATE varchar(128),
|
||||
NUM_FILES integer, CURRENT_FILE_NUM integer, CURRENT_FILE varchar(256), WAIT_FOR integer, PA_JOB_STATE varchar(48),
|
||||
constraint PK_JOB_ID primary key(ID) );
|
||||
CREATE TABLE job (
|
||||
id INTEGER, start_time TIMESTAMPTZ, last_update TIMESTAMPTZ, name VARCHAR(64), state VARCHAR(128),
|
||||
num_files INTEGER, current_file_num INTEGER, current_file VARCHAR(256), wait_for INTEGER, pa_job_state VARCHAR(48),
|
||||
CONSTRAINT pk_job_id PRIMARY KEY(id) );
|
||||
|
||||
-- used to pass / keep extra values, e.g. num_files for jobs that have sets of files, or out* for adding output from jobs that you want to pass to next job in the chain
|
||||
create table JOBEXTRA ( ID integer, JOB_ID integer, NAME varchar(32), VALUE varchar,
|
||||
constraint PK_JOBEXTRA_ID primary key(ID), constraint FK_JOBEXTRA_JOB_ID foreign key(JOB_ID) references JOB(ID) );
|
||||
CREATE TABLE jobextra ( id INTEGER, job_id INTEGER, name VARCHAR(32), value VARCHAR,
|
||||
CONSTRAINT pk_jobextra_id PRIMARY KEY(id), CONSTRAINT fk_jobextra_job_id FOREIGN KEY(job_id) REFERENCES job(id) );
|
||||
|
||||
create table JOBLOG ( ID integer, JOB_ID integer, LOG_DATE timestamptz, LOG varchar,
|
||||
constraint PK_JL_ID primary key(ID), constraint FK_JL_JOB_ID foreign key(JOB_ID) references JOB(ID) );
|
||||
CREATE TABLE joblog ( id INTEGER, job_id INTEGER, log_date TIMESTAMPTZ, log VARCHAR,
|
||||
CONSTRAINT pk_jl_id PRIMARY KEY(id), CONSTRAINT fk_jl_job_id FOREIGN KEY(job_id) REFERENCES job(id) );
|
||||
|
||||
create table PA_JOB_MANAGER_FE_MESSAGE ( ID integer, JOB_ID integer, LEVEL varchar(16), MESSAGE varchar(1024), PERSISTENT boolean, CANT_CLOSE boolean,
|
||||
constraint PA_JOB_MANAGER_FE_ACKS_ID primary key(ID),
|
||||
constraint FK_PA_JOB_MANAGER_FE_MESSAGE_JOB_ID foreign key(JOB_ID) references JOB(ID) );
|
||||
CREATE TABLE pa_job_manager_fe_message ( id INTEGER, job_id INTEGER, level VARCHAR(16), message VARCHAR(8192), persistent BOOLEAN, cant_close BOOLEAN,
|
||||
CONSTRAINT pk_pa_job_manager_fe_acks_id PRIMARY KEY(id),
|
||||
CONSTRAINT fk_pa_job_manager_fe_message_job_id FOREIGN KEY(job_id) REFERENCES job(id) );
|
||||
|
||||
CREATE TABLE amendment_type ( id INTEGER, job_name VARCHAR(64), which VARCHAR(8), what VARCHAR(32), colour VARCHAR(32),
|
||||
CONSTRAINT pk_amendment_type_id PRIMARY KEY(id) );
|
||||
INSERT INTO amendment_type ( id, job_name, which, what, colour ) VALUES ( 1, 'delete_files', 'icon', 'trash', 'var(--bs-danger)' );
|
||||
INSERT INTO amendment_type ( id, job_name, which, what, colour ) VALUES ( 2, 'restore_files', 'icon', 'trash', 'var(--bs-success)' );
|
||||
INSERT INTO amendment_type ( id, job_name, which, what, colour ) VALUES ( 3, 'transform_image:90', 'img', 'rot90.png', '#009EFF' );
|
||||
INSERT INTO amendment_type ( id, job_name, which, what, colour ) VALUES ( 4, 'transform_image:180', 'img', 'rot180.png', '#009EFF' );
|
||||
INSERT INTO amendment_type ( id, job_name, which, what, colour ) VALUES ( 5, 'transform_image:270', 'img', 'rot270.png', '#009EFF' );
|
||||
INSERT INTO amendment_type ( id, job_name, which, what, colour ) VALUES ( 6, 'transform_image:fliph', 'icon', 'flip_h', '#009EFF' );
|
||||
INSERT INTO amendment_type ( id, job_name, which, what, colour ) VALUES ( 7, 'transform_image:flipv', 'icon', 'flip_v', '#009EFF' );
|
||||
INSERT INTO amendment_type ( id, job_name, which, what, colour ) VALUES ( 8, 'move_files', 'icon', 'folder_plus', 'var(--bs-primary)' );
|
||||
|
||||
CREATE TABLE entry_amendment ( amend_type INTEGER, eid INTEGER, job_id INTEGER,
|
||||
CONSTRAINT pk_entry_amendment_eid_job_id PRIMARY KEY(eid,job_id),
|
||||
CONSTRAINT fk_entry_amendment_amendment_type FOREIGN KEY(amend_type) REFERENCES amendment_type(id),
|
||||
CONSTRAINT fk_entry_amendment_job_id FOREIGN KEY(job_id) REFERENCES job(id) );
|
||||
|
||||
-- default data for types of paths
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Import' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Storage' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Bin' );
|
||||
insert into PATH_TYPE values ( (select nextval('PATH_TYPE_ID_SEQ')), 'Metadata' );
|
||||
INSERT INTO path_type VALUES ( (SELECT NEXTVAL('path_type_id_seq')), 'Import' );
|
||||
INSERT INTO path_type VALUES ( (SELECT NEXTVAL('path_type_id_seq')), 'Storage' );
|
||||
INSERT INTO path_type VALUES ( (SELECT NEXTVAL('path_type_id_seq')), 'Bin' );
|
||||
INSERT INTO path_type VALUES ( (SELECT NEXTVAL('path_type_id_seq')), 'Metadata' );
|
||||
|
||||
-- default data for types of files
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Image' );
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Video' );
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Directory' );
|
||||
insert into FILE_TYPE values ( (select nextval('FILE_TYPE_ID_SEQ')), 'Unknown' );
|
||||
INSERT INTO file_type VALUES ( (SELECT NEXTVAL('file_type_id_seq')), 'Image' );
|
||||
INSERT INTO file_type VALUES ( (SELECT NEXTVAL('file_type_id_seq')), 'Video' );
|
||||
INSERT INTO file_type VALUES ( (SELECT NEXTVAL('file_type_id_seq')), 'Directory' );
|
||||
INSERT INTO file_type VALUES ( (SELECT NEXTVAL('file_type_id_seq')), 'Unknown' );
|
||||
|
||||
-- fake data only for making testing easier
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'dad', 'Damien', 'De Paoli' );
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'mum', 'Mandy', 'De Paoli' );
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'cam', 'Cameron', 'De Paoli' );
|
||||
--insert into PERSON values ( (select nextval('PERSON_ID_SEQ')), 'mich', 'Michelle', 'De Paoli' );
|
||||
--INSERT INTO person VALUES ( (SELECT NEXTVAL('person_id_seq')), 'dad', 'Damien', 'De Paoli' );
|
||||
--INSERT INTO person VALUES ( (SELECT NEXTVAL('person_id_seq')), 'mum', 'Mandy', 'De Paoli' );
|
||||
--INSERT INTO person VALUES ( (SELECT NEXTVAL('person_id_seq')), 'cam', 'Cameron', 'De Paoli' );
|
||||
--INSERT INTO person VALUES ( (SELECT NEXTVAL('person_id_seq')), 'mich', 'Michelle', 'De Paoli' );
|
||||
-- DEV(ddp):
|
||||
insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/home/ddp/src/photoassistant/', 'images_to_process/', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
INSERT INTO settings ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) VALUES ( (SELECT NEXTVAL('settings_id_seq')), '/home/ddp/src/photoassistant/', 'images_to_process/', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
-- DEV(cam):
|
||||
--insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), 'c:/Users/cam/Desktop/code/python/photoassistant/', 'c:\images_to_process', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
--INSERT INTO settings ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) VALUES ( (select nextval('SETTINGS_ID_SEQ')), 'c:/Users/cam/Desktop/code/python/photoassistant/', 'c:\images_to_process', 'photos/', '.pa_bin/', '.pa_metadata/', TRUE, 1, 1, '0.55', 43, 1, 1, 7, 30, 3 );
|
||||
-- PROD:
|
||||
--insert into SETTINGS ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) values ( (select nextval('SETTINGS_ID_SEQ')), '/export/docker/storage/', 'Camera_uploads/', 'photos/', '.pa_bin/', '.pa_metadata/', true, 1, 1, '0.55', 43, 1, 1, 7, 30, 4 );
|
||||
--INSERT INTO settings ( id, base_path, import_path, storage_path, recycle_bin_path, metadata_path, auto_rotate, default_refimg_model, default_scan_model, default_threshold, face_size_limit, scheduled_import_scan, scheduled_storage_scan, scheduled_bin_cleanup, bin_cleanup_file_age, job_archive_age ) VALUES ( (SELECT NEXTVAL('settings_id_seq')), '/export/docker/storage/', 'Camera_uploads/', 'photos/', '.pa_bin/', '.pa_metadata/', TRUE, 1, 1, '0.55', 43, 1, 1, 7, 30, 4 );
|
||||
|
||||
@@ -4,10 +4,9 @@
|
||||
<div class="container">
|
||||
<div class="row px-3 my-5 offset-lg-3 col-lg-6 col-sm-12" style="border: 3px solid #5bc0de; border-radius: 15px;">
|
||||
<h3 class="my-3 text-center" style="color: #5bc0de">
|
||||
{# <svg width="64" height="64" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#pa_logo" /></svg> About Photo Assistant</h3> #}
|
||||
<svg width="64" height="64" fill="currentColor"><use xlink:href="internal/icons.svg#pa_logo" /></svg> Photo Assistant - About</h3>
|
||||
<img width="64" height="64" src="internal/pa-logo.png"/></img> Photo Assistant - About</h3>
|
||||
<div class="alert alert-info">
|
||||
{{build_info}}
|
||||
<center>{{build_info}}</center>
|
||||
</div>
|
||||
</div>
|
||||
<nav>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<script>
|
||||
function HandleSearch()
|
||||
{
|
||||
st=$('#search_term').val();
|
||||
st=encodeURIComponent( $('#search_term').val() )
|
||||
if( st != '' ) {
|
||||
document.location.href='/search/'+st
|
||||
}
|
||||
@@ -101,6 +101,7 @@
|
||||
{% if config.ENV != "production" %}
|
||||
<a class="dropdown-item" href="{{url_for('force_scan')}}">Force Scan (delete data & rebuild)</a>
|
||||
{% endif %}
|
||||
<a class="dropdown-item" href="{{url_for('force_clear')}}">Clear Messages (Force)</a>
|
||||
<a class="dropdown-item" href="{{url_for('wake_up')}}">Force wake the job manager</a>
|
||||
</div class="dropdow-menu">
|
||||
</div class="nav-item dropdown">
|
||||
@@ -135,7 +136,7 @@
|
||||
|
||||
{% if not InDBox %}
|
||||
{%block script_content %}{% endblock script_content %}
|
||||
<div id="status_container" class="position-fixed top-0 end-0 p-0 my-5" "z-index: 11"> </div>
|
||||
<div id="status_container" class="position-fixed top-0 end-0 p-0 my-5" style="z-index: 9999"> </div>
|
||||
<!-- CheckForJobs(), will see if there are any messages/jobs and keep doing this until there are 0 more and then stop -->
|
||||
<script>
|
||||
$(document).ready(function() { CheckForJobs() } )
|
||||
|
||||
@@ -1,64 +1,52 @@
|
||||
{% extends "base.html" %} {% block main_content %}
|
||||
<script src="{{ url_for( 'internal', filename='js/files_support.js')}}?v={{js_vers['fs']}}"></script>
|
||||
|
||||
<div class="container-fluid">
|
||||
<h3 class="offset-2">{{page_title}}</h3>
|
||||
<form id="main_form" method="POST">
|
||||
<input id="offset" type="hidden" name="offset" value="{{OPT.offset}}">
|
||||
<input id="grouping" type="hidden" name="grouping" value="">
|
||||
<input id="folders" type="hidden" name="folders" value="False">
|
||||
<div class="col col-auto">
|
||||
<div class="input-group">
|
||||
{{CreateSelect( "noo", OPT.noo, ["Oldest", "Newest","A to Z", "Z to A"], "$('#offset').val(0)", "rounded-start py-1 my-1")|safe }}
|
||||
{{CreateSelect( "how_many", OPT.how_many|string, ["10", "25", "50", "75", "100", "150", "200", "500"], "", "rounded-end py-1 my-1" )|safe }}
|
||||
{{CreateSelect( "noo", OPT.noo, ["Oldest", "Newest","A to Z", "Z to A"], "changeOPT(getPageFileList); return false", "rounded-start py-1 my-1")|safe }}
|
||||
{{CreateSelect( "how_many", OPT.how_many|string, ["10", "25", "50", "75", "100", "150", "200", "500"], "changeOPT(getPageFileList); return false", "rounded-end py-1 my-1" )|safe }}
|
||||
<div class="mb-1 col my-auto d-flex justify-content-center">
|
||||
{% set prv_disabled="" %}
|
||||
{% if OPT.offset|int == 0 %}
|
||||
{% set prv_disabled="disabled" %}
|
||||
{% endif %}
|
||||
<button id="prev" {{prv_disabled}} name="prev" class="prev sm-txt btn btn-outline-secondary">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#prev"/></svg>
|
||||
<button id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary" onClick="prevPage(getPageFileList)">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#prev"/></svg>
|
||||
</button>
|
||||
<span class="sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
{% set nxt_disabled="" %}
|
||||
{% if entry_data|length < OPT.how_many|int %}
|
||||
{% set nxt_disabled="disabled" %}
|
||||
{% endif %}
|
||||
<button id="next" {{nxt_disabled}} name="next" class="next sm-txt btn btn-outline-secondary">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#next"/></svg>
|
||||
<span class="how_many_text sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
<button id="next" name="next" class="next sm-txt btn btn-outline-secondary" onClick="nextPage(getPageFileList)">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#next"/></svg>
|
||||
</button>
|
||||
</div class="col...">
|
||||
</div class="input-group...">
|
||||
</div class="col col-auto">
|
||||
</form
|
||||
<div class="row">
|
||||
<table class="table table-striped table-sm col-xl-12">
|
||||
<thead><tr class="table-primary"><th>Name</th><th>Size (MB)</th><th>Path Prefix</th><th>Hash</th></tr></thead><tbody>
|
||||
{% for obj in entry_data %}
|
||||
<tr><td>
|
||||
{% if obj.type.name == "Image" or obj.type.name == "Video" %}
|
||||
<figure class="figure" font-size: 24px;>
|
||||
<div style="position:relative; width:100%">
|
||||
{% if obj.type.name=="Image" %}
|
||||
<a href="{{obj.in_dir.in_path.path_prefix}}/{{obj.in_dir.rel_path}}/{{obj.name}}">
|
||||
{% elif obj.type.name == "Video" %}
|
||||
<a href="{{obj.in_dir.in_path.path_prefix}}/{{obj.in_dir.rel_path}}/{{obj.name}}">
|
||||
{% endif %}
|
||||
<img class="thumb" style="display:block" height="48" src="data:image/jpeg;base64,{{obj.file_details.thumbnail}}"></img>
|
||||
{% if obj.type.name=="Image" or obj.type.name == "Video" %}
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
<figcaption class="figure-caption">{{obj.name}}</figcaption>
|
||||
</figure>
|
||||
{% endif %}
|
||||
</td>
|
||||
{% if obj.type.name != "Directory" %}
|
||||
<td>{{obj.file_details.size_mb}}</td><td>{{obj.in_dir.in_path.path_prefix.replace("static/","")}}/{{obj.in_dir.rel_path}}</td><td>{{obj.file_details.hash}}</td>
|
||||
{% else %}
|
||||
<td></td><td></td><td></td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody></table>
|
||||
</div class="row">
|
||||
</div class="container">
|
||||
<div id="file_list_div" class="container-fluid pt-2">
|
||||
</div class="container">
|
||||
<div class="container-fluid">
|
||||
<input type="hidden" name="cwd" id="cwd" value="{{OPT.cwd}}">
|
||||
<div class="row">
|
||||
<div class="col my-auto d-flex justify-content-center">
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary disabled" onClick="prevPage(getPageFileList)" disabled>
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#prev"/></svg>
|
||||
</button>
|
||||
<span class="how_many_text sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
<button aria-label="next" id="next" name="next" class="next sm-txt btn btn-outline-secondary" onClick="nextPage(getPageFileList)">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#next"/></svg>
|
||||
</button>
|
||||
</div class="col my-auto"> </div class="row">
|
||||
</div class="container-fluid">
|
||||
{% endblock main_content %}
|
||||
{% block script_content %}
|
||||
<script>
|
||||
// this is the list of entry ids for the images for ALL matches for this query
|
||||
var entryList={{query_data.entry_list}}
|
||||
var OPT = {{ OPT.to_dict()|tojson }};
|
||||
// set from query data and stored in OPT for convenience. It can be 0 -
|
||||
// this implies no content in the Path at all
|
||||
OPT.root_eid = {{ query_data.root_eid }};
|
||||
|
||||
// pageList is just those entries shown on this page from the full entryList
|
||||
var pageList=[]
|
||||
// force pageList to set pageList for & render the first page
|
||||
getPage( 1, getPageFileList )
|
||||
</script>
|
||||
{% endblock script_content %}
|
||||
|
||||
@@ -1,38 +1,43 @@
|
||||
{% extends "base.html" %}
|
||||
{% block main_content %}
|
||||
|
||||
<script src="{{ url_for( 'internal', filename='js/files_support.js')}}"></script>
|
||||
<script src="{{ url_for( 'internal', filename='js/files_transform.js')}}"></script>
|
||||
|
||||
<script>
|
||||
var move_paths=[]
|
||||
{% for p in move_paths %}
|
||||
p = new Object()
|
||||
p.type = '{{p.type}}'
|
||||
p.path = '{{p.path}}'
|
||||
p.icon_url = '{{p.icon_url}}'
|
||||
move_paths.push(p)
|
||||
{% endfor %}
|
||||
</script>
|
||||
<style>
|
||||
@media (max-width: 576px) {
|
||||
#la, #ra {
|
||||
padding: 5% !important;
|
||||
}
|
||||
}
|
||||
.norm-txt { font-size: 1.0rem }
|
||||
.form-check-input:checked {
|
||||
background-color: #39C0ED;
|
||||
border-color: #CFF4FC;
|
||||
}
|
||||
.form-switch .form-check-input {
|
||||
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%2339C0ED'/%3e%3c/svg%3e");
|
||||
}
|
||||
.form-switch .form-check-input:focus {
|
||||
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23CFF4FC'/%3e%3c/svg%3e");
|
||||
}
|
||||
#tst90:hover,#tst90:focus { filter: invert(73%) sepia(27%) saturate(3970%) hue-rotate(146deg) brightness(94%) contrast(100%); }
|
||||
</style>
|
||||
<script src="{{ url_for( 'internal', filename='js/files_transform.js')}}?v={{ js_vers['ft'] }}"></script>
|
||||
<script src="{{ url_for( 'internal', filename='js/files_support.js')}}?v={{ js_vers['fs'] }}"></script>
|
||||
<script src="{{ url_for( 'internal', filename='js/view_support.js')}}?v={{ js_vers['vs'] }}"></script>
|
||||
|
||||
<div id="files_div">
|
||||
<div class="container-fluid">
|
||||
<form id="main_form" method="POST" action="/change_file_opts">
|
||||
<input type="hidden" name="cwd" id="cwd" value="{{OPT.cwd}}">
|
||||
{% if search_term is defined %}
|
||||
<input type="hidden" name="search_term" id="view_term" value="{{search_term}}">
|
||||
{% endif %}
|
||||
<div class="d-flex row mb-2">
|
||||
{% if OPT.folders %}
|
||||
<div class="my-auto col col-auto">
|
||||
<span class="alert alert-primary py-2">
|
||||
{% if "files_ip" in request.url %}
|
||||
<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#import"/></svg>
|
||||
<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#import"/></svg>
|
||||
{% set tmp_path=OPT.cwd | replace( "static/Import", "" ) + "/" %}
|
||||
{% elif "files_sp" in request.url %}
|
||||
<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#db"/></svg>
|
||||
<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#db"/></svg>
|
||||
{% set tmp_path=OPT.cwd | replace( "static/Storage", "" ) + "/" %}
|
||||
{% elif "files_rbp" in request.url %}
|
||||
<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#trash"/></svg>
|
||||
<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#trash"/></svg>
|
||||
{% set tmp_path=OPT.cwd | replace( "static/Bin", "" ) + "/" %}
|
||||
{% endif %}
|
||||
{{tmp_path}}</span>
|
||||
@@ -40,15 +45,14 @@
|
||||
{% endif %}
|
||||
<div class="col col-auto">
|
||||
<div class="input-group">
|
||||
{{CreateSelect( "noo", OPT.noo, ["Oldest", "Newest","A to Z", "Z to A"], "$('#offset').val(0)", "rounded-start py-2")|safe }}
|
||||
{{CreateSelect( "how_many", OPT.how_many|string, ["10", "25", "50", "75", "100", "150", "200", "500"])|safe }}
|
||||
{{CreateSelect( "noo", OPT.noo, ["Oldest", "Newest","A to Z", "Z to A"], "changeOPT(getPageFigures); return false", "rounded-start py-2")|safe }}
|
||||
{{CreateSelect( "how_many", OPT.how_many|string, ["10", "25", "50", "75", "100", "150", "200", "500"], "changeOPT(getPageFigures); return false" )|safe }}
|
||||
{% if OPT.folders %}
|
||||
<input type="hidden" name="grouping" id="grouping" value="{{OPT.grouping}}">
|
||||
{{CreateFoldersSelect( OPT.folders, "rounded-end" )|safe }}
|
||||
{{CreateFoldersSelect( OPT.folders, "changeOPT(getPageFigures); return false", "rounded-end" )|safe }}
|
||||
{% else %}
|
||||
{{CreateFoldersSelect( OPT.folders )|safe }}
|
||||
{{CreateFoldersSelect( OPT.folders, "changeOPT(getPageFigures); return false" )|safe }}
|
||||
<span class="sm-txt my-auto btn btn-outline-info disabled border-top border-bottom">grouped by:</span>
|
||||
{{CreateSelect( "grouping", OPT.grouping, ["None", "Day", "Week", "Month"], "", "rounded-end")|safe }}
|
||||
{{CreateSelect( "grouping", OPT.grouping, ["None", "Day", "Week", "Month"], "OPT.grouping=$('#grouping').val();drawPageOfFigures();return false", "rounded-end")|safe }}
|
||||
{% endif %}
|
||||
</div class="input-group">
|
||||
</div class="col">
|
||||
@@ -56,349 +60,278 @@
|
||||
<div class="col col-auto my-auto">
|
||||
<span class="alert alert-primary p-2">Searched for: '{{search_term}}'</span>
|
||||
</div class="col my-auto">
|
||||
<script>
|
||||
$('#folders').prop('disabled', 'disabled').removeClass('border-info').addClass('border-secondary').removeClass('text-info').addClass('text-secondary');
|
||||
</script>
|
||||
{% endif %}
|
||||
<div class="col flex-grow-1 my-auto d-flex justify-content-center w-100">
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#prev"/></svg>
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary disabled" onClick="prevPage(getPageFigures)" disabled>
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#prev"/></svg>
|
||||
</button>
|
||||
<span class="sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
{% set nxt_disabled="" %}
|
||||
{% if entry_data|length < OPT.how_many|int %}
|
||||
{% set nxt_disabled="disabled" %}
|
||||
{% endif %}
|
||||
<button aria-label="next" id="next" {{nxt_disabled}} name="next" class="next sm-txt btn btn-outline-secondary">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#next"/></svg>
|
||||
<span class="how_many_text sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
<button aria-label="next" id="next" name="next" class="next sm-txt btn btn-outline-secondary" onClick="nextPage(getPageFigures)">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#next"/></svg>
|
||||
</button>
|
||||
<button aria-label="move" id="move" disabled name="move" class="sm-txt btn btn-outline-primary ms-4" onClick="MoveDBox(move_paths,'{{url_for('internal', filename='icons.svg')}}'); return false;">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#folder_plus"/></svg>
|
||||
<button aria-label="move" id="move" disabled name="move" class="sm-txt btn btn-outline-primary ms-4" onClick="MoveDBox(move_paths); return false;">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#folder_plus"/></svg>
|
||||
</button>
|
||||
{% if "files_rbp" in request.url %}
|
||||
<button aria-label="delete" id="del" disabled name="del" class="sm-txt btn btn-outline-success mx-1" onClick="DelDBox('Restore'); return false;">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#trash-fill"/></svg>
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#trash-fill"/></svg>
|
||||
{% else %}
|
||||
<button aria-label="delete" id="del" disabled name="del" class="sm-txt btn btn-outline-danger mx-1" onClick="DelDBox('Delete'); return false;">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#trash-fill"/></svg>
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#trash-fill"/></svg>
|
||||
{% endif %}
|
||||
</button>
|
||||
</div>
|
||||
<button style="visibility:hidden" class="btn btn-outline-secondary" aria-label="shift-key" id="shift-key" onclick="document.fake_shift=1-document.fake_shift; event.stopPropagation(); return false">shift</button>
|
||||
<button style="visibility:hidden" class="btn btn-outline-secondary" aria-label="ctrl-key" id="ctrl-key" onclick="document.fake_ctrl=1-document.fake_ctrl; event.stopPropagation(); return false">ctrl</button>
|
||||
</div class="col flex-grow-1">
|
||||
<div class="d-flex col col-auto justify-content-end">
|
||||
<div class="btn-group">
|
||||
{% if OPT.size == 64 %}
|
||||
{% set bt="btn-info text-white" %}
|
||||
{% else %}
|
||||
{% set bt="btn-outline-info" %}
|
||||
{% endif %}
|
||||
<button aria-label="extra small" id="64" class="px-2 sm-txt sz-but btn {{bt}}" onClick="$('#size').val(64)">XS</button>
|
||||
{% if OPT.size == 96 %}
|
||||
{% set bt="btn-info text-white" %}
|
||||
{% else %}
|
||||
{% set bt="btn-outline-info" %}
|
||||
{% endif %}
|
||||
<button aria-label="small" id="96" class="px-2 sm-txt sz-but btn {{bt}}" onClick="$('#size').val(96)">S</button>
|
||||
{% if OPT.size == 128 %}
|
||||
{% set bt="btn-info text-white" %}
|
||||
{% else %}
|
||||
{% set bt="btn-outline-info" %}
|
||||
{% endif %}
|
||||
<button aria-label="medium" id="128" class="px-2 sm-txt sz-but btn {{bt}}" onClick="$('#size').val(128)">M</button>
|
||||
{% if OPT.size == 192 %}
|
||||
{% set bt="btn-info text-white" %}
|
||||
{% else %}
|
||||
{% set bt="btn-outline-info" %}
|
||||
{% endif %}
|
||||
<button aria-label="large" id="192" class="px-2 sm-txt sz-but btn {{bt}}" onClick="$('#size').val(192)">L</button>
|
||||
{% if OPT.size == 256 %}
|
||||
{% set bt="btn-info text-white" %}
|
||||
{% else %}
|
||||
{% set bt="btn-outline-info" %}
|
||||
{% endif %}
|
||||
<button aria-label="extra large" id="256" class="px-2 sm-txt sz-but btn {{bt}}" onClick="$('#size').val(256)">XL</button>
|
||||
</div class="btn-group">
|
||||
</div class="col">
|
||||
<input id="offset" type="hidden" name="offset" value="{{OPT.offset}}">
|
||||
<input id="size" type="hidden" name="size" value="{{OPT.size}}">
|
||||
</div class="form-row">
|
||||
{% set eids=namespace( str="" ) %}
|
||||
{# gather all the file eids and collect them in case we go gallery mode #}
|
||||
{% for obj in entry_data %}
|
||||
{% if obj.type.name != "Directory" %}
|
||||
{% set eids.str = eids.str + obj.id|string +"," %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
<input name="eids" id="eids" type="hidden" value="{{eids.str}}">
|
||||
</form>
|
||||
</div>
|
||||
{% set ecnt=namespace( val=0 ) %}
|
||||
<div class="row ms-2">
|
||||
{% set last = namespace(printed=0) %}
|
||||
{# rare event of empty folder, still need to show back button #}
|
||||
{% if OPT.folders and entry_data|length == 0 %}
|
||||
{% if OPT.cwd != OPT.root %}
|
||||
<figure id="_back" class="dir entry m-1" ecnt="{{ecnt.val}}" dir="{{OPT.cwd|ParentPath}}" type="Directory">
|
||||
<svg class="svg" width="{{OPT.size|int-22}}" height="{{OPT.size|int-22}}"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#folder_back"/></svg>
|
||||
<figcaption class="figure-caption text-center">Back</figcaption>
|
||||
</figure class="figure">
|
||||
{% set ecnt.val=ecnt.val+1 %}
|
||||
<script>f=$('#_back'); w=f.find('svg').width(); f.find('figcaption').width(w);</script>
|
||||
{% else %}
|
||||
<div class="col col-auto g-0 m-1">
|
||||
<svg class="svg" width="{{OPT.size|int-22}}" height="{{OPT.size|int-22}}"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#folder_back_gray"/></svg>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% for obj in entry_data %}
|
||||
{% if loop.index==1 and OPT.folders %}
|
||||
{% if OPT.cwd != OPT.root %}
|
||||
<figure class="col col-auto g-0 dir entry m-1" ecnt="{{ecnt.val}}" dir="{{OPT.cwd|ParentPath}}" type="Directory">
|
||||
<svg class="svg" width="{{OPT.size|int-22}}" height="{{OPT.size|int-22}}" fill="currentColor">
|
||||
<use xlink:href="{{url_for('internal', filename='icons.svg')}}#folder_back"/></svg>
|
||||
<figcaption class="svg_cap figure-caption text-center">Back</figcaption>
|
||||
</figure class="figure">
|
||||
{% set ecnt.val=ecnt.val+1 %}
|
||||
{% else %}
|
||||
{# create an even lighter-grey, unclickable back button - so folders dont jump around when you go into them #}
|
||||
<div class="col col-auto g-0 m-1">
|
||||
<svg class="svg" width="{{OPT.size|int-22}}" height="{{OPT.size|int-22}}"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#folder_back_gray"/></svg>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if not OPT.folders and obj.type.name == "Directory" %}
|
||||
{% continue %}
|
||||
{% endif %}
|
||||
{% if OPT.grouping == "Day" %}
|
||||
{% if last.printed != obj.file_details.day %}
|
||||
<div class="row ps-3"><h6>Day: {{obj.file_details.day}} of {{obj.file_details.month}}/{{obj.file_details.year}}</h6></div>
|
||||
{% set last.printed = obj.file_details.day %}
|
||||
{% endif %}
|
||||
{% elif OPT.grouping == "Week" %}
|
||||
{% if last.printed != obj.file_details.woy %}
|
||||
<div class="row ps-3"><h6>Week #: {{obj.file_details.woy}} of {{obj.file_details.year}}</h6></div>
|
||||
{% set last.printed = obj.file_details.woy %}
|
||||
{% endif %}
|
||||
{% elif OPT.grouping == "Month" %}
|
||||
{% if last.printed != obj.file_details.month %}
|
||||
<div class="row ps-3"><h6>Month: {{obj.file_details.month}} of {{obj.file_details.year}}</h6></div>
|
||||
{% set last.printed = obj.file_details.month %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if obj.type.name == "Image" or obj.type.name == "Video" or obj.type.name == "Unknown" %}
|
||||
{% if (not OPT.folders) or ((obj.in_dir.in_path.path_prefix+'/'+obj.in_dir.rel_path+'/'+obj.name) | TopLevelFolderOf(OPT.cwd)) %}
|
||||
<figure id="{{obj.id}}" ecnt="{{ecnt.val}}" class="col col-auto g-0 figure entry m-1" path_type="{{obj.in_dir.in_path.type.name}}" size="{{obj.file_details.size_mb}}" hash="{{obj.file_details.hash}}" in_dir="{{obj.in_dir.in_path.path_prefix}}/{{obj.in_dir.rel_path}}" fname="{{obj.name}}" yr="{{obj.file_details.year}}" date="{{obj.file_details.year}}{{"%02d" % obj.file_details.month}}{{"%02d" % obj.file_details.day}}" pretty_date="{{obj.file_details.day}}/{{obj.file_details.month}}/{{obj.file_details.year}}" type="{{obj.type.name}}">
|
||||
{% if obj.type.name=="Image" or obj.type.name=="Unknown" %}
|
||||
<div style="position:relative; width:100%">
|
||||
{% if obj.file_details.thumbnail %}
|
||||
<a href="{{obj.in_dir.in_path.path_prefix}}/{{obj.in_dir.rel_path}}/{{obj.name}}">
|
||||
<img alt="{{obj.name}}" class="thumb" height="{{OPT.size}}" src="data:image/jpeg;base64,{{obj.file_details.thumbnail}}"></img></a>
|
||||
{% else %}
|
||||
<a href="{{obj.in_dir.in_path.path_prefix}}/{{obj.in_dir.rel_path}}/{{obj.name}}">
|
||||
<svg width="{{OPT.size}}" height="{{OPT.size}}" fill="white"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#unknown_ftype"/></svg>
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if search_term is defined %}
|
||||
<div style="position:absolute; bottom: 0px; left: 2px;">
|
||||
<svg width="16" height="16" fill="white"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#{{LocationIcon(obj)}}"/></svg>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div id="s{{obj.id}}" style="display:none; position:absolute; top: 50%; left:50%; transform:translate(-50%, -50%);">
|
||||
<img height="64px" src="{{url_for('internal', filename='throbber.gif')}}"></img>
|
||||
</div>
|
||||
</div>
|
||||
{% elif obj.type.name == "Video" %}
|
||||
<div style="position:relative; width:100%">
|
||||
{% if obj.file_details.thumbnail %}
|
||||
<a href="{{obj.in_dir.in_path.path_prefix}}/{{obj.in_dir.rel_path}}/{{obj.name}}">
|
||||
<img alt="{{obj.name}}" class="thumb" height="{{OPT.size}}" src="data:image/jpeg;base64,{{obj.file_details.thumbnail}}"></img></a>
|
||||
{% else %}
|
||||
<a href="{{obj.in_dir.in_path.path_prefix}}/{{obj.in_dir.rel_path}}/{{obj.name}}">
|
||||
<svg width="{{OPT.size}}" height="{{OPT.size}}" fill="white"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#unknown_ftype"/></svg>
|
||||
</a>
|
||||
{% endif %}
|
||||
<div class="btn-group" role="group" aria-label="Size radio button group">
|
||||
<input type="radio" class="btn-check" name="size" id="size-xs" onCLick="changeSize()" autocomplete="off" value="64">
|
||||
<label class="btn btn-outline-info btn-radio" for="size-xs">XS</label>
|
||||
|
||||
<div style="position:absolute; top: 0px; left: 2px;">
|
||||
<svg width="16" height="16" fill="white"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#film"/></svg>
|
||||
</div>
|
||||
{% if search_term is defined %}
|
||||
<div style="position:absolute; bottom: 0px; left: 2px;">
|
||||
<svg width="16" height="16" fill="white"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#{{LocationIcon(obj)}}"/></svg>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</figure>
|
||||
{% set ecnt.val=ecnt.val+1 %}
|
||||
{% endif %}
|
||||
{% elif obj.type.name == "Directory" %}
|
||||
{% if OPT.folders %}
|
||||
{% if obj.dir_details.rel_path | length %}
|
||||
{% set dirname=obj.dir_details.in_path.path_prefix+'/'+obj.dir_details.rel_path %}
|
||||
{% else %}
|
||||
{% set dirname=obj.dir_details.in_path.path_prefix %}
|
||||
{% endif %}
|
||||
{# if this dir is the toplevel of the cwd, show the folder icon #}
|
||||
{% if dirname| TopLevelFolderOf(OPT.cwd) %}
|
||||
<figure class="col col-auto g-0 dir entry m-1" id={{obj.id}} ecnt={{ecnt.val}} dir="{{dirname}}" type="Directory">
|
||||
<svg class="svg" width="{{OPT.size|int-22}}" height="{{OPT.size|int-22}}" fill="currentColor">
|
||||
<use xlink:href="{{url_for('internal', filename='icons.svg')}}#Directory"/></svg>
|
||||
<figcaption class="svg_cap figure-caption text-center text-wrap text-break">{{obj.name}}</figcaption>
|
||||
</figure class="figure">
|
||||
{% set ecnt.val=ecnt.val+1 %}
|
||||
<script>f=$('#{{obj.id}}'); w=f.find('svg').width(); f.find('figcaption').width(w);</script>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
<input type="radio" class="btn-check" name="size" id="size-s" onCLick="changeSize()" autocomplete="off" value="96">
|
||||
<label class="btn btn-outline-info btn-radio" for="size-s">S</label>
|
||||
|
||||
<input type="radio" class="btn-check" name="size" id="size-m" onCLick="changeSize()" autocomplete="off" value="128">
|
||||
<label class="btn btn-outline-info btn-radio" for="size-m">M</label>
|
||||
|
||||
<input type="radio" class="btn-check" name="size" id="size-l" onCLick="changeSize()" autocomplete="off" value="192">
|
||||
<label class="btn btn-outline-info btn-radio" for="size-l">L</label>
|
||||
|
||||
<input type="radio" class="btn-check" name="size" id="size-xl" onCLick="changeSize()" autocomplete="off" value="256">
|
||||
<label class="btn btn-outline-info btn-radio" for="size-xl">XL</label>
|
||||
</div>
|
||||
</div class="d-flex col">
|
||||
</div class="d-flex row mb-2">
|
||||
</div container="fluid">
|
||||
<div id="figures" class="row ms-2">
|
||||
</div>
|
||||
<div class="container-fluid">
|
||||
<form id="nav_form" method="POST" action="/change_file_opts">
|
||||
<input type="hidden" name="cwd" id="cwd" value="{{OPT.cwd}}">
|
||||
<div class="row">
|
||||
<div class="col my-auto d-flex justify-content-center">
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#prev"/></svg>
|
||||
<button aria-label="prev" id="prev" name="prev" class="prev sm-txt btn btn-outline-secondary disabled" onClick="prevPage(getPageFigures)" disabled>
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#prev"/></svg>
|
||||
</button>
|
||||
<span class="sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
<button aria-label="next" id="next" {{nxt_disabled}} name="next" class="next sm-txt btn btn-outline-secondary">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#next"/></svg>
|
||||
<span class="how_many_text sm-txt my-auto"> {{OPT.how_many}} files </span>
|
||||
<button aria-label="next" id="next" name="next" class="next sm-txt btn btn-outline-secondary" onClick="nextPage(getPageFigures)">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#next"/></svg>
|
||||
</button>
|
||||
</div class="col my-auto">
|
||||
</div class="row">
|
||||
</div class="container-fluid">
|
||||
</div id="files_div">
|
||||
<div id="viewer_div" class="d-none">
|
||||
<div id="viewer" class="container-fluid">
|
||||
<div class="row flex-nowrap">
|
||||
<!-- Left Buttons Column -->
|
||||
<div class="col-auto d-flex flex-column min-width-0">
|
||||
<!-- Up Button (Small) -->
|
||||
<button title="Back to list" class="btn btn-outline-info btn-sm p-1 mb-1" onclick="goOutOfViewer()">
|
||||
<svg width="16" height="16" fill="currentColor">
|
||||
<use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#back"></use>
|
||||
</svg>
|
||||
</button>
|
||||
<!-- Left Button (large/flex-grow-1) -->
|
||||
<button title="Show previous image" class="btn btn-outline-info px-2 flex-grow-1 overflow-hidden"
|
||||
style="padding: 10%" id="la" onClick="prevImageInViewer()">
|
||||
<svg width="16" height="16" fill="currentColor">
|
||||
<use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#prev"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
<figure style="position: relative;" class="col col-auto border border-info rounded m-0 p-1" id="figure">
|
||||
<canvas id="canvas"></canvas>
|
||||
<!-- next 4 are placeholders and called on during amendments only in viewer code -->
|
||||
<img id="throbber" src="{{url_for('internal', filename='throbber.gif')}}?v={{js_vers[th]}}" style="display:none;height:96px"
|
||||
class="position-absolute top-50 start-50 translate-middle">
|
||||
<img id="white-circle" src="{{url_for('internal', filename='white-circle.png')}}?v={{js_vers[th]}}" style="display:none;height:72px"
|
||||
class="position-absolute top-50 start-50 translate-middle">
|
||||
<img id="inside-img" style="display:none;height:64px" class="position-absolute top-50 start-50 translate-middle">
|
||||
<svg id="inside-icon" style="display:none;height:64px" class="position-absolute top-50 start-50 translate-middle">
|
||||
<use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#flip_v">
|
||||
</use></svg>
|
||||
<script>
|
||||
var im=new Image();
|
||||
im.onload=DrawImg
|
||||
var context = canvas.getContext('2d')
|
||||
</script>
|
||||
<figcaption id="img-cap" class="figure-caption text-center text-wrap text-break">
|
||||
<span id="fname_i"></span></figcaption>
|
||||
</figure>
|
||||
<div id="video_div" class="col col-auto">
|
||||
<video id="video" class="col col-auto" controls>
|
||||
<source id="videoSource" src="" type="video/mp4">
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
<figcaption id="vid-cap" class="figure-caption text-center text-wrap text-break">
|
||||
<span id="fname_v"></span></figcaption>
|
||||
</div>
|
||||
</form>
|
||||
</div class="container">
|
||||
<!-- Right-hand Buttons Column -->
|
||||
<div class="col-auto d-flex flex-column min-width-0">
|
||||
<!-- Up Button (Small) -->
|
||||
<button title="Back to list" class="btn btn-outline-info btn-sm p-1 mb-1" onclick="goOutOfViewer()">
|
||||
<svg width="16" height="16" fill="currentColor">
|
||||
<use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#back"></use>
|
||||
</svg>
|
||||
</button>
|
||||
<!-- Right Button (large/flex-grow-1) -->
|
||||
<button title="Show next image" class="btn btn-outline-info px-2 flex-grow-1 overflow-hidden"
|
||||
style="padding: 10%" id="ra" onClick="nextImageInViewer()">
|
||||
<svg width="16" height="16" fill="currentColor">
|
||||
<use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#next"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
</div class="row">
|
||||
<div class="row">
|
||||
{# this whole div, just takes up the same space as the left button and is hidden for alignment only #}
|
||||
<div class="col-auto px-0">
|
||||
<button class="btn btn-outline-info px-2 invisible" disabled>
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#next"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
<span class="col-auto my-auto">Show:</span>
|
||||
<div title="Toggle showing filename (hotkey: n)" class="d-flex form-check form-switch border border-info rounded col col-auto my-auto py-1 justify-content-center ps-5">
|
||||
<input class="form-check-input" type="checkbox" id="fname_toggle" onChange="$('.figure-caption').toggle()" checked>
|
||||
<label class="form-check-label ps-1" for="fname_toggle">Filename</label>
|
||||
</div>
|
||||
<div title="Toggle showing matched faces (hotkey: f)" class="d-flex form-check form-switch border border-info rounded col col-auto my-auto py-1 justify-content-center ps-5">
|
||||
<input class="form-check-input" type="checkbox" onChange="FaceToggle()" id="faces">
|
||||
<label class="form-check-label ps-1" for="faces">Faces</label>
|
||||
</div>
|
||||
<div title="Toggle showing 'distance' on matched faces (hotkey: d)" class="d-flex form-check form-switch border border-info rounded col col-auto my-auto py-1 justify-content-center ps-5">
|
||||
<input class="form-check-input" type="checkbox" onChange="DrawImg()" id="distance">
|
||||
<label class="form-check-label ps-1" for="distance">Distance</label>
|
||||
</div>
|
||||
<div title="Change the model used to detect faces" class="col col-auto my-auto">
|
||||
AI Model:
|
||||
{# can use 0 as default, it will be (re)set correctly in DrawImg() anyway #}
|
||||
{{CreateSelect( "model", 0, ["N/A", "normal", "slow/accurate"], "", "rounded norm-txt", [0,1,2])|safe }}
|
||||
</div>
|
||||
<div class="col col-auto pt-1">
|
||||
<button class="btn btn-outline-info p-1" title="Rotate by 90 degrees" onClick="Transform(90)">
|
||||
<img src="{{url_for('internal', filename='rot90.png')}}?v={{js_vers['r90']}}" width="32" height="32" onMouseOver="this.src='{{url_for('internal', filename='rot90-invert.png')}}'"
|
||||
onMouseOut="this.src='{{url_for('internal', filename='rot90.png')}}?v={{js_vers['r90']}}'" />
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Rotate by 180 degrees" onClick="Transform(180)">
|
||||
<img src="{{url_for('internal', filename='rot180.png')}}?v={{js_vers['r180']}}" width="32" height="32" onMouseOver="this.src='{{url_for('internal', filename='rot180-invert.png')}}'"
|
||||
onMouseOut="this.src='{{url_for('internal', filename='rot180.png')}}?v={{js_vers['r180']}}'" />
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Rotate by 270 degrees" onClick="Transform(270)">
|
||||
<img src="{{url_for('internal', filename='rot270.png')}}?v={{js_vers['r270']}}" width="32" height="32" onMouseOver="this.src='{{url_for('internal', filename='rot270-invert.png')}}'"
|
||||
onMouseOut="this.src='{{url_for('internal', filename='rot270.png')}}?v={{js_vers['r270']}}'" />
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Flip horizontally" onClick="Transform('fliph')">
|
||||
<svg width="32" height="32" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#flip_h"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Flip vertically" onClick="Transform('flipv')">
|
||||
<svg width="32" height="32" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#flip_v"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="View in Fullscreen mode (hotkey: F)" onClick="fullscreen=true; ViewImageOrVideo()">
|
||||
<svg width="32" height="32" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#fullscreen"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Show logs relating to this filename (hotkey: l)" onClick="JoblogSearch()">
|
||||
<svg width="32" height="32" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#log"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="View Original" onClick="window.location='/'+document.viewing.FullPathOnFS">
|
||||
<svg width="32" height="32" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#download"/></svg>
|
||||
</button>
|
||||
<button id="viewer_del" class="btn btn-outline-danger p-1" title="Delete (hotkey: Del)" onClick="DelDBox('Delete')">
|
||||
<svg id="viewer_bin" width="32" height="32" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#trash"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
</div class="row">
|
||||
</div id="viewer">
|
||||
</div id="viewer_div">
|
||||
{% endblock main_content %}
|
||||
|
||||
{% block script_content %}
|
||||
|
||||
<script>
|
||||
// GLOBALS
|
||||
document.fake_shift=0
|
||||
document.fake_ctrl=0
|
||||
|
||||
$('.figure').click( function(e) { DoSel(e, this ); SetButtonState(); return false; });
|
||||
$(document).on('click', function(e) { $('.highlight').removeClass('highlight') ; SetButtonState() });
|
||||
// FIXME: used by viewer code - should probably get rid of this?
|
||||
var fullscreen=false;
|
||||
|
||||
function CallViewRouteWrapper()
|
||||
{
|
||||
CallViewRoute( $(this).attr("id") )
|
||||
}
|
||||
// this is the current entry (object) we are viewing - an image/video (used when we dbl-click to view & then in next/prev in view)
|
||||
document.viewing=null;
|
||||
|
||||
function CallViewRoute(id)
|
||||
{
|
||||
s='<form id="_fm" method="POST" action="/view/' + id + '">'
|
||||
s+='<input type="hidden" name="eids" value="'+$("#eids").val() + '">'
|
||||
s+='<input type="hidden" name="cwd" value="{{OPT.cwd}}">'
|
||||
s+='<input type="hidden" name="root" value="{{OPT.root}}">'
|
||||
s+='<input type="hidden" name="offset" value="{{OPT.offset}}">'
|
||||
s+='<input type="hidden" name="how_many" value="{{OPT.how_many}}">'
|
||||
s+='<input type="hidden" name="orig_url" value="{{request.path}}">'
|
||||
s+='<input type="hidden" name="view_eid" value="'+id+'">'
|
||||
{% if search_term is defined %}
|
||||
s+='<input type="hidden" name="search_term" value="{{search_term}}">'
|
||||
{% endif %}
|
||||
s+='</form>'
|
||||
$(s).appendTo('body').submit();
|
||||
}
|
||||
var OPT = {{ OPT.to_dict()|tojson }};
|
||||
// set from query data and stored in OPT for convenience. It can be 0 -
|
||||
// this implies no content in the Path at all
|
||||
OPT.root_eid = {{ query_data.root_eid }};
|
||||
|
||||
$('.figure').dblclick( CallViewRouteWrapper )
|
||||
// amendment types are stable per code release, store them once and use as
|
||||
// needed when we amend entrys in Transforms, removes, etc.
|
||||
document.amendTypes = {{ query_data.amendTypes|tojson }};
|
||||
|
||||
// different context menu on files
|
||||
$.contextMenu({
|
||||
selector: '.entry',
|
||||
build: function($triggerElement, e) {
|
||||
// when right-clicking & no selection add one OR deal with ctrl/shift right-lick as it always changes seln
|
||||
if( NoSel() || e.ctrlKey || e.shiftKey )
|
||||
// get items out of query_data into convenience javascript vars...
|
||||
var move_paths = {{ query_data.move_paths|tojson }};
|
||||
var NMO={{query_data.NMO|tojson}}
|
||||
var people={{query_data.people|tojson}}
|
||||
|
||||
// this is the list of entry ids for the images for ALL matches for this query
|
||||
var entryList={{query_data.entry_list}}
|
||||
|
||||
// pageList is just those entries shown on this page from the full entryList
|
||||
var pageList=[]
|
||||
// force pageList to set pageList for & render the first page
|
||||
getPage(1,getPageFigures)
|
||||
|
||||
// gap is used to keep some space around video in viewer - tbh, not sure why anymore
|
||||
var gap=0.8
|
||||
|
||||
function PrettyFname(fname)
|
||||
{
|
||||
DoSel(e, e.currentTarget )
|
||||
SetButtonState();
|
||||
}
|
||||
|
||||
if( FiguresOrDirsOrBoth() == "figure" )
|
||||
s='<span class="alert alert-secondary py-2">'
|
||||
if( fname.indexOf( "static/Import" ) == 0 )
|
||||
{
|
||||
item_list = {
|
||||
details: { name: "Details..." },
|
||||
view: { name: "View File" },
|
||||
sep: "---",
|
||||
s+='<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#import"/></svg>'
|
||||
tmp_path=fname.replace("statuc/Import","" )
|
||||
}
|
||||
if( e.currentTarget.getAttribute('type') == 'Image' )
|
||||
if( fname.indexOf( "static/Storage" ) == 0 )
|
||||
{
|
||||
item_list['transform'] = {
|
||||
name: "Transform",
|
||||
items: {
|
||||
"r90": { "name" : "Rotate 90 degrees" },
|
||||
"r180": { "name" : "Rotate 180 degrees" },
|
||||
"r270": { "name" : "Rotate 270 degrees" },
|
||||
"fliph": { "name" : "Flip horizontally" },
|
||||
"flipv": { "name" : "Flip vertically" }
|
||||
s+='<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#db"/></svg>'
|
||||
tmp_path=fname.replace("static/Storage","" )
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
item_list['move'] = { name: "Move selected file(s) to new folder" }
|
||||
item_list['sep2'] = { sep: "---" }
|
||||
}
|
||||
else
|
||||
item_list = {
|
||||
move: { name: "Move selection(s) to new folder" }
|
||||
}
|
||||
|
||||
item_list['ai'] = {
|
||||
name: "Scan file for faces",
|
||||
items: {
|
||||
"ai-all": {"name": "all"},
|
||||
{% for p in people %}
|
||||
"ai-{{p.tag}}": {"name": "{{p.tag}}"},
|
||||
{% endfor %}
|
||||
}
|
||||
}
|
||||
|
||||
if( SelContainsBinAndNotBin() ) {
|
||||
item_list['both']= { name: 'Cannot delete and restore at same time', disabled: true }
|
||||
} else {
|
||||
if (e.currentTarget.getAttribute('path_type') == 'Bin' )
|
||||
item_list['undel']= { name: "Restore selected file(s)" }
|
||||
else if( e.currentTarget.getAttribute('type') != 'Directory' )
|
||||
item_list['del']= { name: "Delete Selected file(s)" }
|
||||
}
|
||||
|
||||
return {
|
||||
callback: function( key, options) {
|
||||
if( key == "details" ) { DetailsDBox() }
|
||||
if( key == "view" ) { CallViewRoute( $(this).attr('id') ) }
|
||||
if( key == "move" ) { MoveDBox(move_paths, "{{url_for('internal', filename='icons.svg')}}") }
|
||||
if( key == "del" ) { DelDBox('Delete') }
|
||||
if( key == "undel") { DelDBox('Restore') }
|
||||
if( key == "r90" ) { Transform(90) }
|
||||
if( key == "r180" ) { Transform(180) }
|
||||
if( key == "r270" ) { Transform(270) }
|
||||
if( key == "fliph" ) { Transform("fliph") }
|
||||
if( key == "flipv" ) { Transform("flipv") }
|
||||
if( key.startsWith("ai")) { RunAIOnSeln(key) }
|
||||
},
|
||||
items: item_list
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
$(document).ready(function() {
|
||||
if( {{OPT.offset}} == 0 )
|
||||
if( fname.indexOf( "static/Bin" ) == 0 )
|
||||
{
|
||||
$('.prev').addClass('disabled')
|
||||
$('.prev').prop('disabled', true)
|
||||
s+='<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}?v={{js_vers['ic']}}#trash-fill"/></svg>'
|
||||
tmp_path=fname.replace("static/Bin","" )
|
||||
}
|
||||
s+=tmp_path+'</span>'
|
||||
return s
|
||||
}
|
||||
$(".dir").click( function(e) { $('#offset').val(0) ; $('#cwd').val( $(this).attr('dir') ) ; $('#main_form').submit() } )
|
||||
} )
|
||||
|
||||
$( document ).keydown(function(event) {
|
||||
switch (event.key)
|
||||
// check the size radiobutton
|
||||
$(`input[name="size"][value="${OPT.size}"]`).prop('checked', true)
|
||||
|
||||
window.addEventListener('resize', DrawImg, false);
|
||||
window.addEventListener('resize', ResizeVideo, false);
|
||||
|
||||
// when we are in recycle bin, change colours to green & func to restore
|
||||
if( window.location.href.includes('files_rbp') )
|
||||
{
|
||||
case "Delete":
|
||||
{% if "files_rbp" in request.url %}
|
||||
if( ! NoSel() ) DelDBox('Restore');
|
||||
{% else %}
|
||||
if( ! NoSel() ) DelDBox('Delete');
|
||||
{% endif %}
|
||||
break;
|
||||
$('#viewer_bin').attr('fill', 'var(--bs-success)')
|
||||
// fill with bg-success colour
|
||||
$('#viewer_bin use').attr('fill', 'var(--bs-success)')
|
||||
$('#viewer_del').removeClass('btn-outline-danger').addClass('btn-outline-success')
|
||||
$('#viewer_del').on('mouseenter', function() {
|
||||
// Set the SVG fill to white
|
||||
$('#viewer_bin use').attr('fill', 'white');
|
||||
});
|
||||
|
||||
// When mouse leaves the button
|
||||
$('#viewer_del').on('mouseleave', function() {
|
||||
// Revert the SVG fill to the bg-success colour
|
||||
$('#viewer_bin use').attr('fill', 'var(--bs-success)');
|
||||
});
|
||||
$('#viewer_del').on('click', function() { DelDBox('Restore') } )
|
||||
}
|
||||
});
|
||||
|
||||
if( isMobile() )
|
||||
{
|
||||
$('#shift-key').css('visibility', 'visible');
|
||||
$('#ctrl-key').css('visibility', 'visible');
|
||||
}
|
||||
|
||||
</script>
|
||||
{% endblock script_content %}
|
||||
|
||||
@@ -46,8 +46,7 @@
|
||||
|
||||
<div class="row px-3 my-5 offset-lg-3 col-lg-6 col-sm-12" style="border: 3px solid #5bc0de; border-radius: 15px;">
|
||||
<h3 class="my-3 text-center" style="color: #5bc0de">
|
||||
{# <svg width="64" height="64" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#pa_logo" /></svg> Photo Assistant Login</h3> #}
|
||||
<svg width="64" height="64" fill="currentColor"><use xlink:href="internal/icons.svg#pa_logo" /></svg> Photo Assistant Login</h3>
|
||||
<img width="64" height="64" src="internal/pa-logo.png"/></img> Photo Assistant Login</h3>
|
||||
<form class="" method="POST">
|
||||
<div class="input-group">
|
||||
<label for="username" class="text-right input-group-text col-4 text-info">Username:</label>
|
||||
|
||||
@@ -34,9 +34,7 @@
|
||||
<!-- browsers can put the fakepath in for security, remove it -->
|
||||
function DoMagic() {
|
||||
str=$("#new_file_chooser").val()
|
||||
console.log(str)
|
||||
str=str.replace('C:\\fakepath\\', '' )
|
||||
console.log(str)
|
||||
$("#fname").val(str)
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -56,44 +56,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</div class="col-7">
|
||||
|
||||
<div class="row pt-5">
|
||||
<alert class="alert alert-warning">The following values are based on the defaults above and subsequent changes as you navigate the application and are not set by hand. The following content is for checking/debugging only.</alert>
|
||||
</div class="row">
|
||||
|
||||
<div class="row">
|
||||
<table id="pa_user_state_tbl" class="table table-striped table-sm" data-toolbar="#toolbar" data-search="true">
|
||||
<thead>
|
||||
<tr class="table-primary"><th>Path</th><th>New or Oldest</th><th>How Many</th><th>Folders?</th><th>Group by</th><th>Thumb size</th><th>DB retrieve offset</th><th>Root</th><th>cwd</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for st in states %}
|
||||
<tr>
|
||||
<td>{{st.path_type}}
|
||||
{% if st.path_type == 'Search' %}
|
||||
"{{st.orig_search_term}}"
|
||||
{% endif %}
|
||||
{% if st.path_type == 'View' %}
|
||||
(orig: id={{st.view_eid}} in {{st.orig_ptype}})
|
||||
{% if st.orig_ptype == 'Search' %}
|
||||
"{{st.orig_search_term}}"
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
</td>
|
||||
<td>{{st.noo}}</td>
|
||||
<td>{{st.how_many}}</td>
|
||||
<td>{{st.folders}}</td>
|
||||
<td>{{st.grouping}}</td>
|
||||
<td>{{st.size}}</td>
|
||||
<td>{{st.st_offset}}</td>
|
||||
<td>{{st.root}}</td>
|
||||
<td>{{st.cwd}}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div class="row">
|
||||
</div class="container-fluid">
|
||||
{% endblock main_content %}
|
||||
{% block script_content %}
|
||||
|
||||
@@ -1,323 +0,0 @@
|
||||
{% extends "base.html" %} {% block main_content %}
|
||||
{# make the form-switch / toggle info color set, give or take #}
|
||||
<style>
|
||||
.norm-txt { font-size: 1.0rem }
|
||||
.form-check-input:checked {
|
||||
background-color: #39C0ED;
|
||||
border-color: #CFF4FC;
|
||||
}
|
||||
.form-switch .form-check-input {
|
||||
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%2339C0ED'/%3e%3c/svg%3e");
|
||||
}
|
||||
.form-switch .form-check-input:focus {
|
||||
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23CFF4FC'/%3e%3c/svg%3e");
|
||||
}
|
||||
</style>
|
||||
|
||||
<script src="{{ url_for( 'internal', filename='js/view_transform.js')}}"></script>
|
||||
<script src="{{ url_for( 'internal', filename='js/view_support.js')}}"></script>
|
||||
|
||||
<script>
|
||||
var gap=0.8
|
||||
var grayscale=0
|
||||
var throbber=0
|
||||
|
||||
var objs=[]
|
||||
var NMO=[]
|
||||
var current={{current}}
|
||||
var eids="{{eids}}"
|
||||
var eid_lst=eids.split(",")
|
||||
var offset={{OPT.offset}}
|
||||
var first_eid={{OPT.first_eid}}
|
||||
var last_eid={{OPT.last_eid}}
|
||||
var imp_path="static/Import/{{imp_path}}"
|
||||
var st_path="static/Storage/{{st_path}}"
|
||||
var bin_path="static/Bin/{{bin_path}}"
|
||||
|
||||
{% for id in objs %}
|
||||
e=new Object()
|
||||
e.url = "{{objs[id].FullPathOnFS()|safe}}"
|
||||
e.type = "{{objs[id].type.name}}"
|
||||
{% if objs[id].file_details.faces %}
|
||||
e.face_model="{{objs[id].file_details.faces[0].facefile_lnk.model_used}}"
|
||||
{% endif %}
|
||||
e.faces=[]
|
||||
{% for face in objs[id].file_details.faces %}
|
||||
data = { 'id': '{{face.id}}', 'x': '{{face.face_left}}', 'y': '{{face.face_top}}', 'w': '{{face.w}}', 'h':'{{face.h}}' }
|
||||
{% if face.refimg %}
|
||||
data['who']='{{face.refimg.person.tag}}'
|
||||
data['distance']="{{face.refimg_lnk.face_distance|round(2)}}"
|
||||
{% endif %}
|
||||
{% if face.no_match_override %}
|
||||
data['override'] = {
|
||||
'face_id' : '{{face.no_match_override.face_id}}',
|
||||
'type_id' : '{{face.no_match_override.type.id}}',
|
||||
'type_name': '{{face.no_match_override.type.name}}',
|
||||
'who' : '{{face.no_match_override.type.name}}',
|
||||
'distance' : 'N/A'
|
||||
}
|
||||
{% endif %}
|
||||
{% if face.manual_override %}
|
||||
data['override'] = {
|
||||
'face_id' : '{{face.manual_override.face_id}}',
|
||||
'type_id' : '{{face.manual_override.type.id}}',
|
||||
'type_name': '{{face.manual_override.type.name}}',
|
||||
'who' : '{{face.manual_override.person.tag}}',
|
||||
'distance' : 'N/A'
|
||||
}
|
||||
{% endif %}
|
||||
e.faces.push( data )
|
||||
{% endfor %}
|
||||
objs[{{id}}]=e
|
||||
{% endfor %}
|
||||
|
||||
{% for el in NMO_data %}
|
||||
NMO[{{el.id}}] = { 'type_id': {{el.id}}, 'name': '{{el.name}}' }
|
||||
{% endfor %}
|
||||
|
||||
|
||||
|
||||
function PrettyFname(fname)
|
||||
{
|
||||
s='<span class="alert alert-secondary py-2">'
|
||||
if( fname.indexOf( "static/Import" ) == 0 )
|
||||
{
|
||||
s+='<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#import"/></svg>'
|
||||
tmp_path=fname.replace(imp_path,"" )
|
||||
}
|
||||
if( fname.indexOf( "static/Storage" ) == 0 )
|
||||
{
|
||||
s+='<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#db"/></svg>'
|
||||
tmp_path=fname.replace("static/Storage","" )
|
||||
}
|
||||
if( fname.indexOf( "static/Bin" ) == 0 )
|
||||
{
|
||||
s+='<svg width="20" height="20" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#trash-fill"/></svg>'
|
||||
tmp_path=fname.replace("static/Bin","" )
|
||||
}
|
||||
s+=tmp_path+'</span>'
|
||||
return s
|
||||
}
|
||||
|
||||
function CallViewListRoute(dir)
|
||||
{
|
||||
data="eids="+$("#eids").val()
|
||||
data+="&cwd={{OPT.cwd}}"
|
||||
data+="&root={{OPT.root}}"
|
||||
data+="&orig_url={{OPT.orig_url}}"
|
||||
data+="&view_eid={{OPT.view_eid}}"
|
||||
// direction (next/prev)
|
||||
data+="&"+dir+ "=1"
|
||||
{% if search_term is defined %}
|
||||
data+="&search_term={{search_term}}"
|
||||
{% endif %}
|
||||
$.ajax({ type: 'POST', data: data, url: '/view_list', success: function(res){
|
||||
current=res.current
|
||||
eids=res.eids
|
||||
objs=res.objs
|
||||
eid_lst=eids.split(",")
|
||||
offset=res.offset
|
||||
if( current == first_eid )
|
||||
$("#la").prop("disabled", true)
|
||||
if( current == last_eid )
|
||||
$("#ra").prop("disabled", true)
|
||||
ViewImageOrVideo()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<div id="viewer" class="container-fluid">
|
||||
|
||||
{% set max=eids.split(',')|length %}
|
||||
<input type="hidden" name="eids" value={{eids}}>
|
||||
<div class="row">
|
||||
<button title="Show previous image" class="col-auto btn btn-outline-info px-2" style="padding: 10%" id="la"
|
||||
{% if OPT.first_eid == current %}
|
||||
disabled
|
||||
{% endif %}
|
||||
onClick="
|
||||
cidx = eid_lst.indexOf(current.toString())
|
||||
prev=cidx-1
|
||||
if( prev < 0 )
|
||||
{
|
||||
if( offset )
|
||||
{
|
||||
CallViewListRoute('prev')
|
||||
return
|
||||
}
|
||||
else
|
||||
{
|
||||
$('#la').attr('disabled', true )
|
||||
prev=0
|
||||
}
|
||||
}
|
||||
$('#ra').attr('disabled', false )
|
||||
current=eid_lst[prev]
|
||||
ViewImageOrVideo()
|
||||
if( current == first_eid )
|
||||
$('#la').attr('disabled', true )
|
||||
">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#prev"/></svg>
|
||||
</button>
|
||||
<figure class="col col-auto border border-info rounded m-0 p-1" id="figure">
|
||||
<canvas id="canvas"></canvas>
|
||||
<img id="throbber" src="{{url_for('internal', filename='throbber.gif')}}" style="display:none;">
|
||||
<script>
|
||||
var im=new Image();
|
||||
im.onload=DrawImg
|
||||
im.src="../" + objs[current].url
|
||||
var context = canvas.getContext('2d')
|
||||
window.addEventListener('resize', DrawImg, false);
|
||||
</script>
|
||||
<figcaption id="img-cap" class="figure-caption text-center text-wrap text-break"><span id="fname_i"></span></figcaption>
|
||||
</figure>
|
||||
<script>$('#fname_i').html(PrettyFname(objs[current].url))</script>
|
||||
{% if objs[current].type.name != "Image" %}
|
||||
<script>$('#figure').hide()</script>
|
||||
{% endif %}
|
||||
<div id="video_div" class="col col-auto">
|
||||
<video id="video" class="col col-auto" controls>
|
||||
<source src="../{{objs[current].FullPathOnFS()}}" type="video/mp4">
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
<figcaption id="vid-cap" class="figure-caption text-center text-wrap text-break"><span id="fname_v"></span></figcaption>
|
||||
<script>$('#fname_v').html(PrettyFname(objs[current].url))</script>
|
||||
</div>
|
||||
<script>
|
||||
window.addEventListener('resize', ResizeVideo, false);
|
||||
ResizeVideo()
|
||||
{% if objs[current].type.name != "Video" %}
|
||||
$('#video_div').hide()
|
||||
{% endif %}
|
||||
</script>
|
||||
|
||||
<button title="Show next image" class="col-auto btn btn-outline-info px-2" style="padding: 10%" id="ra"
|
||||
{% if OPT.last_eid == current %}
|
||||
disabled
|
||||
{% endif %}
|
||||
onClick="
|
||||
cidx = eid_lst.indexOf(current.toString())
|
||||
if( cidx < eid_lst.length-1 )
|
||||
{
|
||||
current=eid_lst[cidx+1]
|
||||
ViewImageOrVideo()
|
||||
if( current != first_eid )
|
||||
$('#la').attr('disabled', false )
|
||||
}
|
||||
else
|
||||
CallViewListRoute('next')
|
||||
|
||||
if( current == last_eid )
|
||||
{
|
||||
$('#ra').attr('disabled', true )
|
||||
return
|
||||
}
|
||||
">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#next"/></svg>
|
||||
</button>
|
||||
</div id="/form-row">
|
||||
{# use this for color of toggles: https://www.codeply.com/p/4sL9uhevwJ #}
|
||||
<div class="row">
|
||||
{# this whole div, just takes up the same space as the left button and is hidden for alignment only #}
|
||||
<div class="col-auto px-0">
|
||||
<button class="btn btn-outline-info px-2" disabled style="visibility:hidden">
|
||||
<svg width="16" height="16" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#next"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
<span class="col-auto my-auto">Show:</span>
|
||||
<div title="Toggle showing filename (hotkey: n)" class="d-flex form-check form-switch border border-info rounded col col-auto my-auto py-1 justify-content-center ps-5">
|
||||
<input class="form-check-input" type="checkbox" id="fname_toggle" onChange="$('.figure-caption').toggle()" checked>
|
||||
<label class="form-check-label ps-1" for="fname_toggle">Filename</label>
|
||||
</div>
|
||||
<div title="Toggle showing matched faces (hotkey: f)" class="d-flex form-check form-switch border border-info rounded col col-auto my-auto py-1 justify-content-center ps-5">
|
||||
<input class="form-check-input" type="checkbox" onChange="FaceToggle()" id="faces">
|
||||
<label class="form-check-label ps-1" for="faces">Faces</label>
|
||||
</div>
|
||||
<div title="Toggle showing 'distance' on matched faces (hotkey: d)" class="d-flex form-check form-switch border border-info rounded col col-auto my-auto py-1 justify-content-center ps-5">
|
||||
<input class="form-check-input" type="checkbox" onChange="DrawImg()" id="distance">
|
||||
<label class="form-check-label ps-1" for="distance">Distance</label>
|
||||
</div>
|
||||
<div title="Change the model used to detect faces" class="col col-auto my-auto">
|
||||
AI Model:
|
||||
{# can use 0 as default, it will be (re)set correctly in DrawImg() anyway #}
|
||||
{{CreateSelect( "model", 0, ["N/A", "normal", "slow/accurate"], "", "rounded norm-txt", [0,1,2])|safe }}
|
||||
</div>
|
||||
<div class="col col-auto pt-1">
|
||||
<button class="btn btn-outline-info p-1" title="Rotate by 90 degrees" onClick="Transform(90)">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#rot90"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Rotate by 180 degrees" onClick="Transform(180)">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#rot180"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Rotate by 270 degrees" onClick="Transform(270)">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#rot270"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Flip horizontally" onClick="Transform('fliph')">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#flip_h"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Flip vertically" onClick="Transform('flipv')">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#flip_v"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="View in Fullscreen mode (hotkey: F)" onClick="fullscreen=true; ViewImageOrVideo()">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#fullscreen"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="Show logs relating to this filename (hotkey: l)" onClick="JoblogSearch()">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#log"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-outline-info p-1" title="View Original" onClick="window.location='/'+objs[current].url">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#download"/></svg>
|
||||
</button>
|
||||
<button id="del" class="btn btn-outline-danger p-1" title="Delete (hotkey: Del)"
|
||||
onClick="$.ajax({ type: 'POST', data: '&eid-0={{current}}', url: '/delete_files', success: function(data){ window.location='/'; return false; } })">
|
||||
<svg width="28" height="28" fill="currentColor"><use xlink:href="{{url_for('internal', filename='icons.svg')}}#trash"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
</div class="row">
|
||||
{% endblock main_content %}
|
||||
{% block script_content %}
|
||||
<script>
|
||||
$( document ).keydown(function(event) {
|
||||
// if dbox is visible, dont process this hot-key, we are inputting text
|
||||
// into inputs instead
|
||||
if( $("#dbox").is(':visible') )
|
||||
return
|
||||
switch (event.key)
|
||||
{
|
||||
case "Left": // IE/Edge specific value
|
||||
case "ArrowLeft":
|
||||
if( $('#la').prop('disabled') == false )
|
||||
$('#la').click()
|
||||
break;
|
||||
case "Right": // IE/Edge specific value
|
||||
case "ArrowRight":
|
||||
if( $('#ra').prop('disabled') == false )
|
||||
$('#ra').click()
|
||||
break;
|
||||
case "d":
|
||||
$('#distance').click()
|
||||
break;
|
||||
case "f":
|
||||
$('#faces').click()
|
||||
break;
|
||||
case "n":
|
||||
$('#fname_toggle').click()
|
||||
break;
|
||||
case "F":
|
||||
fullscreen=!document.fullscreen
|
||||
ViewImageOrVideo()
|
||||
break;
|
||||
case "l":
|
||||
JoblogSearch()
|
||||
break;
|
||||
case "Delete":
|
||||
$('#del').click()
|
||||
default:
|
||||
return; // Quit when this doesn't handle the key event.
|
||||
}
|
||||
});
|
||||
|
||||
var fullscreen=false;
|
||||
</script>
|
||||
{% endblock script_content %}
|
||||
9
utils/pa_rotate
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
|
||||
echo "start"
|
||||
b=`basename "$1"`
|
||||
echo "b=$b"
|
||||
mv "$1" "/tmp/$b"
|
||||
echo "mv '$1' to /tmp/$b"
|
||||
jpegtran -outfile "$1" -rotate "$2" "/tmp/$b"
|
||||
echo "done jpegtran"
|
||||
18
wrapper.sh
@@ -1,7 +1,21 @@
|
||||
#!/bin/bash
|
||||
|
||||
su mythtv -g mythtv -c 'ENV="production" python3 -u /code/pa_job_manager.py' &> /var/log/pa_job_manager.out &
|
||||
gunicorn --bind=0.0.0.0:443 --workers=4 --threads=16 --certfile /etc/letsencrypt/live/pa.depaoli.id.au/fullchain.pem --keyfile /etc/letsencrypt/live/pa.depaoli.id.au/privkey.pem main:app --env ENV="production" --error-logfile gunicorn.error.log --access-logfile gunicorn.log --capture-output
|
||||
sudo -u pauser ENV=${ENV} python3 -u pa_job_manager.py &> /var/log/pa_job_manager.out &
|
||||
if [ "$ENV" == "production" ]; then
|
||||
sudo -u pauser gunicorn --bind=0.0.0.0:80 --workers=4 --threads=16 main:app --env ENV="production" --error-logfile gunicorn.error.log --access-logfile gunicorn.log --capture-output
|
||||
elif [ "$ENV" == "container" ]; then
|
||||
# move into src tree so padev responds to code changes
|
||||
cd /home/ddp/src/photoassistant
|
||||
# push container logs/errors to stdout so docker logs works
|
||||
sudo -u pauser gunicorn --bind=0.0.0.0:80 --workers=1 --threads=1 main:app --env ENV="container" --reload --capture-output --log-file - --error-logfile -
|
||||
else
|
||||
echo "Not sure which ENV ($ENV) we are running, set up for DEV and dont run job manager (jic)"
|
||||
sudo -u pauser gunicorn --bind=0.0.0.0:80 --workers=1 --threads=1 main:app --env ENV="development" --error-logfile gunicorn.error.log --access-logfile gunicorn.log --capture-output --enable-stdio-inheritance --reload
|
||||
fi
|
||||
|
||||
# warm the cache to see if this helps with odd restart 404s
|
||||
curl -sf http://localhost/health
|
||||
|
||||
# this should never be invoked unless gunicorn fails -- in that case, at least
|
||||
# we will keep the container can login by hand and check the issue/error
|
||||
sleep 99999
|
||||
|
||||