fixed up a few issues found from linter
This commit is contained in:
13
files.py
13
files.py
@@ -13,7 +13,7 @@ import hashlib
|
||||
import exifread
|
||||
import base64
|
||||
import numpy
|
||||
import cv2
|
||||
from cv2 import cv2
|
||||
import time
|
||||
import re
|
||||
|
||||
@@ -27,6 +27,8 @@ from settings import Settings
|
||||
from shared import SymlinkName
|
||||
from dups import Duplicates
|
||||
|
||||
# pylint: disable=no-member
|
||||
|
||||
################################################################################
|
||||
# Class describing File in the database, and via sqlalchemy, connected to the DB as well
|
||||
# This has to match one-for-one the DB table
|
||||
@@ -110,7 +112,6 @@ def GetJM_Message():
|
||||
return msg
|
||||
|
||||
def ClearJM_Message(id):
|
||||
msg=PA_JobManager_Message.query.get(id)
|
||||
PA_JobManager_Message.query.filter(PA_JobManager_Message.id==id).delete()
|
||||
db.session.commit()
|
||||
return
|
||||
@@ -270,10 +271,8 @@ def fix_dups():
|
||||
st.SetMessage(f"Err, no dups - should now clear the FE 'danger' message?")
|
||||
return render_template("base.html")
|
||||
|
||||
jexes = JobExtra.query.join(Job).join(PA_JobManager_Message).filter(PA_JobManager_Message.id==request.form['fe_msg_id']).all()
|
||||
path=[jex.value for jex in jexes if jex.name == "path"][0]
|
||||
prefix = SymlinkName(path,path+'/')
|
||||
if 'pagesize' not in request.form:
|
||||
jexes = JobExtra.query.join(Job).join(PA_JobManager_Message).filter(PA_JobManager_Message.id==request.form['fe_msg_id']).all()
|
||||
pagesize=int([jex.value for jex in jexes if jex.name == "pagesize"][0])
|
||||
else:
|
||||
pagesize=int(request.form['pagesize'])
|
||||
@@ -293,12 +292,12 @@ def rm_dups():
|
||||
for el in request.form:
|
||||
if 'kfhash-' in el:
|
||||
# get which row/number kf it is...
|
||||
pfx, which = el.split('-')
|
||||
_, which = el.split('-')
|
||||
jex.append( JobExtra( name=f"kfid-{which}", value=request.form['kfname-'+which] ) )
|
||||
jex.append( JobExtra( name=f"kfhash-{which}", value=request.form[el] ) )
|
||||
if 'kdhash-' in el:
|
||||
# get which row/number kd it is...
|
||||
pfx, which = el.split('-')
|
||||
_, which = el.split('-')
|
||||
jex.append( JobExtra( name=f"kdid-{which}", value=request.form['kdid-'+which] ) )
|
||||
jex.append( JobExtra( name=f"kdhash-{which}", value=request.form[el] ) )
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
###
|
||||
|
||||
### SQLALCHEMY IMPORTS ###
|
||||
# pylint: disable=no-member
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import Column, Integer, String, Sequence, Float, ForeignKey, DateTime, LargeBinary, Boolean
|
||||
@@ -37,7 +38,7 @@ import hashlib
|
||||
import exifread
|
||||
import base64
|
||||
import numpy
|
||||
import cv2
|
||||
from cv2 import cv2
|
||||
import socket
|
||||
import threading
|
||||
import io
|
||||
@@ -507,7 +508,7 @@ def GetDateFromFile(file, stat):
|
||||
print(f"trying exif read of {file}")
|
||||
f = open(file, 'rb')
|
||||
tags = exifread.process_file(f)
|
||||
date_str, time_str = str(tags["EXIF DateTimeOriginal"]).split(" ")
|
||||
date_str, _ = str(tags["EXIF DateTimeOriginal"]).split(" ")
|
||||
print(date_str)
|
||||
year, month, day = date_str.split(":")
|
||||
year=int(year)
|
||||
@@ -868,7 +869,7 @@ def RemoveDups(job):
|
||||
dup_cnt=0
|
||||
for jex in job.extra:
|
||||
if 'kfid-' in jex.name:
|
||||
pfx, which = jex.name.split('-')
|
||||
_, which = jex.name.split('-')
|
||||
hash=[jex.value for jex in job.extra if jex.name == f"kfhash-{which}"][0]
|
||||
AddLogForJob(job, f"deleting duplicate files with hash: {hash} but keeping file with DB id={jex.value}" )
|
||||
files=session.query(Entry).join(File).filter(File.hash==hash).all()
|
||||
@@ -892,7 +893,7 @@ def RemoveDups(job):
|
||||
dup_cnt += 1
|
||||
|
||||
if 'kdid-' in jex.name:
|
||||
pfx, which = jex.name.split('-')
|
||||
_, which = jex.name.split('-')
|
||||
hashes=[jex.value for jex in job.extra if jex.name == f"kdhash-{which}"][0]
|
||||
keeping=jex.value
|
||||
tmp=session.query(Dir).filter(Dir.eid==keeping).first()
|
||||
|
||||
Reference in New Issue
Block a user