put summary counts on dups

This commit is contained in:
2021-03-02 22:44:00 +11:00
parent 42e00d0aea
commit 689081ef0b
2 changed files with 18 additions and 1 deletions

View File

@@ -265,17 +265,30 @@ def fix_dups():
per_file_dups=[] per_file_dups=[]
per_path_dups=[] per_path_dups=[]
hashes="" hashes=""
overall_dup_cnt=0
overall_dup_sets=0
for hash in dups: for hash in dups:
# more than 2 files (just ask per file)
if len(dups[hash]) > 2: if len(dups[hash]) > 2:
per_file_dups.append(dups[hash]) per_file_dups.append(dups[hash])
overall_dup_cnt += len(dups[hash])
overall_dup_sets += 1
# only 2 copies, and files are in same dir (so must be diff name, so just ask)
elif dups[hash][0]['d'] == dups[hash][1]['d']: elif dups[hash][0]['d'] == dups[hash][1]['d']:
per_file_dups.append(dups[hash]) per_file_dups.append(dups[hash])
overall_dup_cnt += len(dups[hash])
overall_dup_sets += 1
# content same, filename different (just ask per file)
elif dups[hash][0]['f'] != dups[hash][1]['f']: elif dups[hash][0]['f'] != dups[hash][1]['f']:
per_file_dups.append(dups[hash]) per_file_dups.append(dups[hash])
overall_dup_cnt += len(dups[hash])
overall_dup_sets += 1
# by here we have only 2 files, with the same name, different path # by here we have only 2 files, with the same name, different path
# (MOST COMMON, and I think we dont care per file, just per path) # (MOST COMMON, and I think we dont care per file, just per path)
elif d1 != dups[hash][0]['d']: elif d1 != dups[hash][0]['d']:
if d1 != '': if d1 != '':
overall_dup_cnt += dup_cnt
overall_dup_sets += 1
per_path_dups.append({'count': dup_cnt, 'd1': d1, 'd2': d2, 'did1': did1, 'did2': did2, 'hashes' : hashes }) per_path_dups.append({'count': dup_cnt, 'd1': d1, 'd2': d2, 'did1': did1, 'did2': did2, 'hashes' : hashes })
dup_cnt=1 dup_cnt=1
d1 = dups[hash][0]['d'] d1 = dups[hash][0]['d']
@@ -289,8 +302,11 @@ def fix_dups():
hashes += f"{hash}," hashes += f"{hash},"
if d1 != '': if d1 != '':
overall_dup_cnt += dup_cnt
overall_dup_sets += dup_cnt
per_path_dups.append({'count': dup_cnt, 'd1': d1, 'd2': d2, 'did1': did1, 'did2': did2, 'hashes' : hashes }) per_path_dups.append({'count': dup_cnt, 'd1': d1, 'd2': d2, 'did1': did1, 'did2': did2, 'hashes' : hashes })
return render_template("dups.html", per_file_dups=per_file_dups, per_path_dups=per_path_dups, fe_msg_id=request.form['fe_msg_id'] )
return render_template("dups.html", per_file_dups=per_file_dups, per_path_dups=per_path_dups, fe_msg_id=request.form['fe_msg_id'], overall_dup_cnt=overall_dup_cnt, overall_dup_sets=overall_dup_sets )
@app.route("/rm_dups", methods=["POST"]) @app.route("/rm_dups", methods=["POST"])
def rm_dups(): def rm_dups():

View File

@@ -7,6 +7,7 @@
'Delete Duplicates', the files / directories in red will be deleted from the file 'Delete Duplicates', the files / directories in red will be deleted from the file
system, those in green will remain</div> system, those in green will remain</div>
<h5>{{overall_dup_sets}} sets/dirs of files containing {{overall_dup_cnt}} files</h5>
<script> <script>
let D=[] let D=[]
let F=[] let F=[]