From cc9e827474ce82191d60f8b214f0002cca69818d Mon Sep 17 00:00:00 2001 From: Damien De Paoli Date: Sun, 15 Jan 2023 13:31:04 +1100 Subject: [PATCH] fixed bug, where directories with the same name but different path were being lost due to distinct query in DB. I dont really remember why I decided I needed distinct, so this might introduce other issues, but theere is an array merge later that should remove any redundant data, so I think this is better/good for now --- files.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/files.py b/files.py index dbd365a..20f4684 100644 --- a/files.py +++ b/files.py @@ -793,9 +793,9 @@ def GetExistingPathsAsDiv(dt): return make_response( '[]' ) new_dt=new_dtime.strftime('%Y%m%d') # find dirs named with this date - dirs_arr+=Dir.query.distinct(Dir.rel_path).filter(Dir.rel_path.ilike('%'+new_dt+'%')).all(); + dirs_arr+=Dir.query.filter(Dir.rel_path.ilike('%'+new_dt+'%')).all(); # find dirs with non-dirs (files) with this date - dirs_arr+=Dir.query.distinct(Dir.rel_path).join(EntryDirLink).join(Entry).filter(Entry.type_id!=dir_ft.id).filter(Entry.name.ilike('%'+new_dt+'%')).all() + dirs_arr+=Dir.query.join(EntryDirLink).join(Entry).filter(Entry.type_id!=dir_ft.id).filter(Entry.name.ilike('%'+new_dt+'%')).all() # remove duplicates from array dirs = set(dirs_arr)