remove duplicates from search results - fixes BUG-70

This commit is contained in:
2021-10-10 21:58:58 +11:00
parent 91f0f10767
commit d40f2de2a1

View File

@@ -247,7 +247,9 @@ def GetEntries( OPT ):
dir_data=Entry.query.join(File).join(EntryDirLink).join(Dir).filter(Dir.rel_path.ilike(f"%{search_term}%")).order_by(File.year.desc(),File.month.desc(),File.day.desc(),Entry.name).offset(OPT.offset).limit(OPT.how_many).all()
ai_data=Entry.query.join(File).join(FaceFileLink).join(Face).join(FaceRefimgLink).join(Refimg).join(PersonRefimgLink).join(Person).filter(Person.tag.ilike(f"%{search_term}%")).order_by(File.year.desc(),File.month.desc(),File.day.desc(),Entry.name).offset(OPT.offset).limit(OPT.how_many).all()
# remove any duplicates from combined data
all_entries = file_data
all_entries = []
for f in file_data:
all_entries.append(f)
for d in dir_data:
add_it=1
for f in file_data:
@@ -255,7 +257,7 @@ def GetEntries( OPT ):
add_it=0
break
if add_it:
all_entries = all_entries + d
all_entries.append(d)
for a in ai_data:
add_it=1
for f in file_data:
@@ -263,7 +265,7 @@ def GetEntries( OPT ):
add_it=0
break
if add_it:
all_entries = all_entries + a
all_entries.append(a)
return all_entries
for path in OPT.paths: