mirror of
https://github.com/KingDuckZ/dindexer.git
synced 2025-02-21 12:34:56 +00:00
I forgot to check the tags...
This commit is contained in:
parent
417e7105d3
commit
0d16e4005e
1 changed files with 26 additions and 6 deletions
|
@ -21,10 +21,28 @@
|
||||||
#include "dindexerConfig.h"
|
#include "dindexerConfig.h"
|
||||||
#include "dindexer-core/split_tags.hpp"
|
#include "dindexer-core/split_tags.hpp"
|
||||||
#include <boost/regex.hpp>
|
#include <boost/regex.hpp>
|
||||||
|
#include <ciso646>
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
namespace dindb {
|
namespace dindb {
|
||||||
namespace {
|
namespace {
|
||||||
void store_matching_paths (redis::Batch& parBatch, std::vector<LocatedItem>& parOut, std::vector<FileIDType>& parIDs, const boost::regex& parSearch) {
|
bool all_tags_match (const TagList& parTags, const std::string& parTaglist) {
|
||||||
|
const auto tags = dincore::split_tags(parTaglist);
|
||||||
|
|
||||||
|
if (tags.size() >= parTags.size()) {
|
||||||
|
for (const auto& required_tag : parTags) {
|
||||||
|
if (std::find(tags.begin(), tags.end(), required_tag) == tags.end()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void store_matching_paths (redis::Batch& parBatch, std::vector<LocatedItem>& parOut, std::vector<FileIDType>& parIDs, const boost::regex& parSearch, const TagList& parTags) {
|
||||||
using dinhelp::lexical_cast;
|
using dinhelp::lexical_cast;
|
||||||
assert(parIDs.size() == parBatch.replies().size());
|
assert(parIDs.size() == parBatch.replies().size());
|
||||||
|
|
||||||
|
@ -35,9 +53,11 @@ namespace dindb {
|
||||||
const auto& path = redis::get_string(reply[0]);
|
const auto& path = redis::get_string(reply[0]);
|
||||||
|
|
||||||
if (boost::regex_search(path, parSearch)) {
|
if (boost::regex_search(path, parSearch)) {
|
||||||
|
if (parTags.empty() or all_tags_match(parTags, redis::get_string(reply[2]))) {
|
||||||
const auto group_id = lexical_cast<GroupIDType>(redis::get_string(reply[1]));
|
const auto group_id = lexical_cast<GroupIDType>(redis::get_string(reply[1]));
|
||||||
parOut.push_back(LocatedItem{path, parIDs[id_index], group_id});
|
parOut.push_back(LocatedItem{path, parIDs[id_index], group_id});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
assert(id_index < parIDs.size());
|
assert(id_index < parIDs.size());
|
||||||
++id_index;
|
++id_index;
|
||||||
}
|
}
|
||||||
|
@ -70,18 +90,18 @@ namespace dindb {
|
||||||
auto batch = parRedis.make_batch();
|
auto batch = parRedis.make_batch();
|
||||||
for (const auto& itm : parRedis.scan(PROGRAM_NAME ":file:*")) {
|
for (const auto& itm : parRedis.scan(PROGRAM_NAME ":file:*")) {
|
||||||
++curr_count;
|
++curr_count;
|
||||||
batch.run("HMGET", itm, "path", "group_id");
|
batch.run("HMGET", itm, "path", "group_id", "tags");
|
||||||
ids.push_back(lexical_cast<FileIDType>(split_and_trim(itm, ':').back()));
|
ids.push_back(lexical_cast<FileIDType>(split_and_trim(itm, ':').back()));
|
||||||
|
|
||||||
if (curr_count == prefetch_count) {
|
if (curr_count == prefetch_count) {
|
||||||
store_matching_paths(batch, retval, ids, search);
|
store_matching_paths(batch, retval, ids, search, parTags);
|
||||||
batch.reset();
|
batch.reset();
|
||||||
curr_count = 0;
|
curr_count = 0;
|
||||||
ids.clear();
|
ids.clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (curr_count)
|
if (curr_count)
|
||||||
store_matching_paths(batch, retval, ids, search);
|
store_matching_paths(batch, retval, ids, search, parTags);
|
||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
} //namespace dindb
|
} //namespace dindb
|
||||||
|
|
Loading…
Add table
Reference in a new issue