Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,17 @@ public Object visit(ASTOrNode node, Object data) {
SortedSet<String> fields = new TreeSet<>(eqNodesByField.keySet());
fields.addAll(rangeNodesByField.keySet());

// capture the thresholds
int maxOrExpansionFstThreshold = config.getMaxOrExpansionFstThreshold();
int maxOrExpansionThreshold = config.getMaxOrExpansionThreshold();
int maxOrRangeThreshold = config.getMaxOrRangeThreshold();

// if specific fields were requested, then reduce the thresholds
if (this.fields != null) {
maxOrExpansionThreshold = 2;
maxOrRangeThreshold = 2;
}

for (String field : fields) {
// if fields is not specified or the current field is in fields it can be reduced
boolean canReduce = (this.fields == null || this.fields.contains(field));
Expand All @@ -145,9 +156,9 @@ public Object visit(ASTOrNode node, Object data) {
if (canReduce &&
!Constants.ANY_FIELD.equals(field) &&
!Constants.NO_FIELD.equals(field) &&
(eqNodes.size() >= config.getMaxOrExpansionFstThreshold() ||
eqNodes.size() >= config.getMaxOrExpansionThreshold() ||
rangeNodes.size() >= config.getMaxOrRangeThreshold()
(eqNodes.size() >= maxOrExpansionFstThreshold ||
eqNodes.size() >= maxOrExpansionThreshold ||
rangeNodes.size() >= maxOrRangeThreshold
) &&
isIndexed(field)) {
// @formatter:on
Expand All @@ -164,17 +175,17 @@ public Object visit(ASTOrNode node, Object data) {

try {
// if we have an hdfs cache directory and if past the fst/list threshold, then create the fst/list and replace the list with an assignment
if (fstHdfsUri != null && (eqNodes.size() >= config.getMaxOrExpansionFstThreshold())) {
if (fstHdfsUri != null && (eqNodes.size() >= maxOrExpansionFstThreshold)) {
URI fstPath = createFst(values);
markers.add(QueryPropertyMarker.create(new ExceededOr(field, fstPath).getJexlNode(), EXCEEDED_OR));
eqNodes = null;
} else if (eqNodes.size() >= config.getMaxOrExpansionThreshold()) {
} else if (eqNodes.size() >= maxOrExpansionThreshold) {
markers.add(QueryPropertyMarker.create(new ExceededOr(field, values).getJexlNode(), EXCEEDED_OR));
eqNodes = null;
}

// handle range nodes separately
if (rangeNodes.size() >= config.getMaxOrRangeThreshold()) {
if (rangeNodes.size() >= maxOrRangeThreshold) {
TreeMap<Range,JexlNode> ranges = new TreeMap<>();
rangeNodes.forEach(rangeNode -> ranges.put(rangeNodeToRange(rangeNode), rangeNode));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -649,53 +649,40 @@ protected ASTJexlScript pushdownLargeFieldedLists(ShardQueryConfiguration config
}

if (termCount - capacitySum <= config.getFinalMaxTermThreshold()) {
// preserve the original config and set minimum thresholds for creating Value and Range ivarators
int originalMaxOrExpansionThreshold = config.getMaxOrExpansionThreshold();
int originalMaxOrRangeThreshold = config.getMaxOrRangeThreshold();

config.setMaxOrExpansionThreshold(2);
config.setMaxOrRangeThreshold(2);
// invert pushdownCapacity to get the largest payoffs first
SortedMap<Integer,List<String>> sortedMap = new TreeMap<>();
for (String fieldName : pushdownCapacity.keySet()) {
Integer reduction = pushdownCapacity.get(fieldName);
List<String> fields = sortedMap.computeIfAbsent(reduction, k -> new ArrayList<>());
fields.add(fieldName);
}

try {
// invert pushdownCapacity to get the largest payoffs first
SortedMap<Integer,List<String>> sortedMap = new TreeMap<>();
for (String fieldName : pushdownCapacity.keySet()) {
Integer reduction = pushdownCapacity.get(fieldName);
List<String> fields = sortedMap.computeIfAbsent(reduction, k -> new ArrayList<>());
fields.add(fieldName);
}

// sort from largest to smallest reductions and make reductions until under the threshold
Set<String> fieldsToReduce = new HashSet<>();
int toReduce = termCount - config.getFinalMaxTermThreshold();
while (toReduce > 0) {
// get the highest value field out of the map
Integer reduction = sortedMap.lastKey();
List<String> fields = sortedMap.get(reduction);

// take the first field
String field = fields.remove(0);
fieldsToReduce.add(field);
toReduce -= reduction;

// if there are no more reductions of this size remove the reduction from pushdown capacity
if (fields.size() == 0) {
sortedMap.remove(reduction);
}
// sort from largest to smallest reductions and make reductions until under the threshold
Set<String> fieldsToReduce = new HashSet<>();
int toReduce = termCount - config.getFinalMaxTermThreshold();
while (toReduce > 0) {
// get the highest value field out of the map
Integer reduction = sortedMap.lastKey();
List<String> fields = sortedMap.get(reduction);

// take the first field
String field = fields.remove(0);
fieldsToReduce.add(field);
toReduce -= reduction;

// if there are no more reductions of this size remove the reduction from pushdown capacity
if (fields.size() == 0) {
sortedMap.remove(reduction);
}
}

// execute the reduction
if (hdfsQueryCacheUri != null) {
FileSystem fs = VisitorFunction.fileSystemCache.getFileSystem(hdfsQueryCacheUri);
// Find large lists of values against the same field and push down into an Ivarator
script = PushdownLargeFieldedListsVisitor.pushdown(config, script, fs, hdfsQueryCacheUri.toString(), null, fieldsToReduce);
} else {
script = PushdownLargeFieldedListsVisitor.pushdown(config, script, null, null, null, fieldsToReduce);
}
} finally {
// reset config thresholds
config.setMaxOrExpansionThreshold(originalMaxOrExpansionThreshold);
config.setMaxOrRangeThreshold(originalMaxOrRangeThreshold);
// execute the reduction
if (hdfsQueryCacheUri != null) {
FileSystem fs = VisitorFunction.fileSystemCache.getFileSystem(hdfsQueryCacheUri);
// Find large lists of values against the same field and push down into an Ivarator
script = PushdownLargeFieldedListsVisitor.pushdown(config, script, fs, hdfsQueryCacheUri.toString(), null, fieldsToReduce);
} else {
script = PushdownLargeFieldedListsVisitor.pushdown(config, script, null, null, null, fieldsToReduce);
}
}
}
Expand Down