Skip to content

Commit d8b44ff

Browse files
committed
Use bloom filter for evaluating dynamic filters on strings
BenchmarkDynamicPageFilter.filterPages (filterSize) (inputDataSet) (inputNullChance) (nonNullsSelectivity) (nullsAllowed) Mode Cnt Before Score After Score Units 100 VARCHAR_RANDOM 0.05 0.2 false thrpt 20 145.858 ± 4.541 590.506 ± 28.510 ops/s 1000 VARCHAR_RANDOM 0.05 0.2 false thrpt 20 136.995 ± 2.395 596.036 ± 22.694 ops/s 10000 VARCHAR_RANDOM 0.05 0.2 false thrpt 20 136.990 ± 5.284 594.118 ± 15.764 ops/s 100000 VARCHAR_RANDOM 0.05 0.2 false thrpt 20 114.591 ± 7.307 587.445 ± 9.818 ops/s 1000000 VARCHAR_RANDOM 0.05 0.2 false thrpt 20 43.234 ± 1.621 578.800 ± 15.694 ops/s 5000000 VARCHAR_RANDOM 0.05 0.2 false thrpt 20 40.018 ± 2.245 464.153 ± 20.914 ops/s
1 parent a99d96e commit d8b44ff

File tree

5 files changed

+355
-39
lines changed

5 files changed

+355
-39
lines changed
Lines changed: 181 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,181 @@
1+
/*
2+
* Licensed under the Apache License, Version 2.0 (the "License");
3+
* you may not use this file except in compliance with the License.
4+
* You may obtain a copy of the License at
5+
*
6+
* http://www.apache.org/licenses/LICENSE-2.0
7+
*
8+
* Unless required by applicable law or agreed to in writing, software
9+
* distributed under the License is distributed on an "AS IS" BASIS,
10+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11+
* See the License for the specific language governing permissions and
12+
* limitations under the License.
13+
*/
14+
package io.trino.sql.gen.columnar;
15+
16+
import com.google.common.annotations.VisibleForTesting;
17+
import com.google.common.collect.ImmutableList;
18+
import io.airlift.slice.Slice;
19+
import io.airlift.slice.XxHash64;
20+
import io.trino.operator.project.InputChannels;
21+
import io.trino.spi.Page;
22+
import io.trino.spi.block.Block;
23+
import io.trino.spi.block.ValueBlock;
24+
import io.trino.spi.connector.ConnectorSession;
25+
import io.trino.spi.predicate.Domain;
26+
import io.trino.spi.type.CharType;
27+
import io.trino.spi.type.Type;
28+
import io.trino.spi.type.VarbinaryType;
29+
import io.trino.spi.type.VarcharType;
30+
31+
import java.util.List;
32+
import java.util.function.Supplier;
33+
34+
import static com.google.common.base.Verify.verify;
35+
import static java.util.Objects.requireNonNull;
36+
37+
public class BloomFilter
38+
{
39+
private BloomFilter() {}
40+
41+
public static boolean canUseBloomFilter(Domain domain)
42+
{
43+
Type type = domain.getType();
44+
if (type instanceof VarcharType || type instanceof CharType || type instanceof VarbinaryType) {
45+
verify(type.getJavaType() == Slice.class, "Type is not backed by Slice");
46+
return !domain.isNone() && !domain.isAll() && domain.isNullableDiscreteSet();
47+
}
48+
return false;
49+
}
50+
51+
public static Supplier<FilterEvaluator> createBloomFilterEvaluator(Domain domain, int inputChannel)
52+
{
53+
return () -> new ColumnarFilterEvaluator(
54+
new DictionaryAwareColumnarFilter(
55+
new ColumnarBloomFilter(domain.getNullableDiscreteSet(), inputChannel, domain.getType())));
56+
}
57+
58+
private static final class ColumnarBloomFilter
59+
implements ColumnarFilter
60+
{
61+
private final SliceBloomFilter filter;
62+
private final boolean isNullAllowed;
63+
private final InputChannels inputChannels;
64+
65+
public ColumnarBloomFilter(Domain.DiscreteSet discreteSet, int inputChannel, Type type)
66+
{
67+
this.isNullAllowed = discreteSet.containsNull();
68+
this.filter = new SliceBloomFilter((List<Slice>) (List<?>) discreteSet.getNonNullValues(), type);
69+
this.inputChannels = new InputChannels(ImmutableList.of(inputChannel), ImmutableList.of(inputChannel));
70+
}
71+
72+
@Override
73+
public int filterPositionsRange(ConnectorSession session, int[] outputPositions, int offset, int size, Page page)
74+
{
75+
ValueBlock block = (ValueBlock) page.getBlock(0);
76+
int selectedPositionsCount = 0;
77+
for (int position = offset; position < offset + size; position++) {
78+
boolean result = block.isNull(position) ? isNullAllowed : filter.test(block, position);
79+
outputPositions[selectedPositionsCount] = position;
80+
selectedPositionsCount += result ? 1 : 0;
81+
}
82+
return selectedPositionsCount;
83+
}
84+
85+
@Override
86+
public int filterPositionsList(ConnectorSession session, int[] outputPositions, int[] activePositions, int offset, int size, Page page)
87+
{
88+
ValueBlock block = (ValueBlock) page.getBlock(0);
89+
int selectedPositionsCount = 0;
90+
for (int index = offset; index < offset + size; index++) {
91+
int position = activePositions[index];
92+
boolean result = block.isNull(position) ? isNullAllowed : filter.test(block, position);
93+
outputPositions[selectedPositionsCount] = position;
94+
selectedPositionsCount += result ? 1 : 0;
95+
}
96+
return selectedPositionsCount;
97+
}
98+
99+
@Override
100+
public InputChannels getInputChannels()
101+
{
102+
return inputChannels;
103+
}
104+
}
105+
106+
public static final class SliceBloomFilter
107+
{
108+
private final long[] bloom;
109+
private final int bloomSizeMask;
110+
private final Type type;
111+
112+
/**
113+
* A Bloom filter for a set of Slice values.
114+
* This is approx 2X faster than the Bloom filter implementations in ORC and parquet because
115+
* it uses single hash function and uses that to set 3 bits within a 64 bit word.
116+
* The memory footprint is up to (4 * values.size()) bytes, which is much smaller than maintaining a hash set of strings.
117+
*
118+
* @param values List of values used for filtering
119+
*/
120+
public SliceBloomFilter(List<Slice> values, Type type)
121+
{
122+
this.type = requireNonNull(type, "type is null");
123+
int bloomSize = getBloomFilterSize(values.size());
124+
bloom = new long[bloomSize];
125+
bloomSizeMask = bloomSize - 1;
126+
for (Slice value : values) {
127+
long hashCode = XxHash64.hash(value);
128+
// Set 3 bits in a 64 bit word
129+
bloom[bloomIndex(hashCode)] |= bloomMask(hashCode);
130+
}
131+
}
132+
133+
private static int getBloomFilterSize(int valuesCount)
134+
{
135+
// Linear hash table size is the highest power of two less than or equal to number of values * 4. This means that the
136+
// table is under half full, e.g. 127 elements gets 256 slots.
137+
int hashTableSize = Integer.highestOneBit(valuesCount * 4);
138+
// We will allocate 8 bits in the bloom filter for every slot in a comparable hash table.
139+
// The bloomSize is a count of longs, hence / 8.
140+
return Math.max(1, hashTableSize / 8);
141+
}
142+
143+
public boolean test(Block block, int position)
144+
{
145+
return contains(type.getSlice(block, position));
146+
}
147+
148+
public boolean contains(Slice data)
149+
{
150+
long hashCode = XxHash64.hash(data);
151+
long mask = bloomMask(hashCode);
152+
return mask == (bloom[bloomIndex(hashCode)] & mask);
153+
}
154+
155+
@VisibleForTesting
156+
public boolean contains(Slice data, int offset, int length)
157+
{
158+
long hashCode = XxHash64.hash(data, offset, length);
159+
long mask = bloomMask(hashCode);
160+
return mask == (bloom[bloomIndex(hashCode)] & mask);
161+
}
162+
163+
private int bloomIndex(long hashCode)
164+
{
165+
// Lower 21 bits are not used by bloomMask
166+
// These are enough for the maximum size array that will be used here
167+
return (int) (hashCode & bloomSizeMask);
168+
}
169+
170+
private static long bloomMask(long hashCode)
171+
{
172+
// returned mask sets 3 bits based on portions of given hash
173+
// Extract 38th to 43rd bits
174+
return (1L << ((hashCode >> 21) & 63))
175+
// Extract 32nd to 37th bits
176+
| (1L << ((hashCode >> 27) & 63))
177+
// Extract 26th to 31st bits
178+
| (1L << ((hashCode >> 33) & 63));
179+
}
180+
}
181+
}

core/trino-main/src/main/java/io/trino/sql/gen/columnar/DynamicPageFilter.java

Lines changed: 21 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,8 @@
4141

4242
import static com.google.common.collect.ImmutableList.toImmutableList;
4343
import static com.google.common.collect.ImmutableMap.toImmutableMap;
44+
import static io.trino.sql.gen.columnar.BloomFilter.canUseBloomFilter;
45+
import static io.trino.sql.gen.columnar.BloomFilter.createBloomFilterEvaluator;
4446
import static io.trino.sql.gen.columnar.FilterEvaluator.createColumnarFilterEvaluator;
4547
import static io.trino.sql.ir.optimizer.IrExpressionOptimizer.newOptimizer;
4648
import static io.trino.sql.relational.SqlToRowExpressionTranslator.translate;
@@ -107,28 +109,33 @@ public synchronized Supplier<FilterEvaluator> createDynamicPageFilterEvaluator(C
107109
isBlocked = dynamicFilter.isBlocked();
108110
boolean isAwaitable = dynamicFilter.isAwaitable();
109111
TupleDomain<Symbol> currentPredicate = dynamicFilter.getCurrentPredicate().transformKeys(columnHandles::get);
110-
List<Expression> expressionConjuncts = domainTranslator.toPredicateConjuncts(currentPredicate)
111-
.stream()
112-
// Run the expression derived from TupleDomain through IR optimizer to simplify predicates. E.g. SimplifyContinuousInValues
113-
.map(expression -> irExpressionOptimizer.process(expression, session, ImmutableMap.of()).orElse(expression))
114-
.collect(toImmutableList());
115-
// We translate each conjunct into separate RowExpression to make it easy to profile selectivity
116-
// of dynamic filter per column and drop them if they're ineffective
117-
List<RowExpression> rowExpression = expressionConjuncts.stream()
118-
.map(expression -> translate(expression, sourceLayout, metadata, typeManager))
119-
.collect(toImmutableList());
120-
compiledDynamicFilter = createDynamicFilterEvaluator(rowExpression, compiler, selectivityThreshold);
112+
compiledDynamicFilter = createDynamicFilterEvaluator(compiler, currentPredicate);
121113
if (!isAwaitable) {
122114
isBlocked = null; // Dynamic filter will not narrow down anymore
123115
}
124116
}
125117
return compiledDynamicFilter;
126118
}
127119

128-
private static Supplier<FilterEvaluator> createDynamicFilterEvaluator(List<RowExpression> rowExpressions, ColumnarFilterCompiler compiler, double selectivityThreshold)
120+
private Supplier<FilterEvaluator> createDynamicFilterEvaluator(ColumnarFilterCompiler compiler, TupleDomain<Symbol> currentPredicate)
129121
{
130-
List<Supplier<FilterEvaluator>> subExpressionEvaluators = rowExpressions.stream()
131-
.map(expression -> createColumnarFilterEvaluator(expression, compiler))
122+
if (currentPredicate.isNone()) {
123+
return SelectNoneEvaluator::new;
124+
}
125+
// We translate each conjunct into separate FilterEvaluator to make it easy to profile selectivity
126+
// of dynamic filter per column and drop them if they're ineffective
127+
List<Supplier<FilterEvaluator>> subExpressionEvaluators = currentPredicate.getDomains().orElseThrow()
128+
.entrySet().stream()
129+
.map(entry -> {
130+
if (canUseBloomFilter(entry.getValue())) {
131+
return Optional.of(createBloomFilterEvaluator(entry.getValue(), sourceLayout.get(entry.getKey())));
132+
}
133+
Expression expression = domainTranslator.toPredicate(entry.getValue(), entry.getKey().toSymbolReference());
134+
// Run the expression derived from TupleDomain through IR optimizer to simplify predicates. E.g. SimplifyContinuousInValues
135+
expression = irExpressionOptimizer.process(expression, session, ImmutableMap.of()).orElse(expression);
136+
RowExpression rowExpression = translate(expression, sourceLayout, metadata, typeManager);
137+
return createColumnarFilterEvaluator(rowExpression, compiler);
138+
})
132139
.filter(Optional::isPresent)
133140
.map(Optional::get)
134141
.collect(toImmutableList());

core/trino-main/src/main/java/io/trino/sql/planner/DomainTranslator.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ public Expression toPredicate(TupleDomain<Symbol> tupleDomain)
119119
return IrUtils.combineConjuncts(toPredicateConjuncts(tupleDomain));
120120
}
121121

122-
public List<Expression> toPredicateConjuncts(TupleDomain<Symbol> tupleDomain)
122+
private List<Expression> toPredicateConjuncts(TupleDomain<Symbol> tupleDomain)
123123
{
124124
if (tupleDomain.isNone()) {
125125
return ImmutableList.of(FALSE);
@@ -132,7 +132,7 @@ public List<Expression> toPredicateConjuncts(TupleDomain<Symbol> tupleDomain)
132132
.collect(toImmutableList());
133133
}
134134

135-
private Expression toPredicate(Domain domain, Reference reference)
135+
public Expression toPredicate(Domain domain, Reference reference)
136136
{
137137
if (domain.getValues().isNone()) {
138138
return domain.isNullAllowed() ? new IsNull(reference) : FALSE;

core/trino-main/src/test/java/io/trino/sql/gen/BenchmarkDynamicPageFilter.java

Lines changed: 17 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
import com.google.common.collect.ImmutableList;
1717
import com.google.common.collect.ImmutableMap;
18+
import io.airlift.slice.Slices;
1819
import io.trino.FullConnectorSession;
1920
import io.trino.operator.project.SelectedPositions;
2021
import io.trino.spi.Page;
@@ -43,15 +44,14 @@
4344
import java.util.Random;
4445
import java.util.concurrent.TimeUnit;
4546

46-
import static com.google.common.collect.ImmutableList.toImmutableList;
4747
import static io.trino.jmh.Benchmarks.benchmark;
4848
import static io.trino.operator.project.SelectedPositions.positionsRange;
49-
import static io.trino.spi.predicate.Domain.DiscreteSet;
5049
import static io.trino.spi.type.BigintType.BIGINT;
5150
import static io.trino.spi.type.IntegerType.INTEGER;
5251
import static io.trino.spi.type.RealType.REAL;
5352
import static io.trino.spi.type.TypeUtils.readNativeValue;
5453
import static io.trino.spi.type.TypeUtils.writeNativeValue;
54+
import static io.trino.spi.type.VarcharType.VARCHAR;
5555
import static io.trino.testing.TestingSession.testSessionBuilder;
5656
import static io.trino.util.DynamicFiltersTestUtil.createDynamicFilterEvaluator;
5757
import static java.lang.Float.floatToIntBits;
@@ -65,18 +65,19 @@
6565
@Measurement(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS)
6666
public class BenchmarkDynamicPageFilter
6767
{
68-
private static final int MAX_ROWS = 200_000;
68+
private static final int MAX_ROWS = 400_000;
6969
private static final FullConnectorSession FULL_CONNECTOR_SESSION = new FullConnectorSession(
7070
testSessionBuilder().build(),
7171
ConnectorIdentity.ofUser("test"));
72+
private static final ColumnHandle COLUMN_HANDLE = new TestingColumnHandle("dummy");
7273

7374
@Param("0.05")
7475
public double inputNullChance = 0.05;
7576

7677
@Param("0.2")
7778
public double nonNullsSelectivity = 0.2;
7879

79-
@Param({"100", "1000", "5000"})
80+
@Param({"100", "1000", "10000"})
8081
public int filterSize = 100;
8182

8283
@Param("false")
@@ -87,6 +88,7 @@ public class BenchmarkDynamicPageFilter
8788
"INT64_RANDOM",
8889
"INT64_FIXED_32K", // LongBitSetFilter
8990
"REAL_RANDOM",
91+
"VARCHAR_RANDOM", // BloomFilter
9092
})
9193
public DataSet inputDataSet;
9294

@@ -99,6 +101,11 @@ public enum DataSet
99101
INT64_RANDOM(BIGINT, (block, r) -> BIGINT.writeLong(block, r.nextLong())),
100102
INT64_FIXED_32K(BIGINT, (block, r) -> BIGINT.writeLong(block, r.nextLong() % 32768)),
101103
REAL_RANDOM(REAL, (block, r) -> REAL.writeLong(block, floatToIntBits(r.nextFloat()))),
104+
VARCHAR_RANDOM(VARCHAR, (block, r) -> {
105+
byte[] buffer = new byte[r.nextInt(10, 15)];
106+
r.nextBytes(buffer);
107+
VARCHAR.writeSlice(block, Slices.wrappedBuffer(buffer, 0, buffer.length));
108+
}),
102109
/**/;
103110

104111
private final Type type;
@@ -121,7 +128,7 @@ public TupleDomain<ColumnHandle> createFilterTupleDomain(int filterSize, boolean
121128
}
122129
}
123130
return TupleDomain.withColumnDomains(ImmutableMap.of(
124-
new TestingColumnHandle("dummy"),
131+
COLUMN_HANDLE,
125132
Domain.create(ValueSet.copyOf(type, valuesBuilder.build()), nullsAllowed)));
126133
}
127134

@@ -132,12 +139,9 @@ private List<Page> createInputTestData(
132139
long inputRows)
133140
{
134141
List<Object> nonNullValues = filter.getDomains().orElseThrow()
135-
.values().stream()
136-
.flatMap(domain -> {
137-
DiscreteSet nullableDiscreteSet = domain.getNullableDiscreteSet();
138-
return nullableDiscreteSet.getNonNullValues().stream();
139-
})
140-
.collect(toImmutableList());
142+
.get(COLUMN_HANDLE)
143+
.getNullableDiscreteSet()
144+
.getNonNullValues();
141145

142146
// pick a random value from the filter
143147
return createSingleColumnData(
@@ -163,7 +167,7 @@ public void setup()
163167
inputData = inputDataSet.createInputTestData(filterPredicate, inputNullChance, nonNullsSelectivity, MAX_ROWS);
164168
filterEvaluator = createDynamicFilterEvaluator(
165169
filterPredicate,
166-
ImmutableMap.of(new TestingColumnHandle("dummy"), 0),
170+
ImmutableMap.of(COLUMN_HANDLE, 0),
167171
1);
168172
}
169173

@@ -199,7 +203,7 @@ private static List<Page> createSingleColumnData(ValueWriter valueWriter, Type t
199203
if (blockBuilder.getPositionCount() >= batchSize) {
200204
Block block = blockBuilder.build();
201205
pages.add(new Page(new LazyBlock(block.getPositionCount(), () -> block)));
202-
batchSize = Math.min(1024, batchSize * 2);
206+
batchSize = Math.min(8192, batchSize * 2);
203207
blockBuilder = type.createBlockBuilder(null, batchSize);
204208
}
205209
}

0 commit comments

Comments
 (0)