-
Notifications
You must be signed in to change notification settings - Fork 2.3k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Use compatible column name to set Parquet bloom filter #11799
base: main
Are you sure you want to change the base?
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
|
@@ -55,6 +55,7 @@ | |||||
import java.util.Locale; | ||||||
import java.util.Map; | ||||||
import java.util.Objects; | ||||||
import java.util.function.BiConsumer; | ||||||
import java.util.function.Function; | ||||||
import java.util.stream.Collectors; | ||||||
import java.util.stream.IntStream; | ||||||
|
@@ -95,6 +96,7 @@ | |||||
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; | ||||||
import org.apache.iceberg.relocated.com.google.common.collect.Maps; | ||||||
import org.apache.iceberg.relocated.com.google.common.collect.Sets; | ||||||
import org.apache.iceberg.types.Types; | ||||||
import org.apache.iceberg.util.ArrayUtil; | ||||||
import org.apache.iceberg.util.ByteBuffers; | ||||||
import org.apache.iceberg.util.PropertyUtil; | ||||||
|
@@ -115,8 +117,12 @@ | |||||
import org.apache.parquet.hadoop.api.WriteSupport; | ||||||
import org.apache.parquet.hadoop.metadata.CompressionCodecName; | ||||||
import org.apache.parquet.schema.MessageType; | ||||||
import org.slf4j.Logger; | ||||||
import org.slf4j.LoggerFactory; | ||||||
|
||||||
public class Parquet { | ||||||
private static final Logger LOG = LoggerFactory.getLogger(Parquet.class); | ||||||
|
||||||
private Parquet() {} | ||||||
|
||||||
private static final Collection<String> READ_PROPERTIES_TO_REMOVE = | ||||||
|
@@ -266,6 +272,43 @@ private WriteBuilder createContextFunc( | |||||
return this; | ||||||
} | ||||||
|
||||||
private <T> void setBloomFilterConfig( | ||||||
Context context, | ||||||
MessageType parquetSchema, | ||||||
BiConsumer<String, Boolean> withBloomFilterEnabled, | ||||||
BiConsumer<String, Double> withBloomFilterFPP) { | ||||||
|
||||||
Map<Integer, String> fieldIdToParquetPath = | ||||||
parquetSchema.getColumns().stream() | ||||||
.collect( | ||||||
Collectors.toMap( | ||||||
col -> col.getPrimitiveType().getId().intValue(), | ||||||
col -> String.join(".", col.getPath()))); | ||||||
|
||||||
context | ||||||
.columnBloomFilterEnabled() | ||||||
.forEach( | ||||||
(colPath, isEnabled) -> { | ||||||
Types.NestedField fieldId = schema.caseInsensitiveFindField(colPath); | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. [doubt] does sensitivity matters ? can this :
be applied to parquet files with schema containing if not should we explicitly do lowercase post deriving the configs There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Sensitivity matters. I changed to |
||||||
if (fieldId == null) { | ||||||
LOG.warn("Skipping bloom filter config for missing field: {}", colPath); | ||||||
return; | ||||||
} | ||||||
|
||||||
String parquetColumnPath = fieldIdToParquetPath.get(fieldId.fieldId()); | ||||||
if (parquetColumnPath == null) { | ||||||
LOG.warn("Skipping bloom filter config for missing field: {}", fieldId); | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. should we update this message to say something like
Suggested change
mostly coming from the above log lines are identical mostly though at one part we add columnPath and the other we do fielId There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Fixed. Thanks! |
||||||
return; | ||||||
} | ||||||
|
||||||
withBloomFilterEnabled.accept(parquetColumnPath, Boolean.valueOf(isEnabled)); | ||||||
String fpp = context.columnBloomFilterFpp().get(colPath); | ||||||
if (fpp != null) { | ||||||
withBloomFilterFPP.accept(parquetColumnPath, Double.parseDouble(fpp)); | ||||||
} | ||||||
}); | ||||||
} | ||||||
|
||||||
public <D> FileAppender<D> build() throws IOException { | ||||||
Preconditions.checkNotNull(schema, "Schema is required"); | ||||||
Preconditions.checkNotNull(name, "Table name is required and cannot be null"); | ||||||
|
@@ -285,8 +328,6 @@ public <D> FileAppender<D> build() throws IOException { | |||||
int rowGroupCheckMinRecordCount = context.rowGroupCheckMinRecordCount(); | ||||||
int rowGroupCheckMaxRecordCount = context.rowGroupCheckMaxRecordCount(); | ||||||
int bloomFilterMaxBytes = context.bloomFilterMaxBytes(); | ||||||
Map<String, String> columnBloomFilterFpp = context.columnBloomFilterFpp(); | ||||||
Map<String, String> columnBloomFilterEnabled = context.columnBloomFilterEnabled(); | ||||||
boolean dictionaryEnabled = context.dictionaryEnabled(); | ||||||
|
||||||
if (compressionLevel != null) { | ||||||
|
@@ -343,17 +384,8 @@ public <D> FileAppender<D> build() throws IOException { | |||||
.withMaxRowCountForPageSizeCheck(rowGroupCheckMaxRecordCount) | ||||||
.withMaxBloomFilterBytes(bloomFilterMaxBytes); | ||||||
|
||||||
for (Map.Entry<String, String> entry : columnBloomFilterEnabled.entrySet()) { | ||||||
String colPath = entry.getKey(); | ||||||
String bloomEnabled = entry.getValue(); | ||||||
propsBuilder.withBloomFilterEnabled(colPath, Boolean.parseBoolean(bloomEnabled)); | ||||||
} | ||||||
|
||||||
for (Map.Entry<String, String> entry : columnBloomFilterFpp.entrySet()) { | ||||||
String colPath = entry.getKey(); | ||||||
String fpp = entry.getValue(); | ||||||
propsBuilder.withBloomFilterFPP(colPath, Double.parseDouble(fpp)); | ||||||
} | ||||||
setBloomFilterConfig( | ||||||
context, type, propsBuilder::withBloomFilterEnabled, propsBuilder::withBloomFilterFPP); | ||||||
|
||||||
ParquetProperties parquetProperties = propsBuilder.build(); | ||||||
|
||||||
|
@@ -386,17 +418,11 @@ public <D> FileAppender<D> build() throws IOException { | |||||
.withDictionaryPageSize(dictionaryPageSize) | ||||||
.withEncryption(fileEncryptionProperties); | ||||||
|
||||||
for (Map.Entry<String, String> entry : columnBloomFilterEnabled.entrySet()) { | ||||||
String colPath = entry.getKey(); | ||||||
String bloomEnabled = entry.getValue(); | ||||||
parquetWriteBuilder.withBloomFilterEnabled(colPath, Boolean.parseBoolean(bloomEnabled)); | ||||||
} | ||||||
|
||||||
for (Map.Entry<String, String> entry : columnBloomFilterFpp.entrySet()) { | ||||||
String colPath = entry.getKey(); | ||||||
String fpp = entry.getValue(); | ||||||
parquetWriteBuilder.withBloomFilterFPP(colPath, Double.parseDouble(fpp)); | ||||||
} | ||||||
setBloomFilterConfig( | ||||||
context, | ||||||
type, | ||||||
parquetWriteBuilder::withBloomFilterEnabled, | ||||||
parquetWriteBuilder::withBloomFilterFPP); | ||||||
|
||||||
return new ParquetWriteAdapter<>(parquetWriteBuilder.build(), metricsConfig); | ||||||
} | ||||||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
[question] do we need to do anything for isEnabled as false ? or can parquet pro-actively decide if it should have a BF for a column and this isEnabled as false serves as explicit deny ?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
If isEnable is true, iceberg will call withBloomFilterEnabled(String columnPath, boolean enabled). If isEnable is false, we don't need to do anything.