Skip to content

Commit

Permalink
Fixed apache#4 - Obtain ORC stripe offsets from writer instead of ope…
Browse files Browse the repository at this point in the history
…ning the written file for the offsets
  • Loading branch information
pavibhai committed Sep 16, 2022
1 parent 0dd66f1 commit fb5a22c
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 4 deletions.
7 changes: 3 additions & 4 deletions orc/src/main/java/org/apache/iceberg/orc/OrcFileAppender.java
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.orc.OrcFile;
import org.apache.orc.Reader;
import org.apache.orc.StripeInformation;
import org.apache.orc.TypeDescription;
import org.apache.orc.Writer;
Expand Down Expand Up @@ -147,11 +146,11 @@ public long length() {
@Override
public List<Long> splitOffsets() {
Preconditions.checkState(isClosed, "File is not yet closed");
try (Reader reader = ORC.newFileReader(file.toInputFile(), conf)) {
List<StripeInformation> stripes = reader.getStripes();
try {
List<StripeInformation> stripes = writer.getStripes();
return Collections.unmodifiableList(Lists.transform(stripes, StripeInformation::getOffset));
} catch (IOException e) {
throw new RuntimeIOException(e, "Can't close ORC reader %s", file.location());
throw new RuntimeIOException(e, "Cannot receive stripe information from writer for %s", file.location());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,17 @@

import java.io.File;
import java.io.IOException;
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.iceberg.Files;
import org.apache.iceberg.Schema;
import org.apache.iceberg.io.FileAppender;
import org.apache.iceberg.orc.ORC;
import org.apache.iceberg.types.Types;
import org.apache.orc.OrcFile;
import org.apache.orc.Reader;
import org.apache.orc.StripeInformation;
import org.apache.spark.sql.catalyst.InternalRow;
import org.junit.Assert;
import org.junit.Rule;
Expand Down Expand Up @@ -55,5 +61,10 @@ public void splitOffsets() throws IOException {
writer.addAll(rows);
writer.close();
Assert.assertNotNull("Split offsets not present", writer.splitOffsets());
// writer offsets are the same as the ORC reader offsets
Reader reader = OrcFile.createReader(new Path(testFile.toURI()), OrcFile.readerOptions(new Configuration()));
Assert.assertEquals(reader.getStripes().stream().map(StripeInformation::getOffset).collect(Collectors.toList()),
writer.splitOffsets());
reader.close();
}
}

0 comments on commit fb5a22c

Please sign in to comment.