From 1a6026654ca216f5b01b80a48d30cae1314d9fe0 Mon Sep 17 00:00:00 2001 From: chinmay-bhat <12948588+chinmay-bhat@users.noreply.github.com> Date: Wed, 10 Jan 2024 00:09:08 +0530 Subject: [PATCH] small fixes --- .../spark/source/TestCompressionSettings.java | 2 +- .../iceberg/spark/source/TestFilteredScan.java | 3 +-- .../spark/source/TestPositionDeletesTable.java | 16 +++++++++++----- .../iceberg/spark/source/TestReadProjection.java | 4 ++++ .../spark/source/TestSparkReadProjection.java | 3 --- 5 files changed, 17 insertions(+), 11 deletions(-) diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestCompressionSettings.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestCompressionSettings.java index 2cafea827a95..34315bc8a991 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestCompressionSettings.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestCompressionSettings.java @@ -94,7 +94,7 @@ public class TestCompressionSettings extends CatalogTestBase { private FileFormat format; @Parameter(index = 4) - private ImmutableMap properties; + private Map properties; @TempDir private java.nio.file.Path temp; diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 05f5ce0c1539..348173596e46 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -287,7 +287,6 @@ public void testBucketPartitionedIDFilters() { InputPartition[] tasks = scan.planInputPartitions(); // validate predicate push-down - assertThat(tasks).as("Should only create one task for a single bucket").hasSize(1); // validate row filtering @@ -317,7 +316,7 @@ public void testDayPartitionedTimestampFilters() { Batch scan = builder.build().toBatch(); InputPartition[] tasks = scan.planInputPartitions(); - assertThat(tasks).as("Should only create one task for 2017-12-21").hasSize(1); + assertThat(tasks).as("Should create one task for 2017-12-21").hasSize(1); assertEqualsSafe( SCHEMA.asStruct(), diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPositionDeletesTable.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPositionDeletesTable.java index 0b152939e7ec..35d628a3832e 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPositionDeletesTable.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPositionDeletesTable.java @@ -1098,7 +1098,14 @@ public void testWritePartitionEvolutionDisallowed() throws Exception { .writeTo(posDeletesTableName) .option(SparkWriteOptions.REWRITTEN_FILE_SCAN_TASK_SET_ID, fileSetID) .append()) - .isInstanceOf(AnalysisException.class); + .isInstanceOf(AnalysisException.class) + .hasMessage( + "[INCOMPATIBLE_DATA_FOR_TABLE.CANNOT_FIND_DATA] Cannot write incompatible data for the table `" + + catalogName + + "`.`default`.`" + + tableName + + "`.`position_deletes`" + + ": Cannot find data for the output column `partition`."); dropTable(tableName); } @@ -1321,10 +1328,9 @@ public void testNormalWritesNotAllowed() throws IOException { Dataset scanDF = spark.read().format("iceberg").load(posDeletesTableName); - assertThatThrownBy( - () -> scanDF.writeTo(posDeletesTableName).append(), - "position_deletes table can only be written by RewriteDeleteFiles") - .isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> scanDF.writeTo(posDeletesTableName).append()) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Can only write to " + posDeletesTableName + " via actions"); dropTable(tableName); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestReadProjection.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestReadProjection.java index 5743fcc7ee8f..5f59c8eef4ba 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestReadProjection.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestReadProjection.java @@ -27,6 +27,8 @@ import java.nio.file.Path; import java.util.List; import java.util.Map; +import org.apache.iceberg.FileFormat; +import org.apache.iceberg.Parameter; import org.apache.iceberg.ParameterizedTestExtension; import org.apache.iceberg.Schema; import org.apache.iceberg.data.GenericRecord; @@ -44,6 +46,8 @@ @ExtendWith(ParameterizedTestExtension.class) public abstract class TestReadProjection { + @Parameter(index = 0) + protected FileFormat format; protected abstract Record writeAndRead( String desc, Schema writeSchema, Schema readSchema, Record record) throws IOException; diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 9870f071250b..99a327402d97 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -75,9 +75,6 @@ public static Object[][] parameters() { }; } - @Parameter(index = 0) - private FileFormat format; - @Parameter(index = 1) private boolean vectorized;