Skip to content

Commit

Permalink
small fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
chinmay-bhat committed Jan 9, 2024
1 parent e7d407b commit 1a60266
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ public class TestCompressionSettings extends CatalogTestBase {
private FileFormat format;

@Parameter(index = 4)
private ImmutableMap<String, String> properties;
private Map<String, String> properties;

@TempDir private java.nio.file.Path temp;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,6 @@ public void testBucketPartitionedIDFilters() {
InputPartition[] tasks = scan.planInputPartitions();

// validate predicate push-down

assertThat(tasks).as("Should only create one task for a single bucket").hasSize(1);

// validate row filtering
Expand Down Expand Up @@ -317,7 +316,7 @@ public void testDayPartitionedTimestampFilters() {
Batch scan = builder.build().toBatch();

InputPartition[] tasks = scan.planInputPartitions();
assertThat(tasks).as("Should only create one task for 2017-12-21").hasSize(1);
assertThat(tasks).as("Should create one task for 2017-12-21").hasSize(1);

assertEqualsSafe(
SCHEMA.asStruct(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1098,7 +1098,14 @@ public void testWritePartitionEvolutionDisallowed() throws Exception {
.writeTo(posDeletesTableName)
.option(SparkWriteOptions.REWRITTEN_FILE_SCAN_TASK_SET_ID, fileSetID)
.append())
.isInstanceOf(AnalysisException.class);
.isInstanceOf(AnalysisException.class)
.hasMessage(
"[INCOMPATIBLE_DATA_FOR_TABLE.CANNOT_FIND_DATA] Cannot write incompatible data for the table `"
+ catalogName
+ "`.`default`.`"
+ tableName
+ "`.`position_deletes`"
+ ": Cannot find data for the output column `partition`.");

dropTable(tableName);
}
Expand Down Expand Up @@ -1321,10 +1328,9 @@ public void testNormalWritesNotAllowed() throws IOException {

Dataset<Row> scanDF = spark.read().format("iceberg").load(posDeletesTableName);

assertThatThrownBy(
() -> scanDF.writeTo(posDeletesTableName).append(),
"position_deletes table can only be written by RewriteDeleteFiles")
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> scanDF.writeTo(posDeletesTableName).append())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Can only write to " + posDeletesTableName + " via actions");

dropTable(tableName);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import org.apache.iceberg.FileFormat;
import org.apache.iceberg.Parameter;
import org.apache.iceberg.ParameterizedTestExtension;
import org.apache.iceberg.Schema;
import org.apache.iceberg.data.GenericRecord;
Expand All @@ -44,6 +46,8 @@

@ExtendWith(ParameterizedTestExtension.class)
public abstract class TestReadProjection {
@Parameter(index = 0)
protected FileFormat format;

protected abstract Record writeAndRead(
String desc, Schema writeSchema, Schema readSchema, Record record) throws IOException;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,6 @@ public static Object[][] parameters() {
};
}

@Parameter(index = 0)
private FileFormat format;

@Parameter(index = 1)
private boolean vectorized;

Expand Down

0 comments on commit 1a60266

Please sign in to comment.