diff --git a/fe/fe-core/pom.xml b/fe/fe-core/pom.xml
index 4216d1ba819eb..09db604c87356 100644
--- a/fe/fe-core/pom.xml
+++ b/fe/fe-core/pom.xml
@@ -39,7 +39,7 @@ under the License.
${env.FE_UT_PARALLEL}
0.8.7
1.6.0
- 0.8.2
+ 1.0.0
4.0.0rc1
3.4-rc2
python
diff --git a/fe/fe-core/src/main/java/com/starrocks/connector/paimon/PaimonMetadata.java b/fe/fe-core/src/main/java/com/starrocks/connector/paimon/PaimonMetadata.java
index 80a7cb06fae15..ee38b78268c8d 100644
--- a/fe/fe-core/src/main/java/com/starrocks/connector/paimon/PaimonMetadata.java
+++ b/fe/fe-core/src/main/java/com/starrocks/connector/paimon/PaimonMetadata.java
@@ -190,11 +190,13 @@ public Database getDb(String dbName) {
if (databases.containsKey(dbName)) {
return databases.get(dbName);
}
- if (paimonNativeCatalog.databaseExists(dbName)) {
+ try {
+ // get database from paimon catalog to see if the database exists
+ paimonNativeCatalog.getDatabase(dbName);
Database db = new Database(CONNECTOR_ID_GENERATOR.getNextId().asInt(), dbName);
databases.put(dbName, db);
return db;
- } else {
+ } catch (Catalog.DatabaseNotExistException e) {
LOG.error("Paimon database {}.{} does not exist.", catalogName, dbName);
return null;
}
@@ -235,7 +237,12 @@ public Table getTable(String dbName, String tblName) {
@Override
public boolean tableExists(String dbName, String tableName) {
- return paimonNativeCatalog.tableExists(Identifier.create(dbName, tableName));
+ try {
+ paimonNativeCatalog.getTable(Identifier.create(dbName, tableName));
+ return true;
+ } catch (Catalog.TableNotExistException e) {
+ return false;
+ }
}
@Override
diff --git a/fe/fe-core/src/test/java/com/starrocks/connector/paimon/PaimonMetadataTest.java b/fe/fe-core/src/test/java/com/starrocks/connector/paimon/PaimonMetadataTest.java
index 71d777ae4293f..07c1802c572b1 100644
--- a/fe/fe-core/src/test/java/com/starrocks/connector/paimon/PaimonMetadataTest.java
+++ b/fe/fe-core/src/test/java/com/starrocks/connector/paimon/PaimonMetadataTest.java
@@ -84,9 +84,9 @@
import java.util.Map;
import static org.apache.paimon.io.DataFileMeta.DUMMY_LEVEL;
-import static org.apache.paimon.io.DataFileMeta.EMPTY_KEY_STATS;
import static org.apache.paimon.io.DataFileMeta.EMPTY_MAX_KEY;
import static org.apache.paimon.io.DataFileMeta.EMPTY_MIN_KEY;
+import static org.apache.paimon.stats.SimpleStats.EMPTY_STATS;
import static org.junit.Assert.assertEquals;
public class PaimonMetadataTest {
@@ -113,14 +113,14 @@ public void setUp() {
writer.complete();
List meta1 = new ArrayList<>();
- meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
- meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
+ meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
+ meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
List meta2 = new ArrayList<>();
- meta2.add(new DataFileMeta("file3", 100, 400, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
+ meta2.add(new DataFileMeta("file3", 100, 400, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
this.splits.add(DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build());
this.splits.add(DataSplit.builder().withSnapshot(1L).withPartition(row2).withBucket(1)
@@ -157,6 +157,7 @@ public long getTableCreateTime(String dbName, String tblName) {
}
};
com.starrocks.catalog.Table table = metadata.getTable("db1", "tbl1");
+ Assert.assertTrue(metadata.tableExists("db1", "tbl1"));
PaimonTable paimonTable = (PaimonTable) table;
Assert.assertEquals("db1", paimonTable.getCatalogDBName());
Assert.assertEquals("tbl1", paimonTable.getCatalogTableName());
@@ -170,17 +171,6 @@ public long getTableCreateTime(String dbName, String tblName) {
Assert.assertEquals("paimon_catalog.db1.tbl1.0", paimonTable.getUUID());
}
- @Test
- public void testTableExists(@Mocked FileStoreTable paimonNativeTable) {
- new Expectations() {
- {
- paimonNativeCatalog.tableExists((Identifier) any);
- result = true;
- }
- };
- Assert.assertTrue(metadata.tableExists("db1", "tbl1"));
- }
-
@Test
public void testListPartitionNames(@Mocked FileStoreTable mockPaimonTable,
@Mocked PartitionsTable mockPartitionTable,
diff --git a/fe/fe-core/src/test/java/com/starrocks/planner/PaimonScanNodeTest.java b/fe/fe-core/src/test/java/com/starrocks/planner/PaimonScanNodeTest.java
index a737202869196..1d23e7c78f03d 100644
--- a/fe/fe-core/src/test/java/com/starrocks/planner/PaimonScanNodeTest.java
+++ b/fe/fe-core/src/test/java/com/starrocks/planner/PaimonScanNodeTest.java
@@ -27,11 +27,9 @@
import com.starrocks.server.GlobalStateMgr;
import mockit.Expectations;
import mockit.Mocked;
-import org.apache.paimon.data.BinaryArray;
import org.apache.paimon.data.BinaryRow;
import org.apache.paimon.data.BinaryRowWriter;
import org.apache.paimon.io.DataFileMeta;
-import org.apache.paimon.stats.BinaryTableStats;
import org.apache.paimon.table.source.DataSplit;
import org.apache.paimon.table.source.DeletionFile;
import org.apache.paimon.table.source.RawFile;
@@ -43,9 +41,9 @@
import java.util.List;
import static org.apache.paimon.io.DataFileMeta.DUMMY_LEVEL;
-import static org.apache.paimon.io.DataFileMeta.EMPTY_KEY_STATS;
import static org.apache.paimon.io.DataFileMeta.EMPTY_MAX_KEY;
import static org.apache.paimon.io.DataFileMeta.EMPTY_MIN_KEY;
+import static org.apache.paimon.stats.SimpleStats.EMPTY_STATS;
public class PaimonScanNodeTest {
@Test
@@ -78,10 +76,10 @@ public void testTotalFileLength(@Mocked PaimonTable table) {
writer.complete();
List meta1 = new ArrayList<>();
- meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
- meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
+ meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
+ meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
DataSplit split = DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build();
@@ -103,10 +101,10 @@ public void testEstimatedLength(@Mocked PaimonTable table) {
writer.complete();
List meta1 = new ArrayList<>();
- meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
- meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
+ meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
+ meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
DataSplit split = DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build();
@@ -134,12 +132,10 @@ public void testSplitRawFileScanRange(@Mocked PaimonTable table, @Mocked RawFile
List meta1 = new ArrayList<>();
- BinaryTableStats dataTableStats
- = new BinaryTableStats(BinaryRow.EMPTY_ROW, BinaryRow.EMPTY_ROW, BinaryArray.fromLongArray(new Long[]{0L}));
- meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, dataTableStats,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
- meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, dataTableStats,
- 1, 1, 1, DUMMY_LEVEL, 0L, null));
+ meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
+ meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
+ 1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
DataSplit split = DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build();
@@ -152,7 +148,7 @@ public void testSplitRawFileScanRange(@Mocked PaimonTable table, @Mocked RawFile
};
desc.setTable(table);
PaimonScanNode scanNode = new PaimonScanNode(new PlanNodeId(0), desc, "XXX");
- DeletionFile deletionFile = new DeletionFile("dummy", 1, 22);
+ DeletionFile deletionFile = new DeletionFile("dummy", 1, 22, 0L);
scanNode.splitRawFileScanRangeLocations(rawFile, deletionFile);
scanNode.splitScanRangeLocations(rawFile, 0, 256 * 1024 * 1024, 64 * 1024 * 1024, null);
scanNode.addSplitScanRangeLocations(split, null, 256 * 1024 * 1024);
diff --git a/java-extensions/paimon-reader/pom.xml b/java-extensions/paimon-reader/pom.xml
index 6f99c82e17802..4b858f7fedf86 100644
--- a/java-extensions/paimon-reader/pom.xml
+++ b/java-extensions/paimon-reader/pom.xml
@@ -13,7 +13,7 @@
${basedir}/../
- 0.8.2
+ 1.0.0