Skip to content

Commit

Permalink
[Enhancemnt] Upgrade paimon to 1.0
Browse files Browse the repository at this point in the history
Signed-off-by: Jiao Mingye <[email protected]>
  • Loading branch information
mxdzs0612 committed Jan 8, 2025
1 parent bce3ff8 commit 7ebadaa
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 41 deletions.
2 changes: 1 addition & 1 deletion fe/fe-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ under the License.
<fe_ut_parallel>${env.FE_UT_PARALLEL}</fe_ut_parallel>
<jacoco.version>0.8.7</jacoco.version>
<iceberg.version>1.6.0</iceberg.version>
<paimon.version>0.8.2</paimon.version>
<paimon.version>1.0.0</paimon.version>
<delta-kernel.version>4.0.0rc1</delta-kernel.version>
<staros.version>3.4-rc2</staros.version>
<python>python</python>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,11 +190,13 @@ public Database getDb(String dbName) {
if (databases.containsKey(dbName)) {
return databases.get(dbName);
}
if (paimonNativeCatalog.databaseExists(dbName)) {
try {
// get database from paimon catalog to see if the database exists
paimonNativeCatalog.getDatabase(dbName);
Database db = new Database(CONNECTOR_ID_GENERATOR.getNextId().asInt(), dbName);
databases.put(dbName, db);
return db;
} else {
} catch (Catalog.DatabaseNotExistException e) {
LOG.error("Paimon database {}.{} does not exist.", catalogName, dbName);
return null;
}
Expand Down Expand Up @@ -235,7 +237,12 @@ public Table getTable(String dbName, String tblName) {

@Override
public boolean tableExists(String dbName, String tableName) {
return paimonNativeCatalog.tableExists(Identifier.create(dbName, tableName));
try {
paimonNativeCatalog.getTable(Identifier.create(dbName, tableName));
return true;
} catch (Catalog.TableNotExistException e) {
return false;
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,9 @@
import java.util.Map;

import static org.apache.paimon.io.DataFileMeta.DUMMY_LEVEL;
import static org.apache.paimon.io.DataFileMeta.EMPTY_KEY_STATS;
import static org.apache.paimon.io.DataFileMeta.EMPTY_MAX_KEY;
import static org.apache.paimon.io.DataFileMeta.EMPTY_MIN_KEY;
import static org.apache.paimon.stats.SimpleStats.EMPTY_STATS;
import static org.junit.Assert.assertEquals;

public class PaimonMetadataTest {
Expand All @@ -113,14 +113,14 @@ public void setUp() {
writer.complete();

List<DataFileMeta> meta1 = new ArrayList<>();
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));

List<DataFileMeta> meta2 = new ArrayList<>();
meta2.add(new DataFileMeta("file3", 100, 400, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta2.add(new DataFileMeta("file3", 100, 400, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
this.splits.add(DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build());
this.splits.add(DataSplit.builder().withSnapshot(1L).withPartition(row2).withBucket(1)
Expand Down Expand Up @@ -157,6 +157,7 @@ public long getTableCreateTime(String dbName, String tblName) {
}
};
com.starrocks.catalog.Table table = metadata.getTable("db1", "tbl1");
Assert.assertTrue(metadata.tableExists("db1", "tbl1"));
PaimonTable paimonTable = (PaimonTable) table;
Assert.assertEquals("db1", paimonTable.getCatalogDBName());
Assert.assertEquals("tbl1", paimonTable.getCatalogTableName());
Expand All @@ -170,17 +171,6 @@ public long getTableCreateTime(String dbName, String tblName) {
Assert.assertEquals("paimon_catalog.db1.tbl1.0", paimonTable.getUUID());
}

@Test
public void testTableExists(@Mocked FileStoreTable paimonNativeTable) {
new Expectations() {
{
paimonNativeCatalog.tableExists((Identifier) any);
result = true;
}
};
Assert.assertTrue(metadata.tableExists("db1", "tbl1"));
}

@Test
public void testListPartitionNames(@Mocked FileStoreTable mockPaimonTable,
@Mocked PartitionsTable mockPartitionTable,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,9 @@
import com.starrocks.server.GlobalStateMgr;
import mockit.Expectations;
import mockit.Mocked;
import org.apache.paimon.data.BinaryArray;
import org.apache.paimon.data.BinaryRow;
import org.apache.paimon.data.BinaryRowWriter;
import org.apache.paimon.io.DataFileMeta;
import org.apache.paimon.stats.BinaryTableStats;
import org.apache.paimon.table.source.DataSplit;
import org.apache.paimon.table.source.DeletionFile;
import org.apache.paimon.table.source.RawFile;
Expand All @@ -43,9 +41,9 @@
import java.util.List;

import static org.apache.paimon.io.DataFileMeta.DUMMY_LEVEL;
import static org.apache.paimon.io.DataFileMeta.EMPTY_KEY_STATS;
import static org.apache.paimon.io.DataFileMeta.EMPTY_MAX_KEY;
import static org.apache.paimon.io.DataFileMeta.EMPTY_MIN_KEY;
import static org.apache.paimon.stats.SimpleStats.EMPTY_STATS;

public class PaimonScanNodeTest {
@Test
Expand Down Expand Up @@ -78,10 +76,10 @@ public void testTotalFileLength(@Mocked PaimonTable table) {
writer.complete();

List<DataFileMeta> meta1 = new ArrayList<>();
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));

DataSplit split = DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build();
Expand All @@ -103,10 +101,10 @@ public void testEstimatedLength(@Mocked PaimonTable table) {
writer.complete();

List<DataFileMeta> meta1 = new ArrayList<>();
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));

DataSplit split = DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build();
Expand Down Expand Up @@ -134,12 +132,10 @@ public void testSplitRawFileScanRange(@Mocked PaimonTable table, @Mocked RawFile

List<DataFileMeta> meta1 = new ArrayList<>();

BinaryTableStats dataTableStats
= new BinaryTableStats(BinaryRow.EMPTY_ROW, BinaryRow.EMPTY_ROW, BinaryArray.fromLongArray(new Long[]{0L}));
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, dataTableStats,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, dataTableStats,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_STATS, EMPTY_STATS,
1, 1, 1, DUMMY_LEVEL, 0L, null, null, null));

DataSplit split = DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build();
Expand All @@ -152,7 +148,7 @@ public void testSplitRawFileScanRange(@Mocked PaimonTable table, @Mocked RawFile
};
desc.setTable(table);
PaimonScanNode scanNode = new PaimonScanNode(new PlanNodeId(0), desc, "XXX");
DeletionFile deletionFile = new DeletionFile("dummy", 1, 22);
DeletionFile deletionFile = new DeletionFile("dummy", 1, 22, 0L);
scanNode.splitRawFileScanRangeLocations(rawFile, deletionFile);
scanNode.splitScanRangeLocations(rawFile, 0, 256 * 1024 * 1024, 64 * 1024 * 1024, null);
scanNode.addSplitScanRangeLocations(split, null, 256 * 1024 * 1024);
Expand Down
2 changes: 1 addition & 1 deletion java-extensions/paimon-reader/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

<properties>
<java-extensions.home>${basedir}/../</java-extensions.home>
<paimon.version>0.8.2</paimon.version>
<paimon.version>1.0.0</paimon.version>
</properties>

<dependencies>
Expand Down

0 comments on commit 7ebadaa

Please sign in to comment.