Skip to content

Commit

Permalink
[BugFix] Reject table create request with time type in complex type (…
Browse files Browse the repository at this point in the history
…backport #54601) (#54701)

Co-authored-by: zhangqiang <[email protected]>
  • Loading branch information
mergify[bot] and sevev authored Jan 6, 2025
1 parent ff4ef72 commit 71ce361
Show file tree
Hide file tree
Showing 4 changed files with 150 additions and 6 deletions.
49 changes: 43 additions & 6 deletions fe/fe-core/src/main/java/com/starrocks/common/util/Util.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,13 @@

import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.starrocks.catalog.ArrayType;
import com.starrocks.catalog.Column;
import com.starrocks.catalog.MapType;
import com.starrocks.catalog.PrimitiveType;
import com.starrocks.catalog.ScalarType;
import com.starrocks.catalog.StructField;
import com.starrocks.catalog.StructType;
import com.starrocks.catalog.Type;
import com.starrocks.common.AnalysisException;
import com.starrocks.common.TimeoutException;
Expand Down Expand Up @@ -241,22 +246,22 @@ public static List<String> shellSplit(CharSequence string) {
return tokens;
}

private static String columnHashString(Column column) {
Type type = column.getType();
private static String columnHashString(Type type) {
if (type.isScalarType()) {
PrimitiveType primitiveType = type.getPrimitiveType();
switch (primitiveType) {
case CHAR:
case VARCHAR:
return String.format(
TYPE_STRING_MAP.get(primitiveType), column.getStrLen());
TYPE_STRING_MAP.get(primitiveType), ((ScalarType) type).getLength());
case DECIMALV2:
case DECIMAL32:
case DECIMAL64:
case DECIMAL128:
return String.format(
TYPE_STRING_MAP.get(primitiveType), column.getPrecision(),
column.getScale());
TYPE_STRING_MAP.get(primitiveType),
((ScalarType) type).getScalarPrecision(),
((ScalarType) type).getScalarScale());
default:
return TYPE_STRING_MAP.get(primitiveType);
}
Expand All @@ -273,7 +278,7 @@ public static int schemaHash(int schemaVersion, List<Column> columns, Set<String
// columns
for (Column column : columns) {
adler32.update(column.getName().getBytes(StandardCharsets.UTF_8));
String typeString = columnHashString(column);
String typeString = columnHashString(column.getType());
if (typeString == null) {
throw new SemanticException("Type:%s of column:%s does not support",
column.getType().toString(), column.getName());
Expand Down Expand Up @@ -314,6 +319,38 @@ public static int generateSchemaHash() {
return Math.abs(ThreadLocalRandom.current().nextInt(Integer.MAX_VALUE));
}

public static boolean checkTypeSupported(Type type) {
if (type.isScalarType()) {
if (TYPE_STRING_MAP.get(type.getPrimitiveType()) == null) {
return false;
}
} else if (type.isArrayType()) {
return checkTypeSupported(((ArrayType) type).getItemType());
} else if (type.isMapType()) {
Type keyType = ((MapType) type).getKeyType();
Type valueType = ((MapType) type).getValueType();
return checkTypeSupported(keyType) && checkTypeSupported(valueType);
} else if (type.isStructType()) {
for (StructField field : ((StructType) type).getFields()) {
if (!checkTypeSupported(field.getType())) {
return false;
}
}
}
return true;
}

public static boolean checkColumnSupported(List<Column> columns) {
for (Column column : columns) {
Type type = column.getType();
if (!checkTypeSupported(type)) {
throw new SemanticException("Type:%s of column:%s does not support",
column.getType().toString(), column.getName());
}
}
return true;
}

// get response body as a string from the given url.
// "encodedAuthInfo", the base64 encoded auth info. like:
// Base64.encodeBase64String("user:passwd".getBytes());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -588,6 +588,7 @@ public Table createTable(LocalMetastore metastore, Database db, CreateTableStmt
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Util.checkColumnSupported(baseSchema);
int schemaHash = Util.schemaHash(schemaVersion, baseSchema, bfColumns, bfFpp);

if (stmt.getSortKeys() != null) {
Expand Down
57 changes: 57 additions & 0 deletions test/sql/test_create_table/R/test_create_table_with_time
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
-- name: test_create_table_with_time
CREATE TABLE dup_test (
id bigint,
city varchar(100) not null,
time TIME not null
)
DUPLICATE KEY(id)
PARTITION BY (city)
DISTRIBUTED BY HASH(`id`)
PROPERTIES (
"replication_num" = "1"
);
-- result:
E: (1064, 'Getting analyzing error. Detail message: Type:TIME of column:time does not support.')
-- !result
CREATE TABLE dup_test (
id bigint,
city varchar(100) not null,
time array<TIME> not null
)
DUPLICATE KEY(id)
PARTITION BY (city)
DISTRIBUTED BY HASH(`id`)
PROPERTIES (
"replication_num" = "1"
);
-- result:
E: (1064, 'Getting analyzing error. Detail message: Type:ARRAY<TIME> of column:time does not support.')
-- !result
CREATE TABLE dup_test (
id bigint,
city varchar(100) not null,
time map<bigint, TIME> not null
)
DUPLICATE KEY(id)
PARTITION BY (city)
DISTRIBUTED BY HASH(`id`)
PROPERTIES (
"replication_num" = "1"
);
-- result:
E: (1064, 'Getting analyzing error. Detail message: Type:MAP<BIGINT,TIME> of column:time does not support.')
-- !result
CREATE TABLE dup_test (
id bigint,
city varchar(100) not null,
time struct<c1 bigint, c2 TIME> not null
)
DUPLICATE KEY(id)
PARTITION BY (city)
DISTRIBUTED BY HASH(`id`)
PROPERTIES (
"replication_num" = "1"
);
-- result:
E: (1064, 'Getting analyzing error. Detail message: Type:struct<c1 bigint(20), c2 TIME> of column:time does not support.')
-- !result
49 changes: 49 additions & 0 deletions test/sql/test_create_table/T/test_create_table_with_time
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
-- name: test_create_table_with_time

CREATE TABLE dup_test (
id bigint,
city varchar(100) not null,
time TIME not null
)
DUPLICATE KEY(id)
PARTITION BY (city)
DISTRIBUTED BY HASH(`id`)
PROPERTIES (
"replication_num" = "1"
);

CREATE TABLE dup_test (
id bigint,
city varchar(100) not null,
time array<TIME> not null
)
DUPLICATE KEY(id)
PARTITION BY (city)
DISTRIBUTED BY HASH(`id`)
PROPERTIES (
"replication_num" = "1"
);

CREATE TABLE dup_test (
id bigint,
city varchar(100) not null,
time map<bigint, TIME> not null
)
DUPLICATE KEY(id)
PARTITION BY (city)
DISTRIBUTED BY HASH(`id`)
PROPERTIES (
"replication_num" = "1"
);

CREATE TABLE dup_test (
id bigint,
city varchar(100) not null,
time struct<c1 bigint, c2 TIME> not null
)
DUPLICATE KEY(id)
PARTITION BY (city)
DISTRIBUTED BY HASH(`id`)
PROPERTIES (
"replication_num" = "1"
);

0 comments on commit 71ce361

Please sign in to comment.