Skip to content

Commit

Permalink
Hive: Add Hive 4 support and remove Hive 3
Browse files Browse the repository at this point in the history
  • Loading branch information
manuzhang committed Dec 13, 2024
1 parent d402f83 commit 44cd3cb
Show file tree
Hide file tree
Showing 71 changed files with 118 additions and 2,690 deletions.
3 changes: 0 additions & 3 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,7 @@ ORC:
HIVE:
- changed-files:
- any-glob-to-any-file: [
'hive3/**/*',
'hive-metastore/**/*',
'hive-runtime/**/*',
'hive3-orc-bundle/**/*'
]

DATA:
Expand Down
3 changes: 0 additions & 3 deletions .github/workflows/delta-conversion-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,6 @@ on:
- '.asf.yml'
- 'dev/**'
- 'mr/**'
- 'hive3/**'
- 'hive3-orc-bundle/**'
- 'hive-runtime/**'
- 'flink/**'
- 'kafka-connect/**'
- 'docs/**'
Expand Down
3 changes: 0 additions & 3 deletions .github/workflows/flink-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,6 @@ on:
- '.asf.yml'
- 'dev/**'
- 'mr/**'
- 'hive3/**'
- 'hive3-orc-bundle/**'
- 'hive-runtime/**'
- 'kafka-connect/**'
- 'spark/**'
- 'docs/**'
Expand Down
33 changes: 2 additions & 31 deletions .github/workflows/hive-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -87,39 +87,10 @@ jobs:
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
restore-keys: ${{ runner.os }}-gradle-
- run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts
- run: ./gradlew -DsparkVersions= -DhiveVersions=2 -DflinkVersions= -DkafkaVersions= -Pquick=true :iceberg-mr:check :iceberg-hive-runtime:check -x javadoc
- run: ./gradlew -DsparkVersions= -DhiveVersions=2 -DflinkVersions= -DkafkaVersions= -Pquick=true :iceberg-mr:check -x javadoc
- uses: actions/upload-artifact@v4
if: failure()
with:
name: test logs
path: |
**/build/testlogs
hive3-tests:
runs-on: ubuntu-22.04
strategy:
matrix:
jvm: [11, 17, 21]
env:
SPARK_LOCAL_IP: localhost
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: zulu
java-version: ${{ matrix.jvm }}
- uses: actions/cache@v4
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
restore-keys: ${{ runner.os }}-gradle-
- run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts
- run: ./gradlew -DsparkVersions= -DhiveVersions=3 -DflinkVersions= -DkafkaVersions= -Pquick=true :iceberg-hive3-orc-bundle:check :iceberg-hive3:check :iceberg-hive-runtime:check -x javadoc
- uses: actions/upload-artifact@v4
if: failure()
with:
name: test logs
path: |
**/build/testlogs
**/build/testlogs
3 changes: 0 additions & 3 deletions .github/workflows/kafka-connect-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,6 @@ on:
- 'dev/**'
- 'mr/**'
- 'flink/**'
- 'hive3/**'
- 'hive3-orc-bundle/**'
- 'hive-runtime/**'
- 'spark/**'
- 'docs/**'
- 'site/**'
Expand Down
3 changes: 0 additions & 3 deletions .github/workflows/spark-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,6 @@ on:
- 'dev/**'
- 'site/**'
- 'mr/**'
- 'hive3/**'
- 'hive3-orc-bundle/**'
- 'hive-runtime/**'
- 'flink/**'
- 'kafka-connect/**'
- 'docs/**'
Expand Down
10 changes: 6 additions & 4 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -675,7 +675,7 @@ project(':iceberg-hive-metastore') {

compileOnly libs.avro.avro

compileOnly(libs.hive2.metastore) {
compileOnly(libs.hive4.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand All @@ -695,7 +695,7 @@ project(':iceberg-hive-metastore') {
// that's really old. We use the core classifier to be able to override our guava
// version. Luckily, hive-exec seems to work okay so far with this version of guava
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
testImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand All @@ -707,7 +707,7 @@ project(':iceberg-hive-metastore') {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}

testImplementation(libs.hive2.metastore) {
testImplementation(libs.hive4.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand All @@ -723,7 +723,9 @@ project(':iceberg-hive-metastore') {
exclude group: 'com.zaxxer', module: 'HikariCP'
}

compileOnly(libs.hadoop2.client) {
testImplementation(libs.hive4.standalone.metastore.server)

compileOnly(libs.hadoop3.client) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ public static void startMetastoreAndSpark() {
.master("local[2]")
.config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic")
.config(
"spark.hadoop." + HiveConf.ConfVars.METASTOREURIS.varname,
hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname))
"spark.hadoop." + HiveConf.ConfVars.METASTORE_URIS.varname,
hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname))
.config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true")
.config("spark.databricks.delta.retentionDurationCheck.enabled", "false")
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
Expand Down
4 changes: 2 additions & 2 deletions flink/v1.18/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
// that's really old. We use the core classifier to be able to override our guava
// version. Luckily, hive-exec seems to work okay so far with this version of guava
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
testImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand All @@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}

testImplementation(libs.hive2.metastore) {
testImplementation(libs.hive4.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,6 @@ protected String getFullQualifiedTableName(String tableName) {
}

static String getURI(HiveConf conf) {
return conf.get(HiveConf.ConfVars.METASTOREURIS.varname);
return conf.get(HiveConf.ConfVars.METASTORE_URIS.varname);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public void testCreateCatalogWithHiveConfDir() throws IOException {
Configuration newConf = new Configuration(hiveConf);
// Set another new directory which is different with the hive metastore's warehouse path.
newConf.set(
HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "file://" + warehouseDir.getAbsolutePath());
HiveConf.ConfVars.METASTORE_WAREHOUSE.varname, "file://" + warehouseDir.getAbsolutePath());
newConf.writeXml(fos);
}
assertThat(hiveSiteXML.toPath()).exists();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
Map<String, String> properties = Maps.newHashMap();
properties.put(
CatalogProperties.WAREHOUSE_LOCATION,
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
properties.put(
CatalogProperties.CLIENT_POOL_SIZE,
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
Map<String, String> properties = Maps.newHashMap();
properties.put(
CatalogProperties.WAREHOUSE_LOCATION,
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
properties.put(
CatalogProperties.CLIENT_POOL_SIZE,
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));
Expand Down
8 changes: 4 additions & 4 deletions flink/v1.19/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
// that's really old. We use the core classifier to be able to override our guava
// version. Luckily, hive-exec seems to work okay so far with this version of guava
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
testImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand All @@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}

testImplementation(libs.hive2.metastore) {
testImplementation(libs.hive4.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand Down Expand Up @@ -193,7 +193,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
exclude group: 'org.apache.avro', module: 'avro'
}

integrationImplementation(libs.hive2.metastore) {
integrationImplementation(libs.hive4.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand All @@ -210,7 +210,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
exclude group: 'org.slf4j'
}

integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
integrationImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,6 @@ protected String getFullQualifiedTableName(String tableName) {
}

static String getURI(HiveConf conf) {
return conf.get(HiveConf.ConfVars.METASTOREURIS.varname);
return conf.get(HiveConf.ConfVars.METASTORE_URIS.varname);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public void testCreateCatalogWithHiveConfDir() throws IOException {
Configuration newConf = new Configuration(hiveConf);
// Set another new directory which is different with the hive metastore's warehouse path.
newConf.set(
HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "file://" + warehouseDir.getAbsolutePath());
HiveConf.ConfVars.METASTORE_WAREHOUSE.varname, "file://" + warehouseDir.getAbsolutePath());
newConf.writeXml(fos);
}
assertThat(hiveSiteXML.toPath()).exists();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
Map<String, String> properties = Maps.newHashMap();
properties.put(
CatalogProperties.WAREHOUSE_LOCATION,
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
properties.put(
CatalogProperties.CLIENT_POOL_SIZE,
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
Map<String, String> properties = Maps.newHashMap();
properties.put(
CatalogProperties.WAREHOUSE_LOCATION,
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
properties.put(
CatalogProperties.CLIENT_POOL_SIZE,
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));
Expand Down
8 changes: 4 additions & 4 deletions flink/v1.20/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
// that's really old. We use the core classifier to be able to override our guava
// version. Luckily, hive-exec seems to work okay so far with this version of guava
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
testImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand All @@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}

testImplementation(libs.hive2.metastore) {
testImplementation(libs.hive4.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand Down Expand Up @@ -193,7 +193,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
exclude group: 'org.apache.avro', module: 'avro'
}

integrationImplementation(libs.hive2.metastore) {
integrationImplementation(libs.hive4.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand All @@ -210,7 +210,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
exclude group: 'org.slf4j'
}

integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
integrationImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,6 @@ protected String getFullQualifiedTableName(String tableName) {
}

static String getURI(HiveConf conf) {
return conf.get(HiveConf.ConfVars.METASTOREURIS.varname);
return conf.get(HiveConf.ConfVars.METASTORE_URIS.varname);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ public void testCreateCatalogWithHiveConfDir() throws IOException {
Configuration newConf = new Configuration(hiveConf);
// Set another new directory which is different with the hive metastore's warehouse path.
newConf.set(
HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "file://" + warehouseDir.getAbsolutePath());
HiveConf.ConfVars.METASTORE_WAREHOUSE.varname,
"file://" + warehouseDir.getAbsolutePath());
newConf.writeXml(fos);
}
assertThat(hiveSiteXML.toPath()).exists();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
Map<String, String> properties = Maps.newHashMap();
properties.put(
CatalogProperties.WAREHOUSE_LOCATION,
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
properties.put(
CatalogProperties.CLIENT_POOL_SIZE,
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
Map<String, String> properties = Maps.newHashMap();
properties.put(
CatalogProperties.WAREHOUSE_LOCATION,
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
properties.put(
CatalogProperties.CLIENT_POOL_SIZE,
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));
Expand Down
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jmhIncludeRegex=.*
systemProp.defaultFlinkVersions=1.20
systemProp.knownFlinkVersions=1.18,1.19,1.20
systemProp.defaultHiveVersions=2
systemProp.knownHiveVersions=2,3
systemProp.knownHiveVersions=2,4
systemProp.defaultSparkVersions=3.5
systemProp.knownSparkVersions=3.3,3.4,3.5
systemProp.defaultKafkaVersions=3
Expand Down
11 changes: 6 additions & 5 deletions gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ hadoop2 = "2.7.3"
hadoop3 = "3.4.1"
httpcomponents-httpclient5 = "5.4.1"
hive2 = { strictly = "2.3.9"} # see rich version usage explanation above
hive3 = "3.1.3"
hive4 = "4.0.1"
immutables-value = "2.10.1"
jackson-bom = "2.18.2"
jackson211 = { strictly = "2.11.4"} # see rich version usage explanation above
Expand Down Expand Up @@ -139,10 +139,11 @@ hive2-exec = { module = "org.apache.hive:hive-exec", version.ref = "hive2" }
hive2-metastore = { module = "org.apache.hive:hive-metastore", version.ref = "hive2" }
hive2-serde = { module = "org.apache.hive:hive-serde", version.ref = "hive2" }
hive2-service = { module = "org.apache.hive:hive-service", version.ref = "hive2" }
hive3-exec = { module = "org.apache.hive:hive-exec", version.ref = "hive3" }
hive3-metastore = { module = "org.apache.hive:hive-metastore", version.ref = "hive3" }
hive3-serde = { module = "org.apache.hive:hive-serde", version.ref = "hive3" }
hive3-service = { module = "org.apache.hive:hive-service", version.ref = "hive3" }
hive4-exec = { module = "org.apache.hive:hive-exec", version.ref = "hive4" }
hive4-metastore = { module = "org.apache.hive:hive-metastore", version.ref = "hive4" }
hive4-standalone-metastore-server = { module = "org.apache.hive:hive-standalone-metastore-server", version.ref = "hive4" }
hive4-serde = { module = "org.apache.hive:hive-serde", version.ref = "hive4" }
hive4-service = { module = "org.apache.hive:hive-service", version.ref = "hive4" }
httpcomponents-httpclient5 = { module = "org.apache.httpcomponents.client5:httpclient5", version.ref = "httpcomponents-httpclient5" }
immutables-value = { module = "org.immutables:value", version.ref = "immutables-value" }
jackson-bom = { module = "com.fasterxml.jackson:jackson-bom", version.ref = "jackson-bom" }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ public <R> R run(Action<R, IMetaStoreClient, TException> action, boolean retry)
static Key extractKey(String cacheKeys, Configuration conf) {
// generate key elements in a certain order, so that the Key instances are comparable
List<Object> elements = Lists.newArrayList();
elements.add(conf.get(HiveConf.ConfVars.METASTOREURIS.varname, ""));
elements.add(conf.get(HiveConf.ConfVars.METASTORE_URIS.varname, ""));
elements.add(conf.get(HiveCatalog.HIVE_CONF_CATALOG, "hive"));
if (cacheKeys == null || cacheKeys.isEmpty()) {
return Key.of(elements);
Expand Down
Loading

0 comments on commit 44cd3cb

Please sign in to comment.