diff --git a/build.gradle b/build.gradle index 298d690e6c9d..08f6f8a3ad4f 100644 --- a/build.gradle +++ b/build.gradle @@ -374,10 +374,7 @@ project(':iceberg-data') { exclude group: 'org.slf4j', module: 'slf4j-log4j12' } - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar @@ -681,10 +678,7 @@ project(':iceberg-hive-metastore') { // that's really old. We use the core classifier to be able to override our guava // version. Luckily, hive-exec seems to work okay so far with this version of guava // See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context. - testImplementation(libs.hive2.exec) { - artifact { - classifier = 'core' - } + testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -734,10 +728,7 @@ project(':iceberg-orc') { exclude group: 'org.tukaani' // xz compression is not supported } - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar diff --git a/flink/v1.15/build.gradle b/flink/v1.15/build.gradle index ff8a8e8fe34a..febc678c2bec 100644 --- a/flink/v1.15/build.gradle +++ b/flink/v1.15/build.gradle @@ -36,11 +36,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // for dropwizard histogram metrics implementation compileOnly libs.flink115.metrics.dropwizard compileOnly libs.flink115.streaming.java - compileOnly(libs.flink115.streaming.java) { - artifact { - classifier = 'tests' - } - } + compileOnly "${libs.flink115.streaming.java.get().module}:${libs.flink115.streaming.java.get().getVersion()}:tests" compileOnly libs.flink115.table.api.java.bridge compileOnly "org.apache.flink:flink-table-planner_${scalaVersion}:${libs.versions.flink115.get()}" compileOnly libs.flink115.connector.base @@ -61,10 +57,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { compileOnly libs.avro.avro - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar @@ -92,10 +85,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // that's really old. We use the core classifier to be able to override our guava // version. Luckily, hive-exec seems to work okay so far with this version of guava // See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context. - testImplementation(libs.hive2.exec) { - artifact { - classifier = 'core' - } + testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -209,10 +199,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'com.zaxxer', module: 'HikariCP' } - integrationImplementation(libs.hive2.exec) { - artifact { - classifier = 'core' - } + integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency diff --git a/flink/v1.16/build.gradle b/flink/v1.16/build.gradle index 260e7c5bd12b..bdddc8bf3e74 100644 --- a/flink/v1.16/build.gradle +++ b/flink/v1.16/build.gradle @@ -36,11 +36,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // for dropwizard histogram metrics implementation compileOnly libs.flink116.metrics.dropwizard compileOnly libs.flink116.streaming.java - compileOnly(libs.flink116.streaming.java) { - artifact { - classifier = 'tests' - } - } + compileOnly "${libs.flink116.streaming.java.get().module}:${libs.flink116.streaming.java.get().getVersion()}:tests" compileOnly libs.flink116.table.api.java.bridge compileOnly "org.apache.flink:flink-table-planner_${scalaVersion}:${libs.versions.flink116.get()}" compileOnly libs.flink116.connector.base @@ -61,10 +57,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { compileOnly libs.avro.avro - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar @@ -92,10 +85,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // that's really old. We use the core classifier to be able to override our guava // version. Luckily, hive-exec seems to work okay so far with this version of guava // See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context. - testImplementation(libs.hive2.exec) { - artifact { - classifier = 'core' - } + testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -209,10 +199,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'com.zaxxer', module: 'HikariCP' } - integrationImplementation(libs.hive2.exec) { - artifact { - classifier = 'core' - } + integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency diff --git a/flink/v1.17/build.gradle b/flink/v1.17/build.gradle index b597a1bcb0b9..fdd766449a56 100644 --- a/flink/v1.17/build.gradle +++ b/flink/v1.17/build.gradle @@ -36,11 +36,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // for dropwizard histogram metrics implementation compileOnly libs.flink117.metrics.dropwizard compileOnly libs.flink117.streaming.java - compileOnly(libs.flink117.streaming.java) { - artifact { - classifier = 'tests' - } - } + compileOnly "${libs.flink117.streaming.java.get().module}:${libs.flink117.streaming.java.get().getVersion()}:tests" compileOnly libs.flink117.table.api.java.bridge compileOnly "org.apache.flink:flink-table-planner_${scalaVersion}:${libs.versions.flink117.get()}" compileOnly libs.flink117.connector.base @@ -61,10 +57,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { compileOnly libs.avro.avro - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar @@ -92,10 +85,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // that's really old. We use the core classifier to be able to override our guava // version. Luckily, hive-exec seems to work okay so far with this version of guava // See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context. - testImplementation(libs.hive2.exec) { - artifact { - classifier = 'core' - } + testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -209,10 +199,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'com.zaxxer', module: 'HikariCP' } - integrationImplementation(libs.hive2.exec) { - artifact { - classifier = 'core' - } + integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency diff --git a/hive3/build.gradle b/hive3/build.gradle index 67fe1090c5b0..fe0d3a9b45fa 100644 --- a/hive3/build.gradle +++ b/hive3/build.gradle @@ -50,10 +50,7 @@ project(':iceberg-hive3') { exclude group: 'org.apache.avro', module: 'avro' } - compileOnly(libs.hive3.exec) { - artifact { - classifier = 'core' - } + compileOnly("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'com.google.code.findbugs', module: 'jsr305' exclude group: 'com.google.guava' exclude group: 'com.google.protobuf', module: 'protobuf-java' @@ -65,10 +62,7 @@ project(':iceberg-hive3') { exclude group: 'org.slf4j', module: 'slf4j-log4j12' } - compileOnly(libs.orc.core) { - artifact { - classifier = 'nohive' - } + compileOnly("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar diff --git a/mr/build.gradle b/mr/build.gradle index a7ff1b73f869..848f6dee3c43 100644 --- a/mr/build.gradle +++ b/mr/build.gradle @@ -38,10 +38,7 @@ project(':iceberg-mr') { exclude group: 'org.apache.avro', module: 'avro' } - compileOnly(libs.hive2.exec) { - artifact { - classifier = 'core' - } + compileOnly("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { exclude group: 'com.google.code.findbugs', module: 'jsr305' exclude group: 'com.google.guava' exclude group: 'com.google.protobuf', module: 'protobuf-java' diff --git a/spark/v3.1/build.gradle b/spark/v3.1/build.gradle index ffea62918a9c..8ab31995ef17 100644 --- a/spark/v3.1/build.gradle +++ b/spark/v3.1/build.gradle @@ -77,10 +77,7 @@ project(':iceberg-spark:iceberg-spark-3.1_2.12') { implementation libs.parquet.column implementation libs.parquet.hadoop - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar diff --git a/spark/v3.2/build.gradle b/spark/v3.2/build.gradle index c21f1e18bad7..632652f13ef1 100644 --- a/spark/v3.2/build.gradle +++ b/spark/v3.2/build.gradle @@ -78,10 +78,7 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { implementation libs.parquet.column implementation libs.parquet.hadoop - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar diff --git a/spark/v3.3/build.gradle b/spark/v3.3/build.gradle index abd614923325..a2c905443b02 100644 --- a/spark/v3.3/build.gradle +++ b/spark/v3.3/build.gradle @@ -75,10 +75,7 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { implementation libs.parquet.column implementation libs.parquet.hadoop - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar diff --git a/spark/v3.4/build.gradle b/spark/v3.4/build.gradle index 3ec10c3b6b31..09e39b96f837 100644 --- a/spark/v3.4/build.gradle +++ b/spark/v3.4/build.gradle @@ -75,10 +75,7 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { implementation libs.parquet.column implementation libs.parquet.hadoop - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar diff --git a/spark/v3.5/build.gradle b/spark/v3.5/build.gradle index e4ef5919e7a8..d73a50b2e7c6 100644 --- a/spark/v3.5/build.gradle +++ b/spark/v3.5/build.gradle @@ -75,10 +75,7 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { implementation libs.parquet.column implementation libs.parquet.hadoop - implementation(libs.orc.core) { - artifact { - classifier = 'nohive' - } + implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") { exclude group: 'org.apache.hadoop' exclude group: 'commons-lang' // These artifacts are shaded and included in the orc-core fat jar