From 221d5438f5a2234669c075fed5b746e0b09653a8 Mon Sep 17 00:00:00 2001 From: Sebastian Nehls <10393149+snehlsen@users.noreply.github.com> Date: Wed, 8 May 2019 12:50:42 +0200 Subject: [PATCH] Feature/152 enable join pushdown for hive (#181) * #152 Hive JOIN push-down --- doc/dialects/athena.md | 2 +- doc/dialects/db2.md | 2 +- doc/dialects/exasol.md | 2 +- doc/dialects/hive.md | 2 +- doc/dialects/impala.md | 2 +- doc/dialects/oracle.md | 2 +- doc/dialects/postgresql.md | 2 +- doc/dialects/redshift.md | 2 +- doc/dialects/sql_server.md | 2 +- doc/dialects/sybase.md | 2 +- doc/dialects/teradata.md | 2 +- .../deploying_the_virtual_schema_adapter.md | 8 +- doc/user-guide/dialects/db2.md | 2 +- doc/user-guide/dialects/exasol.md | 2 +- doc/user-guide/dialects/hive.md | 2 +- doc/user-guide/dialects/impala.md | 2 +- doc/user-guide/dialects/oracle.md | 2 +- doc/user-guide/dialects/postgresql.md | 2 +- doc/user-guide/dialects/redshift.md | 2 +- doc/user-guide/dialects/sql_server.md | 2 +- doc/user-guide/dialects/sybase.md | 2 +- doc/user-guide/dialects/teradata.md | 2 +- .../integration-test-db2.yaml | 2 +- .../integration-test-sample.yaml | 2 +- .../integration-test-travis.yaml | 2 +- .../local/integration-test-config.yaml | 2 +- jdbc-adapter/pom.xml | 2 +- .../adapter/dialects/hive/HiveSqlDialect.java | 49 +++--- .../dialects/hive/HiveSqlDialectIT.java | 154 +++++++++++------- .../dialects/hive/HiveSqlDialectTest.java | 42 ++--- 30 files changed, 178 insertions(+), 127 deletions(-) diff --git a/doc/dialects/athena.md b/doc/dialects/athena.md index 09120068d..73a0624b2 100644 --- a/doc/dialects/athena.md +++ b/doc/dialects/athena.md @@ -43,7 +43,7 @@ You install the adapter script via the special SQL command `CREATE JAVA ADAPTER ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/bucketfs1/jdbc/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/jdbc/virtualschema-jdbc-adapter-dist-1.16.0.jar; %jar /buckets/bucketfs1/jdbc/AthenaJDBC42-.jar; / ``` diff --git a/doc/dialects/db2.md b/doc/dialects/db2.md index a86fff45c..023f84d3c 100644 --- a/doc/dialects/db2.md +++ b/doc/dialects/db2.md @@ -46,7 +46,7 @@ CREATE or replace JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // DB2 Driver files %jar /buckets/bucketfs1/bucket1/db2jcc4.jar; diff --git a/doc/dialects/exasol.md b/doc/dialects/exasol.md index a4db2d0f1..c86295978 100644 --- a/doc/dialects/exasol.md +++ b/doc/dialects/exasol.md @@ -17,7 +17,7 @@ After uploading the adapter jar, the adapter script can be created as follows: CREATE SCHEMA adapter; CREATE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.16.0.jar; / ``` diff --git a/doc/dialects/hive.md b/doc/dialects/hive.md index d7622cbbd..29eb6e12e 100644 --- a/doc/dialects/hive.md +++ b/doc/dialects/hive.md @@ -23,7 +23,7 @@ CREATE SCHEMA adapter; CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; %jar /buckets/bucketfs1/bucket1/HiveJDBC41.jar; / diff --git a/doc/dialects/impala.md b/doc/dialects/impala.md index 6ec5a45f9..e886d70cf 100644 --- a/doc/dialects/impala.md +++ b/doc/dialects/impala.md @@ -22,7 +22,7 @@ CREATE SCHEMA adapter; CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; %jar /buckets/bucketfs1/bucket1/hive_metastore.jar; %jar /buckets/bucketfs1/bucket1/hive_service.jar; diff --git a/doc/dialects/oracle.md b/doc/dialects/oracle.md index 16a6d2c92..f71f64b53 100644 --- a/doc/dialects/oracle.md +++ b/doc/dialects/oracle.md @@ -32,7 +32,7 @@ CREATE JAVA ADAPTER SCRIPT adapter.jdbc_oracle AS // You need to replace `your-bucket-fs` and `your-bucket` to match the actual location // of the adapter jar. - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.16.0.jar; // Add the oracle jdbc driver to the classpath %jar /buckets/bucketfs1/bucket1/ojdbc7-12.1.0.2.jar diff --git a/doc/dialects/postgresql.md b/doc/dialects/postgresql.md index 5cb7e1120..acd5abf22 100644 --- a/doc/dialects/postgresql.md +++ b/doc/dialects/postgresql.md @@ -15,7 +15,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) %jar /buckets/bucketfs1/bucket1/postgresql-42.0.0.jar; diff --git a/doc/dialects/redshift.md b/doc/dialects/redshift.md index 910a89ac9..b928f3c4a 100644 --- a/doc/dialects/redshift.md +++ b/doc/dialects/redshift.md @@ -45,7 +45,7 @@ You install the adapter script via the special SQL command `CREATE JAVA ADAPTER ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/bucketfs1/jdbc/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/jdbc/virtualschema-jdbc-adapter-dist-1.16.0.jar; %jar /buckets/bucketfs1/jdbc/RedshiftJDBC42-.jar; / ``` diff --git a/doc/dialects/sql_server.md b/doc/dialects/sql_server.md index 78a8c927d..023846972 100644 --- a/doc/dialects/sql_server.md +++ b/doc/dialects/sql_server.md @@ -17,7 +17,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.sql_server_jdbc_adapter // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // You have to add all files of the data source jdbc driver here %jar /buckets/bucketfs1/bucket1/jtds.jar; diff --git a/doc/dialects/sybase.md b/doc/dialects/sybase.md index bdbd4fa6a..4eec7371e 100644 --- a/doc/dialects/sybase.md +++ b/doc/dialects/sybase.md @@ -18,7 +18,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/bucketfs1/virtualschema/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/virtualschema/virtualschema-jdbc-adapter-dist-1.16.0.jar; %jar /buckets/bucketfs1/virtualschema/jtds-1.3.1.jar; / ``` diff --git a/doc/dialects/teradata.md b/doc/dialects/teradata.md index 209e31429..fbc8b03a5 100644 --- a/doc/dialects/teradata.md +++ b/doc/dialects/teradata.md @@ -22,7 +22,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) %jar /buckets/bucketfs1/bucket1/terajdbc4.jar; diff --git a/doc/user-guide/deploying_the_virtual_schema_adapter.md b/doc/user-guide/deploying_the_virtual_schema_adapter.md index ecb7a9801..8916ef00c 100644 --- a/doc/user-guide/deploying_the_virtual_schema_adapter.md +++ b/doc/user-guide/deploying_the_virtual_schema_adapter.md @@ -23,7 +23,7 @@ cd virtual-schemas/jdbc-adapter/ mvn clean -DskipTests package ``` -The resulting fat JAR is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.15.0.jar`. +The resulting fat JAR is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.16.0.jar`. ## Uploading the Adapter JAR Archive @@ -42,8 +42,8 @@ Following steps are required to upload a file to a bucket: 1. Now upload the file into this bucket, e.g. using curl (adapt the hostname, BucketFS port, bucket name and bucket write password). ```bash -curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.15.0.jar \ - http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar +curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.16.0.jar \ + http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar ``` See chapter 3.6.4. "The synchronous cluster file system BucketFS" in the EXASolution User Manual for more details about BucketFS. @@ -75,7 +75,7 @@ CREATE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.16.0.jar; // You have to add all files of the data source jdbc driver here (e.g. Hive JDBC driver files) %jar /buckets/your-bucket-fs/your-bucket/name-of-data-source-jdbc-driver.jar; diff --git a/doc/user-guide/dialects/db2.md b/doc/user-guide/dialects/db2.md index a86fff45c..023f84d3c 100644 --- a/doc/user-guide/dialects/db2.md +++ b/doc/user-guide/dialects/db2.md @@ -46,7 +46,7 @@ CREATE or replace JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // DB2 Driver files %jar /buckets/bucketfs1/bucket1/db2jcc4.jar; diff --git a/doc/user-guide/dialects/exasol.md b/doc/user-guide/dialects/exasol.md index a4db2d0f1..c86295978 100644 --- a/doc/user-guide/dialects/exasol.md +++ b/doc/user-guide/dialects/exasol.md @@ -17,7 +17,7 @@ After uploading the adapter jar, the adapter script can be created as follows: CREATE SCHEMA adapter; CREATE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.16.0.jar; / ``` diff --git a/doc/user-guide/dialects/hive.md b/doc/user-guide/dialects/hive.md index d7622cbbd..29eb6e12e 100644 --- a/doc/user-guide/dialects/hive.md +++ b/doc/user-guide/dialects/hive.md @@ -23,7 +23,7 @@ CREATE SCHEMA adapter; CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; %jar /buckets/bucketfs1/bucket1/HiveJDBC41.jar; / diff --git a/doc/user-guide/dialects/impala.md b/doc/user-guide/dialects/impala.md index 6ec5a45f9..e886d70cf 100644 --- a/doc/user-guide/dialects/impala.md +++ b/doc/user-guide/dialects/impala.md @@ -22,7 +22,7 @@ CREATE SCHEMA adapter; CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; %jar /buckets/bucketfs1/bucket1/hive_metastore.jar; %jar /buckets/bucketfs1/bucket1/hive_service.jar; diff --git a/doc/user-guide/dialects/oracle.md b/doc/user-guide/dialects/oracle.md index 16a6d2c92..f71f64b53 100644 --- a/doc/user-guide/dialects/oracle.md +++ b/doc/user-guide/dialects/oracle.md @@ -32,7 +32,7 @@ CREATE JAVA ADAPTER SCRIPT adapter.jdbc_oracle AS // You need to replace `your-bucket-fs` and `your-bucket` to match the actual location // of the adapter jar. - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.16.0.jar; // Add the oracle jdbc driver to the classpath %jar /buckets/bucketfs1/bucket1/ojdbc7-12.1.0.2.jar diff --git a/doc/user-guide/dialects/postgresql.md b/doc/user-guide/dialects/postgresql.md index 5cb7e1120..acd5abf22 100644 --- a/doc/user-guide/dialects/postgresql.md +++ b/doc/user-guide/dialects/postgresql.md @@ -15,7 +15,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) %jar /buckets/bucketfs1/bucket1/postgresql-42.0.0.jar; diff --git a/doc/user-guide/dialects/redshift.md b/doc/user-guide/dialects/redshift.md index 6268966e5..6e3f4a337 100644 --- a/doc/user-guide/dialects/redshift.md +++ b/doc/user-guide/dialects/redshift.md @@ -21,7 +21,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) diff --git a/doc/user-guide/dialects/sql_server.md b/doc/user-guide/dialects/sql_server.md index 78a8c927d..023846972 100644 --- a/doc/user-guide/dialects/sql_server.md +++ b/doc/user-guide/dialects/sql_server.md @@ -17,7 +17,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.sql_server_jdbc_adapter // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // You have to add all files of the data source jdbc driver here %jar /buckets/bucketfs1/bucket1/jtds.jar; diff --git a/doc/user-guide/dialects/sybase.md b/doc/user-guide/dialects/sybase.md index bdbd4fa6a..4eec7371e 100644 --- a/doc/user-guide/dialects/sybase.md +++ b/doc/user-guide/dialects/sybase.md @@ -18,7 +18,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/bucketfs1/virtualschema/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/virtualschema/virtualschema-jdbc-adapter-dist-1.16.0.jar; %jar /buckets/bucketfs1/virtualschema/jtds-1.3.1.jar; / ``` diff --git a/doc/user-guide/dialects/teradata.md b/doc/user-guide/dialects/teradata.md index 209e31429..fbc8b03a5 100644 --- a/doc/user-guide/dialects/teradata.md +++ b/doc/user-guide/dialects/teradata.md @@ -22,7 +22,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar; + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar; // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) %jar /buckets/bucketfs1/bucket1/terajdbc4.jar; diff --git a/jdbc-adapter/integration-test-data/integration-test-db2.yaml b/jdbc-adapter/integration-test-data/integration-test-db2.yaml index a290248b4..cc2cf13a8 100644 --- a/jdbc-adapter/integration-test-data/integration-test-db2.yaml +++ b/jdbc-adapter/integration-test-data/integration-test-db2.yaml @@ -5,7 +5,7 @@ general: debugAddress: '192.168.0.12:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas bucketFsUrl: http://exasol-host:2580/bucket1 bucketFsPassword: bucket1 - jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar + jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar exasol: runIntegrationTests: true diff --git a/jdbc-adapter/integration-test-data/integration-test-sample.yaml b/jdbc-adapter/integration-test-data/integration-test-sample.yaml index 4c074a361..4f0022376 100644 --- a/jdbc-adapter/integration-test-data/integration-test-sample.yaml +++ b/jdbc-adapter/integration-test-data/integration-test-sample.yaml @@ -5,7 +5,7 @@ general: debugAddress: '192.168.0.12:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas bucketFsUrl: http://exasol-host:2580/bucket1 bucketFsPassword: bucket1 - jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.15.0.jar + jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.16.0.jar exasol: runIntegrationTests: true diff --git a/jdbc-adapter/integration-test-data/integration-test-travis.yaml b/jdbc-adapter/integration-test-data/integration-test-travis.yaml index 7fc25e226..5685dcbd9 100644 --- a/jdbc-adapter/integration-test-data/integration-test-travis.yaml +++ b/jdbc-adapter/integration-test-data/integration-test-travis.yaml @@ -4,7 +4,7 @@ general: debug: false debugAddress: '' bucketFsUrl: http://127.0.0.1:6594/default - jdbcAdapterPath: /buckets/bfsdefault/default/virtualschema-jdbc-adapter-dist-1.15.0.jar + jdbcAdapterPath: /buckets/bfsdefault/default/virtualschema-jdbc-adapter-dist-1.16.0.jar additionalJDBCDriverDir: /vagrant/drivers/ exasol: diff --git a/jdbc-adapter/local/integration-test-config.yaml b/jdbc-adapter/local/integration-test-config.yaml index f61ab62fb..88a19866f 100644 --- a/jdbc-adapter/local/integration-test-config.yaml +++ b/jdbc-adapter/local/integration-test-config.yaml @@ -5,7 +5,7 @@ general: debugAddress: '10.44.1.228:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas bucketFsUrl: http://localhost:2580/jars bucketFsPassword: public - jdbcAdapterPath: /buckets/bfsdefault/jars/virtualschema-jdbc-adapter-dist-1.15.0.jar + jdbcAdapterPath: /buckets/bfsdefault/jars/virtualschema-jdbc-adapter-dist-1.16.0.jar exasol: runIntegrationTests: true diff --git a/jdbc-adapter/pom.xml b/jdbc-adapter/pom.xml index fbe844929..fa73f8b4c 100644 --- a/jdbc-adapter/pom.xml +++ b/jdbc-adapter/pom.xml @@ -10,7 +10,7 @@ virtualschema-jdbc-adapter-dist - 1.15.0 + 1.16.0 UTF-8 UTF-8 1.8 diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/hive/HiveSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/hive/HiveSqlDialect.java index b54c70615..6b60047c8 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/hive/HiveSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/hive/HiveSqlDialect.java @@ -17,15 +17,16 @@ import com.exasol.adapter.sql.ScalarFunction; /** - * Dialect for Hive, using the Cloudera Hive JDBC Driver/Connector (developed by Simba). Only supports Hive 2.1.0 and - * later because of the order by (nulls first/last option) + * Dialect for Hive, using the Cloudera Hive JDBC Driver/Connector (developed by + * Simba). Only supports Hive 2.1.0 and later because of the order by (nulls + * first/last option) */ public class HiveSqlDialect extends AbstractSqlDialect { private static final String NAME = "HIVE"; private static final List SUPPORTED_PROPERTIES = Arrays.asList(SQL_DIALECT_PROPERTY, - CONNECTION_NAME_PROPERTY, CONNECTION_STRING_PROPERTY, USERNAME_PROPERTY, PASSWORD_PROPERTY, - CATALOG_NAME_PROPERTY, SCHEMA_NAME_PROPERTY, TABLE_FILTER_PROPERTY, EXCLUDED_CAPABILITIES_PROPERTY, - DEBUG_ADDRESS_PROPERTY, LOG_LEVEL_PROPERTY); + CONNECTION_NAME_PROPERTY, CONNECTION_STRING_PROPERTY, USERNAME_PROPERTY, + PASSWORD_PROPERTY, CATALOG_NAME_PROPERTY, SCHEMA_NAME_PROPERTY, TABLE_FILTER_PROPERTY, + EXCLUDED_CAPABILITIES_PROPERTY, DEBUG_ADDRESS_PROPERTY, LOG_LEVEL_PROPERTY); public HiveSqlDialect(final Connection connection, final AdapterProperties properties) { super(connection, properties); @@ -38,27 +39,33 @@ public static String getPublicName() { @Override public Capabilities getCapabilities() { final Capabilities.Builder builder = Capabilities.builder(); - builder.addMain(SELECTLIST_PROJECTION, SELECTLIST_EXPRESSIONS, FILTER_EXPRESSIONS, AGGREGATE_SINGLE_GROUP, - AGGREGATE_GROUP_BY_COLUMN, AGGREGATE_HAVING, ORDER_BY_COLUMN, ORDER_BY_EXPRESSION, LIMIT); - builder.addPredicate(AND, OR, NOT, EQUAL, NOTEQUAL, LESS, LESSEQUAL, LIKE, REGEXP_LIKE, BETWEEN, IN_CONSTLIST, - IS_NULL, IS_NOT_NULL); + builder.addMain(SELECTLIST_PROJECTION, SELECTLIST_EXPRESSIONS, FILTER_EXPRESSIONS, + AGGREGATE_SINGLE_GROUP, AGGREGATE_GROUP_BY_COLUMN, AGGREGATE_HAVING, + ORDER_BY_COLUMN, ORDER_BY_EXPRESSION, LIMIT, JOIN, JOIN_TYPE_INNER, + JOIN_TYPE_LEFT_OUTER, JOIN_TYPE_RIGHT_OUTER, JOIN_TYPE_FULL_OUTER, + JOIN_CONDITION_EQUI); + builder.addPredicate(AND, OR, NOT, EQUAL, NOTEQUAL, LESS, LESSEQUAL, LIKE, REGEXP_LIKE, + BETWEEN, IN_CONSTLIST, IS_NULL, IS_NOT_NULL); builder.addLiteral(NULL, BOOL, DATE, TIMESTAMP, DOUBLE, EXACTNUMERIC, STRING); - builder.addAggregateFunction(COUNT, COUNT_STAR, COUNT_DISTINCT, SUM, SUM_DISTINCT, MIN, MAX, AVG, AVG_DISTINCT, - STDDEV_POP, STDDEV_POP_DISTINCT, STDDEV_SAMP, STDDEV_SAMP_DISTINCT, VAR_POP, VAR_POP_DISTINCT, VAR_SAMP, - VAR_SAMP_DISTINCT); - builder.addScalarFunction(ADD, SUB, MULT, FLOAT_DIV, NEG, ABS, ACOS, ASIN, ATAN, CEIL, COS, DEGREES, DIV, EXP, - FLOOR, LN, LOG, MOD, POWER, RADIANS, SIGN, SIN, SQRT, TAN, ASCII, CONCAT, LENGTH, LOWER, LPAD, REPEAT, - REVERSE, RPAD, SOUNDEX, SPACE, SUBSTR, TRANSLATE, UPPER, ADD_DAYS, ADD_MONTHS, CURRENT_DATE, - CURRENT_TIMESTAMP, DATE_TRUNC, DAY, DAYS_BETWEEN, MINUTE, MONTH, MONTHS_BETWEEN, SECOND, WEEK, CAST, - BIT_AND, BIT_OR, BIT_XOR, CURRENT_USER); + builder.addAggregateFunction(COUNT, COUNT_STAR, COUNT_DISTINCT, SUM, SUM_DISTINCT, MIN, MAX, + AVG, AVG_DISTINCT, STDDEV_POP, STDDEV_POP_DISTINCT, STDDEV_SAMP, + STDDEV_SAMP_DISTINCT, VAR_POP, VAR_POP_DISTINCT, VAR_SAMP, VAR_SAMP_DISTINCT); + builder.addScalarFunction(ADD, SUB, MULT, FLOAT_DIV, NEG, ABS, ACOS, ASIN, ATAN, CEIL, COS, + DEGREES, DIV, EXP, FLOOR, LN, LOG, MOD, POWER, RADIANS, SIGN, SIN, SQRT, TAN, ASCII, + CONCAT, LENGTH, LOWER, LPAD, REPEAT, REVERSE, RPAD, SOUNDEX, SPACE, SUBSTR, + TRANSLATE, UPPER, ADD_DAYS, ADD_MONTHS, CURRENT_DATE, CURRENT_TIMESTAMP, DATE_TRUNC, + DAY, DAYS_BETWEEN, MINUTE, MONTH, MONTHS_BETWEEN, SECOND, WEEK, CAST, BIT_AND, + BIT_OR, BIT_XOR, CURRENT_USER); return builder.build(); } /** - * Quote from user manual The Cloudera JDBC Driver for Apache Hive supports both catalogs and schemas to make it - * easy for the driver to work with various JDBC applications. Since Hive only organizes tables into - * schemas/databases, the driver provides a synthetic catalog called “HIVE” under which all of the schemas/databases - * are organized. The driver also maps the JDBC schema to the Hive schema/database. + * Quote from user manual The Cloudera JDBC Driver for Apache Hive supports both + * catalogs and schemas to make it easy for the driver to work with various JDBC + * applications. Since Hive only organizes tables into schemas/databases, the + * driver provides a synthetic catalog called “HIVE” under which all of the + * schemas/databases are organized. The driver also maps the JDBC schema to the + * Hive schema/database. */ @Override public StructureElementSupport supportsJdbcCatalogs() { diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/hive/HiveSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/hive/HiveSqlDialectIT.java index 32aeffb41..1329ab088 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/hive/HiveSqlDialectIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/hive/HiveSqlDialectIT.java @@ -1,6 +1,7 @@ package com.exasol.adapter.dialects.hive; import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.FileNotFoundException; @@ -42,16 +43,17 @@ static void beforeAll() throws FileNotFoundException, SQLException, ClassNotFoun createHiveJDBCAdapter(); createHiveConnection(); - createVirtualSchema(VIRTUAL_SCHEMA, HiveSqlDialect.getPublicName(), "", HIVE_SCHEMA, HIVE_CONNECTION, "", "", - "ADAPTER.JDBC_ADAPTER", "", IS_LOCAL, getConfig().debugAddress(), - "", null,""); + createVirtualSchema(VIRTUAL_SCHEMA, HiveSqlDialect.getPublicName(), "", HIVE_SCHEMA, + HIVE_CONNECTION, "", "", "ADAPTER.JDBC_ADAPTER", "", IS_LOCAL, + getConfig().debugAddress(), "", null, ""); } - private static void createTestSchema() throws SQLException, ClassNotFoundException, FileNotFoundException { + private static void createTestSchema() + throws SQLException, ClassNotFoundException, FileNotFoundException { final String hiveConnectionString = getConfig().getHiveJdbcConnectionString(); Class.forName("org.apache.hive.jdbc.HiveDriver"); - try (final Connection conn = DriverManager.getConnection(hiveConnectionString, "hive", "")) - { + try (final Connection conn = DriverManager.getConnection(hiveConnectionString, "hive", + "")) { final Statement stmt = conn.createStatement(); stmt.execute("create table t(x int)"); stmt.execute("truncate table t"); @@ -64,9 +66,11 @@ private static void createTestSchema() throws SQLException, ClassNotFoundExcepti stmt.execute("truncate table t2"); stmt.execute("insert into t2 values (2,'bbb'), (3,'ccc')"); - stmt.execute("CREATE TABLE ALL_HIVE_DATA_TYPES(ARRAYCOL ARRAY, BIGINTEGER BIGINT, BOOLCOLUMN BOOLEAN, CHARCOLUMN CHAR(1), DECIMALCOL DECIMAL(10,0), DOUBLECOL DOUBLE, FLOATCOL FLOAT, INTCOL INT, MAPCOL MAP, SMALLINTEGER SMALLINT, STRINGCOL STRING, STRUCTCOL struct, TIMESTAMPCOL TIMESTAMP, TINYINTEGER TINYINT, VARCHARCOL VARCHAR(10), BINARYCOL BINARY, DATECOL DATE)"); + stmt.execute( + "CREATE TABLE ALL_HIVE_DATA_TYPES(ARRAYCOL ARRAY, BIGINTEGER BIGINT, BOOLCOLUMN BOOLEAN, CHARCOLUMN CHAR(1), DECIMALCOL DECIMAL(10,0), DOUBLECOL DOUBLE, FLOATCOL FLOAT, INTCOL INT, MAPCOL MAP, SMALLINTEGER SMALLINT, STRINGCOL STRING, STRUCTCOL struct, TIMESTAMPCOL TIMESTAMP, TINYINTEGER TINYINT, VARCHARCOL VARCHAR(10), BINARYCOL BINARY, DATECOL DATE)"); stmt.execute("truncate table ALL_HIVE_DATA_TYPES"); - stmt.execute("insert into all_hive_data_types(arraycol,biginteger,boolcolumn,charcolumn,decimalcol,doublecol,floatcol,intcol,mapcol,smallinteger,stringcol,structcol,timestampcol,tinyinteger,varcharcol,binarycol,datecol) select array('etet','ettee'), 56, true, '2', 53, 56.3, 5.199999809265137, 85, map('jkljj',5), 2, 'tshg', named_struct('a',2,'b',4), timestamp '2017-01-02 13:32:50.744', 1, 'tytu', 'MTAxMA==', date '1970-01-01' from t"); + stmt.execute( + "insert into all_hive_data_types(arraycol,biginteger,boolcolumn,charcolumn,decimalcol,doublecol,floatcol,intcol,mapcol,smallinteger,stringcol,structcol,timestampcol,tinyinteger,varcharcol,binarycol,datecol) select array('etet','ettee'), 56, true, '2', 53, 56.3, 5.199999809265137, 85, map('jkljj',5), 2, 'tshg', named_struct('a',2,'b',4), timestamp '2017-01-02 13:32:50.744', 1, 'tytu', 'MTAxMA==', date '1970-01-01' from t"); } } @@ -79,54 +83,79 @@ public void testSetup() throws SQLException { // Join Tests ------------------------------------------------------------- @Test - public void innerJoin() throws SQLException { - final String query = String.format("SELECT * FROM %1$s.t1 a INNER JOIN %1$s.t2 b ON a.x=b.x", VIRTUAL_SCHEMA); + void testInnerJoin() throws SQLException { + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".t1 a INNER JOIN " + + VIRTUAL_SCHEMA + ".t2 b ON a.x=b.x"; final ResultSet result = executeQuery(query); - matchNextRow(result, (long) 2, "bbb", (long) 2 ,"bbb"); - assertFalse(result.next()); + assertAll(() -> matchNextRow(result, 2L, "bbb", 2L, "bbb"), + () -> assertFalse(result.next())); } @Test - public void innerJoinWithProjection() throws SQLException { - final String query = String.format("SELECT b.y || %1$s.t1.y FROM %1$s.t1 INNER JOIN %1$s.t2 b ON %1$s.t1.x=b.x", VIRTUAL_SCHEMA); + void testInnerJoinWithProjection() throws SQLException { + final String query = "SELECT b.y || " + VIRTUAL_SCHEMA + ".t1.y FROM " + VIRTUAL_SCHEMA + + ".t1 INNER JOIN " + VIRTUAL_SCHEMA + ".t2 b ON " + VIRTUAL_SCHEMA + ".t1.x=b.x"; final ResultSet result = executeQuery(query); - matchNextRow(result, "bbbbbb"); - assertFalse(result.next()); + assertAll(() -> matchNextRow(result, "bbbbbb"), () -> assertFalse(result.next())); } @Test - public void leftJoin() throws SQLException { - final String query = String.format("SELECT * FROM %1$s.t1 a LEFT OUTER JOIN %1$s.t2 b ON a.x=b.x ORDER BY a.x", VIRTUAL_SCHEMA); + void testLeftJoin() throws SQLException { + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".t1 a LEFT OUTER JOIN " + + VIRTUAL_SCHEMA + ".t2 b ON a.x=b.x ORDER BY a.x"; final ResultSet result = executeQuery(query); - matchNextRow(result, (long) 1, "aaa", null ,null); - matchNextRow(result, (long) 2, "bbb", (long) 2 ,"bbb"); - assertFalse(result.next()); + assertAll(() -> matchNextRow(result, 1L, "aaa", null, null), + () -> matchNextRow(result, 2L, "bbb", 2L, "bbb"), () -> assertFalse(result.next())); } @Test - public void rightJoin() throws SQLException { - final String query = String.format("SELECT * FROM %1$s.t1 a RIGHT OUTER JOIN %1$s.t2 b ON a.x=b.x ORDER BY a.x", VIRTUAL_SCHEMA); + void testRightJoin() throws SQLException { + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".t1 a RIGHT OUTER JOIN " + + VIRTUAL_SCHEMA + ".t2 b ON a.x=b.x ORDER BY a.x"; final ResultSet result = executeQuery(query); - matchNextRow(result, (long) 2, "bbb", (long) 2 ,"bbb"); - matchNextRow(result, null, null, (long) 3 ,"ccc"); - assertFalse(result.next()); + assertAll(() -> matchNextRow(result, 2L, "bbb", 2L, "bbb"), + () -> matchNextRow(result, null, null, 3L, "ccc"), + () -> assertFalse(result.next())); } @Test - public void fullOuterJoin() throws SQLException { - final String query = String.format("SELECT * FROM %1$s.t1 a FULL OUTER JOIN %1$s.t2 b ON a.x=b.x ORDER BY a.x", VIRTUAL_SCHEMA); + void testFullOuterJoin() throws SQLException { + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".t1 a FULL OUTER JOIN " + + VIRTUAL_SCHEMA + ".t2 b ON a.x=b.x ORDER BY a.x"; final ResultSet result = executeQuery(query); - matchNextRow(result, (long) 1, "aaa", null ,null); - matchNextRow(result, (long) 2, "bbb", (long) 2 ,"bbb"); - matchNextRow(result, null, null, (long) 3 ,"ccc"); - assertFalse(result.next()); + assertAll(() -> matchNextRow(result, 1L, "aaa", null, null), + () -> matchNextRow(result, 2L, "bbb", 2L, "bbb"), + () -> matchNextRow(result, null, null, 3L, "ccc"), + () -> assertFalse(result.next())); + } + + @Test + void testRightJoinWithComplexCondition() throws SQLException { + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".t1 a RIGHT OUTER JOIN " + + VIRTUAL_SCHEMA + ".t2 b ON a.x||a.y=b.x||b.y ORDER BY a.x"; + final ResultSet result = executeQuery(query); + assertAll(() -> matchNextRow(result, 2L, "bbb", 2L, "bbb"), + () -> matchNextRow(result, null, null, 3L, "ccc"), + () -> assertFalse(result.next())); + } + + @Test + void testFullOuterJoinWithComplexCondition() throws SQLException { + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".t1 a FULL OUTER JOIN " + + VIRTUAL_SCHEMA + ".t2 b ON a.x-b.x=0 ORDER BY a.x"; + final ResultSet result = executeQuery(query); + assertAll(() -> matchNextRow(result, 1L, "aaa", null, null), + () -> matchNextRow(result, 2L, "bbb", 2L, "bbb"), + () -> matchNextRow(result, null, null, 3L, "ccc"), + () -> assertFalse(result.next())); } @Test void testTypeMapping() throws SQLException { final ResultSet result = executeQuery( "SELECT COLUMN_NAME, COLUMN_TYPE, COLUMN_MAXSIZE, COLUMN_NUM_PREC, COLUMN_NUM_SCALE, COLUMN_DEFAULT FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = '" - + VIRTUAL_SCHEMA + "' AND COLUMN_TABLE='ALL_HIVE_DATA_TYPES' ORDER BY COLUMN_ORDINAL_POSITION"); + + VIRTUAL_SCHEMA + + "' AND COLUMN_TABLE='ALL_HIVE_DATA_TYPES' ORDER BY COLUMN_ORDINAL_POSITION"); matchNextRow(result, "ARRAYCOL", "VARCHAR(255) ASCII", (long) 255, null, null, null); matchNextRow(result, "BIGINTEGER", "DECIMAL(19,0)", (long) 19, (long) 19, (long) 0, null); matchNextRow(result, "BOOLCOLUMN", "BOOLEAN", (long) 1, null, null, null); @@ -142,16 +171,19 @@ void testTypeMapping() throws SQLException { matchNextRow(result, "TIMESTAMPCOL", "TIMESTAMP", (long) 29, null, null, null); matchNextRow(result, "TINYINTEGER", "DECIMAL(3,0)", (long) 3, (long) 3, (long) 0, null); matchNextRow(result, "VARCHARCOL", "VARCHAR(10) ASCII", (long) 10, null, null, null); - matchNextRow(result, "BINARYCOL", "VARCHAR(2000000) UTF8", (long) 2000000, null, null, null); + matchNextRow(result, "BINARYCOL", "VARCHAR(2000000) UTF8", (long) 2000000, null, null, + null); matchLastRow(result, "DATECOL", "DATE", (long) 10, null, null, null); } @Test void testSelectWithAllTypes() throws SQLException { - final ResultSet result = executeQuery("SELECT * from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"); - matchNextRow(result, "[\"etet\",\"ettee\"]", new BigDecimal("56"), true, "2", (long) 53, 56.3, - 5.199999809265137, (long) 85, "{\"jkljj\":5}", 2, "tshg", "{\"a\":2,\"b\":4}", - getSqlTimestamp(2017, 1, 2, 13, 32, 50, 744), (short) 1, "tytu", "TVRBeE1BPT0=", getSqlDate(1970, 1, 1)); + final ResultSet result = executeQuery( + "SELECT * from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"); + matchNextRow(result, "[\"etet\",\"ettee\"]", new BigDecimal("56"), true, "2", (long) 53, + 56.3, 5.199999809265137, (long) 85, "{\"jkljj\":5}", 2, "tshg", "{\"a\":2,\"b\":4}", + getSqlTimestamp(2017, 1, 2, 13, 32, 50, 744), (short) 1, "tytu", "TVRBeE1BPT0=", + getSqlDate(1970, 1, 1)); } @Test @@ -159,7 +191,8 @@ void testProjection() throws SQLException { final String query = "SELECT BIGINTEGER FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; final ResultSet result = executeQuery(query); matchNextRow(result, new BigDecimal("56")); - matchSingleRowExplain(query, "SELECT `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` FROM `default`.`ALL_HIVE_DATA_TYPES`"); + matchSingleRowExplain(query, + "SELECT `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` FROM `default`.`ALL_HIVE_DATA_TYPES`"); } @Test @@ -167,7 +200,8 @@ void testRewrittenProjection() throws SQLException { final String query = "SELECT BINARYCOL FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; final ResultSet result = executeQuery(query); matchNextRow(result, "TVRBeE1BPT0="); - matchSingleRowExplain(query, "SELECT base64(`ALL_HIVE_DATA_TYPES`.`BINARYCOL`) FROM `default`.`ALL_HIVE_DATA_TYPES`"); + matchSingleRowExplain(query, + "SELECT base64(`ALL_HIVE_DATA_TYPES`.`BINARYCOL`) FROM `default`.`ALL_HIVE_DATA_TYPES`"); } @Test @@ -209,15 +243,16 @@ void testLogicalPredicates() throws SQLException { + ".ALL_HIVE_DATA_TYPES where (biginteger < 56 or biginteger > 56) and not (biginteger is null)"; final ResultSet result = executeQuery(query); assertEquals(false, result.next()); - matchSingleRowExplain(query, "SELECT `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` FROM `default`.`ALL_HIVE_DATA_TYPES` " - + "WHERE ((`ALL_HIVE_DATA_TYPES`.`BIGINTEGER` < 56 OR 56 < `ALL_HIVE_DATA_TYPES`.`BIGINTEGER`) AND NOT (`ALL_HIVE_DATA_TYPES`.`BIGINTEGER` IS NULL))"); + matchSingleRowExplain(query, + "SELECT `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` FROM `default`.`ALL_HIVE_DATA_TYPES` " + + "WHERE ((`ALL_HIVE_DATA_TYPES`.`BIGINTEGER` < 56 OR 56 < `ALL_HIVE_DATA_TYPES`.`BIGINTEGER`) AND NOT (`ALL_HIVE_DATA_TYPES`.`BIGINTEGER` IS NULL))"); } @Test void testLikePredicates() throws SQLException { // LIKE, LIKE ESCAPE (not pushed down) - final String query = "select varcharcol, varcharcol like 't%' escape 't' from " + VIRTUAL_SCHEMA - + ".ALL_HIVE_DATA_TYPES where (varcharcol like 't%')"; + final String query = "select varcharcol, varcharcol like 't%' escape 't' from " + + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES where (varcharcol like 't%')"; final ResultSet result = executeQuery(query); matchNextRow(result, "tytu", false); matchSingleRowExplain(query, @@ -242,24 +277,27 @@ void testMiscPredicates() throws SQLException { + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES WHERE biginteger between 51 and 60"; final ResultSet result = executeQuery(query); matchNextRow(result, new BigDecimal("56"), true, false, true); - matchSingleRowExplain(query, "SELECT `ALL_HIVE_DATA_TYPES`.`BIGINTEGER`, `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` IN (56, 61), `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` IS NULL, " - + "`ALL_HIVE_DATA_TYPES`.`BIGINTEGER` IS NOT NULL FROM `default`.`ALL_HIVE_DATA_TYPES` WHERE `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` BETWEEN 51 AND 60"); + matchSingleRowExplain(query, + "SELECT `ALL_HIVE_DATA_TYPES`.`BIGINTEGER`, `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` IN (56, 61), `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` IS NULL, " + + "`ALL_HIVE_DATA_TYPES`.`BIGINTEGER` IS NOT NULL FROM `default`.`ALL_HIVE_DATA_TYPES` WHERE `ALL_HIVE_DATA_TYPES`.`BIGINTEGER` BETWEEN 51 AND 60"); } - //This does not work with the current Hive version, since datatypes for the SUM columns dffer in the prepare and execute phases + // This does not work with the current Hive version, since datatypes for the SUM + // columns dffer in the prepare and execute phases public void testCountSumAggregateFunction() throws SQLException { final String query = "SELECT COUNT(biginteger), COUNT(*), COUNT(DISTINCT biginteger), SUM(biginteger), SUM(DISTINCT biginteger) from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; final ResultSet result = executeQuery(query); - matchNextRow(result, new BigDecimal("1"), new BigDecimal("1"), new BigDecimal("1"), 56.0, 56.0); + matchNextRow(result, new BigDecimal("1"), new BigDecimal("1"), new BigDecimal("1"), 56.0, + 56.0); matchSingleRowExplain(query, "SELECT COUNT(`ALL_HIVE_DATA_TYPES`.`BIGINTEGER`), COUNT(*), COUNT(DISTINCT `ALL_HIVE_DATA_TYPES`.`BIGINTEGER`), SUM(`ALL_HIVE_DATA_TYPES`.`BIGINTEGER`), SUM(DISTINCT `ALL_HIVE_DATA_TYPES`.`BIGINTEGER`) FROM `default`.`ALL_HIVE_DATA_TYPES`"); } @Test void testAvgMinMaxAggregateFunction() throws SQLException { - final String query = "SELECT AVG(biginteger), MIN(biginteger), MAX(biginteger) from " + VIRTUAL_SCHEMA - + ".ALL_HIVE_DATA_TYPES"; + final String query = "SELECT AVG(biginteger), MIN(biginteger), MAX(biginteger) from " + + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; final ResultSet result = executeQuery(query); matchNextRow(result, 56.0, new BigDecimal("56"), new BigDecimal("56")); matchSingleRowExplain(query, @@ -268,8 +306,8 @@ void testAvgMinMaxAggregateFunction() throws SQLException { @Test void testCastedStringFunctions() throws SQLException { - final String query = "select concat(upper(varcharcol),lower(repeat(varcharcol,2))) from " + VIRTUAL_SCHEMA - + ".ALL_HIVE_DATA_TYPES"; + final String query = "select concat(upper(varcharcol),lower(repeat(varcharcol,2))) from " + + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; final ResultSet result = executeQuery(query); matchNextRow(result, "TYTUtytutytu"); matchSingleRowExplain(query, @@ -279,8 +317,8 @@ void testCastedStringFunctions() throws SQLException { @Test void testRewrittenDivAndModFunctions() throws SQLException { - final String query = "select DIV(biginteger,biginteger), mod(biginteger,biginteger) from " + VIRTUAL_SCHEMA - + ".ALL_HIVE_DATA_TYPES"; + final String query = "select DIV(biginteger,biginteger), mod(biginteger,biginteger) from " + + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; final ResultSet result = executeQuery(query); matchNextRow(result, new BigDecimal("1"), new BigDecimal("0")); matchSingleRowExplain(query, @@ -289,10 +327,12 @@ void testRewrittenDivAndModFunctions() throws SQLException { @Test void testRewrittenSubStringFunction() throws SQLException { - final String query = "select substring(stringcol FROM 1 FOR 2) from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; + final String query = "select substring(stringcol FROM 1 FOR 2) from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES"; final ResultSet result = executeQuery(query); matchNextRow(result, "ts"); - matchSingleRowExplain(query, "SELECT SUBSTR(`ALL_HIVE_DATA_TYPES`.`STRINGCOL`, 1, 2) FROM `default`.`ALL_HIVE_DATA_TYPES`"); + matchSingleRowExplain(query, + "SELECT SUBSTR(`ALL_HIVE_DATA_TYPES`.`STRINGCOL`, 1, 2) FROM `default`.`ALL_HIVE_DATA_TYPES`"); } @Test @@ -330,7 +370,7 @@ private static void createHiveJDBCAdapter() throws SQLException, FileNotFoundExc createJDBCAdapter(hiveIncludes); } - private static void createHiveConnection() throws SQLException, FileNotFoundException { + private static void createHiveConnection() throws SQLException, FileNotFoundException { createConnection(HIVE_CONNECTION, getConfig().getHiveDockerJdbcConnectionString(), "", ""); } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/hive/HiveSqlDialectTest.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/hive/HiveSqlDialectTest.java index 7b4f015b3..256e4ef66 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/hive/HiveSqlDialectTest.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/hive/HiveSqlDialectTest.java @@ -38,27 +38,31 @@ void beforeEach() { @Test void testGetCapabilities() { final Capabilities capabilities = this.dialect.getCapabilities(); - assertAll( - () -> assertThat(capabilities.getMainCapabilities(), - containsInAnyOrder(SELECTLIST_PROJECTION, SELECTLIST_EXPRESSIONS, FILTER_EXPRESSIONS, - AGGREGATE_SINGLE_GROUP, AGGREGATE_GROUP_BY_COLUMN, AGGREGATE_HAVING, ORDER_BY_COLUMN, - ORDER_BY_EXPRESSION, LIMIT)), // + assertAll(() -> assertThat(capabilities.getMainCapabilities(), + containsInAnyOrder(SELECTLIST_PROJECTION, SELECTLIST_EXPRESSIONS, + FILTER_EXPRESSIONS, AGGREGATE_SINGLE_GROUP, AGGREGATE_GROUP_BY_COLUMN, + AGGREGATE_HAVING, ORDER_BY_COLUMN, ORDER_BY_EXPRESSION, LIMIT, JOIN, + JOIN_TYPE_INNER, JOIN_TYPE_LEFT_OUTER, JOIN_TYPE_RIGHT_OUTER, + JOIN_TYPE_FULL_OUTER, JOIN_CONDITION_EQUI)), // () -> assertThat(capabilities.getLiteralCapabilities(), - containsInAnyOrder(NULL, BOOL, DATE, TIMESTAMP, DOUBLE, EXACTNUMERIC, STRING)), + containsInAnyOrder(NULL, BOOL, DATE, TIMESTAMP, DOUBLE, EXACTNUMERIC, + STRING)), () -> assertThat(capabilities.getPredicateCapabilities(), - containsInAnyOrder(AND, OR, NOT, EQUAL, NOTEQUAL, LESS, LESSEQUAL, LIKE, REGEXP_LIKE, BETWEEN, - IN_CONSTLIST, IS_NULL, IS_NOT_NULL)), + containsInAnyOrder(AND, OR, NOT, EQUAL, NOTEQUAL, LESS, LESSEQUAL, LIKE, + REGEXP_LIKE, BETWEEN, IN_CONSTLIST, IS_NULL, IS_NOT_NULL)), () -> assertThat(capabilities.getAggregateFunctionCapabilities(), - containsInAnyOrder(COUNT, COUNT_STAR, COUNT_DISTINCT, SUM, SUM_DISTINCT, MIN, MAX, AVG, - AVG_DISTINCT, STDDEV_POP, STDDEV_POP_DISTINCT, STDDEV_SAMP, STDDEV_SAMP_DISTINCT, - VAR_POP, VAR_POP_DISTINCT, VAR_SAMP, VAR_SAMP_DISTINCT)), + containsInAnyOrder(COUNT, COUNT_STAR, COUNT_DISTINCT, SUM, SUM_DISTINCT, + MIN, MAX, AVG, AVG_DISTINCT, STDDEV_POP, STDDEV_POP_DISTINCT, + STDDEV_SAMP, STDDEV_SAMP_DISTINCT, VAR_POP, VAR_POP_DISTINCT, + VAR_SAMP, VAR_SAMP_DISTINCT)), () -> assertThat(capabilities.getScalarFunctionCapabilities(), - containsInAnyOrder(ADD, SUB, MULT, FLOAT_DIV, NEG, ABS, ACOS, ASIN, ATAN, CEIL, COS, DEGREES, - DIV, EXP, FLOOR, LN, LOG, MOD, POWER, RADIANS, SIGN, SIN, SQRT, TAN, ASCII, CONCAT, - LENGTH, LOWER, LPAD, REPEAT, REVERSE, RPAD, SOUNDEX, SPACE, SUBSTR, TRANSLATE, UPPER, - ADD_DAYS, ADD_MONTHS, CURRENT_DATE, CURRENT_TIMESTAMP, DATE_TRUNC, DAY, DAYS_BETWEEN, - MINUTE, MONTH, MONTHS_BETWEEN, SECOND, WEEK, CAST, BIT_AND, BIT_OR, BIT_XOR, - CURRENT_USER))); + containsInAnyOrder(ADD, SUB, MULT, FLOAT_DIV, NEG, ABS, ACOS, ASIN, ATAN, + CEIL, COS, DEGREES, DIV, EXP, FLOOR, LN, LOG, MOD, POWER, RADIANS, + SIGN, SIN, SQRT, TAN, ASCII, CONCAT, LENGTH, LOWER, LPAD, REPEAT, + REVERSE, RPAD, SOUNDEX, SPACE, SUBSTR, TRANSLATE, UPPER, ADD_DAYS, + ADD_MONTHS, CURRENT_DATE, CURRENT_TIMESTAMP, DATE_TRUNC, DAY, + DAYS_BETWEEN, MINUTE, MONTH, MONTHS_BETWEEN, SECOND, WEEK, CAST, + BIT_AND, BIT_OR, BIT_XOR, CURRENT_USER))); } @Test @@ -75,8 +79,8 @@ void testValidateDialectNameProperty() { setMandatoryProperties("ORACLE"); final AdapterProperties adapterProperties = new AdapterProperties(this.rawProperties); final SqlDialect sqlDialect = new HiveSqlDialect(null, adapterProperties); - final PropertyValidationException exception = assertThrows(PropertyValidationException.class, - sqlDialect::validateProperties); + final PropertyValidationException exception = assertThrows( + PropertyValidationException.class, sqlDialect::validateProperties); MatcherAssert.assertThat(exception.getMessage(), containsString( "The dialect HIVE cannot have the name ORACLE. You specified the wrong dialect name or created the wrong dialect class.")); }