diff --git a/.gitignore b/.gitignore index 1213f6978..5594217f5 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,7 @@ dependency-reduced-pom.xml # Others .DS_Store -*.swp +*.swp +**/local +Scripts +.dbeaver* \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index e8112099a..bebeeea41 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,4 +11,6 @@ matrix: include: - jdk: "oraclejdk8" +before_script: ./jdbc-adapter/tools/version.sh verify + script: ./jdbc-adapter/integration-test-data/run_integration_tests.sh diff --git a/README.md b/README.md index c57721a79..d08debe70 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Build Status](https://travis-ci.org/EXASOL/virtual-schemas.svg?branch=master)](https://travis-ci.org/EXASOL/virtual-schemas) -###### Please note that this is an open source project which is officially supported by Exasol. For any question, you can contact our support team. +

Please note that this is an open source project which is officially supported by Exasol. For any question, you can contact our support team.

Virtual schemas provide a powerful abstraction to conveniently access arbitrary data sources. Virtual schemas are a kind of read-only link to an external source and contain virtual tables which look like regular tables except that the actual data are not stored locally. @@ -14,16 +14,14 @@ Please note that virtual schemas are part of the Advanced Edition of Exasol. For further details about the concept, usage and examples, please see the corresponding chapter in our Exasol User Manual. - ## API Specification The subdirectory [doc](doc) contains the API specification for virtual schema adapters. - ## JDBC Adapter The subdirectory [jdbc-adapter](jdbc-adapter) contains the JDBC adapter which allows to integrate any kind of JDBC data source which provides a JDBC driver. ## Python Redis Demo Adapter -The subdirectory [python-redis-demo-adapter](python-redis-demo-adapter) contains a demo adapter for Redis written in Python. This adapter was created to easily demonstrate the key concepts in a real, but very simple implementation. If you want to write your own adapter, this might be the right code to get a first impression what you'll have to develop. +The subdirectory [python-redis-demo-adapter](python-redis-demo-adapter) contains a demo adapter for Redis written in Python. This adapter was created to easily demonstrate the key concepts in a real, but very simple implementation. If you want to write your own adapter, this might be the right code to get a first impression what you'll have to develop. \ No newline at end of file diff --git a/jdbc-adapter/README.md b/jdbc-adapter/README.md index 7dc389ab4..cd912a583 100644 --- a/jdbc-adapter/README.md +++ b/jdbc-adapter/README.md @@ -1,41 +1,52 @@ # JDBC Adapter for Virtual Schemas -[![Build Status](https://travis-ci.org/EXASOL/virtual-schemas.svg?branch=master)](https://travis-ci.org/EXASOL/virtual-schemas) +[![Build Status](https://travis-ci.org/EXASOL/virtual-schemas.svg)](https://travis-ci.org/EXASOL/virtual-schemas) + +## Supported Dialects + +1. [EXASOL](doc/sql_dialects/exasol.md) +1. [Hive](doc/sql_dialects/hive.md) +1. [Impala](doc/sql_dialects/impala.md) +1. [DB2](doc/sql_dialects/db2.md) +1. [Oracle](doc/sql_dialects/oracle.md) +1. [Teradata](doc/sql_dialects/teradata.md) +1. [Redshift](doc/sql_dialects/redshift.md) +1. [SQL Server](doc/sql_dialects/sql_server.md) +1. [Sybase ASE](doc/sql_dialects/sybase.md) +1. [PostgresSQL](doc/sql_dialects/postgresql.md) +1. Generic ## Overview The JDBC adapter for virtual schemas allows you to connect to JDBC data sources like Hive, Oracle, Teradata, Exasol or any other data source supporting JDBC. It uses the well proven ```IMPORT FROM JDBC``` Exasol statement behind the scenes to obtain the requested data, when running a query on a virtual table. The JDBC adapter also serves as the reference adapter for the Exasol virtual schema framework. -The JDBC adapter currently supports the following SQL dialects and data sources. This list will be continuously extended based on the feedback from our users: -* Exasol -* Hive -* Impala -* Oracle -* Teradata -* Redshift -* DB2 -* SQL Server -* PostgreSQL +Check the [SQL dialect list](doc/supported_sql_dialects.md) to learn which SQL dialects the JDBC adapter currently supports + +This list will be continuously extended based on the feedback from our users. Each such implementation of a dialect handles three major aspects: + * How to **map the tables** in the source systems to virtual tables in Exasol, including how to **map the data types** to Exasol data types. -* How is the **SQL syntax** of the data source, including identifier quoting, case-sensitivity, function names, or special syntax like `LIMIT`/`TOP`. +* How is the **SQL syntax** of the data source, including identifier quoting, case-sensitivity, function names, or special syntax like `LIMIT` / `TOP`. * Which **capabilities** are supported by the data source. E.g. is it supported to run filters, to specify select list expressions, to run aggregation or scalar functions or to order or limit the result. In addition to the aforementioned dialects there is the so called `GENERIC` dialect, which is designed to work with any JDBC driver. It derives the SQL dialect from the JDBC driver metadata. However, it does not support any capabilities and might fail if the data source has special syntax or data types, so it should only be used for evaluation purposes. -If you are interested in a introduction to virtual schemas please refer to the Exasol user manual. You can find it in the [download area of the Exasol user portal](https://www.exasol.com/portal/display/DOWNLOAD/6.0). +If you are interested in a introduction to virtual schemas please refer to the Exasol user manual. You can find it in the [download area of the Exasol user portal](https://www.exasol.com/portal/display/DOC/Database+User+Manual). +## Before you Start + +Please note that the syntax for creating adapter scripts is not recognized by all SQL clients. [DBeaver](https://dbeaver.io/) for example. If you encounter such a problem, try a different client. ## Getting Started Before you can start using the JDBC adapter for virtual schemas you have to deploy the adapter and the JDBC driver of your data source in your Exasol database. -Please follow the [step-by-step deployment guide](doc/deploy-adapter.md). - +Please follow the [step-by-step deployment guide](doc/deploying_the_virtual_schema_adapter.md). ## Using the Adapter The following statements demonstrate how you can use virtual schemas with the JDBC adapter to connect to a Hive system. Please scroll down to see a list of all properties supported by the JDBC adapter. First we create a virtual schema using the JDBC adapter. The adapter will retrieve the metadata via JDBC and map them to virtual tables. The metadata (virtual tables, columns and data types) are then cached in Exasol. + ```sql CREATE CONNECTION hive_conn TO 'jdbc:hive2://localhost:10000/default' USER 'hive-usr' IDENTIFIED BY 'hive-pwd'; @@ -46,6 +57,7 @@ CREATE VIRTUAL SCHEMA hive USING adapter.jdbc_adapter WITH ``` We can now explore the tables in the virtual schema, just like for a regular schema: + ```sql OPEN SCHEMA hive; SELECT * FROM cat; @@ -53,40 +65,45 @@ DESCRIBE clicks; ``` And we can run arbitrary queries on the virtual tables: + ```sql SELECT count(*) FROM clicks; SELECT DISTINCT USER_ID FROM clicks; ``` -Behind the scenes the Exasol command `IMPORT FROM JDBC` will be executed to obtain the data needed from the data source to fulfil the query. The Exasol database interacts with the adapter to pushdown as much as possible to the data source (e.g. filters, aggregations or `ORDER BY`/`LIMIT`), while considering the capabilities of the data source. +Behind the scenes the Exasol command `IMPORT FROM JDBC` will be executed to obtain the data needed from the data source to fulfil the query. The Exasol database interacts with the adapter to pushdown as much as possible to the data source (e.g. filters, aggregations or `ORDER BY` / `LIMIT`), while considering the capabilities of the data source. Let's combine a virtual and a native tables in a query: -``` + +```sql SELECT * from clicks JOIN native_schema.users on clicks.userid = users.id; ``` You can refresh the schemas metadata, e.g. if tables were added in the remote system: + ```sql ALTER VIRTUAL SCHEMA hive REFRESH; ALTER VIRTUAL SCHEMA hive REFRESH TABLES t1 t2; -- refresh only these tables ``` -Or set properties. Depending on the adapter and the property you set this might update the metadata or not. In our example the metadata are affected, because afterwards the virtual schema will only expose two virtul tables. +Or set properties. Depending on the adapter and the property you set this might update the metadata or not. In our example the metadata are affected, because afterwards the virtual schema will only expose two virtual tables. + ```sql ALTER VIRTUAL SCHEMA hive SET TABLE_FILTER='CUSTOMERS, CLICKS'; ``` Finally you can unset properties: + ```sql ALTER VIRTUAL SCHEMA hive SET TABLE_FILTER=null; ``` Or drop the virtual schema: + ```sql DROP VIRTUAL SCHEMA hive CASCADE; ``` - ### Adapter Properties The following properties can be used to control the behavior of the JDBC adapter. As you see above, these properties can be defined in `CREATE VIRTUAL SCHEMA` or changed afterwards via `ALTER VIRTUAL SCHEMA SET`. Note that properties are always strings, like `TABLE_FILTER='T1,T2'`. @@ -129,14 +146,17 @@ Property | Value ## Debugging + To see all communication between the database and the adapter you can use the python script udf_debug.py located in the [tools](tools) directory. First, start the `udf_debug.py` script, which will listen on the specified address and print all incoming text. -``` + +```sh python tools/udf_debug.py -s myhost -p 3000 ``` Then run following SQL statement in your session to redirect all stdout and stderr from the adapter script to the `udf_debug.py` script we started before. + ```sql ALTER SESSION SET SCRIPT_OUTPUT_ADDRESS='host-where-udf-debug-script-runs:3000' ``` @@ -145,12 +165,23 @@ You have to make sure that Exasol can connect to the host running the `udf_debug ## Frequent Issues -* **Error: No suitable driver found for JDBC...**: The JDBC driver class was not discovered automatically. Either you have to add a `META-INF/services/java.sql.Driver` file with the classname to your jar, or you have to load the driver manually (see `JdbcMetadataReader.readRemoteMetadata()`). + +### Error: No suitable driver found for JDBC... + +The JDBC driver class was not discovered automatically. Either you have to add a `META-INF/services/java.sql.Driver` file with the class name to your JAR, or you have to load the driver manually (see `JdbcMetadataReader.readRemoteMetadata()`). + See https://docs.oracle.com/javase/7/docs/api/java/sql/DriverManager.html -* **Very slow execution of queries with SCRIPT_OUTPUT_ADDRESS**: If `SCRIPT_OUTPUT_ADDRESS` is set as explained in the [debugging section](#debugging), verify that a service is actually listening at that address. Otherwise, if Exasol can not establish a connection, repeated connection attempts can be the cause for slowdowns. -* **Very slow execution of queries**: Depending on which JDK version Exasol uses to execute Java user-defined functions, a blocking randomness source may be used by default. Especially cryptographic operations do not complete until the operating system has collected a sufficient amount of entropy. This problem seems to occur most often when Exasol is run in an isolated environment, e.g., a virtual machine or a container. A solution is to use a non-blocking randomness source. - To do so, log in to EXAOperation and shutdown the database. Append `-etlJdbcJavaEnv -Djava.security.egd=/dev/./urandom` to the "Extra Database Parameters" input field and power the database on again. + +### Very Slow Execution of Queries With SCRIPT_OUTPUT_ADDRESS + +If `SCRIPT_OUTPUT_ADDRESS` is set as explained in the [debugging section](#debugging), verify that a service is actually listening at that address. Otherwise, if Exasol can not establish a connection, repeated connection attempts can be the cause for slowdowns. + +### Very Slow Execution of Queries + +Depending on which JDK version Exasol uses to execute Java user-defined functions, a blocking random-number source may be used by default. Especially cryptographic operations do not complete until the operating system has collected a sufficient amount of entropy. This problem seems to occur most often when Exasol is run in an isolated environment, e.g., a virtual machine or a container. A solution is to use a non-blocking random-number source. + +To do so, log in to EXAOperation and shutdown the database. Append `-etlJdbcJavaEnv -Djava.security.egd=/dev/./urandom` to the "Extra Database Parameters" input field and power the database on again. ## Developing New Dialects -If you want to contribute a new dialect please visit the guide [how to develop and test a dialect](doc/develop-dialect.md). +If you want to contribute a new dialect please visit the guide [how to develop and test a dialect](doc/developing_an_sql_dialect.md). \ No newline at end of file diff --git a/jdbc-adapter/doc/deploy-adapter.md b/jdbc-adapter/doc/deploying_the_virtual_schema_adapter.md similarity index 51% rename from jdbc-adapter/doc/deploy-adapter.md rename to jdbc-adapter/doc/deploying_the_virtual_schema_adapter.md index f6eeb8e9f..2aefc8948 100644 --- a/jdbc-adapter/doc/deploy-adapter.md +++ b/jdbc-adapter/doc/deploying_the_virtual_schema_adapter.md @@ -1,56 +1,70 @@ -## Deploying the Adapter Step By Step +# Deploying the Adapter Step By Step Run the following steps to deploy your adapter: -### 1. Prerequisites: -* EXASOL >= 6.0 +## Prerequisites + +* Exasol Version 6.0 or later * Advanced edition (which includes the ability to execute adapter scripts), or Free Small Business Edition -* EXASOL must be able to connect to the host and port specified in the JDBC connection string. In case of problems you can use a [UDF to test the connectivity](https://www.exasol.com/support/browse/SOL-307). -* If the JDBC driver requires Kerberos authentication (e.g. for Hive or Impala), the EXASOL database will authenticate using a keytab file. Each EXASOL node needs access to port 88 of the the Kerberos KDC (key distribution center). +* Exasol must be able to connect to the host and port specified in the JDBC connection string. In case of problems you can use a [UDF to test the connectivity](https://www.exasol.com/support/browse/SOL-307). +* If the JDBC driver requires Kerberos authentication (e.g. for Hive or Impala), the Exasol database will authenticate using a keytab file. Each Exasol node needs access to port 88 of the the Kerberos KDC (key distribution center). -### 2. Obtain Jar: +## Obtaining JAR Archives -First you have to obtain the so called fat jar (including all dependencies). +First you have to obtain the so called fat JAR (including all dependencies). -The easiest way is to download the jar from the last [Release](https://github.com/EXASOL/virtual-schemas/releases). +The easiest way is to download the JAR from the last [Release](https://github.com/Exasol/virtual-schemas/releases). -Alternatively you can clone the repository and build the jar as follows: -``` -git clone https://github.com/EXASOL/virtual-schemas.git +Alternatively you can clone the repository and build the JAR as follows: + +```bash +git clone https://github.com/Exasol/virtual-schemas.git cd virtual-schemas/jdbc-adapter/ mvn clean -DskipTests package ``` -The resulting fat jar is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar`. +The resulting fat JAR is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.1.0.jar`. -### 3. Upload Adapter Jar +## Uploading the Adapter JAR Archive -You have to upload the jar of the adapter to a bucket of your choice in the EXASOL bucket file system (BucketFS). This will allow using the jar in the adapter script. +You have to upload the JAR of the adapter to a bucket of your choice in the Exasol bucket file system (BucketFS). This will allow using the jar in the adapter script. Following steps are required to upload a file to a bucket: -* Make sure you have a bucket file system (BucketFS) and you know the port for either http or https. This can be done in EXAOperation under "EXABuckets". E.g. the id could be `bucketfs1` and the http port 2580. -* Check if you have a bucket in the BucketFS. Simply click on the name of the BucketFS in EXAOperation and add a bucket there, e.g. `bucket1`. Also make sure you know the write password. For simplicity we assume that the bucket is defined as a public bucket, i.e. it can be read by any script. -* Now upload the file into this bucket, e.g. using curl (adapt the hostname, BucketFS port, bucket name and bucket write password). -``` -curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar \ - http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar + +1. Make sure you have a bucket file system (BucketFS) and you know the port for either HTTP or HTTPS. + + This can be done in EXAOperation under "EXABuckets". E.g. the id could be `bucketfs1` and the HTTP port 2580. + +1. Check if you have a bucket in the BucketFS. Simply click on the name of the BucketFS in EXAOperation and add a bucket there, e.g. `bucket1`. + + Also make sure you know the write password. For simplicity we assume that the bucket is defined as a public bucket, i.e. it can be read by any script. + +1. Now upload the file into this bucket, e.g. using curl (adapt the hostname, BucketFS port, bucket name and bucket write password). + +```bash +curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.1.0.jar \ + http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar ``` See chapter 3.6.4. "The synchronous cluster file system BucketFS" in the EXASolution User Manual for more details about BucketFS. -### 4. Upload JDBC Driver Files +## Deploying JDBC Driver Files + +You have to upload the JDBC driver files of your remote database **twice**: -You have to upload the JDBC driver files of your remote database **two times**: -* Upload all files of the JDBC driver into a bucket of your choice, so that they can be accessed from the adapter script. This happens the same way as described above for the adapter jar. You can use the same bucket. +* Upload all files of the JDBC driver into a bucket of your choice, so that they can be accessed from the adapter script. + This happens the same way as described above for the adapter JAR. You can use the same bucket. * Upload all files of the JDBC driver as a JDBC driver in EXAOperation - In EXAOperation go to Software -> JDBC Drivers - - Add the JDBC driver by specifying the jdbc main class and the prefix of the JDBC connection string + - Add the JDBC driver by specifying the JDBC main class and the prefix of the JDBC connection string - Upload all files (one by one) to the specific JDBC to the newly added JDBC driver. -Note that some JDBC drivers consist of several files and that you have to upload all of them. To find out which jar you need, consult the [supported dialects page](supported-dialects.md). +Note that some JDBC drivers consist of several files and that you have to upload all of them. To find out which JAR you need, consult the [supported dialects page](supported_sql_dialects.md). + +## Deploying the Adapter Script -### 5. Deploy Adapter Script Then run the following SQL commands to deploy the adapter in the database: + ```sql -- The adapter is simply a script. It has to be stored in any regular schema. CREATE SCHEMA adapter; @@ -61,7 +75,7 @@ CREATE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS // This will add the adapter jar to the classpath so that it can be used inside the adapter script // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.1.0.jar; // You have to add all files of the data source jdbc driver here (e.g. Hive JDBC driver files) %jar /buckets/your-bucket-fs/your-bucket/name-of-data-source-jdbc-driver.jar; diff --git a/jdbc-adapter/doc/develop-dialect.md b/jdbc-adapter/doc/develop-dialect.md deleted file mode 100644 index 99792f768..000000000 --- a/jdbc-adapter/doc/develop-dialect.md +++ /dev/null @@ -1,111 +0,0 @@ -# How To Develop and Test a Dialect -This page describes how you can develop and semi-automatically test a dialect for the JDBC adapter. The framework for testing a dialect is still work in progress. - -# Content -* [How To Develop a Dialect](#how-to-develop-a-dialect) -* [How To Start Integration Tests](#how-to-start-integration-tests) - -## How To Develop a Dialect -You can implement a dialect by implementing the interface `com.exasol.adapter.dialects.SqlDialect`. -We recommend to look at the following ressources to get started: -* First have a look at the [SqlDialect interface source code](../virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialect.java). You can start with the comments of the interface and have a look at the methods you can override. -* Second you can review the source code of one of the [dialect implementations](../virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl) as an inspiration. Ideally you should look at the dialect which is closest to your data source. - -To implement a full dialect for a typical data source you have to run all of the following steps. We recommend to follow the order proposed here. - -### Setup Data Source -* Setup and start the database -* Testdata: Create a test schema with a simple table (simple data types) - -### Setup EXASOL -* Setup and start an EXASOL database with virtual schemas feature -* Upload the JDBC drivers of the data source via EXAOperation -* Manual test: query data from the data source via `IMPORT FROM JDBC` - -### Catalog, Schema & Table Mapping -* Override the `SqlDialect` methods for catalog, schema and table mapping -* Manual test: create a virtual schema by specifying the catalog and/or schema. - -### Data Type Mapping -* Testdata: Create a table with all data types and at least one row of data -* Override the `SqlDialect` method for data type mapping -* Automatic test: sys tables show virtual table and columns with correctly mapped type -* Automatic test: running `SELECT` on the virtual table returns the expected result - -### Identifier Case Handling & Quoting -* Testdata: Create a schema/table/column with mixed case (if supported) -* Automatic test: sys tables correct -* Automatic test: `SELECT` works as expected - -### Projection Capability -* Add capability -* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`). Also test with mixed case columns. - -### Predicates and Literal Capabilities -* Add capabilities for supported literals and predicates (e.g. `c1='foo'`) -* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`) for all predicates & literals - -### Aggregation & Set Function Capabilities -* Add capabilities for aggregations and aggregation functions -* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`) for all set functions - -### Order By / Limit Capabilities -* Testdata: Create a table with null values and non-null values, to check null collation. -* Add capabilities for order by and/or limit -* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`) -* Automatic test: default null collation, explicit `NULLS FIRST/LAST` - -### Scalar Function Capabilities -* Add capabilities for scalar functions -* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`) - -### Views -* Testdata: Create a simple view, e.g. joining two existing tables -* Automatic test: Query the view, optionally e.g. with a filter. - - -## How To Start Integration Tests -We assume that you have a running EXASOL and data source database with all required test tables. - -We use following Maven phases for our integration tests: -* pre-integration-test phase is used to automatically deploy the latest jdbc adapter jar (based on your latest code modifications) -* integration-test phase is used to execute the actual integration tests - -Note that to check whether the integration-tests were successful, you have to run the verify Maven phase. - -You can start the integration tests as follows: -``` -mvn clean package && mvn verify -Pit -Dintegrationtest.configfile=/path/to/your/integration-test-config.yaml -``` - -This will run all integration tests, i.e. all junit tests with the suffix "IT" in the filename. The yaml configuration file stores the information for your test environment like jdbc connection strings, paths and credentials. - -## Java Remote Debugging of Adapter script - -When developing a new dialect it's sometimes really helpful to debug the deployed adapter script inside the database. -In a one node EXASOL environment setting up remote debugging is straight forward. -First define the following env directive in your adapter script: - -```sql -CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter - AS - - %env JAVA_TOOL_OPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,address=8000,suspend=y"; - - // This is the class implementing the callback method of the adapter script - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - // This will add the adapter jar to the classpath so that it can be used inside the adapter script - // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-1.0.2-SNAPSHOT.jar; - - // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) - - %jar /buckets/bucketfs1/bucket1/RedshiftJDBC42-1.2.1.1001.jar; - -/ -``` - -In eclipse (or any other Java IDE) you can then attach remotely to the Java Adapter using the IP of your one node EXASOL environment and the port 8000. -With `suspend=y` the Java-process will wait until the debugger connects to the Java UDF. - diff --git a/jdbc-adapter/doc/developing_an_sql_dialect.md b/jdbc-adapter/doc/developing_an_sql_dialect.md new file mode 100644 index 000000000..609d86af0 --- /dev/null +++ b/jdbc-adapter/doc/developing_an_sql_dialect.md @@ -0,0 +1,270 @@ +# How To Develop and Test a Dialect +This page describes how you can develop and semi-automatically test a dialect for the JDBC adapter. The framework for testing a dialect is still work in progress. + +## Content + +* [Developing a Dialect](#developing-a-dialect) +* [Integration Testing](#integration-testing) + +## Developing a Dialect + +You can implement a dialect by implementing the interface `com.exasol.adapter.dialects.SqlDialect`. +We recommend to look at the following resources to get started: + +* First have a look at the [SqlDialect interface source code](../virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialect.java). You can start with the comments of the interface and have a look at the methods you can override. +* Second you can review the source code of one of the [dialect implementations](../virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl) as an inspiration. Ideally you should look at the dialect which is closest to your data source. + +To implement a full dialect for a typical data source you have to run all of the following steps. We recommend to follow the order proposed here. + +### Registering the Dialect + +The Virtual Schema adapter creates an instance of an SQL dialect on demand. You can pick any dialect that is listed in the `SqlDialects` registry. + +To register your new dialect add it to the list in [sql_dialects.properties](../virtualschema-jdbc-adapter/src/main/resources/sql_dialects.properties]). + +```properties +com.exasol.adapter.dialects.supported=\ +... +com.exasol.adapter.dialects.impl.MyAweSomeSqlDialect +``` + +For tests or in case you want to exclude existing dialects in certain scenarios you can override the contents of this file by setting the system property `com.exasol.adapter.dialects.supported`. + +Please also remember to [list the supported dialect in the documentation](../README.md). + +### Setup Data Source + +* Setup and start the database +* Testdata: Create a test schema with a simple table (simple data types) + +### Setup Exasol + +* Setup and start an Exasol database with virtual schemas feature +* Upload the JDBC drivers of the data source via EXAOperation +* Manual test: query data from the data source via `IMPORT FROM JDBC` + +### Catalog, Schema & Table Mapping + +* Override the `SqlDialect` methods for catalog, schema and table mapping +* Manual test: create a virtual schema by specifying the catalog and/or schema. + +### Data Type Mapping + +* Testdata: Create a table with all data types and at least one row of data +* Override the `SqlDialect` method for data type mapping +* Automatic test: sys tables show virtual table and columns with correctly mapped type +* Automatic test: running `SELECT` on the virtual table returns the expected result + +### Identifier Case Handling & Quoting + +* Testdata: Create a schema/table/column with mixed case (if supported) +* Automatic test: sys tables correct +* Automatic test: `SELECT` works as expected + +### Projection Capability + +* Add capability +* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`). Also test with mixed case columns. + +### Predicates and Literal Capabilities + +* Add capabilities for supported literals and predicates (e.g. `c1='foo'`) +* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`) for all predicates & literals + +### Aggregation & Set Function Capabilities + +* Add capabilities for aggregations and aggregation functions +* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`) for all set functions + +### Order By / Limit Capabilities + +* Testdata: Create a table with null values and non-null values, to check null collation. +* Add capabilities for order by and/or limit +* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`) +* Automatic test: default null collation, explicit `NULLS FIRST/LAST` + +### Scalar Function Capabilities + +* Add capabilities for scalar functions +* Automatic test: pushed down & correct result (incl. `EXPLAIN VIRTUAL`) + +### Views + +* Testdata: Create a simple view, e.g. joining two existing tables +* Automatic test: Query the view, optionally e.g. with a filter. + +## Integration Testing + +### Security Considerations + +Please note that in the course of the integration tests you need to provide the test framework with access rights and credentials to the source database. + +In order not to create security issues: + +* Make sure the data in the source database is not confidential (demo data only) +* Don't reuse credentials +* Don't check in credentials + +### Prerequisites + +* Exasol running +* Exasol accessible from within integration test environment +* Source database running +* Source database accessible from within integration test environment +* Test data loaded into source database +* [BucketFS HTTP port listening and reachable](https://www.exasol.com/support/browse/SOL-503?src=confmacro) (e.g. on port 2580) + + ![BucketFS on port 2580](images/Screenshot_BucketFS_default_service.png) + +* Bucket on BucketFS prepared for holding JDBC drivers and virtual schema adapter + + ![Integration test bucket](images/Screenshot_bucket_for_JARs.png) + +* JDBC driver JAR archives available for databases against which to run integration tests + +If BucketFS is new to you, there are nice [training videos on BucketFS](https://www.exasol.com/portal/display/TRAINING/BucketFS) available. + +### Preparing Integration Test + +1. Create a dedicated user in the source database that has the necessary access privileges +2. Create credentials for the user under which the integration tests run at the source +3. Make a local copy of the [sample integration test configuration file](../integration-test-data/integration-test-sample.yaml) in a place where you don't accidentally check this file in. +4. Edit the credentials information +5. [Deploy the JDBC driver(s)](deploying_the_virtual_schema_adapter.md#deploying-jdbc-driver-files) to the prepared bucket in Exasol's BucketFS + +#### Creating Your own Integration Test Configuration + +Directories called `local` are ignored by Git, so you can place your configuration there and avoid having it checked in. + +In the root directory of the adapter sources execute the following commands: + +```bash +mkdir jdbc-adapter/local +cp jdbc-adapter/integration-test-data/integration-test-sample.yaml jdbc-adapter/local/integration-test-config.yaml +``` + +Now edit the file `jdbc-adapter/local/integration-test-config.yaml` to adapt the settings to your local installation. + +### Executing Integration Tests + +We use following [Maven lifecycle phases](https://maven.apache.org/guides/introduction/introduction-to-the-lifecycle.html) for our integration tests: + +* `pre-integration-test` phase is used to **automatically deploy the latest [JDBC](https://www.exasol.com/support/secure/attachment/66315/EXASOL_JDBC-6.1.rc1.tar.gz) adapter JAR** (based on your latest code modifications) +* `integration-test` phase is used to execute the actual integration tests + +Note that to check whether the integration-tests were successful, you have to run the verify Maven phase. + +You can start the integration tests as follows: + +```bash +mvn clean package && mvn verify -Pit -Dintegrationtest.configfile=/path/to/your/integration-test-config.yaml +``` + +This will run all integration tests, i.e. all JUnit tests with the suffix `IT` in the filename. + +The YAML configuration file stores the information for your test environment like JDBC connection strings, paths and credentials. + +## Observing Adapter Output + +You can either use [netcat](http://netcat.sourceforge.net/) or `exaoutput.py` from the [EXASolution Python Package](https://github.com/EXASOL/python-exasol). Since netcat is available on most Linux machines anyway, we will use this in the description here. + +First start netcat in listen-mode on a free TCP port on your machine. + +```bash +nc -lkp 3000 +``` + +The `-l` switch puts netcat into listen-mode. `-k` tells it to stay open after the peer closed a connection. `-p 3000` set the number of the TCP port netcat listens on. + +Next find out your IP address. + +Linux: + +```bash +ip -br address +``` + +Windows: + +```cmd +ipconfig /all +``` + +The next SQL command shows an example of declaring a virtual schema. Notice the IP address and port in the last line. This tells the adapter script where to direct the output to. + +```sql +CREATE VIRTUAL SCHEMA VS_EXA_IT +USING ADAPTER.JDBC_ADAPTER +WITH CONNECTION_STRING='jdbc:exa:localhost:8563' USERNAME='sys' PASSWORD='exasol' + SCHEMA_NAME='NATIVE_EXA_IT' SQL_DIALECT='EXASOL' IS_LOCAL='true' + DEBUG_ADDRESS='10.44.1.228:3000' LOG_LEVEL='ALL'; +``` + +The parameter LOG_LEVEL lets you pick a log level as defined in [java.util.logging.Level](https://docs.oracle.com/javase/8/docs/api/java/util/logging/Level.html). + +The recommended standard log levels are: + +* `INFO` in production +* `ALL` for in-depth debugging + +You can tell that the connection works if you see the following message after executing the SQL command that installs a virtual schema: + + Attached to output service + +## Java Remote Debugging of Adapter script + +When developing a new dialect it is sometimes really helpful to debug the deployed adapter script inside the database. +In a one node Exasol environment setting up remote debugging is straight forward. +First define the following `env` directive in your adapter script: + +```sql +CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter + AS + + %env JAVA_TOOL_OPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,address=8000,suspend=y"; + + // This is the class implementing the callback method of the adapter script + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + // This will add the adapter jar to the classpath so that it can be used inside the adapter script + // Replace the names of the bucketfs and the bucket with the ones you used. + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) + + %jar /buckets/bucketfs1/bucket1/RedshiftJDBC42-1.2.1.1001.jar; + +/ +``` + +In eclipse (or any other Java IDE) you can then attach remotely to the Java Adapter using the IP of your one node Exasol environment and the port 8000. + +The switch `suspend=y` tells the Java-process to wait until the debugger connects to the Java UDF. + +## Version Management + +All dialects have the same version as the master project. In the master `pom.xml` file a property called `product-version` is set. Use this in as the artifact version number in the JDBC adapter and all dialects. + +Run the script + +```bash +jdbc-adapter/tools/version.sh verify +``` + +To check that all documentation and templates reference the same version number. This script is also used as a build breaker in the continuous integration script. + +To update documentation files run + +```bash +jdbc-adapter/tools/version.sh unify +``` + +Note that the script must be run from the root directory of the virtual schema project. + +## Troubleshooting + +### Setting the Right IP Addresses for Database Connections + +Keep in mind that the adapter script is deployed in the Exasol database. If you want it to be able to make connections to other databases, you need to make sure that the IP addresses or host names are the ones that the database sees, not your local machine. This is easily forgotten in case of automated integration tests since it feels like they run on your machine -- which is only partially true. + +So a common source of error would be to specify `localhost` or `127.0.0.1` as address of the remote database in case you have it running in Docker or a VM on your local machine. But the Exasol Database cannot reach the other database there unless it is running on the same machine directly (i.e. not behind a virtual network device). \ No newline at end of file diff --git a/jdbc-adapter/doc/images/Screenshot_BucketFS_default_service.png b/jdbc-adapter/doc/images/Screenshot_BucketFS_default_service.png new file mode 100644 index 000000000..24b249da7 Binary files /dev/null and b/jdbc-adapter/doc/images/Screenshot_BucketFS_default_service.png differ diff --git a/jdbc-adapter/doc/images/Screenshot_bucket_for_JARs.png b/jdbc-adapter/doc/images/Screenshot_bucket_for_JARs.png new file mode 100644 index 000000000..a9b02f2f8 Binary files /dev/null and b/jdbc-adapter/doc/images/Screenshot_bucket_for_JARs.png differ diff --git a/jdbc-adapter/doc/sql_dialects/db2.md b/jdbc-adapter/doc/sql_dialects/db2.md new file mode 100644 index 000000000..7f27bf9b1 --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/db2.md @@ -0,0 +1,77 @@ +# DB2 SQL Dialect + +DB2 was tested with the IBM DB2 JCC Drivers that come with DB2 LUW V10.1 and V11. As these drivers didn't have any major changes in the past years any DB2 driver should work (back to V9.1). The driver comes with 2 different implementations `db2jcc.jar` and `db2jcc4.jar`. All tests were made with the `db2jcc4.jar`. + +Additionally there are 2 files for the DB2 Driver. + +* `db2jcc_license_cu.jar` - License File for DB2 on Linux Unix and Windows +* `db2jcc_license_cisuz.jar` - License File for DB2 on zOS (Mainframe) + +Make sure that you upload the necessary license file for the target platform you want to connect to. + +## Supported Capabilities + +The DB2 dialect handles some casts in regards of time data types and functions. + +Casting of Data Types + +* `TIMESTAMP` and `TIMESTAMP(x)` will be cast to `VARCHAR` to not lose precision. +* `VARCHAR` and `CHAR` for bit data will be cast to a hex string with double the original size +* `TIME` will be cast to `VARCHAR(8)` +* `XML` will be cast to `VARCHAR(DB2_MAX_LENGTH)` +* `BLOB` is not supported + +Casting of Functions + +* `LIMIT` will replaced by `FETCH FIRST x ROWS ONLY` +* `OFFSET` is currently not supported as only DB2 V11 support this nativly +* `ADD_DAYS`, `ADD_WEEKS` ... will be replaced by `COLUMN + DAYS`, `COLUMN + ....` + + +## JDBC Driver + +You have to specify the following settings when adding the JDBC driver via EXAOperation: + +* Name: `DB2` +* Main: `com.ibm.db2.jcc.DB2Driver` +* Prefix: `jdbc:db2:` + +## Adapter script + +```sql +CREATE or replace JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS + + // This is the class implementing the callback method of the adapter script + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + // This will add the adapter jar to the classpath so that it can be used inside the adapter script + // Replace the names of the bucketfs and the bucket with the ones you used. + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + // DB2 Driver files + %jar /buckets/bucketfs1/bucket1/db2jcc4.jar; + %jar /buckets/bucketfs1/bucket1/db2jcc_license_cu.jar; + // uncomment for mainframe connection and upload db2jcc_license_cisuz.jar; + //%jar /buckets/bucketfs1/bucket1/db2jcc_license_cisuz.jar; +/ +``` + +## Creating a Virtual Schema + +You can now create a virtual schema as follows: + +```sql +create or replace connection DB2_CON to 'jdbc:db2://host:port/database' user 'db2-usr' identified by 'db2-pwd'; + +create virtual schema db2 using adapter.jdbc_adapter with + SQL_DIALECT = 'DB2' + CONNECTION_NAME = 'DB2_CON' + SCHEMA_NAME = '' +; +``` + +`` has to be replaced by the actual db2 schema you want to connect to. + +## Running the DB2 Integration Tests + +A how to has been included in the [setup sql file](../../integration-test-data/db2-testdata.sql) \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/exasol.md b/jdbc-adapter/doc/sql_dialects/exasol.md new file mode 100644 index 000000000..1dab6e236 --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/exasol.md @@ -0,0 +1,48 @@ +# Exasol SQL Dialect + +## Supported Capabilities + +The Exasol SQL dialect supports all capabilities that are supported by the virtual schema framework. + +## JDBC Driver + +Connecting to an Exasol database is the simplest way to start with virtual schemas. +You don't have to install any JDBC driver, because it is already installed in the Exasol database and also included in the jar of the JDBC adapter. + +## Adapter Script + +After uploading the adapter jar, the adapter script can be created as follows: + +```sql +CREATE SCHEMA adapter; +CREATE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.1.0.jar; +/ +``` + +## Creating a Virtual Schema + +```sql +CREATE CONNECTION exasol_conn TO 'jdbc:exa:exasol-host:1234' USER 'user' IDENTIFIED BY 'pwd'; + +CREATE VIRTUAL SCHEMA virtual_exasol USING adapter.jdbc_adapter WITH + SQL_DIALECT = 'EXASOL' + CONNECTION_NAME = 'EXASOL_CONN' + SCHEMA_NAME = 'default'; +``` + +## Using IMPORT FROM EXA Instead of IMPORT FROM JDBC + +Exasol provides the faster and parallel `IMPORT FROM EXA` command for loading data from Exasol. You can tell the adapter to use this command instead of `IMPORT FROM JDBC` by setting the `IMPORT_FROM_EXA` property. In this case you have to provide the additional `EXA_CONNECTION_STRING` which is the connection string used for the internally used `IMPORT FROM EXA` command (it also supports ranges like `192.168.6.11..14:8563`). Please note, that the `CONNECTION` object must still have the JDBC connection string in `AT`, because the Adapter Script uses a JDBC connection to obtain the metadata when a schema is created or refreshed. For the internally used `IMPORT FROM EXA` statement, the address from `EXA_CONNECTION_STRING` and the user name and password from the connection will be used. + +```sql +CREATE CONNECTION exasol_conn TO 'jdbc:exa:exasol-host:1234' USER 'user' IDENTIFIED BY 'pwd'; + +CREATE VIRTUAL SCHEMA virtual_exasol USING adapter.jdbc_adapter WITH + SQL_DIALECT = 'EXASOL' + CONNECTION_NAME = 'EXASOL_CONN' + SCHEMA_NAME = 'default' + IMPORT_FROM_EXA = 'true' + EXA_CONNECTION_STRING = 'exasol-host:1234'; +``` \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/hive.md b/jdbc-adapter/doc/sql_dialects/hive.md new file mode 100644 index 000000000..1062584f7 --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/hive.md @@ -0,0 +1,106 @@ +# Hive SQL Dialect + +## JDBC Driver + +The dialect was tested with the Cloudera Hive JDBC driver available on the [Cloudera downloads page](http://www.cloudera.com/downloads). The driver is also available directly from [Simba technologies](http://www.simba.com/), who developed the driver. + +When you unpack the JDBC driver archive you will see that there are two variants, JDBC 4.0 and 4.1. We tested with the JDBC 4.1 variant. + +You have to specify the following settings when adding the JDBC driver via EXAOperation: + +* Name: `Hive` +* Main: `com.cloudera.hive.jdbc41.HS2Driver` +* Prefix: `jdbc:hive2:` + +Make sure you upload **all files** of the JDBC driver (over 10 at the time of writing) in EXAOperation **and** to the bucket. + +## Adapter Script + +You have to add all files of the JDBC driver to the classpath using `%jar` as follows (filenames may vary): + +```sql +CREATE SCHEMA adapter; +CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + %jar /buckets/bucketfs1/bucket1/hive_metastore.jar; + %jar /buckets/bucketfs1/bucket1/hive_service.jar; + %jar /buckets/bucketfs1/bucket1/HiveJDBC41.jar; + %jar /buckets/bucketfs1/bucket1/libfb303-0.9.0.jar; + %jar /buckets/bucketfs1/bucket1/libthrift-0.9.0.jar; + %jar /buckets/bucketfs1/bucket1/log4j-1.2.14.jar; + %jar /buckets/bucketfs1/bucket1/ql.jar; + %jar /buckets/bucketfs1/bucket1/slf4j-api-1.5.11.jar; + %jar /buckets/bucketfs1/bucket1/slf4j-log4j12-1.5.11.jar; + %jar /buckets/bucketfs1/bucket1/TCLIServiceClient.jar; + %jar /buckets/bucketfs1/bucket1/zookeeper-3.4.6.jar; +/ +``` + +### Creating a Virtual Schema + +```sql +CREATE CONNECTION hive_conn TO 'jdbc:hive2://hive-host:10000' USER 'hive-usr' IDENTIFIED BY 'hive-pwd'; + +CREATE VIRTUAL SCHEMA hive_default USING adapter.jdbc_adapter WITH + SQL_DIALECT = 'HIVE' + CONNECTION_NAME = 'HIVE_CONN' + SCHEMA_NAME = 'default'; +``` + +### Connecting To a Kerberos Secured Hadoop: + +Connecting to a Kerberos secured Impala or Hive service only differs in one aspect: You have to a `CONNECTION` object which contains all the relevant information for the Kerberos authentication. This section describes how Kerberos authentication works and how to create such a `CONNECTION`. + +#### Understanding how it Works (Optional) + +Both the adapter script and the internally used `IMPORT FROM JDBC` statement support Kerberos authentication. They detect, that the connection is a Kerberos connection by a special prefix in the `IDENTIFIED BY` field. In such case, the authentication will happen using a Kerberos keytab and Kerberos config file (using the JAAS Java API). + +The `CONNECTION` object stores all relevant information and files in its fields: + +* The `TO` field contains the JDBC connection string +* The `USER` field contains the Kerberos principal +* The `IDENTIFIED BY` field contains the Kerberos configuration file and keytab file (base64 encoded) along with an internal prefix `ExaAuthType=Kerberos;` to identify the `CONNECTION` as a Kerberos `CONNECTION`. + +#### Generating the CREATE CONNECTION Statement + +In order to simplify the creation of Kerberos `CONNECTION` objects, the [`create_kerberos_conn.py`](https://github.com/EXASOL/hadoop-etl-udfs/blob/master/tools/create_kerberos_conn.py) Python script has been provided. The script requires 5 arguments: + +* `CONNECTION` name (arbitrary name for the new `CONNECTION`) +* Kerberos principal for Hadoop (i.e., Hadoop user) +* Kerberos configuration file path (e.g., `krb5.conf`) +* Kerberos keytab file path, which contains keys for the Kerberos principal +* JDBC connection string + +Example command: + +``` +python tools/create_kerberos_conn.py krb_conn krbuser@EXAMPLE.COM /etc/krb5.conf ./krbuser.keytab \ + 'jdbc:hive2://hive-host.example.com:10000;AuthMech=1;KrbRealm=EXAMPLE.COM;KrbHostFQDN=hive-host.example.com;KrbServiceName=hive' +``` + +Output: + +```sql +CREATE CONNECTION krb_conn TO 'jdbc:hive2://hive-host.example.com:10000;AuthMech=1;KrbRealm=EXAMPLE.COM;KrbHostFQDN=hive-host.example.com;KrbServiceName=hive' USER 'krbuser@EXAMPLE.COM' IDENTIFIED BY 'ExaAuthType=Kerberos;enp6Cg==;YWFhCg==' +``` + +#### Creating the CONNECTION +You have to execute the generated `CREATE CONNECTION` statement directly in EXASOL to actually create the Kerberos `CONNECTION` object. For more detailed information about the script, use the help option: + +```sh +python tools/create_kerberos_conn.py -h +``` + +#### Using the Connection When Creating a Virtual Schema + +You can now create a virtual schema using the Kerberos connection created before. + +```sql +CREATE VIRTUAL SCHEMA hive_default USING adapter.jdbc_adapter WITH + SQL_DIALECT = 'HIVE' + CONNECTION_NAME = 'KRB_CONN' + SCHEMA_NAME = 'default'; +``` \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/impala.md b/jdbc-adapter/doc/sql_dialects/impala.md new file mode 100644 index 000000000..18aa2daed --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/impala.md @@ -0,0 +1,54 @@ +# Impala SQL Dialect + +The Impala dialect is similar to the Hive dialect in most aspects. For this reason we only highlight the differences in this section. + +## JDBC Driver + +You have to specify the following settings when adding the JDBC driver via EXAOperation: + +* Name: `Hive` +* Main: `com.cloudera.impala.jdbc41.Driver` +* Prefix: `jdbc:impala:` + +Make sure you upload **all files** of the JDBC driver (over 10 at the time of writing) in EXAOperation and to the bucket. + +## Adapter script + +The adapter can be created similar to Hive: + +```sql + +CREATE SCHEMA adapter; +CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + %jar /buckets/bucketfs1/bucket1/hive_metastore.jar; + %jar /buckets/bucketfs1/bucket1/hive_service.jar; + %jar /buckets/bucketfs1/bucket1/ImpalaJDBC41.jar; + %jar /buckets/bucketfs1/bucket1/libfb303-0.9.0.jar; + %jar /buckets/bucketfs1/bucket1/libthrift-0.9.0.jar; + %jar /buckets/bucketfs1/bucket1/log4j-1.2.14.jar; + %jar /buckets/bucketfs1/bucket1/ql.jar; + %jar /buckets/bucketfs1/bucket1/slf4j-api-1.5.11.jar; + %jar /buckets/bucketfs1/bucket1/slf4j-log4j12-1.5.11.jar; + %jar /buckets/bucketfs1/bucket1/TCLIServiceClient.jar; + %jar /buckets/bucketfs1/bucket1/zookeeper-3.4.6.jar; +/ +``` + +## Creating a Virtual Schema + +You can now create a virtual schema as follows: + +```sql +CREATE CONNECTION impala_conn TO 'jdbc:impala://impala-host:21050' USER 'impala-usr' IDENTIFIED BY 'impala-pwd'; + +CREATE VIRTUAL SCHEMA impala_default USING adapter.jdbc_adapter WITH + SQL_DIALECT = 'IMPALA' + CONNECTION_NAME = 'IMPALA_CONN' + SCHEMA_NAME = 'default'; +``` + +Connecting to a Kerberos secured Impala works similar as for Hive and is described in the section [Connecting To a Kerberos Secured Hadoop](hive.md#connecting-to-a-kerberos-secured-hadoop). \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/oracle.md b/jdbc-adapter/doc/sql_dialects/oracle.md new file mode 100644 index 000000000..0265f78e0 --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/oracle.md @@ -0,0 +1,111 @@ +# Oracle SQL Dialect + +## Supported capabilities + +The Oracle dialect does not support all capabilities. A complete list can be found in [OracleSqlDialect.getCapabilities()](../../virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java). + +Oracle data types are mapped to their equivalents in Exasol. The following exceptions apply: + +- `NUMBER`, `NUMBER with precision > 36` and `LONG` are casted to `VARCHAR` to prevent a loss of precision. +- `DATE` is casted to `TIMESTAMP`. This data type is only supported for positive year values, i.e., years > 0001. +- `TIMESTAMP WITH [LOCAL] TIME ZONE` is casted to `VARCHAR`. Exasol does not support timestamps with time zone information. +- `INTERVAL` is casted to `VARCHAR`. +- `CLOB`, `NCLOB` and `BLOB` are casted to `VARCHAR`. +- `RAW` and `LONG RAW` are not supported. + +## JDBC Driver + +To setup a virtual schema that communicates with an Oracle database using JDBC, the JDBC driver, e.g., `ojdbc7-12.1.0.2.jar`, must first be installed in EXAoperation and deployed to BucketFS; see [this article](https://www.exasol.com/support/browse/SOL-179#WhichJDBCdriverforOracleshallIuse?) and [Deploying the Adapter Step By Step](deploying_the_virtual_schema_adapter.md) for instructions. + +## Adapter Script + +After uploading the adapter jar we are ready to create an Oracle adapter script. Adapt the following script as indicated. + +```sql +CREATE SCHEMA adapter; +CREATE JAVA ADAPTER SCRIPT adapter.jdbc_oracle AS + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + // You need to replace `your-bucket-fs` and `your-bucket` to match the actual location + // of the adapter jar. + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + // Add the oracle jdbc driver to the classpath + %jar /buckets/bucketfs1/bucket1/ojdbc7-12.1.0.2.jar +/ +``` + +## JDBC Connection + +Next, create a JDBC connection to your Oracle database. Adjust the properties to match your environment. + +```sql +CREATE CONNECTION jdbc_oracle + TO 'jdbc:oracle:thin:@//:/' + USER '' + IDENTIFIED BY ''; +``` + +A quick option to test the `JDBC_ORACLE` connection is to run an `IMPORT FROM JDBC` query. The connection works, if `42` is returned. + +```sql +IMPORT FROM JDBC AT jdbc_oracle + STATEMENT 'SELECT 42 FROM DUAL'; +``` + +### Creating a Virtual schema + +Having created both a JDBC adapter script and a JDBC oracle connection, we are ready to create a virtual schema. Insert the name of the schema that you want to expose in Exasol. + +```sql +CREATE VIRTUAL SCHEMA virt_oracle USING adapter.jdbc_oracle WITH + SQL_DIALECT = 'ORACLE' + CONNECTION_NAME = 'JDBC_ORACLE' + SCHEMA_NAME = ''; +``` + +## Using IMPORT FROM ORA Instead of IMPORT FROM JDBC + +Exasol provides the `IMPORT FROM ORA` command for loading data from Oracle. It is possible to create a virtual schema that uses `IMPORT FROM ORA` instead of JDBC to communicate with Oracle. Both options are indented to support the same features. `IMPORT FROM ORA` almost always offers better performance since it is implemented natively. + +This behavior is toggled by the Boolean `IMPORT_FROM_ORA` variable. Note that a JDBC connection to Oracle is still required to fetch metadata. In addition, a "direct" connection to the Oracle database is needed. + +### Deploying the Oracle Instant Client + +To be able to communicate with Oracle, you first need to supply Exasol with the Oracle Instant Client, which can be obtained [directly from Oracle](http://www.oracle.com/technetwork/database/database-technologies/instant-client/overview/index.html). Open EXAoperation, visit Software -> "Upload Oracle Instant Client" and select the downloaded package. The latest version of Oracle Instant Client we tested is `instantclient-basic-linux.x64-12.1.0.2.0`. + +### Creating an Oracle Connection + +Having deployed the Oracle Instant Client, a connection to your Oracle database can be set up. + +```sql +CREATE CONNECTION conn_oracle + TO '(DESCRIPTION = + (ADDRESS_LIST = (ADDRESS = (PROTOCOL = TCP) + (HOST = ) + (PORT = ))) + (CONNECT_DATA = (SERVER = DEDICATED) + (SERVICE_NAME = )))' + USER '' + IDENTIFIED BY ''; +``` + +This connection can be tested using, e.g., the following SQL expression. + +```sql +IMPORT FROM ORA at CONN_ORACLE + STATEMENT 'SELECT 42 FROM DUAL'; +``` + +### Creating a Virtual schema + +Assuming you already setup the JDBC connection `JDBC_ORACLE` as shown in the previous section, you can continue with creating the virtual schema. + +```sql +CREATE VIRTUAL SCHEMA virt_import_oracle USING adapter.jdbc_oracle WITH + SQL_DIALECT = 'ORACLE' + CONNECTION_NAME = 'JDBC_ORACLE' + SCHEMA_NAME = '' + IMPORT_FROM_ORA = 'true' + EXA_CONNECTION_NAME = 'CONN_ORACLE'; +``` \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/postgresql.md b/jdbc-adapter/doc/sql_dialects/postgresql.md new file mode 100644 index 000000000..e35cc2413 --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/postgresql.md @@ -0,0 +1,37 @@ +# PostgreSQL SQL Dialect + +## JDBC Driver + +The PostgreSQL dialect was tested with JDBC driver version 42.0.0 and PostgreSQL 9.6.2 . + +## Adapter Script + +```sql +CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter + AS + + // This is the class implementing the callback method of the adapter script + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + // This will add the adapter jar to the classpath so that it can be used inside the adapter script + // Replace the names of the bucketfs and the bucket with the ones you used. + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) + %jar /buckets/bucketfs1/bucket1/postgresql-42.0.0.jar; + +/ +``` + +## Creating a Virtual Schema + +```sql +CREATE VIRTUAL SCHEMA postgres + USING adapter.jdbc_adapter + WITH + SQL_DIALECT = 'POSTGRESQL' + CATALOG_NAME = 'postgres' + SCHEMA_NAME = 'public' + CONNECTION_NAME = 'POSTGRES_DOCKER' + ; +``` \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/redshift.md b/jdbc-adapter/doc/sql_dialects/redshift.md new file mode 100644 index 000000000..21c153da9 --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/redshift.md @@ -0,0 +1,44 @@ +# Redshift SQL Dialect + +## JDBC Driver + +You have to specify the following settings when adding the JDBC driver via EXAOperation: +* Name: `REDSHIFT` +* Main: `com.amazon.redshift.jdbc.Driver` +* Prefix: `jdbc:redshift:` +* Files: `RedshiftJDBC42-1.2.1.1001.jar` + +Please also upload the driver jar into a bucket for the adapter script. + +## Adapter Script + +```sql +CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter + AS + + // This is the class implementing the callback method of the adapter script + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + // This will add the adapter jar to the classpath so that it can be used inside the adapter script + // Replace the names of the bucketfs and the bucket with the ones you used. + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) + + %jar /buckets/bucketfs1/bucket1/RedshiftJDBC42-1.2.1.1001.jar; + +/ +``` + +## Creating a Virtual Schema + +```sql +CREATE VIRTUAL SCHEMA redshift_tickit + USING adapter.jdbc_adapter + WITH + SQL_DIALECT = 'REDSHIFT' + CONNECTION_NAME = 'REDSHIFT_CONNECTION' + CATALOG_NAME = 'database_name' + SCHEMA_NAME = 'public' + ; +``` \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/sql_server.md b/jdbc-adapter/doc/sql_dialects/sql_server.md new file mode 100644 index 000000000..41a2efeac --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/sql_server.md @@ -0,0 +1,37 @@ +# SQL Server SQL Dialect + +## JDBC driver + +The SQL Server Dialect was tested with the jTDS 1.3.1 JDBC driver and SQL Server 2014. +As the jTDS driver is already pre-installed for the `IMPORT` command itself you only need +to upload the `jtds.jar` to a bucket for the adapter script. + +## Adapter Script + +```sql +CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.sql_server_jdbc_adapter + AS + + // This is the class implementing the callback method of the adapter script + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + // This will add the adapter jar to the classpath so that it can be used inside the adapter script + // Replace the names of the bucketfs and the bucket with the ones you used. + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + // You have to add all files of the data source jdbc driver here + %jar /buckets/bucketfs1/bucket1/jtds.jar; +/ +``` + +## Creating a Virtual Schema + +```sql +CREATE VIRTUAL SCHEMA VS_SQLSERVER USING adapter.sql_server_jdbc_adapter +WITH + SQL_DIALECT = 'SQLSERVER' + CONNECTION_NAME = 'SQLSERVER_CONNECTION' + CATALOG_NAME = 'MyDatabase' + SCHEMA_NAME = 'dbo' +; +``` \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/sybase.md b/jdbc-adapter/doc/sql_dialects/sybase.md new file mode 100644 index 000000000..255311f82 --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/sybase.md @@ -0,0 +1,50 @@ +# Sybase SQL Dialect + +## JDBC driver + +The Sybase dialect was tested with the [jTDS 1.3.1 JDBC driver](https://sourceforge.net/projects/jtds/files/jtds/1.3.1/) and Sybase 16.0. +While the jTDS driver is pre-installed in EXAOperation, you still need to upload `jdts.jar` to BucketFS. + +You can check the Sybase version with the following SQL command: + +```sql +SELECT @@version; +``` + +## Adapter script + +```sql +CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter + AS + + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + %jar /buckets/bucketfs1/virtualschema/virtualschema-jdbc-adapter-dist-1.1.0.jar; + %jar /buckets/bucketfs1/virtualschema/jtds-1.3.1.jar; +/ +``` + +## Installing the Test Data + +Create and populate the test database using the [sybase-testdata.sql](../integration-test-data/sybase-testdata.sql) SQL script. + +## Creating a Virtual Schema + +```sql +CREATE OR REPLACE CONNECTION "conn_sybase" + TO 'jdbc:jtds:sybase://172.17.0.1:5000/testdb' + USER 'tester' + IDENTIFIED BY 'pass' + +CREATE VIRTUAL SCHEMA sybase USING adapter.jdbc_adapter WITH + SQL_DIALECT = 'SYBASE' + CONNECTION_NAME = 'CONN_SYBASE' + CATALOG_NAME = 'testdb' + SCHEMA_NAME = 'tester'; +``` + +## Supported Data types + +* `NUMERIC/DECIMAL(precision, scale)`: Sybase supports precision values up to 38, Exasol only up to 36 decimals. `NUMERIC/DECIMAL` with precision <= 36 are mapped to Exasol's `DECIMAL` type; greater precision values are mapped to a `VARCHAR` column. +* The Sybase data type `CHAR(n > 2000)` is mapped to Exasol's `VARCHAR(n)`. Exasol only supports `n <= 2000` for data type `CHAR`. +* The Sybase data types `TEXT` and `UNITEXT` are mapped to `VARCHAR(2000000) UTF8`. If the virtual schema is queried and a row of the text column is matched that contains a value that exceed Exasol's column size, an error is shown. +* The Sybase data types `BINARY`, `VARBINARY`, and `IMAGE` are not supported. \ No newline at end of file diff --git a/jdbc-adapter/doc/sql_dialects/teradata.md b/jdbc-adapter/doc/sql_dialects/teradata.md new file mode 100644 index 000000000..7afdb78de --- /dev/null +++ b/jdbc-adapter/doc/sql_dialects/teradata.md @@ -0,0 +1,43 @@ +# Teradata SQL Dialect + +## JDBC Driver + +You have to specify the following settings when adding the JDBC driver via EXAOperation: + +* Name: `TERADATA` +* Main: `com.teradata.jdbc.TeraDriver` +* Prefix: `jdbc:teradata:` +* Files: `terajdbc4.jar`, `tdgssconfig.jar` + +Please also upload the jar files to a bucket for the adapter script. + +## Adapter script + +```sql +CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter + AS + + // This is the class implementing the callback method of the adapter script + %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; + + // This will add the adapter jar to the classpath so that it can be used inside the adapter script + // Replace the names of the bucketfs and the bucket with the ones you used. + %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar; + + // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) + %jar /buckets/bucketfs1/bucket1/terajdbc4.jar; + %jar /buckets/bucketfs1/bucket1/tdgssconfig.jar; + +/ +``` + +## Creating a Virtual Schema + +```sql +CREATE VIRTUAL SCHEMA TERADATA_financial USING adapter.jdbc_adapter +WITH + SQL_DIALECT = 'TERADATA' + CONNECTION_NAME = 'TERADATA_CONNECTION' + SCHEMA_NAME = 'financial' +; +``` \ No newline at end of file diff --git a/jdbc-adapter/doc/supported-dialects.md b/jdbc-adapter/doc/supported-dialects.md deleted file mode 100644 index 894050221..000000000 --- a/jdbc-adapter/doc/supported-dialects.md +++ /dev/null @@ -1,524 +0,0 @@ -# Supported Dialects - -The purpose of this page is to provide detailed instructions for each of the supported dialects on how to get started. Typical questions are -* Which **JDBC driver** is used, which files have to be uploaded and included when creating the adapter script. -* How does the **CREATE VIRTUAL SCHEMA** statement look like, i.e. which properties are required. -* **Data source specific notes**, like authentication with Kerberos, supported capabilities or things to consider regarding the data type mapping. - -As an entry point we recommend to follow the [step-by-step deployment guide](deploy-adapter.md) which will link to this page whenever needed. - -## Table of Contents - -1. [EXASOL](#exasol) -2. [Hive](#hive) - - [Connecting To a Kerberos Secured Hadoop](#connecting-to-a-kerberos-secured-hadoop) -3. [Impala](#impala) -4. [DB2](#db2) -5. [Oracle](#oracle) -6. [Teradata](#teradata) -7. [Redshift](#redshift) -8. [SQL Server](#sql-server) -8. [PostgresSQL](#postgresql) -10. [Generic](#generic) - -## EXASOL - -**Supported capabilities**: -The EXASOL SQL dialect supports all capabilities that are supported by the virtual schema framework. - -**JDBC driver**: -Connecting to an EXASOL database is the simplest way to start with virtual schemas. -You don't have to install any JDBC driver, because it is already installed in the EXASOL database and also included in the jar of the JDBC adapter. - -**Adapter script**: -After uploading the adapter jar, the adapter script can be created as follows: -```sql -CREATE SCHEMA adapter; -CREATE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; -/ -``` -**Create a virtual schema:** - -```sql -CREATE CONNECTION exasol_conn TO 'jdbc:exa:exasol-host:1234' USER 'user' IDENTIFIED BY 'pwd'; - -CREATE VIRTUAL SCHEMA virtual_exasol USING adapter.jdbc_adapter WITH - SQL_DIALECT = 'EXASOL' - CONNECTION_NAME = 'EXASOL_CONN' - SCHEMA_NAME = 'default'; -``` - -**Use IMPORT FROM EXA instead of IMPORT FROM JDBC** - -EXASOL provides the faster and parallel `IMPORT FROM EXA` command for loading data from EXASOL. You can tell the adapter to use this command instead of `IMPORT FROM JDBC` by setting the `IMPORT_FROM_EXA` property. In this case you have to provide the additional `EXA_CONNECTION_STRING` which is the connection string used for the internally used `IMPORT FROM EXA` command (it also supports ranges like `192.168.6.11..14:8563`). Please note, that the `CONNECTION` object must still have the jdbc connection string in `AT`, because the Adapter Script uses a JDBC connection to obtain the metadata when a schema is created or refreshed. For the internally used `IMPORT FROM EXA` statement, the address from `EXA_CONNECTION_STRING` and the username and password from the connection will be used. -```sql -CREATE CONNECTION exasol_conn TO 'jdbc:exa:exasol-host:1234' USER 'user' IDENTIFIED BY 'pwd'; - -CREATE VIRTUAL SCHEMA virtual_exasol USING adapter.jdbc_adapter WITH - SQL_DIALECT = 'EXASOL' - CONNECTION_NAME = 'EXASOL_CONN' - SCHEMA_NAME = 'default' - IMPORT_FROM_EXA = 'true' - EXA_CONNECTION_STRING = 'exasol-host:1234'; -``` - -## Hive - -**JDBC driver**: -The dialect was tested with the Cloudera Hive JDBC driver available on the [Cloudera downloads page](http://www.cloudera.com/downloads). The driver is also available directly from [Simba technologies](http://www.simba.com/), who developed the driver. - -When you unpack the JDBC driver archive you will see that there are two variants, JDBC 4.0 and 4.1. We tested with the JDBC 4.1 variant. - -You have to specify the following settings when adding the JDBC driver via EXAOperation: -* Name: `Hive` -* Main: `com.cloudera.hive.jdbc41.HS2Driver` -* Prefix: `jdbc:hive2:` - -Make sure you upload **all files** of the JDBC driver (over 10 at the time of writing) in EXAOperation **and** to the bucket. - -**Adapter script**: -You have to add all files of the JDBC driver to the classpath using `%jar` as follows (filenames may vary): -```sql -CREATE SCHEMA adapter; -CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; - - %jar /buckets/bucketfs1/bucket1/hive_metastore.jar; - %jar /buckets/bucketfs1/bucket1/hive_service.jar; - %jar /buckets/bucketfs1/bucket1/HiveJDBC41.jar; - %jar /buckets/bucketfs1/bucket1/libfb303-0.9.0.jar; - %jar /buckets/bucketfs1/bucket1/libthrift-0.9.0.jar; - %jar /buckets/bucketfs1/bucket1/log4j-1.2.14.jar; - %jar /buckets/bucketfs1/bucket1/ql.jar; - %jar /buckets/bucketfs1/bucket1/slf4j-api-1.5.11.jar; - %jar /buckets/bucketfs1/bucket1/slf4j-log4j12-1.5.11.jar; - %jar /buckets/bucketfs1/bucket1/TCLIServiceClient.jar; - %jar /buckets/bucketfs1/bucket1/zookeeper-3.4.6.jar; -/ -``` -**Create a virtual schema:** -```sql -CREATE CONNECTION hive_conn TO 'jdbc:hive2://hive-host:10000' USER 'hive-usr' IDENTIFIED BY 'hive-pwd'; - -CREATE VIRTUAL SCHEMA hive_default USING adapter.jdbc_adapter WITH - SQL_DIALECT = 'HIVE' - CONNECTION_NAME = 'HIVE_CONN' - SCHEMA_NAME = 'default'; -``` - -### Connecting To a Kerberos Secured Hadoop: - -Connecting to a Kerberos secured Impala or Hive service only differs in one aspect: You have to a `CONNECTION` object which contains all the relevant information for the Kerberos authentication. This section describes how Kerberos authentication works and how to create such a `CONNECTION`. - -#### 0. Understand how it works (optional) -Both the adapter script and the internally used `IMPORT FROM JDBC` statement support Kerberos authentication. They detect, that the connection is a Kerberos connection by a special prefix in the `IDENTIFIED BY` field. In such case, the authentication will happen using a Kerberos keytab and Kerberos config file (using the JAAS Java API). - -The `CONNECTION` object stores all relevant information and files in its fields: -* The `TO` field contains the JDBC connection string -* The `USER` field contains the Kerberos principal -* The `IDENTIFIED BY` field contains the Kerberos configuration file and keytab file (base64 encoded) along with an internal prefix `ExaAuthType=Kerberos;` to identify the `CONNECTION` as a Kerberos `CONNECTION`. - -#### 1. Generate the CREATE CONNECTION statement -In order to simplify the creation of Kerberos `CONNECTION` objects, the [`create_kerberos_conn.py`](https://github.com/EXASOL/hadoop-etl-udfs/blob/master/tools/create_kerberos_conn.py) Python script has been provided. The script requires 5 arguments: -* `CONNECTION` name (arbitrary name for the new `CONNECTION`) -* Kerberos principal for Hadoop (i.e., Hadoop user) -* Kerberos configuration file path (e.g., `krb5.conf`) -* Kerberos keytab file path, which contains keys for the Kerberos principal -* JDBC connection string - -Example command: -``` -python tools/create_kerberos_conn.py krb_conn krbuser@EXAMPLE.COM /etc/krb5.conf ./krbuser.keytab \ - 'jdbc:hive2://hive-host.example.com:10000;AuthMech=1;KrbRealm=EXAMPLE.COM;KrbHostFQDN=hive-host.example.com;KrbServiceName=hive' -``` -Output: -```sql -CREATE CONNECTION krb_conn TO 'jdbc:hive2://hive-host.example.com:10000;AuthMech=1;KrbRealm=EXAMPLE.COM;KrbHostFQDN=hive-host.example.com;KrbServiceName=hive' USER 'krbuser@EXAMPLE.COM' IDENTIFIED BY 'ExaAuthType=Kerberos;enp6Cg==;YWFhCg==' -``` - -#### 2. Create the CONNECTION -You have to execute the generated `CREATE CONNECTION` statement directly in EXASOL to actually create the Kerberos `CONNECTION` object. For more detailed information about the script, use the help option: -``` -python tools/create_kerberos_conn.py -h -``` - -#### 3. Use the connection when creating a virtual schema -You can now create a virtual schema using the Kerberos connection created before. -```sql -CREATE VIRTUAL SCHEMA hive_default USING adapter.jdbc_adapter WITH - SQL_DIALECT = 'HIVE' - CONNECTION_NAME = 'KRB_CONN' - SCHEMA_NAME = 'default'; -``` - -## Impala - -The Impala dialect is similar to the Hive dialect in most aspects. For this reason we only highlight the differences in this section. - -**JDBC driver:** - -You have to specify the following settings when adding the JDBC driver via EXAOperation: -* Name: `Hive` -* Main: `com.cloudera.impala.jdbc41.Driver` -* Prefix: `jdbc:impala:` - -Make sure you upload **all files** of the JDBC driver (over 10 at the time of writing) in EXAOperation and to the bucket. - -**Adapter script**: -The adapter can be created similar to Hive: -```sql - -CREATE SCHEMA adapter; -CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; - - %jar /buckets/bucketfs1/bucket1/hive_metastore.jar; - %jar /buckets/bucketfs1/bucket1/hive_service.jar; - %jar /buckets/bucketfs1/bucket1/ImpalaJDBC41.jar; - %jar /buckets/bucketfs1/bucket1/libfb303-0.9.0.jar; - %jar /buckets/bucketfs1/bucket1/libthrift-0.9.0.jar; - %jar /buckets/bucketfs1/bucket1/log4j-1.2.14.jar; - %jar /buckets/bucketfs1/bucket1/ql.jar; - %jar /buckets/bucketfs1/bucket1/slf4j-api-1.5.11.jar; - %jar /buckets/bucketfs1/bucket1/slf4j-log4j12-1.5.11.jar; - %jar /buckets/bucketfs1/bucket1/TCLIServiceClient.jar; - %jar /buckets/bucketfs1/bucket1/zookeeper-3.4.6.jar; -/ -``` - -**Create a virtual schema:** -You can now create a virtual schema as follows: -```sql -CREATE CONNECTION impala_conn TO 'jdbc:impala://impala-host:21050' USER 'impala-usr' IDENTIFIED BY 'impala-pwd'; - -CREATE VIRTUAL SCHEMA impala_default USING adapter.jdbc_adapter WITH - SQL_DIALECT = 'IMPALA' - CONNECTION_NAME = 'IMPALA_CONN' - SCHEMA_NAME = 'default'; -``` - -Connecting to a Kerberos secured Impala works similar as for Hive and is described in the section [Connecting To a Kerberos Secured Hadoop](#connecting-to-a-kerberos-secured-hadoop). - -## DB2 - -DB2 was tested with the IBM DB2 JCC Drivers that come with DB2 LUW V10.1 and V11. As these drivers didn't have any major changes in the past years any DB2 driver should work (back to V9.1). The driver comes with 2 different implementations `db2jcc.jar` and `db2jcc4.jar`. All tests were made with the `db2jcc4.jar`. - -Additionally there are 2 files for the DB2 Driver. -* `db2jcc_license_cu.jar` - License File for DB2 on Linux Unix and Windows -* `db2jcc_license_cisuz.jar` - License File for DB2 on zOS (Mainframe) - -Make sure that you upload the necessary license file for the target platform you want to connect to. - -**Supported capabilities**: -The db2 dialect handles some casts in regards of time data types and functions. - -Casting of Data Types -* `TIMESTAMP` and `TIMESTAMP(x)` will be cast to `VARCHAR` to not lose precision. -* `VARCHAR` and `CHAR` for bit data will be cast to a hex string with double the original size -* `TIME` will be cast to `VARCHAR(8)` -* `XML` will be cast to `VARCHAR(DB2_MAX_LENGTH)` -* `BLOB` is not supported - -Casting of Functions -* `LIMIT` will replaced by `FETCH FIRST x ROWS ONLY` -* `OFFSET` is currently not supported as only DB2 V11 support this nativly -* `ADD_DAYS`, `ADD_WEEKS` ... will be replaced by `COLUMN + DAYS`, `COLUMN + ....` - - -**JDBC driver:** -You have to specify the following settings when adding the JDBC driver via EXAOperation: -* Name: `DB2` -* Main: `com.ibm.db2.jcc.DB2Driver` -* Prefix: `jdbc:db2:` - -**Adapter script** -```sql -CREATE or replace JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS - - // This is the class implementing the callback method of the adapter script - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - // This will add the adapter jar to the classpath so that it can be used inside the adapter script - // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; - - // DB2 Driver files - %jar /buckets/bucketfs1/bucket1/db2jcc4.jar; - %jar /buckets/bucketfs1/bucket1/db2jcc_license_cu.jar; - // uncomment for mainframe connection and upload db2jcc_license_cisuz.jar; - //%jar /buckets/bucketfs1/bucket1/db2jcc_license_cisuz.jar; -/ -``` - -**Create a virtual schema** -You can now create a virtual schema as follows: -```sql -create or replace connection DB2_CON to 'jdbc:db2://host:port/database' user 'db2-usr' identified by 'db2-pwd'; - -create virtual schema db2 using adapter.jdbc_adapter with - SQL_DIALECT = 'DB2' - CONNECTION_NAME = 'DB2_CON' - SCHEMA_NAME = '' -; -``` - -`` has to be replaced by the actual db2 schema you want to connect to. - -**Running the DB2 integration tests** -A how to has been included in the [setup sql file](../integration-test-data/db2-testdata.sql) - -## Oracle -**Supported capabilities**: -The Oracle dialect does not support all capabilities. A complete list can be found in [OracleSqlDialect.getCapabilities()](../virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java). - -Oracle datatypes are mapped to their equivalents in Exasol. The following exceptions apply: -- `NUMBER`, `NUMBER with precision > 36` and `LONG` are casted to `VARCHAR` to prevent a loss of precision. -- `DATE` is casted to `TIMESTAMP`. This datatype is only supported for positive year values, i.e., years > 0001. -- `TIMESTAMP WITH [LOCAL] TIME ZONE` is casted to `VARCHAR`. Exasol does not support timestamps with time zone information. -- `INTERVAL` is casted to `VARCHAR`. -- `CLOB`, `NCLOB` and `BLOB` are casted to `VARCHAR`. -- `RAW` and `LONG RAW` are not supported. - - -### JDBC driver -To setup a virtual schema that communicates with an Oracle database using JDBC, the JDBC driver, e.g., `ojdbc7-12.1.0.2.jar`, must first be installed in EXAoperation and deployed to BucketFS; see [this article](https://www.exasol.com/support/browse/SOL-179#WhichJDBCdriverforOracleshallIuse?) and [Deploying the Adapter Step By Step](deploy-adapter.md) for instructions. - -**Adapter script**: -After uploading the adapter jar we are ready to create an Oracle adapter script. Adapt the following script as indicated. -```sql -CREATE SCHEMA adapter; -CREATE JAVA ADAPTER SCRIPT adapter.jdbc_oracle AS - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - // You need to replace `your-bucket-fs` and `your-bucket` to match the actual location - // of the adapter jar. - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; - - // Add the oracle jdbc driver to the classpath - %jar /buckets/bucketfs1/bucket1/ojdbc7-12.1.0.2.jar -/ -``` - -**JDBC Connection**: -Next, create a JDBC connection to your Oracle database. Adjust the properties to match your environment. -```sql -CREATE CONNECTION jdbc_oracle - TO 'jdbc:oracle:thin:@//:/' - USER '' - IDENTIFIED BY ''; -``` - -A quick option to test the `JDBC_ORACLE` connection is to run an `IMPORT FROM JDBC` query. The connection works, if `42` is returned. -```sql -IMPORT FROM JDBC AT jdbc_oracle - STATEMENT 'SELECT 42 FROM DUAL'; -``` - -**Virtual schema:** -Having created both a JDBC adapter script and a JDBC oracle connection, we are ready to create a virtual schema. Insert the name of the schema that you want to expose in Exasol. -```sql -CREATE VIRTUAL SCHEMA virt_oracle USING adapter.jdbc_oracle WITH - SQL_DIALECT = 'ORACLE' - CONNECTION_NAME = 'JDBC_ORACLE' - SCHEMA_NAME = ''; -``` - -### Use IMPORT FROM ORA instead of IMPORT FROM JDBC** -Exasol provides the `IMPORT FROM ORA` command for loading data from Oracle. It is possible to create a virtual schema that uses `IMPORT FROM ORA` instead of JDBC to communicate with Oracle. Both options are indented to support the same features. `IMPORT FROM ORA` almost always offers better performance since it is implemented natively. - -This behaviour is toggled by the Boolean `IMPORT_FROM_ORA` variable. Note that a JDBC connection to Oracle is still required to fetch metadata. In addition, a "direct" connection to the Oracle database is needed. - -**Deploy the Oracle Instant Client**: -To be able to communicate with Oracle, you first need to supply Exasol with the Oracle Instant Client, which can be obtained [directly from Oracle](http://www.oracle.com/technetwork/database/database-technologies/instant-client/overview/index.html). Open EXAoperation, visit Software -> "Upload Oracle Instant Client" and select the downloaded package. The latest version of Oracle Instant Client we tested is `instantclient-basic-linux.x64-12.1.0.2.0`. - -**Create an Oracle Connection**: -Having deployed the Oracle Instant Client, a connection to your Oracle database can be set up. -```sql -CREATE CONNECTION conn_oracle - TO '(DESCRIPTION = - (ADDRESS_LIST = (ADDRESS = (PROTOCOL = TCP) - (HOST = ) - (PORT = ))) - (CONNECT_DATA = (SERVER = DEDICATED) - (SERVICE_NAME = )))' - USER '' - IDENTIFIED BY ''; -``` - -This connection can be tested using, e.g., the following SQL expression. -```sql -IMPORT FROM ORA at CONN_ORACLE - STATEMENT 'SELECT 42 FROM DUAL'; -``` - -**Virtual schema**: -Assuming you already setup the JDBC connection `JDBC_ORACLE` as shown in the previous section, you can continue with creating the virtual schema. -```sql -CREATE VIRTUAL SCHEMA virt_import_oracle USING adapter.jdbc_oracle WITH - SQL_DIALECT = 'ORACLE' - CONNECTION_NAME = 'JDBC_ORACLE' - SCHEMA_NAME = '' - IMPORT_FROM_ORA = 'true' - EXA_CONNECTION_NAME = 'CONN_ORACLE'; -``` - -## Teradata - -**JDBC driver:** -You have to specify the following settings when adding the JDBC driver via EXAOperation: -* Name: `TERADATA` -* Main: `com.teradata.jdbc.TeraDriver` -* Prefix: `jdbc:teradata:` -* Files: `terajdbc4.jar`, `tdgssconfig.jar` - -Please also upload the jar files to a bucket for the adapter script. - -**Adapter script** -```sql -CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter - AS - - // This is the class implementing the callback method of the adapter script - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - // This will add the adapter jar to the classpath so that it can be used inside the adapter script - // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; - - // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) - %jar /buckets/bucketfs1/bucket1/terajdbc4.jar; - %jar /buckets/bucketfs1/bucket1/tdgssconfig.jar; - -/ -``` - -**Create a virtual schema** -```sql -CREATE VIRTUAL SCHEMA TERADATA_financial USING adapter.jdbc_adapter -WITH - SQL_DIALECT = 'TERADATA' - CONNECTION_NAME = 'TERADATA_CONNECTION' - SCHEMA_NAME = 'financial' -; -``` - -## Redshift - -**JDBC driver:** - -You have to specify the following settings when adding the JDBC driver via EXAOperation: -* Name: `REDSHIFT` -* Main: `com.amazon.redshift.jdbc.Driver` -* Prefix: `jdbc:redshift:` -* Files: `RedshiftJDBC42-1.2.1.1001.jar` - -Please also upload the driver jar into a bucket for the adapter script. - -**Adapter script** -```sql -CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter - AS - - // This is the class implementing the callback method of the adapter script - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - // This will add the adapter jar to the classpath so that it can be used inside the adapter script - // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; - - // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) - - %jar /buckets/bucketfs1/bucket1/RedshiftJDBC42-1.2.1.1001.jar; - -/ -``` - -**Create a virtual schema** -```sql -CREATE VIRTUAL SCHEMA redshift_tickit - USING adapter.jdbc_adapter - WITH - SQL_DIALECT = 'REDSHIFT' - CONNECTION_NAME = 'REDSHIFT_CONNECTION' - CATALOG_NAME = 'database_name' - SCHEMA_NAME = 'public' - ; -``` - -## Sql Server - -**JDBC driver:** -The Sql Server Dialect was tested with the jdts 1.3.1 JDBC driver and Sql Server 2014. -As the jdts driver is already preinstalled for the `IMPORT` command itself you only need -to upload the `jdts.jar` to a bucket for the adapter script. - -**Adapter script** -```sql -CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.sql_server_jdbc_adapter - AS - - // This is the class implementing the callback method of the adapter script - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - // This will add the adapter jar to the classpath so that it can be used inside the adapter script - // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; - - // You have to add all files of the data source jdbc driver here - %jar /buckets/bucketfs1/bucket1/jtds.jar; -/ -``` - -**Create a virtual schema** -```sql -CREATE VIRTUAL SCHEMA VS_SQLSERVER USING adapter.sql_server_jdbc_adapter -WITH - SQL_DIALECT = 'SQLSERVER' - CONNECTION_NAME = 'SQLSERVER_CONNECTION' - CATALOG_NAME = 'MyDatabase' - SCHEMA_NAME = 'dbo' -; -``` - -## PostgreSQL - -**JDBC driver:** -The PostgreSQL dialect was tested with JDBC driver version 42.0.0 and PostgreSQL 9.6.2 . - -**Adapter script** -```sql -CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter - AS - - // This is the class implementing the callback method of the adapter script - %scriptclass com.exasol.adapter.jdbc.JdbcAdapter; - - // This will add the adapter jar to the classpath so that it can be used inside the adapter script - // Replace the names of the bucketfs and the bucket with the ones you used. - %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar; - - // You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive) - %jar /buckets/bucketfs1/bucket1/postgresql-42.0.0.jar; - -/ -``` - -**Create a virtual schema** -```sql -CREATE VIRTUAL SCHEMA postgres - USING adapter.jdbc_adapter - WITH - SQL_DIALECT = 'POSTGRESQL' - CATALOG_NAME = 'postgres' - SCHEMA_NAME = 'public' - CONNECTION_NAME = 'POSTGRES_DOCKER' - ; -``` - -## Generic diff --git a/jdbc-adapter/doc/supported_sql_dialects.md b/jdbc-adapter/doc/supported_sql_dialects.md new file mode 100644 index 000000000..34c04dcb3 --- /dev/null +++ b/jdbc-adapter/doc/supported_sql_dialects.md @@ -0,0 +1,26 @@ +# Supported Dialects + +The purpose of this page is to provide detailed instructions for each of the supported dialects on how to get started. Typical questions are +* Which **JDBC driver** is used, which files have to be uploaded and included when creating the adapter script. +* How does the **CREATE VIRTUAL SCHEMA** statement look like, i.e. which properties are required. +* **Data source specific notes**, like authentication with Kerberos, supported capabilities or things to consider regarding the data type mapping. + +As an entry point we recommend you follow the [step-by-step deployment guide](deploying_the_virtual_schema_adapter.md) which will link to this page whenever needed. + +## Before you Start + +Please note that the syntax for creating adapter scripts is not recognized by all SQL clients. [DBeaver](https://dbeaver.io/) for example. If you encounter such a problem, try a different client. + +## List of Supported Dialects + +1. [EXASOL](sql_dialects/exasol.md) +1. [Hive](sql_dialects/hive.md) +1. [Impala](sql_dialects/impala.md) +1. [DB2](sql_dialects/db2.md) +1. [Oracle](sql_dialects/oracle.md) +1. [Teradata](sql_dialects/teradata.md) +1. [Redshift](sql_dialects/redshift.md) +1. [SQL Server](sql_dialects/sql_server.md) +1. [Sybase ASE](sql_dialects/sybase.md) +1. [PostgresSQL](sql_dialects/postgresql.md) +1. Generic \ No newline at end of file diff --git a/jdbc-adapter/integration-test-data/integration-test-db2.yaml b/jdbc-adapter/integration-test-data/integration-test-db2.yaml index 7ab4f7dd8..020c678a7 100644 --- a/jdbc-adapter/integration-test-data/integration-test-db2.yaml +++ b/jdbc-adapter/integration-test-data/integration-test-db2.yaml @@ -5,7 +5,7 @@ general: debugAddress: '192.168.0.12:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas bucketFsUrl: http://exasol-host:2580/bucket1 bucketFsPassword: bucket1 - jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar + jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar exasol: runIntegrationTests: true diff --git a/jdbc-adapter/integration-test-data/integration-test-sample.yaml b/jdbc-adapter/integration-test-data/integration-test-sample.yaml index adb74b1d6..95cda3fe2 100644 --- a/jdbc-adapter/integration-test-data/integration-test-sample.yaml +++ b/jdbc-adapter/integration-test-data/integration-test-sample.yaml @@ -5,7 +5,7 @@ general: debugAddress: '192.168.0.12:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas bucketFsUrl: http://exasol-host:2580/bucket1 bucketFsPassword: bucket1 - jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar + jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.1.0.jar exasol: runIntegrationTests: true @@ -87,3 +87,9 @@ hive: user: user password: pass +sybase: + runIntegrationTests: false + jdbcDriverPath: /buckets/mybucketfs/mybucket/jtds-1.3.1.jar + connectionString: jdbc:jtds:sybase://127.0.0.1:5000/db + user: sybase-user + password: sybase-password diff --git a/jdbc-adapter/integration-test-data/integration-test-travis.yaml b/jdbc-adapter/integration-test-data/integration-test-travis.yaml index 415e6ae83..fff001e94 100644 --- a/jdbc-adapter/integration-test-data/integration-test-travis.yaml +++ b/jdbc-adapter/integration-test-data/integration-test-travis.yaml @@ -5,7 +5,7 @@ general: debugAddress: '' bucketFsUrl: http://127.0.0.1:6594/default bucketFsPassword: write - jdbcAdapterPath: /buckets/bfsdefault/default/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar + jdbcAdapterPath: /buckets/bfsdefault/default/virtualschema-jdbc-adapter-dist-1.1.0.jar exasol: runIntegrationTests: true diff --git a/jdbc-adapter/integration-test-data/run_integration_tests.sh b/jdbc-adapter/integration-test-data/run_integration_tests.sh index 004cac16f..ca03b8935 100755 --- a/jdbc-adapter/integration-test-data/run_integration_tests.sh +++ b/jdbc-adapter/integration-test-data/run_integration_tests.sh @@ -1,51 +1,79 @@ #!/usr/bin/env bash - # This script executes integration tests as defined in # integration-test-travis.yaml (currently only Exasol integration tests). - +# # An Exasol instance is run using the exasol/docker-db image. Therefore, a # working installation of Docker and sudo privileges are required. set -eux - cd "$(dirname "$0")/.." -config="$(pwd)/integration-test-data/integration-test-travis.yaml" +readonly config="$(pwd)/integration-test-data/integration-test-travis.yaml" +readonly exasol_docker_image_version="6.0.10-d1" +readonly docker_image="exasol/docker-db:$exasol_docker_image_version" +readonly docker_name="exasoldb" +readonly tmp="$(mktemp -td exasol-vs-adapter-integration.XXXXXX)" || exit 1 function cleanup() { docker rm -f exasoldb || true - sudo rm -rf integration-test-data/exa || true + sudo rm -rf "$tmp" || true } trap cleanup EXIT -# Setup directory "exa" with pre-configured EXAConf to attach it to the exasoldb docker container -mkdir -p integration-test-data/exa/{etc,data/storage} -cp integration-test-data/EXAConf integration-test-data/exa/etc/EXAConf -dd if=/dev/zero of=integration-test-data/exa/data/storage/dev.1.data bs=1 count=1 seek=4G -touch integration-test-data/exa/data/storage/dev.1.meta - -docker pull exasol/docker-db:latest -docker run \ - --name exasoldb \ - -p 8899:8888 \ - -p 6594:6583 \ - --detach \ - --privileged \ - -v "$(pwd)/integration-test-data/exa:/exa" \ - exasol/docker-db:latest \ - init-sc --node-id 11 - -docker logs -f exasoldb & - -# Wait until database is ready -(docker logs -f --tail 0 exasoldb &) 2>&1 | grep -q -i 'stage4: All stages finished' -sleep 30 - -mvn -q clean package - -# Load virtualschema-jdbc-adapter jar into BucketFS and wait until it's available. -mvn -q pre-integration-test -DskipTests -Pit -Dintegrationtest.configfile="$config" -(docker exec exasoldb sh -c 'tail -f -n +0 /exa/logs/cored/*bucket*' &) | \ - grep -q -i 'File.*virtualschema-jdbc-adapter.*linked' - -mvn -q verify -Pit -Dintegrationtest.configfile="$config" -Dintegrationtest.skipTestSetup=true +main() { + prepare_configuration_dir "$tmp/etc" + prepare_data_dir "$tmp/data/storage" + init_docker + check_docker_ready + build + upload_jar_to_bucket + run_tests +} + + +prepare_configuration_dir() { + mkdir -p "$1" + cp integration-test-data/EXAConf "$1/EXAConf" +} + +prepare_data_dir() { + mkdir -p "$1" + dd if=/dev/zero of="$1/dev.1.data" bs=1 count=1 seek=4G + touch "$1/dev.1.meta" +} + +init_docker() { + docker pull "$docker_image" + docker run \ + --name "$docker_name" \ + -p 8899:8888 \ + -p 6594:6583 \ + --detach \ + --privileged \ + -v "$tmp:/exa" \ + "$docker_image" \ + init-sc --node-id 11 + docker logs -f "$docker_name" & +} + +check_docker_ready() { + # Wait until database is ready + (docker logs -f --tail 0 "$docker_name" &) 2>&1 | grep -q -i 'stage4: All stages finished' + sleep 30 +} + +build() { + mvn -q clean package +} + +upload_jar_to_bucket() { + mvn -q pre-integration-test -DskipTests -Pit -Dintegrationtest.configfile="$config" + (docker exec "$docker_name" sh -c 'tail -f -n +0 /exa/logs/cored/*bucket*' &) | \ + grep -q -i 'File.*virtualschema-jdbc-adapter.*linked' +} + +run_tests() { + mvn -q verify -Pit -Dintegrationtest.configfile="$config" -Dintegrationtest.skipTestSetup=true +} + +main "$@" \ No newline at end of file diff --git a/jdbc-adapter/integration-test-data/sybase.sql b/jdbc-adapter/integration-test-data/sybase.sql new file mode 100644 index 000000000..350e08aae --- /dev/null +++ b/jdbc-adapter/integration-test-data/sybase.sql @@ -0,0 +1,165 @@ +DROP TABLE testdb.tester.ittable go +CREATE TABLE testdb.tester.ittable ( + a varchar(100), + b decimal +) go + +INSERT INTO testdb.tester.ittable (a, b) VALUES('e', 2) +INSERT INTO testdb.tester.ittable (a, b) VALUES('b', 3) +INSERT INTO testdb.tester.ittable (a, b) VALUES(NULL, -1) +INSERT INTO testdb.tester.ittable (a, b) VALUES('a', NULL) +INSERT INTO testdb.tester.ittable (a, b) VALUES('z', 0) +INSERT INTO testdb.tester.ittable (a, b) VALUES('z', 0) go + +DROP TABLE testdb.tester.timetypes go +CREATE TABLE testdb.tester.timetypes ( + c_smalldatetime smalldatetime, + c_datetime datetime, + c_date date, + c_time time, + c_bigdatetime bigdatetime, -- error data truncation + c_bigtime bigtime +) go + +INSERT INTO testdb.tester.timetypes + VALUES('1.1.1900 01:02', + '1.1.1753 01:02:03.100', + '12/3/2032', + '11:22:33.456', + '6.4.1553 11:11:11.111111', + '11:11:11.111111' + ) +go + + +-- https://help.sap.com/viewer/b65d6a040c4a4709afd93068071b2a76/16.0.3.5/en-US/aa354eb4bc2b101495d29877b5bd3c5b.html DROP TABLE testdb.tester.integertypes go CREATE TABLE testdb.tester.integertypes ( + c_bigint bigint, + c_int int, + c_smallint smallint, + c_ubigint unsigned bigint, + c_uint unsigned int, + c_usmallint unsigned smallint +) go + +INSERT INTO testdb.tester.integertypes + VALUES(-9223372036854775808, + -2147483648, + -32768, + 0, + 0, + 0 + ) +INSERT INTO testdb.tester.integertypes + VALUES(9223372036854775807, + 2147483647, + 32767, + 18446744073709551615, + 4294967295, + 65535 + ) +go + + +-- https://help.sap.com/viewer/b65d6a040c4a4709afd93068071b2a76/16.0.3.5/en-US/aa357b76bc2b1014ba159ac9d0074e1d.html +DROP TABLE testdb.tester.decimaltypes go +CREATE TABLE testdb.tester.decimaltypes ( + c_numeric_36_0 numeric(36, 0), + c_numeric_38_0 numeric(38, 0), + c_decimal_20_10 decimal(20, 10), + c_decimal_37_10 decimal(37, 10) +) go + +INSERT INTO testdb.tester.decimaltypes +VALUES(12345678901234567890123456, + 1234567890123456789012345678, + 1234567890.0123456789, + 12345678901234567.0123456789 +) +INSERT INTO testdb.tester.decimaltypes + VALUES(-12345678901234567890123456, + -1234567890123456789012345678, + -1234567890.0123456789, + -12345678901234567.0123456789 + ) +go + + +-- https://help.sap.com/viewer/b65d6a040c4a4709afd93068071b2a76/16.0.3.5/en-US/aa357b76bc2b1014ba159ac9d0074e1d.html +-- FLOAT(p) is alias for either DOUBLE PRECISION or REAL. If p < 16, FLOAT is stored as REAL, if p >= 16, FLOAT is stored as DOUBLE PRECISION. +DROP TABLE testdb.tester.approxtypes go +CREATE TABLE testdb.tester.approxtypes ( + c_double double precision, + c_real real, +) go + +INSERT INTO testdb.tester.approxtypes VALUES( + 2.2250738585072014e-308, + 1.175494351e-38 +) +INSERT INTO testdb.tester.approxtypes VALUES( + 1.797693134862315708e+308, + 3.402823466e+38 +) +go + + +DROP TABLE testdb.tester.moneytypes go +CREATE TABLE testdb.tester.moneytypes ( + c_smallmoney smallmoney, + c_money money, +) go + +INSERT INTO testdb.tester.moneytypes VALUES( + 214748.3647, + 922337203685477.5807 +) +INSERT INTO testdb.tester.moneytypes VALUES( + -214748.3648, + -922337203685477.5808 +) +go + + +-- https://help.sap.com/viewer/b65d6a040c4a4709afd93068071b2a76/16.0.3.5/en-US/aa362f6cbc2b1014b1ed808e2a54e693.html +DROP TABLE testdb.tester.chartypes go +CREATE TABLE testdb.tester.chartypes ( + c_char_10 char(10), + c_char_toobig char(2001), + c_varchar varchar(10), -- maximum size in Sybase is 16384 -> smaller than Exasol's limit + c_unichar_10 unichar(10), -- NOT right-padded with spaces + c_unichar_toobig unichar(8192), -- NOT right-padded with spaces + c_univarchar univarchar(10), -- maximum size is 8192 + c_nchar nchar(10), -- maximum size in Sybase is 16384. NOT right-padded with spaces. + c_nvarchar nvarchar(10), -- maximum size in Sybase is 16384 + c_text text, + c_unitext unitext +) go + +INSERT INTO testdb.tester.chartypes VALUES( + 'abcd', + 'Lorem ipsum dolor sit amet... rest is zero.', + 'Lorem.', + 'Ipsum.', + 'xyz', + 'Dolor.', + 'Sit.', + 'Amet.', + 'Text. A wall of text.', + 'Text. A wall of Unicode text.' +) go + + +DROP TABLE testdb.tester.misctypes go +CREATE TABLE testdb.tester.misctypes ( + c_binary binary(10), -- n <= 255 + c_varbinary varbinary(10), + c_image image, + c_bit bit NOT NULL +) go + +INSERT INTO testdb.tester.misctypes VALUES( + 0xdeadbeef, + 0xdeadbeef, + 0xdeadbeef, + 0 +) go diff --git a/jdbc-adapter/integration-test-data/sybase/sybase-create-tables.sql b/jdbc-adapter/integration-test-data/sybase/sybase-create-tables.sql new file mode 100644 index 000000000..e3fb3cbf4 --- /dev/null +++ b/jdbc-adapter/integration-test-data/sybase/sybase-create-tables.sql @@ -0,0 +1,69 @@ +USE testdb go +sp_adduser 'tester' go +SETUSER 'tester' go +CREATE SCHEMA AUTHORIZATION tester + CREATE TABLE ittable ( + a varchar(100) null, + b decimal null + ) + CREATE TABLE timetypes ( + c_smalldatetime smalldatetime, + c_datetime datetime, + c_date date, + c_time time, + c_bigdatetime bigdatetime, -- error data truncation + c_bigtime bigtime + ) + -- https://help.sap.com/viewer/b65d6a040c4a4709afd93068071b2a76/16.0.3.5/en-US/aa357b76bc2b1014ba159ac9d0074e1d.html + -- FLOAT(p) is alias for either DOUBLE PRECISION or REAL. + -- If p < 16, FLOAT is stored as REAL, if p >= 16, FLOAT is stored as DOUBLE PRECISION. + CREATE TABLE approxtypes ( + c_double double precision, + c_real real, + ) + -- https://help.sap.com/viewer/b65d6a040c4a4709afd93068071b2a76/16.0.3.5/en-US/aa357b76bc2b1014ba159ac9d0074e1d.html + CREATE TABLE decimaltypes ( + c_numeric_36_0 numeric(36, 0), + c_numeric_38_0 numeric(38, 0), + c_decimal_20_10 decimal(20, 10), + c_decimal_37_10 decimal(37, 10) + ) + CREATE TABLE integertypes ( + c_bigint bigint, + c_int int, + c_smallint smallint, + c_ubigint unsigned bigint, + c_uint unsigned int, + c_usmallint unsigned smallint + ) + CREATE TABLE moneytypes ( + c_smallmoney smallmoney, + c_money money, + ) + -- https://help.sap.com/viewer/b65d6a040c4a4709afd93068071b2a76/16.0.3.5/en-US/aa362f6cbc2b1014b1ed808e2a54e693.html + CREATE TABLE chartypes ( + c_char_10 char(10), + c_char_toobig char(2001), + c_varchar varchar(10), -- maximum size in Sybase is 16384 -> smaller than Exasol's limit + c_unichar_10 unichar(10), + c_univarchar univarchar(10), -- maximum size is 8192 + c_nchar nchar(10), -- maximum size in Sybase is 16384. NOT right-padded with spaces. + c_nvarchar nvarchar(10), -- maximum size in Sybase is 16384 + ) + -- NOT right-padded with spaces. + -- While the theoretical maximum is 8192 unichars, effectively only 8148 are possible because + -- Sybase otherwise complains that the maximum row width is exceeded. + CREATE TABLE fatunichartypes ( + c_unichar_toobig unichar(8148) + ) + CREATE TABLE texttypes ( + c_text text, + c_unitext unitext + ) + CREATE TABLE misctypes ( + c_binary binary(10), -- n <= 255 + c_varbinary varbinary(10), + c_image image, + c_bit bit NOT NULL + ) +go \ No newline at end of file diff --git a/jdbc-adapter/integration-test-data/sybase/sybase-drop-tables.sql b/jdbc-adapter/integration-test-data/sybase/sybase-drop-tables.sql new file mode 100644 index 000000000..7d4c763a5 --- /dev/null +++ b/jdbc-adapter/integration-test-data/sybase/sybase-drop-tables.sql @@ -0,0 +1,13 @@ +USE testdb go + +DROP TABLE ittable go +DROP TABLE timetypes go +DROP TABLE integertypes go +DROP TABLE decimaltypes go +DROP TABLE approxtypes go +DROP TABLE moneytypes go +DROP TABLE chartypes go +DROP TABLE fatunichartypes go +DROP TABLE texttypes go +DROP TABLE misctypes go +SELECT * FROM sysobjects WHERE type = 'U' go \ No newline at end of file diff --git a/jdbc-adapter/integration-test-data/sybase/sybase-populate-tables.sql b/jdbc-adapter/integration-test-data/sybase/sybase-populate-tables.sql new file mode 100644 index 000000000..72da153f3 --- /dev/null +++ b/jdbc-adapter/integration-test-data/sybase/sybase-populate-tables.sql @@ -0,0 +1,94 @@ +USE testdb go + +TRUNCATE TABLE tester.ittable go +TRUNCATE TABLE tester.timetypes go +TRUNCATE TABLE tester.integertypes go +TRUNCATE TABLE tester.decimaltypes go +TRUNCATE TABLE tester.approxtypes go +TRUNCATE TABLE tester.moneytypes go +TRUNCATE TABLE tester.chartypes go +TRUNCATE TABLE tester.fatunichartypes go +TRUNCATE TABLE tester.texttypes go +TRUNCATE TABLE tester.misctypes go + +INSERT INTO tester.ittable (a, b) VALUES('e', 2) go +INSERT INTO tester.ittable (a, b) VALUES('b', 3) go +INSERT INTO tester.ittable (a, b) VALUES(NULL, -1) go +INSERT INTO tester.ittable (a, b) VALUES('a', NULL) go +INSERT INTO tester.ittable (a, b) VALUES('z', 0) go +INSERT INTO tester.ittable (a, b) VALUES('z', 0) go +INSERT INTO tester.timetypes VALUES( + '1.1.1900 01:02', + '1.1.1753 01:02:03.100', + '12/3/2032', + '11:22:33.456', + '6.4.1553 11:11:11.111111', + '11:11:11.111111' +) +INSERT INTO tester.approxtypes VALUES( + 2.2250738585072014e-308, + 1.175494351e-38 +) go +INSERT INTO tester.approxtypes VALUES( + 1.797693134862315708e+308, + 3.402823466e+38 +) go +INSERT INTO tester.decimaltypes VALUES( + 12345678901234567890123456, + 1234567890123456789012345678, + 1234567890.0123456789, + 12345678901234567.0123456789 +) go +INSERT INTO tester.decimaltypes VALUES( + -12345678901234567890123456, + -1234567890123456789012345678, + -1234567890.0123456789, + -12345678901234567.0123456789 +) go +INSERT INTO tester.integertypes VALUES( + -9223372036854775808, + -2147483648, + -32768, + 0, + 0, + 0 +) go +INSERT INTO tester.integertypes VALUES( + 9223372036854775807, + 2147483647, + 32767, + 18446744073709551615, + 4294967295, + 65535 +) go +INSERT INTO tester.moneytypes VALUES( + 214748.3647, + 922337203685477.5807 +) go +INSERT INTO tester.moneytypes VALUES( + -214748.3648, + -922337203685477.5808 +) go +INSERT INTO tester.chartypes VALUES( + 'c10', + 'c2001', + 'vc10', + 'uc10', + 'uvc10', + 'nc10', + 'nvc10' +) go +INSERT INTO tester.fatunichartypes VALUES( + 'xyz' +) go +INSERT INTO tester.texttypes VALUES( + 'Text. A wall of text.', + 'Text. A wall of Unicode text.' +) go +INSERT INTO tester.misctypes VALUES( + 0xdeadbeef, + 0xdeadbeef, + 0xdeadbeef, + 0 +) go +COMMIT go \ No newline at end of file diff --git a/jdbc-adapter/integration-test-data/sybase/sybase-prepare-database.sql b/jdbc-adapter/integration-test-data/sybase/sybase-prepare-database.sql new file mode 100644 index 000000000..763af100d --- /dev/null +++ b/jdbc-adapter/integration-test-data/sybase/sybase-prepare-database.sql @@ -0,0 +1,19 @@ +USE master go + +-- Initialiase a data partition +DISK INIT + name = 'data_dev1', + physname = 'data_dev1.dat', + size = '100M' +go + +-- Initialize a database log partition +DISK INIT + name = 'log_dev1', + physname = 'log_dev1.dat', + size = '25M' +go + +--DROP DATABASE testdb go +CREATE DATABASE testdb ON data_dev1='25M' LOG ON log_dev1='5M' go +sp_addlogin 'tester', 'tester' go \ No newline at end of file diff --git a/jdbc-adapter/launch/Virtual-Schema_all_tests.launch b/jdbc-adapter/launch/Virtual-Schema_all_tests.launch new file mode 100644 index 000000000..1704dc40c --- /dev/null +++ b/jdbc-adapter/launch/Virtual-Schema_all_tests.launch @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/jdbc-adapter/local/integration-test-config.yaml b/jdbc-adapter/local/integration-test-config.yaml new file mode 100644 index 000000000..f75ec8db0 --- /dev/null +++ b/jdbc-adapter/local/integration-test-config.yaml @@ -0,0 +1,89 @@ +# Configuration file for integration tests + +general: + debug: false + debugAddress: '10.44.1.228:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas + bucketFsUrl: http://localhost:2580/jars + bucketFsPassword: public + jdbcAdapterPath: /buckets/bfsdefault/jars/virtualschema-jdbc-adapter-dist-1.1.0.jar + +exasol: + runIntegrationTests: true + address: localhost:8563 + user: sys + password: exasol + + + +# Generic sql dialect is tested via MySQL +generic: + runIntegrationTests: false + jdbcDriverPath: /buckets/bfsdefault/jars/mysql-connector-java-8.0.12.jar + connectionString: jdbc:mysql://localhost/virtual-schema-integration-test + user: virtual-schema-integration-test + password: password + +oracle: + runIntegrationTests: false + jdbcDriverPath: /buckets/mybucketfs/mybucket/oracle/ojdbc7.jar + connectionString: jdbc:oracle:thin:@oracle-host:1521:orcl + user: myuser + password: mypass + + +impala: + runIntegrationTests: false + connectionString: jdbc:impala://impala-host:21050;AuthMech=0 + jdbcDriverPath: /buckets/mybucketfs/mybucket/Cloudera_Impala_JDBC_2_5_28.1047_Driver/ + jdbcDriverJars: + - hive_metastore.jar + - hive_service.jar + - ImpalaJDBC41.jar + - libfb303-0.9.0.jar + - libthrift-0.9.0.jar + - log4j-1.2.14.jar + - ql.jar + - slf4j-api-1.5.11.jar + - slf4j-log4j12-1.5.11.jar + - TCLIServiceClient.jar + - zookeeper-3.4.6.jar + + +kerberos: + runIntegrationTests: false + jdbcDriverPath: /buckets/mybucketfs/mybucket/cloudera-hive-jdbc-driver/ + jdbcDriverJars: + - HiveJDBC41.jar + - hive_metastore.jar + - hive_service.jar + - libfb303-0.9.0.jar + - libthrift-0.9.0.jar + - log4j-1.2.14.jar + - ql.jar + - slf4j-api-1.5.11.jar + - slf4j-log4j12-1.5.11.jar + - TCLIServiceClient.jar + - zookeeper-3.4.6.jar + connectionString: jdbc:hive2://hadoop-host.yourcompany.com:10000/;AuthMech=1;KrbRealm=YOURCOMPANY.COM;KrbHostFQDN=hadoop-host.yourcompany.com;KrbServiceName=hive + user: testuser@YOURCOMPANY.COM + password: ExaAuthType=Kerberos;X3xpYmRlZmF1bHRzXQpkZWZhdWx0X3JlYWxtID0gT01HLkRFVi5FWEFTT0wuQ09NCmRuc19jYW5vbmljYWxpemVfaG9zdG5hbWUgPSBmYWxzZQpkbnNfbG9va3VwX2tkYyA9IGZhbHNlCmRuc19sb29rdXBfcmVhbG0gPSBmYWxzZQp0aWNrZXRfbGlmZXRpbWUgPSA4NjQwMApyZW5ld19saWZldGltZSA9IDYwNDgwMApmb3J3YXJkYWJsZSA9IHRydWUKZGVmYXVsdF90Z3NfZW5jdHlwZXMgPSBhcmNmb3VyLWhtYWMKZGVmYXVsdF90a3RfZW5jdHlwZXMgPSBhcmNmb3VyLWhtYWMKcGVybWl0dGVkX2VuY3R5cGVzID0gYXJjZm91ci1obWFjCnVkcF9wcmVmZXJlbmNlX2xpbWl0ID0gMQpbcmVhbG1zXQpPTUcuREVWLkVYQVNPTC5DT00gPSB7CmtkYyA9IGhhZG9vcDAxLm9tZy5kZXYuZXhhc29sLmNvbQphZG1pbl9zZXJ2ZXIgPSBoYWRvb3AwMS5vbWcuZGV2LmV4YXNvbC5jb20KfQo=;BQIAAABBAAEAEk9NRy5ERVYuRVhBU09MLkNPTQAMaGFkb29wdGVzdGVyAAAAAVYo0X0BABcAEGuPtGr6sYdhUEbTqhYQ3E0= + +hive: + runIntegrationTests: false + jdbcDriverPath: /buckets/mybucketfs/mybucket/cloudera-hive-jdbc-driver/ + jdbcDriverJars: + - HiveJDBC41.jar + - hive_metastore.jar + - hive_service.jar + - libfb303-0.9.0.jar + - libthrift-0.9.0.jar + - log4j-1.2.14.jar + - ql.jar + - slf4j-api-1.5.11.jar + - slf4j-log4j12-1.5.11.jar + - TCLIServiceClient.jar + - zookeeper-3.4.6.jar + connectionString: jdbc:hive2://hive-host:10000 + user: user + password: pass + diff --git a/jdbc-adapter/local/logging.properties b/jdbc-adapter/local/logging.properties new file mode 100644 index 000000000..b0d506f9f --- /dev/null +++ b/jdbc-adapter/local/logging.properties @@ -0,0 +1,14 @@ +handlers=java.util.logging.ConsoleHandler, java.util.logging.FileHandler +.level=INFO +java.util.logging.ConsoleHandler.level=ALL +java.util.logging.ConsoleHandler.formatter=java.util.logging.SimpleFormatter + +java.util.logging.FileHandler.level = ALL +java.util.logging.FileHandler.pattern=/home/seb/logs/virtual_schema.log +java.util.logging.FileHandler.limit=50000 +java.util.logging.FileHandler.count=1 +java.util.logging.FileHandler.formatter=java.util.logging.SimpleFormatter + +java.util.logging.SimpleFormatter.format=%1$tY-%1$tm-%1$td %1$tH:%1$tM:%1$tS.%1$tL %4$-7s [%3$s] %5$s %6$s%n + +com.exasol.level=FINE \ No newline at end of file diff --git a/jdbc-adapter/pom.xml b/jdbc-adapter/pom.xml index d4e1ca4e6..09ada3232 100644 --- a/jdbc-adapter/pom.xml +++ b/jdbc-adapter/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.exasol virtualschema-jdbc-adapter-main - 1.0.2-SNAPSHOT + ${product.version} pom @@ -14,7 +14,9 @@ + 1.1.0 UTF-8 + UTF-8 1.8 @@ -55,7 +57,7 @@ junit junit - 4.11 + 4.12 test @@ -64,6 +66,12 @@ 2.0.52-beta + + org.hamcrest + hamcrest-junit + 2.0.0.0 + test + diff --git a/jdbc-adapter/tools/increment_version.sh b/jdbc-adapter/tools/increment_version.sh deleted file mode 100755 index 3a128798b..000000000 --- a/jdbc-adapter/tools/increment_version.sh +++ /dev/null @@ -1,15 +0,0 @@ -if [ $# -ne 2 ]; then - echo "Usage example: $0 0.0.1-SNAPSHOT 0.0.1" - exit 1; -fi - -BASEDIR=$(dirname "$0") -PARENTDIR=$(dirname "$BASEDIR") -OLD_VERSION="$1" -OLD_VERSION="${OLD_VERSION//./\\.}" -NEW_VERSION="$2" - -echo "Substitute $OLD_VERSION with $NEW_VERSION in $PARENTDIR" - -find $PARENTDIR -type f | xargs sed -i -e "s/$OLD_VERSION/$NEW_VERSION/g" - diff --git a/jdbc-adapter/tools/version.sh b/jdbc-adapter/tools/version.sh new file mode 100755 index 000000000..e495d30ef --- /dev/null +++ b/jdbc-adapter/tools/version.sh @@ -0,0 +1,100 @@ +#!/bin/bash +readonly vs_jar_prefix='virtualschema-jdbc-adapter-dist' +readonly jar_suffix='jar' +readonly vs_jar_pattern="$vs_jar_prefix-.*\.$jar_suffix" +readonly root_dir='virtual-schemas' +readonly master_pom='jdbc-adapter/pom.xml' +readonly file_find_regex='.*\.(md|yaml)' +readonly script=$(basename $0) + +main() { + case "$1" in + help) + usage + ;; + verify) + verify + ;; + unify) + unify + ;; + *) + log "Unknown command: \"$1\"" + log + usage + exit 1 + ;; + esac +} + +usage () { + log "Usage: $script help" + log " $script verify" + log " $script unify" + log + log "Run from the root directory \"$root_dir\"" + log + log "This script can serve as a checkpoint using 'verify' as command. The exit value" + log "is zero when all detected version numbers match the ones on the master POM file." + log "It is non-zero if there is a mismatch." + log + log "Used with the command 'unify' this script rewrites all occurrences of divergent" + log "version numbers with the one found in the master POM file." +} + +verify () { + prepare + verify_no_other_version_numbers "$version" +} + +prepare() { + verify_current_directory "$root_dir" + readonly version=$(extract_product_version "$master_pom") + log "Found version $version in master file \"$master_pom\"" +} + +verify_current_directory() { + if [[ "$(basename $PWD)" != "$root_dir" ]] + then + log "Must be in root directory '$root_dir' to execute this script." + exit 1 + fi +} + +extract_product_version() { + grep -oP "product\.version>[^<]*<" "$1" | sed -e's/^.*>\s*//' -e's/\s* com.exasol virtualschema-jdbc-adapter-main - 1.0.2-SNAPSHOT + ${product.version} virtualschema-common diff --git a/jdbc-adapter/virtualschema-jdbc-adapter-dist/pom.xml b/jdbc-adapter/virtualschema-jdbc-adapter-dist/pom.xml index 21d7b4ca0..9190e5df0 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter-dist/pom.xml +++ b/jdbc-adapter/virtualschema-jdbc-adapter-dist/pom.xml @@ -5,7 +5,7 @@ com.exasol virtualschema-jdbc-adapter-main - 1.0.2-SNAPSHOT + ${product.version} virtualschema-jdbc-adapter-dist @@ -21,12 +21,12 @@ com.exasol virtualschema-common - 1.0.2-SNAPSHOT + ${product.version} com.exasol virtualschema-jdbc-adapter - 1.0.2-SNAPSHOT + ${product.version} diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/pom.xml b/jdbc-adapter/virtualschema-jdbc-adapter/pom.xml index 7c22a1729..f1b02ec8c 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/pom.xml +++ b/jdbc-adapter/virtualschema-jdbc-adapter/pom.xml @@ -5,7 +5,7 @@ com.exasol virtualschema-jdbc-adapter-main - 1.0.2-SNAPSHOT + ${product.version} virtualschema-jdbc-adapter @@ -95,7 +95,7 @@ com.exasol virtualschema-common - 1.0.2-SNAPSHOT + ${product.version} diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/AbstractSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/AbstractSqlDialect.java index b5fc00efb..8c28c94cf 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/AbstractSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/AbstractSqlDialect.java @@ -1,5 +1,14 @@ package com.exasol.adapter.dialects; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + import com.exasol.adapter.jdbc.ColumnAdapterNotes; import com.exasol.adapter.jdbc.JdbcAdapterProperties; import com.exasol.adapter.metadata.ColumnMetadata; @@ -7,23 +16,21 @@ import com.exasol.adapter.sql.AggregateFunction; import com.exasol.adapter.sql.ScalarFunction; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Types; -import java.util.*; - /** - * Abstract implementation of a dialect. We recommend that every dialect should extend this abstract class. + * Abstract implementation of a dialect. We recommend that every dialect should + * extend this abstract class. * - * TODO Find solution to handle unsupported types (e.g. exceeding varchar size). E.g. skip column or always truncate or add const-null column or throw error or make configurable + * TODO Find solution to handle unsupported types (e.g. exceeding varchar size). + * E.g. skip column or always truncate or add const-null column or throw error + * or make configurable */ public abstract class AbstractSqlDialect implements SqlDialect { protected Set omitParenthesesMap = new HashSet<>(); - private SqlDialectContext context; + private final SqlDialectContext context; - public AbstractSqlDialect(SqlDialectContext context) { + public AbstractSqlDialect(final SqlDialectContext context) { this.context = context; } @@ -33,73 +40,70 @@ public String getTableCatalogAndSchemaSeparator() { } @Override - public MappedTable mapTable(ResultSet tables) throws SQLException { -// for (int i=1; i<=tables.getMetaData().getColumnCount(); ++i) { -// System.out.println(" - " + tables.getMetaData().getColumnName(i) + ": " + tables.getString(i)); -// } + public MappedTable mapTable(final ResultSet tables) throws SQLException { String commentString = tables.getString("REMARKS"); if (commentString == null) { commentString = ""; } - String tableName = changeIdentifierCaseIfNeeded(tables.getString("TABLE_NAME")); - return MappedTable.createMappedTable(tableName,tables.getString("TABLE_NAME"), commentString); + final String tableName = changeIdentifierCaseIfNeeded(tables.getString("TABLE_NAME")); + return MappedTable.createMappedTable(tableName, tables.getString("TABLE_NAME"), commentString); } @Override - public ColumnMetadata mapColumn(ResultSet columns) throws SQLException { - String colName = changeIdentifierCaseIfNeeded(columns.getString("COLUMN_NAME")); - int jdbcType = columns.getInt("DATA_TYPE"); - int decimalScale = columns.getInt("DECIMAL_DIGITS"); - int precisionOrSize = columns.getInt("COLUMN_SIZE"); - int charOctedLength = columns.getInt("CHAR_OCTET_LENGTH"); - String typeName = columns.getString("TYPE_NAME"); - JdbcTypeDescription jdbcTypeDescription = new JdbcTypeDescription(jdbcType, - decimalScale, precisionOrSize, charOctedLength, typeName); + public ColumnMetadata mapColumn(final ResultSet columns) throws SQLException { + final String colName = changeIdentifierCaseIfNeeded(columns.getString("COLUMN_NAME")); + final int jdbcType = columns.getInt("DATA_TYPE"); + final int decimalScale = columns.getInt("DECIMAL_DIGITS"); + final int precisionOrSize = columns.getInt("COLUMN_SIZE"); + final int charOctedLength = columns.getInt("CHAR_OCTET_LENGTH"); + final String typeName = columns.getString("TYPE_NAME"); + final JdbcTypeDescription jdbcTypeDescription = new JdbcTypeDescription(jdbcType, decimalScale, precisionOrSize, + charOctedLength, typeName); // Check if dialect want's to handle this row - DataType colType = mapJdbcType(jdbcTypeDescription); + final DataType colType = mapJdbcType(jdbcTypeDescription); // Nullable boolean isNullable = true; try { - String nullable = columns.getString("IS_NULLABLE"); + final String nullable = columns.getString("IS_NULLABLE"); if (nullable != null && nullable.toLowerCase().equals("no")) { isNullable = false; } - } catch (SQLException ex) { + } catch (final SQLException ex) { // ignore me } // Identity - + boolean isIdentity = false; try { - String identity = columns.getString("IS_AUTOINCREMENT"); + final String identity = columns.getString("IS_AUTOINCREMENT"); if (identity != null && identity.toLowerCase().equals("yes")) { isIdentity = true; } - } catch (SQLException ex) { + } catch (final SQLException ex) { // ignore me --some older JDBC drivers (Java 1.5) don't support IS_AUTOINCREMENT } // Default String defaultValue = ""; try { - String defaultString = columns.getString("COLUMN_DEF"); + final String defaultString = columns.getString("COLUMN_DEF"); if (defaultString != null) { defaultValue = defaultString; } - } catch (SQLException ex) { + } catch (final SQLException ex) { // ignore me } // Comment String comment = ""; try { - String commentString = columns.getString("REMARKS"); + final String commentString = columns.getString("REMARKS"); if (commentString != null && !commentString.isEmpty()) { comment = commentString; } - } catch (SQLException ex) { + } catch (final SQLException ex) { // ignore me } @@ -108,123 +112,132 @@ public ColumnMetadata mapColumn(ResultSet columns) throws SQLException { if (columnTypeName == null) { columnTypeName = ""; } - String adapterNotes = ColumnAdapterNotes.serialize(new ColumnAdapterNotes(jdbcType, columnTypeName));; + final String adapterNotes = ColumnAdapterNotes.serialize(new ColumnAdapterNotes(jdbcType, columnTypeName)); + ; return new ColumnMetadata(colName, adapterNotes, colType, isNullable, isIdentity, defaultValue, comment); } - private static DataType getExaTypeFromJdbcType(JdbcTypeDescription jdbcTypeDescription) throws SQLException { + private static DataType getExaTypeFromJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { DataType colType; switch (jdbcTypeDescription.getJdbcType()) { - case Types.TINYINT: - case Types.SMALLINT: - if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolDecimalPrecision) { - int precision = jdbcTypeDescription.getPrecisionOrSize() == 0 ? 9 : jdbcTypeDescription.getPrecisionOrSize(); - colType = DataType.createDecimal(precision, 0); - } else { - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - } - break; - case Types.INTEGER: - if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolDecimalPrecision) { - int precision = jdbcTypeDescription.getPrecisionOrSize() == 0 ? 18 : jdbcTypeDescription.getPrecisionOrSize(); - colType = DataType.createDecimal(precision, 0); - } else { - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - } - break; - case Types.BIGINT: // Java type long - if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolDecimalPrecision) { - int precision = jdbcTypeDescription.getPrecisionOrSize() == 0 ? 36 : jdbcTypeDescription.getPrecisionOrSize(); - colType = DataType.createDecimal(precision, 0); - } else { - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - } - break; - case Types.DECIMAL: + case Types.TINYINT: + case Types.SMALLINT: + if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolDecimalPrecision) { + final int precision = jdbcTypeDescription.getPrecisionOrSize() == 0 ? 9 + : jdbcTypeDescription.getPrecisionOrSize(); + colType = DataType.createDecimal(precision, 0); + } else { + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + } + break; + case Types.INTEGER: + if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolDecimalPrecision) { + final int precision = jdbcTypeDescription.getPrecisionOrSize() == 0 ? 18 + : jdbcTypeDescription.getPrecisionOrSize(); + colType = DataType.createDecimal(precision, 0); + } else { + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + } + break; + case Types.BIGINT: // Java type long + if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolDecimalPrecision) { + final int precision = jdbcTypeDescription.getPrecisionOrSize() == 0 ? 36 + : jdbcTypeDescription.getPrecisionOrSize(); + colType = DataType.createDecimal(precision, 0); + } else { + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + } + break; + case Types.DECIMAL: - if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolDecimalPrecision) { - colType = DataType.createDecimal(jdbcTypeDescription.getPrecisionOrSize(), jdbcTypeDescription.getDecimalScale()); - } else { - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - } - break; - case Types.NUMERIC: // Java BigInteger + if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolDecimalPrecision) { + colType = DataType.createDecimal(jdbcTypeDescription.getPrecisionOrSize(), + jdbcTypeDescription.getDecimalScale()); + } else { colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - break; - case Types.REAL: - case Types.FLOAT: - case Types.DOUBLE: - colType = DataType.createDouble(); - break; - case Types.VARCHAR: - case Types.NVARCHAR: - case Types.LONGVARCHAR: - case Types.LONGNVARCHAR: { - DataType.ExaCharset charset = (jdbcTypeDescription.getCharOctedLength() == jdbcTypeDescription.getPrecisionOrSize()) ? DataType.ExaCharset.ASCII : DataType.ExaCharset.UTF8; + } + break; + case Types.NUMERIC: // Java BigInteger + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + break; + case Types.REAL: + case Types.FLOAT: + case Types.DOUBLE: + colType = DataType.createDouble(); + break; + case Types.VARCHAR: + case Types.NVARCHAR: + case Types.LONGVARCHAR: + case Types.LONGNVARCHAR: { + final DataType.ExaCharset charset = (jdbcTypeDescription.getCharOctedLength() == jdbcTypeDescription + .getPrecisionOrSize()) ? DataType.ExaCharset.ASCII : DataType.ExaCharset.UTF8; + if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolVarcharSize) { + final int precision = jdbcTypeDescription.getPrecisionOrSize() == 0 ? DataType.maxExasolVarcharSize + : jdbcTypeDescription.getPrecisionOrSize(); + colType = DataType.createVarChar(precision, charset); + } else { + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, charset); + } + break; + } + case Types.CHAR: + case Types.NCHAR: { + final DataType.ExaCharset charset = (jdbcTypeDescription.getCharOctedLength() == jdbcTypeDescription + .getPrecisionOrSize()) ? DataType.ExaCharset.ASCII : DataType.ExaCharset.UTF8; + if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolCharSize) { + colType = DataType.createChar(jdbcTypeDescription.getPrecisionOrSize(), charset); + } else { if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolVarcharSize) { - int precision = jdbcTypeDescription.getPrecisionOrSize() == 0 - ? DataType.maxExasolVarcharSize : jdbcTypeDescription.getPrecisionOrSize(); - colType = DataType.createVarChar(precision, charset); + colType = DataType.createVarChar(jdbcTypeDescription.getPrecisionOrSize(), charset); } else { colType = DataType.createVarChar(DataType.maxExasolVarcharSize, charset); } - break; - } - case Types.CHAR: - case Types.NCHAR: { - DataType.ExaCharset charset = (jdbcTypeDescription.getCharOctedLength() == jdbcTypeDescription.getPrecisionOrSize()) ? DataType.ExaCharset.ASCII : DataType.ExaCharset.UTF8; - if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolCharSize) { - colType = DataType.createChar(jdbcTypeDescription.getPrecisionOrSize(), charset); - } else { - if (jdbcTypeDescription.getPrecisionOrSize() <= DataType.maxExasolVarcharSize) { - colType = DataType.createVarChar(jdbcTypeDescription.getPrecisionOrSize(), charset); - } else { - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, charset); - } - } - break; } - case Types.DATE: - colType = DataType.createDate(); - break; - case Types.TIMESTAMP: - colType = DataType.createTimestamp(false); - break; - case Types.TIME: - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - break; - case Types.BIT: - case Types.BOOLEAN: - colType = DataType.createBool(); - break; - case Types.BINARY: - case Types.VARBINARY: - case Types.LONGVARBINARY: - case Types.BLOB: - case Types.CLOB: - case Types.NCLOB: - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - break; - case Types.OTHER: - case Types.JAVA_OBJECT: - case Types.DISTINCT: - case Types.STRUCT: - case Types.ARRAY: - case Types.REF: - case Types.DATALINK: - case Types.SQLXML: - case Types.NULL: - default: - throw new RuntimeException("Unsupported data type (" + jdbcTypeDescription.getJdbcType() + ") found in source system, should never happen"); + break; } - assert(colType != null); + case Types.DATE: + colType = DataType.createDate(); + break; + case Types.TIMESTAMP: + colType = DataType.createTimestamp(false); + break; + case Types.TIME: + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + break; + case Types.BIT: + case Types.BOOLEAN: + colType = DataType.createBool(); + break; + case Types.BINARY: + case Types.VARBINARY: + case Types.LONGVARBINARY: + case Types.BLOB: + case Types.CLOB: + case Types.NCLOB: + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + break; + case Types.OTHER: + case Types.JAVA_OBJECT: + case Types.DISTINCT: + case Types.STRUCT: + case Types.ARRAY: + case Types.REF: + case Types.DATALINK: + case Types.SQLXML: + case Types.NULL: + default: + throw new RuntimeException("Unsupported data type (" + jdbcTypeDescription.getJdbcType() + + ") found in source system, should never happen"); + } + assert (colType != null); return colType; } - public String changeIdentifierCaseIfNeeded(String identifier) { + public String changeIdentifierCaseIfNeeded(final String identifier) { if (getQuotedIdentifierHandling() == getUnquotedIdentifierHandling()) { if (getQuotedIdentifierHandling() != IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE) { - // Completely case-insensitive. We can store everything uppercase to allow working with unquoted identifiers in EXASOL + // Completely case-insensitive. We can store everything uppercase to allow + // working with unquoted identifiers in EXASOL return identifier.toUpperCase(); } } @@ -232,12 +245,12 @@ public String changeIdentifierCaseIfNeeded(String identifier) { } @Override - public boolean omitParentheses(ScalarFunction function) { - return omitParenthesesMap.contains(function); + public boolean omitParentheses(final ScalarFunction function) { + return this.omitParenthesesMap.contains(function); } @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new SqlGenerationVisitor(this, context); } @@ -245,7 +258,7 @@ public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context public abstract DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcType) throws SQLException; @Override - public final DataType mapJdbcType(JdbcTypeDescription jdbcType) throws SQLException { + public final DataType mapJdbcType(final JdbcTypeDescription jdbcType) throws SQLException { DataType type = dialectSpecificMapJdbcType(jdbcType); if (type == null) { type = getExaTypeFromJdbcType(jdbcType); @@ -260,7 +273,7 @@ public Map getScalarFunctionAliases() { @Override public Map getAggregateFunctionAliases() { - Map aliases = new HashMap<>(); + final Map aliases = new HashMap<>(); aliases.put(AggregateFunction.GEO_INTERSECTION_AGGREGATE, "ST_INTERSECTION"); aliases.put(AggregateFunction.GEO_UNION_AGGREGATE, "ST_UNION"); return aliases; @@ -268,7 +281,7 @@ public Map getAggregateFunctionAliases() { @Override public Map getBinaryInfixFunctionAliases() { - Map aliases = new HashMap<>(); + final Map aliases = new HashMap<>(); aliases.put(ScalarFunction.ADD, "+"); aliases.put(ScalarFunction.SUB, "-"); aliases.put(ScalarFunction.MULT, "*"); @@ -278,17 +291,18 @@ public Map getBinaryInfixFunctionAliases() { @Override public Map getPrefixFunctionAliases() { - Map aliases = new HashMap<>(); + final Map aliases = new HashMap<>(); aliases.put(ScalarFunction.NEG, "-"); return aliases; } public SqlDialectContext getContext() { - return context; + return this.context; } - public void handleException(SQLException exception, - JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException { + @Override + public void handleException(final SQLException exception, + final JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException { throw exception; }; } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialect.java index 249a0cb5f..c6394ae1f 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialect.java @@ -1,5 +1,10 @@ package com.exasol.adapter.dialects; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Map; + import com.exasol.adapter.capabilities.Capabilities; import com.exasol.adapter.jdbc.JdbcAdapterProperties; import com.exasol.adapter.metadata.ColumnMetadata; @@ -7,13 +12,9 @@ import com.exasol.adapter.sql.AggregateFunction; import com.exasol.adapter.sql.ScalarFunction; -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Map; - /** - * Interface for the implementation of a SQL dialect. All data source specific logic is specified here. + * Interface for the implementation of a SQL dialect. All data source specific + * logic is specified here. * *

* The responsibilities of a dialect can be be divided into 3 areas: @@ -21,43 +22,57 @@ * *

* 1. Capabilities:
- * The dialect defines the set of supported capabilities. See {@link #getCapabilities()} for details. + * The dialect defines the set of supported capabilities. See + * {@link #getCapabilities()} for details. *

* *

* 2. Data Type Mapping:
- * The dialect defines, how the tables in the data source are mapped to EXASOL virtual tables. - * In particular the data types have to be mapped to EXASOL data types. See {@link #mapJdbcType(JdbcTypeDescription)} for details. + * The dialect defines, how the tables in the data source are mapped to EXASOL + * virtual tables. In particular the data types have to be mapped to EXASOL data + * types. See {@link #mapJdbcType(JdbcTypeDescription)} for details. *

* *

* 3. SQL Generation:
* The dialect defines how to generate SQL statements in the data source syntax. - * The dialect provides several methods to customize quoting, case-sensitivity, function name aliases, - * and other aspects of the syntax. + * The dialect provides several methods to customize quoting, case-sensitivity, + * function name aliases, and other aspects of the syntax. * - * The actual SQL generation is done by the separate class {@link SqlGenerationVisitor} (it uses the visitor pattern). - * For things like quoting and case-sensitivity, the SQL generation visitor will ask the dialect how to handle them. + * The actual SQL generation is done by the separate class + * {@link SqlGenerationVisitor} (it uses the visitor pattern). For things like + * quoting and case-sensitivity, the SQL generation visitor will ask the dialect + * how to handle them. * - * If your dialect has a special SQL syntax which cannot be realized using the methods of {@link SqlDialect}, then you can - * implement your own SQL generation visitor which extends {@link SqlGenerationVisitor}. - * Your custom visitor must then be returned by {@link #getSqlGenerationVisitor(SqlGenerationContext)}. - * For an example look at {@link com.exasol.adapter.dialects.impl.OracleSqlGenerationVisitor}. + * If your dialect has a special SQL syntax which cannot be realized using the + * methods of {@link SqlDialect}, then you can implement your own SQL generation + * visitor which extends {@link SqlGenerationVisitor}. Your custom visitor must + * then be returned by {@link #getSqlGenerationVisitor(SqlGenerationContext)}. + * For an example look at + * {@link com.exasol.adapter.dialects.impl.OracleSqlGenerationVisitor}. *

* * Notes for developing a dialect * - *

Create a class for your integration test, with the suffix IT.java.

+ *

+ * Create a class for your integration test, with the suffix IT.java. + *

* - *

We recommend to extend the abstract class {@link AbstractSqlDialect} instead of directly implementing {@link SqlDialect}.

+ *

+ * We recommend to extend the abstract class {@link AbstractSqlDialect} instead + * of directly implementing {@link SqlDialect}. + *

*/ public interface SqlDialect { - + /** - * @return the name that can be used to choose this dialect (user can give this name). Case insensitive. + * @return the name that can be used to choose this dialect (user can give this + * name). Case insensitive. */ - String getPublicName(); - + public static String getPublicName() { + return "SqlDialect interface"; + }; + // // CAPABILITIES // @@ -65,146 +80,178 @@ public interface SqlDialect { /** * @return The set of capabilities supported by this SQL-Dialect */ - Capabilities getCapabilities(); + public Capabilities getCapabilities(); // // MAPPING OF METADATA: CATALOGS, SCHEMAS, TABLES AND DATA TYPES // - enum SchemaOrCatalogSupport { - SUPPORTED, - UNSUPPORTED, - UNKNOWN + public enum SchemaOrCatalogSupport { + SUPPORTED, UNSUPPORTED, UNKNOWN } /** - * @return True, if the database "truly" supports the concept of JDBC catalogs (not just a single dummy catalog). If true, the user must specify the catalog. - * False, if the database does not have a catalog concept, e.g. if it has no catalogs, or a single dummy catalog, or even if it throws an Exception for {@link DatabaseMetaData#getCatalogs()}. If false, the user must not specify the catalog. + * @return True, if the database "truly" supports the concept of JDBC catalogs + * (not just a single dummy catalog). If true, the user must specify the + * catalog. False, if the database does not have a catalog concept, e.g. + * if it has no catalogs, or a single dummy catalog, or even if it + * throws an Exception for {@link DatabaseMetaData#getCatalogs()}. If + * false, the user must not specify the catalog. */ - SchemaOrCatalogSupport supportsJdbcCatalogs(); + public SchemaOrCatalogSupport supportsJdbcCatalogs(); /** - * @return True, if the database "truly" supports the concept of JDBC schemas (not just a single dummy schema). If true, the user must specify the schema. - * False, if the database does not have a schema concept, e.g. if it has no schemas, or a single dummy schemas, or even if it throws an Exception for {@link DatabaseMetaData#getSchemas()}. If false, the user must not specify the schema. + * @return True, if the database "truly" supports the concept of JDBC schemas + * (not just a single dummy schema). If true, the user must specify the + * schema. False, if the database does not have a schema concept, e.g. + * if it has no schemas, or a single dummy schemas, or even if it throws + * an Exception for {@link DatabaseMetaData#getSchemas()}. If false, the + * user must not specify the schema. */ - SchemaOrCatalogSupport supportsJdbcSchemas(); + public SchemaOrCatalogSupport supportsJdbcSchemas(); - class MappedTable { + public class MappedTable { private boolean isIgnored = false; private String tableName = ""; private String originalName = ""; private String tableComment = ""; - public static MappedTable createMappedTable(String tableName, String originalName, String tableComment) { - MappedTable t = new MappedTable(); + + public static MappedTable createMappedTable(final String tableName, final String originalName, + final String tableComment) { + final MappedTable t = new MappedTable(); t.isIgnored = false; t.tableName = tableName; t.originalName = originalName; t.tableComment = tableComment; return t; } + public static MappedTable createIgnoredTable() { - MappedTable t = new MappedTable(); + final MappedTable t = new MappedTable(); t.isIgnored = true; return t; } - public boolean isIgnored() { return isIgnored; } - public String getTableName() { return tableName; } - public String getOriginalTableName () { return originalName;} - public String getTableComment() { return tableComment; } + + public boolean isIgnored() { + return isIgnored; + } + + public String getTableName() { + return tableName; + } + + public String getOriginalTableName() { + return originalName; + } + + public String getTableComment() { + return tableComment; + } } /** - * @param tables A jdbc Resultset for the {@link DatabaseMetaData#getTables(String, String, String, String[])} call, pointing to the current table. + * @param tables A jdbc Resultset for the + * {@link DatabaseMetaData#getTables(String, String, String, String[])} + * call, pointing to the current table. * @return An instance of {@link MappedTable} describing the mapped table. */ - MappedTable mapTable(ResultSet tables) throws SQLException; + public MappedTable mapTable(ResultSet tables) throws SQLException; /** - * @param columns A jdbc Resultset for the {@link DatabaseMetaData#getColumns(String, String, String, String)} call, pointing to the current column. + * @param columns A jdbc Resultset for the + * {@link DatabaseMetaData#getColumns(String, String, String, String)} + * call, pointing to the current column. * @return The mapped column * @throws SQLException */ - ColumnMetadata mapColumn(ResultSet columns) throws SQLException; - + public ColumnMetadata mapColumn(ResultSet columns) throws SQLException; /** - * Maps the jdbc datatype information of a column to the EXASOL datatype of the column. - * The dialect can also return null, so that the default mapping occurs. - * This method will be called by {@link #mapJdbcType(JdbcTypeDescription)} in the default implementation. + * Maps the jdbc datatype information of a column to the EXASOL datatype of the + * column. The dialect can also return null, so that the default mapping occurs. + * This method will be called by {@link #mapJdbcType(JdbcTypeDescription)} in + * the default implementation. * * @param jdbcType A jdbc type description - * @return Either null, if the default datatype mapping shall be applied, - * or the datatype which the current column shall be mapped to. - * This datatype will be used as the datatype in the virtual table and in the pushdown sql. + * @return Either null, if the default datatype mapping shall be applied, or the + * datatype which the current column shall be mapped to. This datatype + * will be used as the datatype in the virtual table and in the pushdown + * sql. * */ - DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcType) throws SQLException; + public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcType) throws SQLException; /** - * Maps the jdbc datatype information of a column to the EXASOL datatype of the column. - * This method will be called by {@link #mapColumn(ResultSet)} in the default implementation. + * Maps the jdbc datatype information of a column to the EXASOL datatype of the + * column. This method will be called by {@link #mapColumn(ResultSet)} in the + * default implementation. * * @param jdbcType A jdbc type description - * @return Either null, if the default datatype mapping shall be applied, - * or the datatype which the current column shall be mapped to. - * This datatype will be used as the datatype in the virtual table and in the pushdown sql. + * @return Either null, if the default datatype mapping shall be applied, or the + * datatype which the current column shall be mapped to. This datatype + * will be used as the datatype in the virtual table and in the pushdown + * sql. * */ - DataType mapJdbcType(JdbcTypeDescription jdbcType) throws SQLException; + public DataType mapJdbcType(JdbcTypeDescription jdbcType) throws SQLException; // // SQL GENERATION // - + /** * How unquoted or quoted identifiers in queries or DDLs are handled */ - enum IdentifierCaseHandling { - INTERPRET_AS_LOWER, - INTERPRET_AS_UPPER, - INTERPRET_CASE_SENSITIVE + public enum IdentifierCaseHandling { + INTERPRET_AS_LOWER, INTERPRET_AS_UPPER, INTERPRET_CASE_SENSITIVE } /** * @return How to handle case sensitivity of unquoted identifiers */ - IdentifierCaseHandling getUnquotedIdentifierHandling(); + public IdentifierCaseHandling getUnquotedIdentifierHandling(); /** - * @return How to handle case sensitivity of quoted identifiers + * @return How to handle case sensitivity of quoted identifiers */ - IdentifierCaseHandling getQuotedIdentifierHandling(); + public IdentifierCaseHandling getQuotedIdentifierHandling(); /** - * @param identifier The name of an identifier (table or column). If identifiers are case sensitive, the identifier must be passed case-sensitive of course. + * @param identifier The name of an identifier (table or column). If identifiers + * are case sensitive, the identifier must be passed + * case-sensitive of course. * @return the quoted identifier, also if quoting is not required */ - String applyQuote(String identifier); + public String applyQuote(String identifier); /** * @param identifier The name of an identifier (table or column). - * @return the quoted identifier, if this name requires quoting, or the unquoted identifier, if no quoting is required. + * @return the quoted identifier, if this name requires quoting, or the unquoted + * identifier, if no quoting is required. */ - String applyQuoteIfNeeded(String identifier); - + public String applyQuoteIfNeeded(String identifier); + /** - * @return True if table names must be catalog-qualified, e.g. SELECT * FROM MY_CATALOG.MY_TABLE, otherwise false. - * Can be combined with {@link #requiresSchemaQualifiedTableNames(SqlGenerationContext)} + * @return True if table names must be catalog-qualified, e.g. SELECT * FROM + * MY_CATALOG.MY_TABLE, otherwise false. Can be combined with + * {@link #requiresSchemaQualifiedTableNames(SqlGenerationContext)} */ - boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context); + public boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context); /** - * @return True if table names must be schema-qualified, e.g. SELECT * FROM MY_SCHEMA.MY_TABLE, otherwise false. - * Can be combined with {@link #requiresCatalogQualifiedTableNames(SqlGenerationContext)} + * @return True if table names must be schema-qualified, e.g. SELECT * FROM + * MY_SCHEMA.MY_TABLE, otherwise false. Can be combined with + * {@link #requiresCatalogQualifiedTableNames(SqlGenerationContext)} */ - boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context); + public boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context); /** - * @return String that is used to separate the catalog and/or the schema from the tablename. In many cases this is a dot. + * @return String that is used to separate the catalog and/or the schema from + * the tablename. In many cases this is a dot. */ - String getTableCatalogAndSchemaSeparator(); + public String getTableCatalogAndSchemaSeparator(); - enum NullSorting { + public enum NullSorting { // NULL values are sorted at the end regardless of sort order NULLS_SORTED_AT_END, @@ -219,61 +266,73 @@ enum NullSorting { } /** - * @return The behavior how nulls are sorted in an ORDER BY. If the null sorting behavior is - * not {@link NullSorting#NULLS_SORTED_AT_END} and your dialects has the order by - * capability but you cannot explicitly specify NULLS FIRST or NULLS LAST, then you must - * overwrite the SQL generation to somehow obtain the desired semantic. + * @return The behavior how nulls are sorted in an ORDER BY. If the null sorting + * behavior is not {@link NullSorting#NULLS_SORTED_AT_END} and your + * dialects has the order by capability but you cannot explicitly + * specify NULLS FIRST or NULLS LAST, then you must overwrite the SQL + * generation to somehow obtain the desired semantic. */ - NullSorting getDefaultNullSorting(); + public NullSorting getDefaultNullSorting(); /** * @param value a string literal value - * @return the string literal in valid SQL syntax, e.g. "value" becomes "'value'". This might include escaping + * @return the string literal in valid SQL syntax, e.g. "value" becomes + * "'value'". This might include escaping */ - String getStringLiteral(String value); + public String getStringLiteral(String value); /** - * @return aliases for scalar functions. To be defined for each function that has the same semantic but a different name in the data source. - * If an alias for the same function is defined in {@link #getBinaryInfixFunctionAliases()}, than the infix alias will be ignored. + * @return aliases for scalar functions. To be defined for each function that + * has the same semantic but a different name in the data source. If an + * alias for the same function is defined in + * {@link #getBinaryInfixFunctionAliases()}, than the infix alias will + * be ignored. */ - Map getScalarFunctionAliases(); + public Map getScalarFunctionAliases(); /** - * @return Defines which binary scalar functions should be treated infix and how. E.g. a map entry ("ADD", "+") causes a function call "ADD(1,2)" to be written as "1 + 2". + * @return Defines which binary scalar functions should be treated infix and + * how. E.g. a map entry ("ADD", "+") causes a function call "ADD(1,2)" + * to be written as "1 + 2". */ - Map getBinaryInfixFunctionAliases(); + public Map getBinaryInfixFunctionAliases(); /** - * @return Defines which unary scalar functions should be treated prefix and how. E.g. a map entry ("NEG", "-") causes a function call "NEG(2)" to be written as "-2". + * @return Defines which unary scalar functions should be treated prefix and + * how. E.g. a map entry ("NEG", "-") causes a function call "NEG(2)" to + * be written as "-2". */ - Map getPrefixFunctionAliases(); + public Map getPrefixFunctionAliases(); /** - * @return aliases for aggregate functions. To be defined for each function that has the same semantic but a different name in the data source. + * @return aliases for aggregate functions. To be defined for each function that + * has the same semantic but a different name in the data source. */ - Map getAggregateFunctionAliases(); + public Map getAggregateFunctionAliases(); /** - * @return Returns true for functions with zero arguments if they do not require parentheses (e.g. SYSDATE). + * @return Returns true for functions with zero arguments if they do not require + * parentheses (e.g. SYSDATE). */ - boolean omitParentheses(ScalarFunction function); + public boolean omitParentheses(ScalarFunction function); /** - * Returns the Visitor to be used for SQL generation. - * Use this only if you need to, i.e. if you have requirements which cannot - * be realized via the other methods provided by {@link SqlDialect}. + * Returns the Visitor to be used for SQL generation. Use this only if you need + * to, i.e. if you have requirements which cannot be realized via the other + * methods provided by {@link SqlDialect}. * * @param context context information for the sql generation visitor * @return the SqlGenerationVisitor to be used for this dialect */ - SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context); + public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context); /** * Allows dialect specific handling of different exceptions. - * @param exception the catched exception + * + * @param exception the catched exception * @param exceptionMode exception mode of the adapter * @throws SQLException */ - void handleException(SQLException exception, - JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException; + public void handleException(SQLException exception, JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) + throws SQLException; } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialects.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialects.java index f8603e467..c444bf9b6 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialects.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialects.java @@ -1,74 +1,159 @@ package com.exasol.adapter.dialects; -import com.exasol.adapter.dialects.impl.*; - -import java.util.List; +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.InvocationTargetException; +import java.util.HashSet; +import java.util.Optional; +import java.util.Properties; +import java.util.Set; +import java.util.logging.Logger; +import java.util.stream.Collectors; /** - * Manages a set of supported SqlDialects. + * This class implements a registry for supported SQL dialects. */ -public class SqlDialects { +public final class SqlDialects { + public static final String SQL_DIALECTS_PROPERTY = "com.exasol.adapter.dialects.supported"; + private static final String GET_PUBLIC_NAME_METHOD = "getPublicName"; + private static final String DIALECTS_PROPERTIES_FILE = "sql_dialects.properties"; + private static SqlDialects instance = null; + private final Set> supportedDialects = new HashSet<>(); + private static final Logger LOGGER = Logger.getLogger(SqlDialects.class.getName()); - private List supportedDialects; + /** + * Get an instance of the {@link SqlDialects} class + * + * @return the instance + */ + public static synchronized SqlDialects getInstance() { + if (instance == null) { + instance = new SqlDialects(); + instance.registerDialectsFromProperty(); + } + return instance; + } - private List> dialects; + private SqlDialects() { + // prevent instantiation outside of singleton. + } - public SqlDialects(List supportedDialects) { - this.supportedDialects = supportedDialects; + private void registerDialectsFromProperty() { + final String sqlDialects = (System.getProperty(SQL_DIALECTS_PROPERTY) == null) + ? readDialectListFromPropertyFile() + : System.getProperty(SQL_DIALECTS_PROPERTY); + registerDialects(sqlDialects); } - public boolean isSupported(String dialectName) { - for (String curName : supportedDialects) { - if (curName.equalsIgnoreCase(dialectName)) { - return true; - } + private String readDialectListFromPropertyFile() { + final Properties properties = new Properties(); + final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); + try (final InputStream stream = contextClassLoader.getResourceAsStream(DIALECTS_PROPERTIES_FILE)) { + properties.load(stream); + return properties.getProperty(SQL_DIALECTS_PROPERTY); + } catch (final IOException e) { + throw new SqlDialectsRegistryException( + "Unable to load list of SQL dialect from " + DIALECTS_PROPERTIES_FILE); } - return false; } - public SqlDialect getDialectByName(String name, SqlDialectContext context) { - if (name.equalsIgnoreCase(GenericSqlDialect.NAME)) { - return new GenericSqlDialect(context); - } else if (name.equalsIgnoreCase(ExasolSqlDialect.NAME)) { - return new ExasolSqlDialect(context); - } else if (name.equalsIgnoreCase(HiveSqlDialect.NAME)) { - return new HiveSqlDialect(context); - } else if (name.equalsIgnoreCase(ImpalaSqlDialect.NAME)) { - return new ImpalaSqlDialect(context); - } else if (name.equalsIgnoreCase(MysqlSqlDialect.NAME)) { - return new MysqlSqlDialect(context); - } else if (name.equalsIgnoreCase(OracleSqlDialect.NAME)) { - return new OracleSqlDialect(context); - } else if (name.equalsIgnoreCase(TeradataSqlDialect.NAME)) { - return new TeradataSqlDialect(context); - } else if (name.equalsIgnoreCase(RedshiftSqlDialect.NAME)) { - return new RedshiftSqlDialect(context); - } else if (name.equalsIgnoreCase(DB2SqlDialect.NAME)) { - return new DB2SqlDialect(context); - } else if (name.equalsIgnoreCase(SqlServerSqlDialect.NAME)) { - return new SqlServerSqlDialect(context); - } else if (name.equalsIgnoreCase(PostgreSQLSqlDialect.NAME)) { - return new PostgreSQLSqlDialect(context); + private void registerDialects(final String sqlDialects) { + for (final String className : sqlDialects.split("\\s*,\\s*")) { + registerDialect(className); } - else { - return null; + } + + private void registerDialect(final String className) { + try { + @SuppressWarnings("unchecked") + final Class dialect = (Class) Class.forName(className); + this.supportedDialects.add(dialect); + LOGGER.fine(() -> "Registered SQL dialect implementation class \"" + className + "\""); + } catch (final ClassNotFoundException e) { + throw new SqlDialectsRegistryException("Unable to find SQL dialect implementation class " + className); } } - public List> getDialects() { - return dialects; + /** + * Check whether a dialect is supported + * + * @param wantedDialectName the name of the dialect + * @return true if the dialect is supported + */ + public boolean isSupported(final String wantedDialectName) { + return this.supportedDialects.stream().anyMatch(dialect -> { + return getNameForDialectClass(dialect).equalsIgnoreCase(wantedDialectName); + }); } - public String getDialectsString() { - StringBuilder builder = new StringBuilder(); - boolean first = true; - for (String curName : supportedDialects) { - if (!first) { - builder.append(", "); - } - builder.append(curName); - first = false; + private String getNameForDialectClass(final Class dialect) { + String dialectName; + try { + dialectName = (String) dialect.getMethod(GET_PUBLIC_NAME_METHOD).invoke(null); + } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException + | SecurityException e) { + throw new SqlDialectsRegistryException( + "Unable to invoke " + GET_PUBLIC_NAME_METHOD + " trying to determine SQL dialect name"); + } + return dialectName; + } + + /** + * Finds an SQL dialect by its name and hands back an instance of the according + * dialect implementation. + * + * @param name name of the dialect to be instantiated + * @param context the context to be handed to the instance. + * @return a new instance of the dialect + * + * @throws SqlDialectsRegistryException if the dialect is not found or cannot be + * instantiated. + */ + public SqlDialect getDialectInstanceForNameWithContext(final String name, final SqlDialectContext context) + throws SqlDialectsRegistryException { + final Optional> foundDialect = findDialectByName(name); + return instantiateDialect(name, foundDialect, context); + } + + private Optional> findDialectByName(final String name) { + final Optional> foundDialect = this.supportedDialects.stream() + .filter(dialect -> getNameForDialectClass(dialect).equalsIgnoreCase(name)) // + .findFirst(); + if (!foundDialect.isPresent()) { + throw new SqlDialectsRegistryException("SQL dialect \"" + name + "\" not found in the dialects registry."); + } + return foundDialect; + } + + private SqlDialect instantiateDialect(final String name, final Optional> foundDialect, + final SqlDialectContext context) throws SqlDialectsRegistryException { + SqlDialect instance; + try { + final Class dialectClass = foundDialect.get(); + instance = dialectClass.getConstructor(SqlDialectContext.class).newInstance(context); + } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException + | NoSuchMethodException | SecurityException e) { + throw new SqlDialectsRegistryException("Unable to instanciate SQL dialect \"" + name + "\".", e); } - return builder.toString(); + return instance; + } + + /** + * Get a comma separated, alphabetically sorted list of supported dialects. + * + * @return comma separated list of dialect. + */ + public String getDialectsString() { + return this.supportedDialects.stream() // + .map(dialect -> getNameForDialectClass(dialect)) // + .sorted() // + .collect(Collectors.joining(", ")); + } + + /** + * Delete the singleton instance (necessary for tests) + */ + public static void deleteInstance() { + instance = null; } -} +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialectsRegistryException.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialectsRegistryException.java new file mode 100644 index 000000000..1b1792662 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlDialectsRegistryException.java @@ -0,0 +1,27 @@ +package com.exasol.adapter.dialects; + +/** + * This class provides runtime exceptions for the SQL dialects registry. + */ +public class SqlDialectsRegistryException extends RuntimeException { + private static final long serialVersionUID = -5603866366083182805L; + + /** + * Create a new instance of the {@link SqlDialectsRegistryException} + * + * @param message message to be displayed + */ + public SqlDialectsRegistryException(final String message) { + super(message); + } + + /** + * Create a new instance of the {@link SqlDialectsRegistryException} + * + * @param message message to be displayed + * @param cause root cause + */ + public SqlDialectsRegistryException(final String message, final Throwable cause) { + super(message, cause); + } +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlGenerationVisitor.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlGenerationVisitor.java index f65eb3549..cf9e8ce81 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlGenerationVisitor.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlGenerationVisitor.java @@ -1,39 +1,80 @@ package com.exasol.adapter.dialects; +import java.util.ArrayList; +import java.util.List; + import com.exasol.adapter.AdapterException; import com.exasol.adapter.metadata.DataType; -import com.exasol.adapter.sql.*; +import com.exasol.adapter.sql.AggregateFunction; +import com.exasol.adapter.sql.ScalarFunction; +import com.exasol.adapter.sql.SqlColumn; +import com.exasol.adapter.sql.SqlFunctionAggregate; +import com.exasol.adapter.sql.SqlFunctionAggregateGroupConcat; +import com.exasol.adapter.sql.SqlFunctionScalar; +import com.exasol.adapter.sql.SqlFunctionScalarCase; +import com.exasol.adapter.sql.SqlFunctionScalarCast; +import com.exasol.adapter.sql.SqlFunctionScalarExtract; +import com.exasol.adapter.sql.SqlGroupBy; +import com.exasol.adapter.sql.SqlLimit; +import com.exasol.adapter.sql.SqlLiteralBool; +import com.exasol.adapter.sql.SqlLiteralDate; +import com.exasol.adapter.sql.SqlLiteralDouble; +import com.exasol.adapter.sql.SqlLiteralExactnumeric; +import com.exasol.adapter.sql.SqlLiteralInterval; +import com.exasol.adapter.sql.SqlLiteralNull; +import com.exasol.adapter.sql.SqlLiteralString; +import com.exasol.adapter.sql.SqlLiteralTimestamp; +import com.exasol.adapter.sql.SqlLiteralTimestampUtc; +import com.exasol.adapter.sql.SqlNode; +import com.exasol.adapter.sql.SqlNodeVisitor; +import com.exasol.adapter.sql.SqlOrderBy; +import com.exasol.adapter.sql.SqlPredicateAnd; +import com.exasol.adapter.sql.SqlPredicateBetween; +import com.exasol.adapter.sql.SqlPredicateEqual; +import com.exasol.adapter.sql.SqlPredicateInConstList; +import com.exasol.adapter.sql.SqlPredicateIsNotNull; +import com.exasol.adapter.sql.SqlPredicateIsNull; +import com.exasol.adapter.sql.SqlPredicateLess; +import com.exasol.adapter.sql.SqlPredicateLessEqual; +import com.exasol.adapter.sql.SqlPredicateLike; +import com.exasol.adapter.sql.SqlPredicateLikeRegexp; +import com.exasol.adapter.sql.SqlPredicateNot; +import com.exasol.adapter.sql.SqlPredicateNotEqual; +import com.exasol.adapter.sql.SqlPredicateOr; +import com.exasol.adapter.sql.SqlSelectList; +import com.exasol.adapter.sql.SqlStatementSelect; +import com.exasol.adapter.sql.SqlTable; import com.google.common.base.Joiner; -import java.util.ArrayList; -import java.util.List; - /** - * This class has the logic to generate SQL queries based on a graph of {@link SqlNode} elements. - * It uses the visitor pattern. - * This class interacts with the dialects in some situations, e.g. to find out how to handle quoting, + * This class has the logic to generate SQL queries based on a graph of + * {@link SqlNode} elements. It uses the visitor pattern. This class interacts + * with the dialects in some situations, e.g. to find out how to handle quoting, * case-sensitivity. * *

- * If this class is not sufficiently customizable for your use case, you can extend - * this class and override the required methods. You also have to return your custom - * visitor class then in the method {@link SqlDialect#getSqlGenerationVisitor(SqlGenerationContext)}. - * See {@link com.exasol.adapter.dialects.impl.OracleSqlGenerationVisitor} for an example. + * If this class is not sufficiently customizable for your use case, you can + * extend this class and override the required methods. You also have to return + * your custom visitor class then in the method + * {@link SqlDialect#getSqlGenerationVisitor(SqlGenerationContext)}. See + * {@link com.exasol.adapter.dialects.impl.OracleSqlGenerationVisitor} for an + * example. *

* * Note on operator associativity and parenthesis generation: Currently we use - * parenthesis almost always. Without parenthesis, two SqlNode graphs with different - * semantic lead to "select 1 = 1 - 1 + 1". Also "SELECT NOT NOT TRUE" needs to be written - * as "SELECT NOT (NOT TRUE)" to work at all, whereas SELECT NOT TRUE works fine - * without parentheses. Currently we make inflationary use of parenthesis to to enforce - * the right semantic, but hopefully there is a better way. + * parenthesis almost always. Without parenthesis, two SqlNode graphs with + * different semantic lead to "select 1 = 1 - 1 + 1". Also "SELECT NOT NOT TRUE" + * needs to be written as "SELECT NOT (NOT TRUE)" to work at all, whereas SELECT + * NOT TRUE works fine without parentheses. Currently we make inflationary use + * of parenthesis to to enforce the right semantic, but hopefully there is a + * better way. */ public class SqlGenerationVisitor implements SqlNodeVisitor { - private SqlDialect dialect; - private SqlGenerationContext context; + private final SqlDialect dialect; + private final SqlGenerationContext context; - public SqlGenerationVisitor(SqlDialect dialect, SqlGenerationContext context) { + public SqlGenerationVisitor(final SqlDialect dialect, final SqlGenerationContext context) { this.dialect = dialect; this.context = context; @@ -42,21 +83,25 @@ public SqlGenerationVisitor(SqlDialect dialect, SqlGenerationContext context) { protected void checkDialectAliases() { // Check if dialect provided invalid aliases, which would never be applied. - for (ScalarFunction function : dialect.getScalarFunctionAliases().keySet()) { + for (final ScalarFunction function : this.dialect.getScalarFunctionAliases().keySet()) { if (!function.isSimple()) { - throw new RuntimeException("The dialect " + dialect.getPublicName() + " provided an alias for the non-simple scalar function " + function.name() + ". This alias will never be considered."); + throw new RuntimeException("The dialect " + SqlDialect.getPublicName() + + " provided an alias for the non-simple scalar function " + function.name() + + ". This alias will never be considered."); } } - for (AggregateFunction function : dialect.getAggregateFunctionAliases().keySet()) { + for (final AggregateFunction function : this.dialect.getAggregateFunctionAliases().keySet()) { if (!function.isSimple()) { - throw new RuntimeException("The dialect " + dialect.getPublicName() + " provided an alias for the non-simple aggregate function " + function.name() + ". This alias will never be considered."); + throw new RuntimeException("The dialect " + SqlDialect.getPublicName() + + " provided an alias for the non-simple aggregate function " + function.name() + + ". This alias will never be considered."); } } } @Override - public String visit(SqlStatementSelect select) throws AdapterException { - StringBuilder sql = new StringBuilder(); + public String visit(final SqlStatementSelect select) throws AdapterException { + final StringBuilder sql = new StringBuilder(); sql.append("SELECT "); sql.append(select.getSelectList().accept(this)); sql.append(" FROM "); @@ -85,15 +130,15 @@ public String visit(SqlStatementSelect select) throws AdapterException { } @Override - public String visit(SqlSelectList selectList) throws AdapterException { - List selectElement = new ArrayList<>(); + public String visit(final SqlSelectList selectList) throws AdapterException { + final List selectElement = new ArrayList<>(); if (selectList.isRequestAnyColumn()) { // The system requested any column selectElement.add("true"); } else if (selectList.isSelectStar()) { selectElement.add("*"); } else { - for (SqlNode node : selectList.getExpressions()) { + for (final SqlNode node : selectList.getExpressions()) { selectElement.add(node.accept(this)); } } @@ -101,41 +146,42 @@ public String visit(SqlSelectList selectList) throws AdapterException { } @Override - public String visit(SqlColumn column) throws AdapterException { - return dialect.applyQuoteIfNeeded(column.getName()); + public String visit(final SqlColumn column) throws AdapterException { + return this.dialect.applyQuoteIfNeeded(column.getName()); } @Override - public String visit(SqlTable table) { + public String visit(final SqlTable table) { String schemaPrefix = ""; - if (dialect.requiresCatalogQualifiedTableNames(context) && context.getCatalogName() != null && !context.getCatalogName().isEmpty()) { - schemaPrefix = dialect.applyQuoteIfNeeded(context.getCatalogName()) - + dialect.getTableCatalogAndSchemaSeparator(); + if (this.dialect.requiresCatalogQualifiedTableNames(this.context) && this.context.getCatalogName() != null + && !this.context.getCatalogName().isEmpty()) { + schemaPrefix = this.dialect.applyQuoteIfNeeded(this.context.getCatalogName()) + + this.dialect.getTableCatalogAndSchemaSeparator(); } - if (dialect.requiresSchemaQualifiedTableNames(context) && context.getSchemaName() != null && !context.getSchemaName().isEmpty()) { - schemaPrefix += dialect.applyQuoteIfNeeded(context.getSchemaName()) - + dialect.getTableCatalogAndSchemaSeparator(); + if (this.dialect.requiresSchemaQualifiedTableNames(this.context) && this.context.getSchemaName() != null + && !this.context.getSchemaName().isEmpty()) { + schemaPrefix += this.dialect.applyQuoteIfNeeded(this.context.getSchemaName()) + + this.dialect.getTableCatalogAndSchemaSeparator(); } - return schemaPrefix + dialect.applyQuoteIfNeeded(table.getName()); + return schemaPrefix + this.dialect.applyQuoteIfNeeded(table.getName()); } @Override - public String visit(SqlGroupBy groupBy) throws AdapterException { + public String visit(final SqlGroupBy groupBy) throws AdapterException { if (groupBy.getExpressions() == null || groupBy.getExpressions().isEmpty()) { - throw new RuntimeException( - "Unexpected internal state (empty group by)"); + throw new RuntimeException("Unexpected internal state (empty group by)"); } - List selectElement = new ArrayList<>(); - for (SqlNode node : groupBy.getExpressions()) { + final List selectElement = new ArrayList<>(); + for (final SqlNode node : groupBy.getExpressions()) { selectElement.add(node.accept(this)); } return Joiner.on(", ").join(selectElement); } @Override - public String visit(SqlFunctionAggregate function) throws AdapterException { - List argumentsSql = new ArrayList<>(); - for (SqlNode node : function.getArguments()) { + public String visit(final SqlFunctionAggregate function) throws AdapterException { + final List argumentsSql = new ArrayList<>(); + for (final SqlNode node : function.getArguments()) { argumentsSql.add(node.accept(this)); } if (function.getFunctionName().equalsIgnoreCase("count") && argumentsSql.size() == 0) { @@ -146,26 +192,25 @@ public String visit(SqlFunctionAggregate function) throws AdapterException { distinctSql = "DISTINCT "; } String functionNameInSourceSystem = function.getFunctionName(); - if (dialect.getAggregateFunctionAliases().containsKey(function.getFunction())) { - functionNameInSourceSystem = dialect.getAggregateFunctionAliases().get(function.getFunction()); + if (this.dialect.getAggregateFunctionAliases().containsKey(function.getFunction())) { + functionNameInSourceSystem = this.dialect.getAggregateFunctionAliases().get(function.getFunction()); } - return functionNameInSourceSystem + "(" + distinctSql - + Joiner.on(", ").join(argumentsSql) + ")"; + return functionNameInSourceSystem + "(" + distinctSql + Joiner.on(", ").join(argumentsSql) + ")"; } @Override - public String visit(SqlFunctionAggregateGroupConcat function) throws AdapterException { - StringBuilder builder = new StringBuilder(); + public String visit(final SqlFunctionAggregateGroupConcat function) throws AdapterException { + final StringBuilder builder = new StringBuilder(); builder.append(function.getFunctionName()); builder.append("("); if (function.hasDistinct()) { builder.append("DISTINCT "); } - assert(function.getArguments().size() == 1 && function.getArguments().get(0) != null); + assert (function.getArguments().size() == 1 && function.getArguments().get(0) != null); builder.append(function.getArguments().get(0).accept(this)); if (function.hasOrderBy()) { builder.append(" "); - String orderByString = function.getOrderBy().accept(this); + final String orderByString = function.getOrderBy().accept(this); builder.append(orderByString); } if (function.getSeparator() != null) { @@ -179,35 +224,33 @@ public String visit(SqlFunctionAggregateGroupConcat function) throws AdapterExce } @Override - public String visit(SqlFunctionScalar function) throws AdapterException { - List argumentsSql = new ArrayList<>(); - for (SqlNode node : function.getArguments()) { + public String visit(final SqlFunctionScalar function) throws AdapterException { + final List argumentsSql = new ArrayList<>(); + for (final SqlNode node : function.getArguments()) { argumentsSql.add(node.accept(this)); } String functionNameInSourceSystem = function.getFunctionName(); - if (dialect.getScalarFunctionAliases().containsKey(function.getFunction())) { + if (this.dialect.getScalarFunctionAliases().containsKey(function.getFunction())) { // Take alias if one is defined - will overwrite the infix - functionNameInSourceSystem = dialect.getScalarFunctionAliases().get(function.getFunction()); + functionNameInSourceSystem = this.dialect.getScalarFunctionAliases().get(function.getFunction()); } else { - if (dialect.getBinaryInfixFunctionAliases().containsKey(function.getFunction())) { + if (this.dialect.getBinaryInfixFunctionAliases().containsKey(function.getFunction())) { assert (argumentsSql.size() == 2); String realFunctionName = function.getFunctionName(); - if (dialect.getBinaryInfixFunctionAliases().containsKey(function.getFunction())) { - realFunctionName = dialect.getBinaryInfixFunctionAliases().get(function.getFunction()); + if (this.dialect.getBinaryInfixFunctionAliases().containsKey(function.getFunction())) { + realFunctionName = this.dialect.getBinaryInfixFunctionAliases().get(function.getFunction()); } - return "(" + argumentsSql.get(0) + " " + realFunctionName + " " - + argumentsSql.get(1) + ")"; - } else if (dialect.getPrefixFunctionAliases().containsKey(function.getFunction())) { + return "(" + argumentsSql.get(0) + " " + realFunctionName + " " + argumentsSql.get(1) + ")"; + } else if (this.dialect.getPrefixFunctionAliases().containsKey(function.getFunction())) { assert (argumentsSql.size() == 1); String realFunctionName = function.getFunctionName(); - if (dialect.getPrefixFunctionAliases().containsKey(function.getFunction())) { - realFunctionName = dialect.getPrefixFunctionAliases().get(function.getFunction()); + if (this.dialect.getPrefixFunctionAliases().containsKey(function.getFunction())) { + realFunctionName = this.dialect.getPrefixFunctionAliases().get(function.getFunction()); } - return "(" + realFunctionName - + argumentsSql.get(0) + ")"; + return "(" + realFunctionName + argumentsSql.get(0) + ")"; } } - if (argumentsSql.size() == 0 && dialect.omitParentheses(function.getFunction())) { + if (argumentsSql.size() == 0 && this.dialect.omitParentheses(function.getFunction())) { return functionNameInSourceSystem; } else { return functionNameInSourceSystem + "(" + Joiner.on(", ").join(argumentsSql) + ")"; @@ -215,16 +258,16 @@ public String visit(SqlFunctionScalar function) throws AdapterException { } @Override - public String visit(SqlFunctionScalarCase function) throws AdapterException { - StringBuilder builder = new StringBuilder(); + public String visit(final SqlFunctionScalarCase function) throws AdapterException { + final StringBuilder builder = new StringBuilder(); builder.append("CASE"); if (function.getBasis() != null) { builder.append(" "); builder.append(function.getBasis().accept(this)); } for (int i = 0; i < function.getArguments().size(); i++) { - SqlNode node = function.getArguments().get(i); - SqlNode result = function.getResults().get(i); + final SqlNode node = function.getArguments().get(i); + final SqlNode result = function.getResults().get(i); builder.append(" WHEN "); builder.append(node.accept(this)); builder.append(" THEN "); @@ -239,12 +282,12 @@ public String visit(SqlFunctionScalarCase function) throws AdapterException { } @Override - public String visit(SqlFunctionScalarCast function) throws AdapterException { + public String visit(final SqlFunctionScalarCast function) throws AdapterException { - StringBuilder builder = new StringBuilder(); + final StringBuilder builder = new StringBuilder(); builder.append("CAST"); builder.append("("); - assert(function.getArguments().size() == 1 && function.getArguments().get(0) != null); + assert (function.getArguments().size() == 1 && function.getArguments().get(0) != null); builder.append(function.getArguments().get(0).accept(this)); builder.append(" AS "); builder.append(function.getDataType()); @@ -253,14 +296,14 @@ public String visit(SqlFunctionScalarCast function) throws AdapterException { } @Override - public String visit(SqlFunctionScalarExtract function) throws AdapterException { - assert(function.getArguments().size() == 1 && function.getArguments().get(0) != null); - String expression = function.getArguments().get(0).accept(this); - return function.getFunctionName() + "(" + function.getToExtract() + " FROM "+ expression + ")"; + public String visit(final SqlFunctionScalarExtract function) throws AdapterException { + assert (function.getArguments().size() == 1 && function.getArguments().get(0) != null); + final String expression = function.getArguments().get(0).accept(this); + return function.getFunctionName() + "(" + function.getToExtract() + " FROM " + expression + ")"; } @Override - public String visit(SqlLimit limit) { + public String visit(final SqlLimit limit) { String offsetSql = ""; if (limit.getOffset() != 0) { offsetSql = " OFFSET " + limit.getOffset(); @@ -269,7 +312,7 @@ public String visit(SqlLimit limit) { } @Override - public String visit(SqlLiteralBool literal) { + public String visit(final SqlLiteralBool literal) { if (literal.getValue()) { return "true"; } else { @@ -278,50 +321,50 @@ public String visit(SqlLiteralBool literal) { } @Override - public String visit(SqlLiteralDate literal) { + public String visit(final SqlLiteralDate literal) { return "DATE '" + literal.getValue() + "'"; // This gets always executed // as // TO_DATE('2015-02-01','YYYY-MM-DD') } @Override - public String visit(SqlLiteralDouble literal) { + public String visit(final SqlLiteralDouble literal) { return Double.toString(literal.getValue()); } @Override - public String visit(SqlLiteralExactnumeric literal) { + public String visit(final SqlLiteralExactnumeric literal) { return literal.getValue().toString(); } @Override - public String visit(SqlLiteralNull literal) { + public String visit(final SqlLiteralNull literal) { return "NULL"; } @Override - public String visit(SqlLiteralString literal) { - return dialect.getStringLiteral(literal.getValue()); + public String visit(final SqlLiteralString literal) { + return this.dialect.getStringLiteral(literal.getValue()); } @Override - public String visit(SqlLiteralTimestamp literal) { + public String visit(final SqlLiteralTimestamp literal) { // TODO Allow dialect to modify behavior return "TIMESTAMP '" + literal.getValue().toString() + "'"; } @Override - public String visit(SqlLiteralTimestampUtc literal) { + public String visit(final SqlLiteralTimestampUtc literal) { // TODO Allow dialect to modify behavior return "TIMESTAMP '" + literal.getValue().toString() + "'"; } @Override - public String visit(SqlLiteralInterval literal) { + public String visit(final SqlLiteralInterval literal) { // TODO Allow dialect to modify behavior if (literal.getDataType().getIntervalType() == DataType.IntervalType.YEAR_TO_MONTH) { - return "INTERVAL '" + literal.getValue().toString() - + "' YEAR (" + literal.getDataType().getPrecision() + ") TO MONTH"; + return "INTERVAL '" + literal.getValue().toString() + "' YEAR (" + literal.getDataType().getPrecision() + + ") TO MONTH"; } else { return "INTERVAL '" + literal.getValue().toString() + "' DAY (" + literal.getDataType().getPrecision() + ") TO SECOND (" + literal.getDataType().getIntervalFraction() + ")"; @@ -329,18 +372,18 @@ public String visit(SqlLiteralInterval literal) { } @Override - public String visit(SqlOrderBy orderBy) throws AdapterException { + public String visit(final SqlOrderBy orderBy) throws AdapterException { // ORDER BY [ASC/DESC] [NULLS FIRST/LAST] // ASC and NULLS LAST are default in EXASOL - List sqlOrderElement = new ArrayList<>(); + final List sqlOrderElement = new ArrayList<>(); for (int i = 0; i < orderBy.getExpressions().size(); ++i) { String elementSql = orderBy.getExpressions().get(i).accept(this); - boolean shallNullsBeAtTheEnd = orderBy.nullsLast().get(i); - boolean isAscending = orderBy.isAscending().get(i); + final boolean shallNullsBeAtTheEnd = orderBy.nullsLast().get(i); + final boolean isAscending = orderBy.isAscending().get(i); if (isAscending == false) { elementSql += " DESC"; } - if (shallNullsBeAtTheEnd != nullsAreAtEndByDefault(isAscending, dialect.getDefaultNullSorting())) { + if (shallNullsBeAtTheEnd != nullsAreAtEndByDefault(isAscending, this.dialect.getDefaultNullSorting())) { // we have to specify null positioning explicitly, otherwise it would be wrong elementSql += (shallNullsBeAtTheEnd) ? " NULLS LAST" : " NULLS FIRST"; } @@ -350,11 +393,13 @@ public String visit(SqlOrderBy orderBy) throws AdapterException { } /** - * @param isAscending true if the desired sort order is ascending, false if descending - * @param defaultNullSorting default null sorting of dialect - * @return true, if the data source would position nulls at end of the resultset if NULLS FIRST/LAST is not specified explicitly. + * @param isAscending true if the desired sort order is ascending, false + * if descending + * @param defaultNullSorting default null sorting of dialect + * @return true, if the data source would position nulls at end of the resultset + * if NULLS FIRST/LAST is not specified explicitly. */ - private boolean nullsAreAtEndByDefault(boolean isAscending, SqlDialect.NullSorting defaultNullSorting) { + private boolean nullsAreAtEndByDefault(final boolean isAscending, final SqlDialect.NullSorting defaultNullSorting) { if (defaultNullSorting == SqlDialect.NullSorting.NULLS_SORTED_AT_END) { return true; } else if (defaultNullSorting == SqlDialect.NullSorting.NULLS_SORTED_AT_START) { @@ -369,53 +414,47 @@ private boolean nullsAreAtEndByDefault(boolean isAscending, SqlDialect.NullSorti } @Override - public String visit(SqlPredicateAnd predicate) throws AdapterException { - List operandsSql = new ArrayList<>(); - for (SqlNode node : predicate.getAndedPredicates()) { + public String visit(final SqlPredicateAnd predicate) throws AdapterException { + final List operandsSql = new ArrayList<>(); + for (final SqlNode node : predicate.getAndedPredicates()) { operandsSql.add(node.accept(this)); } return "(" + Joiner.on(" AND ").join(operandsSql) + ")"; } @Override - public String visit(SqlPredicateBetween predicate) throws AdapterException { - return predicate.getExpression().accept(this) + " BETWEEN " - + predicate.getBetweenLeft().accept(this) + " AND " + public String visit(final SqlPredicateBetween predicate) throws AdapterException { + return predicate.getExpression().accept(this) + " BETWEEN " + predicate.getBetweenLeft().accept(this) + " AND " + predicate.getBetweenRight().accept(this); } @Override - public String visit(SqlPredicateEqual predicate) throws AdapterException { - return predicate.getLeft().accept(this) + " = " - + predicate.getRight().accept(this); + public String visit(final SqlPredicateEqual predicate) throws AdapterException { + return predicate.getLeft().accept(this) + " = " + predicate.getRight().accept(this); } @Override - public String visit(SqlPredicateInConstList predicate) throws AdapterException { - List argumentsSql = new ArrayList<>(); - for (SqlNode node : predicate.getInArguments()) { + public String visit(final SqlPredicateInConstList predicate) throws AdapterException { + final List argumentsSql = new ArrayList<>(); + for (final SqlNode node : predicate.getInArguments()) { argumentsSql.add(node.accept(this)); } - return predicate.getExpression().accept(this) + " IN (" - + Joiner.on(", ").join(argumentsSql) + ")"; + return predicate.getExpression().accept(this) + " IN (" + Joiner.on(", ").join(argumentsSql) + ")"; } @Override - public String visit(SqlPredicateLess predicate) throws AdapterException { - return predicate.getLeft().accept(this) + " < " - + predicate.getRight().accept(this); + public String visit(final SqlPredicateLess predicate) throws AdapterException { + return predicate.getLeft().accept(this) + " < " + predicate.getRight().accept(this); } @Override - public String visit(SqlPredicateLessEqual predicate) throws AdapterException { - return predicate.getLeft().accept(this) + " <= " - + predicate.getRight().accept(this); + public String visit(final SqlPredicateLessEqual predicate) throws AdapterException { + return predicate.getLeft().accept(this) + " <= " + predicate.getRight().accept(this); } @Override - public String visit(SqlPredicateLike predicate) throws AdapterException { - String sql = predicate.getLeft().accept(this) + " LIKE " - + predicate.getPattern().accept(this); + public String visit(final SqlPredicateLike predicate) throws AdapterException { + String sql = predicate.getLeft().accept(this) + " LIKE " + predicate.getPattern().accept(this); if (predicate.getEscapeChar() != null) { sql += " ESCAPE " + predicate.getEscapeChar().accept(this); } @@ -423,39 +462,37 @@ public String visit(SqlPredicateLike predicate) throws AdapterException { } @Override - public String visit(SqlPredicateLikeRegexp predicate) throws AdapterException { - return predicate.getLeft().accept(this) + " REGEXP_LIKE " - + predicate.getPattern().accept(this); + public String visit(final SqlPredicateLikeRegexp predicate) throws AdapterException { + return predicate.getLeft().accept(this) + " REGEXP_LIKE " + predicate.getPattern().accept(this); } @Override - public String visit(SqlPredicateNot predicate) throws AdapterException { + public String visit(final SqlPredicateNot predicate) throws AdapterException { // "SELECT NOT NOT TRUE" is invalid syntax, "SELECT NOT (NOT TRUE)" works. return "NOT (" + predicate.getExpression().accept(this) + ")"; } @Override - public String visit(SqlPredicateNotEqual predicate) throws AdapterException { - return predicate.getLeft().accept(this) + " <> " - + predicate.getRight().accept(this); + public String visit(final SqlPredicateNotEqual predicate) throws AdapterException { + return predicate.getLeft().accept(this) + " <> " + predicate.getRight().accept(this); } @Override - public String visit(SqlPredicateOr predicate) throws AdapterException { - List operandsSql = new ArrayList<>(); - for (SqlNode node : predicate.getOrPredicates()) { + public String visit(final SqlPredicateOr predicate) throws AdapterException { + final List operandsSql = new ArrayList<>(); + for (final SqlNode node : predicate.getOrPredicates()) { operandsSql.add(node.accept(this)); } return "(" + Joiner.on(" OR ").join(operandsSql) + ")"; } @Override - public String visit(SqlPredicateIsNull predicate) throws AdapterException { + public String visit(final SqlPredicateIsNull predicate) throws AdapterException { return predicate.getExpression().accept(this) + " IS NULL"; } @Override - public String visit(SqlPredicateIsNotNull predicate) throws AdapterException { + public String visit(final SqlPredicateIsNotNull predicate) throws AdapterException { return predicate.getExpression().accept(this) + " IS NOT NULL"; } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/DB2SqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/DB2SqlDialect.java index 384192fe6..745526f7a 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/DB2SqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/DB2SqlDialect.java @@ -1,8 +1,5 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.dialects.*; - -import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; @@ -12,33 +9,33 @@ import com.exasol.adapter.capabilities.MainCapability; import com.exasol.adapter.capabilities.PredicateCapability; import com.exasol.adapter.capabilities.ScalarFunctionCapability; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; import com.exasol.adapter.metadata.DataType; /** - * Dialect for DB2 using the DB2 Connector jdbc driver. + * Dialect for DB2 using the DB2 Connector JDBC driver. * * @author Karl Griesser (fullref@gmail.com) */ public class DB2SqlDialect extends AbstractSqlDialect { + private static final String NAME = "DB2"; - public DB2SqlDialect(SqlDialectContext context) - { + public DB2SqlDialect(final SqlDialectContext context) { super(context); } - public static final String NAME = "DB2"; - - @Override - public String getPublicName() - { + public static String getPublicName() { return NAME; } @Override - public Capabilities getCapabilities() - { - Capabilities cap = new Capabilities(); + public Capabilities getCapabilities() { + final Capabilities cap = new Capabilities(); // Capabilities cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); cap.supportMainCapability(MainCapability.SELECTLIST_EXPRESSIONS); @@ -101,7 +98,8 @@ public Capabilities getCapabilities() cap.supportAggregateFunction(AggregateFunctionCapability.FIRST_VALUE); cap.supportAggregateFunction(AggregateFunctionCapability.LAST_VALUE); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV); - // not supported cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_DISTINCT); + // not supported + // cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_POP); // STDDEV_POP_DISTINCT cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_SAMP); @@ -153,7 +151,8 @@ public Capabilities getCapabilities() // CONCAT is not supported. Number of arguments can be different. // DUMP is not supported. Output is different. - // EDIT_DISTANCE is not supported. Output is different. UTL_MATCH.EDIT_DISTANCE returns -1 with NULL argument. + // EDIT_DISTANCE is not supported. Output is different. UTL_MATCH.EDIT_DISTANCE + // returns -1 with NULL argument. // INSERT is not supported. cap.supportScalarFunction(ScalarFunctionCapability.INSTR); cap.supportScalarFunction(ScalarFunctionCapability.LENGTH); @@ -162,9 +161,12 @@ public Capabilities getCapabilities() cap.supportScalarFunction(ScalarFunctionCapability.LPAD); cap.supportScalarFunction(ScalarFunctionCapability.LTRIM); // OCTET_LENGTH is not supported. Can be different for Unicode characters. - // not supported cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_INSTR); - // not supported cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_REPLACE); - // not supported cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_SUBSTR); + // not supported + // cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_INSTR); + // not supported + // cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_REPLACE); + // not supported + // cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_SUBSTR); cap.supportScalarFunction(ScalarFunctionCapability.REPEAT); cap.supportScalarFunction(ScalarFunctionCapability.REPLACE); // REVERSE is not supported @@ -200,8 +202,8 @@ public Capabilities getCapabilities() // MINUTES_BETWEEN is not supported. EXTRACT does not work on strings. // MONTH is not supported. EXTRACT does not work on strings. // MONTHS_BETWEEN is not supported. EXTRACT does not work on strings. - //cap.supportScalarFunction(ScalarFunctionCapability.NUMTODSINTERVAL); - //cap.supportScalarFunction(ScalarFunctionCapability.NUMTOYMINTERVAL); + // cap.supportScalarFunction(ScalarFunctionCapability.NUMTODSINTERVAL); + // cap.supportScalarFunction(ScalarFunctionCapability.NUMTOYMINTERVAL); // POSIX_TIME is not supported. Does not work on strings. // SECOND is not supported. EXTRACT does not work on strings. // SECONDS_BETWEEN is not supported. EXTRACT does not work on strings. @@ -278,42 +280,38 @@ public Capabilities getCapabilities() } @Override - public SchemaOrCatalogSupport supportsJdbcCatalogs() - { + public SchemaOrCatalogSupport supportsJdbcCatalogs() { return SchemaOrCatalogSupport.UNSUPPORTED; } @Override - public SchemaOrCatalogSupport supportsJdbcSchemas() - { + public SchemaOrCatalogSupport supportsJdbcSchemas() { return SchemaOrCatalogSupport.SUPPORTED; } @Override - public IdentifierCaseHandling getUnquotedIdentifierHandling() - { + public IdentifierCaseHandling getUnquotedIdentifierHandling() { return IdentifierCaseHandling.INTERPRET_AS_UPPER; } @Override - public IdentifierCaseHandling getQuotedIdentifierHandling() - { + public IdentifierCaseHandling getQuotedIdentifierHandling() { return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; } @Override - public String applyQuote(String identifier) - { - // If identifier contains double quotation marks ", it needs to be espaced by another double quotation mark. E.g. "a""b" is the identifier a"b in the db. + public String applyQuote(final String identifier) { + // If identifier contains double quotation marks ", it needs to be espaced by + // another double quotation mark. E.g. "a""b" is the identifier a"b in the db. return "\"" + identifier.replace("\"", "\"\"") + "\""; } @Override - public String applyQuoteIfNeeded(String identifier) - { + public String applyQuoteIfNeeded(final String identifier) { // Quoted identifiers can contain any unicode char except dot (.). - // This is a simplified rule, which might cause that some identifiers are quoted although not needed - boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); + // This is a simplified rule, which might cause that some identifiers are quoted + // although not needed + final boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); if (isSimpleIdentifier) { return identifier; } else { @@ -322,44 +320,38 @@ public String applyQuoteIfNeeded(String identifier) } @Override - public boolean requiresCatalogQualifiedTableNames( - SqlGenerationContext context) - { - //DB2 does not know catalogs + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { + // DB2 does not know catalogs return false; } @Override - public boolean requiresSchemaQualifiedTableNames( - SqlGenerationContext context) - { + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { return true; } - + @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new DB2SqlGenerationVisitor(this, context); } @Override - public NullSorting getDefaultNullSorting() - { - //default db2 behaviour is to set nulls to the end of the result + public NullSorting getDefaultNullSorting() { + // default db2 behaviour is to set nulls to the end of the result return NullSorting.NULLS_SORTED_AT_END; } @Override - public String getStringLiteral(String value) - { + public String getStringLiteral(final String value) { // Don't forget to escape single quote return "'" + value.replace("'", "''") + "'"; } - + @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescription) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { DataType colType = null; - int jdbcType = jdbcTypeDescription.getJdbcType(); - + final int jdbcType = jdbcTypeDescription.getJdbcType(); + switch (jdbcType) { case Types.CLOB: colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); @@ -372,16 +364,17 @@ public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescripti case Types.TIMESTAMP: colType = DataType.createVarChar(32, DataType.ExaCharset.UTF8); break; - - // db2 driver always delivers UTF8 Characters no matter what encoding is specified for var + char data + + // db2 driver always delivers UTF8 Characters no matter what encoding is + // specified for var + char data case Types.VARCHAR: case Types.NVARCHAR: case Types.LONGVARCHAR: case Types.CHAR: case Types.NCHAR: case Types.LONGNVARCHAR: { - int size = jdbcTypeDescription.getPrecisionOrSize(); - DataType.ExaCharset charset = DataType.ExaCharset.UTF8; + final int size = jdbcTypeDescription.getPrecisionOrSize(); + final DataType.ExaCharset charset = DataType.ExaCharset.UTF8; if (size <= DataType.maxExasolVarcharSize) { colType = DataType.createVarChar(size, charset); } else { @@ -389,12 +382,13 @@ public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescripti } break; } - - // VARCHAR and CHAR for bit data -> will be converted to hex string so we have to double the size + + // VARCHAR and CHAR for bit data -> will be converted to hex string so we have + // to double the size case -2: - colType = DataType.createChar(jdbcTypeDescription.getPrecisionOrSize()*2, DataType.ExaCharset.ASCII); + colType = DataType.createChar(jdbcTypeDescription.getPrecisionOrSize() * 2, DataType.ExaCharset.ASCII); case -3: - colType = DataType.createVarChar(jdbcTypeDescription.getPrecisionOrSize()*2, DataType.ExaCharset.ASCII); + colType = DataType.createVarChar(jdbcTypeDescription.getPrecisionOrSize() * 2, DataType.ExaCharset.ASCII); break; } return colType; diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/ExasolSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/ExasolSqlDialect.java index adec7f1fb..5a620d5e2 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/ExasolSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/ExasolSqlDialect.java @@ -1,5 +1,7 @@ package com.exasol.adapter.dialects.impl; +import java.sql.SQLException; + import com.exasol.adapter.capabilities.Capabilities; import com.exasol.adapter.dialects.AbstractSqlDialect; import com.exasol.adapter.dialects.JdbcTypeDescription; @@ -8,30 +10,33 @@ import com.exasol.adapter.metadata.DataType; import com.exasol.adapter.sql.ScalarFunction; -import java.sql.ResultSet; -import java.sql.SQLException; - /** * This class is work-in-progress * - * TODO The precision of interval type columns is hardcoded, because it cannot be retrieved via JDBC. Should be retrieved from system table.
- * TODO The srid of geometry type columns is hardcoded, because it cannot be retrieved via JDBC. Should be retrieved from system table.
+ * TODO The precision of interval type columns is hardcoded, because it cannot + * be retrieved via JDBC. Should be retrieved from system table.
+ * TODO The srid of geometry type columns is hardcoded, because it cannot be + * retrieved via JDBC. Should be retrieved from system table.
*/ public class ExasolSqlDialect extends AbstractSqlDialect { + private static final String NAME = "EXASOL"; - public ExasolSqlDialect(SqlDialectContext context) { + public ExasolSqlDialect(final SqlDialectContext context) { super(context); - omitParenthesesMap.add(ScalarFunction.SYSDATE); - omitParenthesesMap.add(ScalarFunction.SYSTIMESTAMP); - omitParenthesesMap.add(ScalarFunction.CURRENT_SCHEMA); - omitParenthesesMap.add(ScalarFunction.CURRENT_SESSION); - omitParenthesesMap.add(ScalarFunction.CURRENT_STATEMENT); - omitParenthesesMap.add(ScalarFunction.CURRENT_USER); + this.omitParenthesesMap.add(ScalarFunction.SYSDATE); + this.omitParenthesesMap.add(ScalarFunction.SYSTIMESTAMP); + this.omitParenthesesMap.add(ScalarFunction.CURRENT_SCHEMA); + this.omitParenthesesMap.add(ScalarFunction.CURRENT_SESSION); + this.omitParenthesesMap.add(ScalarFunction.CURRENT_STATEMENT); + this.omitParenthesesMap.add(ScalarFunction.CURRENT_USER); } - public static final String NAME = "EXASOL"; - - public String getPublicName() { + /** + * Get the name under which the dialect is listed. + * + * @return name of the dialect + */ + public static String getPublicName() { return NAME; } @@ -46,26 +51,29 @@ public SchemaOrCatalogSupport supportsJdbcSchemas() { } @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescription) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { DataType colType = null; - int jdbcType = jdbcTypeDescription.getJdbcType(); + final int jdbcType = jdbcTypeDescription.getJdbcType(); switch (jdbcType) { - case -104: - // Currently precision is hardcoded, because we cannot retrieve it via EXASOL jdbc driver. - colType = DataType.createIntervalDaySecond(2,3); - break; - case -103: - // Currently precision is hardcoded, because we cannot retrieve it via EXASOL jdbc driver. - colType = DataType.createIntervalYearMonth(2); - break; - case 123: - // Currently srid is hardcoded, because we cannot retrieve it via EXASOL jdbc driver. - colType = DataType.createGeometry(3857); - break; - case 124: - colType = DataType.createTimestamp(true); - break; + case -104: + // Currently precision is hardcoded, because we cannot retrieve it via EXASOL + // jdbc driver. + colType = DataType.createIntervalDaySecond(2, 3); + break; + case -103: + // Currently precision is hardcoded, because we cannot retrieve it via EXASOL + // jdbc driver. + colType = DataType.createIntervalYearMonth(2); + break; + case 123: + // Currently srid is hardcoded, because we cannot retrieve it via EXASOL jdbc + // driver. + colType = DataType.createGeometry(3857); + break; + case 124: + colType = DataType.createTimestamp(true); + break; } return colType; } @@ -73,7 +81,7 @@ public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescripti @Override public Capabilities getCapabilities() { // Supports all capabilities - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); cap.supportAllCapabilities(); return cap; } @@ -89,16 +97,18 @@ public IdentifierCaseHandling getQuotedIdentifierHandling() { } @Override - public String applyQuote(String identifier) { - // If identifier contains double quotation marks ", it needs to be espaced by another double quotation mark. E.g. "a""b" is the identifier a"b in the db. + public String applyQuote(final String identifier) { + // If identifier contains double quotation marks ", it needs to be espaced by + // another double quotation mark. E.g. "a""b" is the identifier a"b in the db. return "\"" + identifier.replace("\"", "\"\"") + "\""; } @Override - public String applyQuoteIfNeeded(String identifier) { + public String applyQuoteIfNeeded(final String identifier) { // Quoted identifiers can contain any unicode char except dot (.). - // This is a simplified rule, which might cause that some identifiers are quoted although not needed - boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); + // This is a simplified rule, which might cause that some identifiers are quoted + // although not needed + final boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); if (isSimpleIdentifier) { return identifier; } else { @@ -107,26 +117,28 @@ public String applyQuoteIfNeeded(String identifier) { } @Override - public boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { return false; } @Override - public boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context) { - // We need schema qualifiers a) if we are in IS_LOCAL mode, i.e. we run statements directly in a subselect without IMPORT FROM JDBC - // and b) if we don't have the schema in the jdbc connection string (like "jdbc:exa:localhost:5555;schema=native") + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + // We need schema qualifiers a) if we are in IS_LOCAL mode, i.e. we run + // statements directly in a subselect without IMPORT FROM JDBC + // and b) if we don't have the schema in the jdbc connection string (like + // "jdbc:exa:localhost:5555;schema=native") return true; // return context.isLocal(); } @Override public NullSorting getDefaultNullSorting() { - assert(getContext().getSchemaAdapterNotes().isNullsAreSortedHigh()); + assert (getContext().getSchemaAdapterNotes().isNullsAreSortedHigh()); return NullSorting.NULLS_SORTED_HIGH; } @Override - public String getStringLiteral(String value) { + public String getStringLiteral(final String value) { // Don't forget to escape single quote return "'" + value.replace("'", "''") + "'"; } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/GenericSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/GenericSqlDialect.java index d2e4df945..bdc9109d7 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/GenericSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/GenericSqlDialect.java @@ -1,5 +1,7 @@ package com.exasol.adapter.dialects.impl; +import java.sql.SQLException; + import com.exasol.adapter.capabilities.Capabilities; import com.exasol.adapter.dialects.AbstractSqlDialect; import com.exasol.adapter.dialects.JdbcTypeDescription; @@ -8,27 +10,25 @@ import com.exasol.adapter.jdbc.SchemaAdapterNotes; import com.exasol.adapter.metadata.DataType; -import java.sql.SQLException; - /** - * This dialect can be used for data sources where a custom dialect implementation does not yet exists. - * It will obtain all information from the JDBC Metadata. + * This dialect can be used for data sources where a custom dialect + * implementation does not yet exists. It will obtain all information from the + * JDBC Metadata. */ public class GenericSqlDialect extends AbstractSqlDialect { - - public GenericSqlDialect(SqlDialectContext context) { + public GenericSqlDialect(final SqlDialectContext context) { super(context); } - public static final String NAME = "GENERIC"; + private static final String NAME = "GENERIC"; - public String getPublicName() { + public static String getPublicName() { return NAME; } @Override public Capabilities getCapabilities() { - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); return cap; } @@ -44,7 +44,7 @@ public SchemaOrCatalogSupport supportsJdbcSchemas() { @Override public IdentifierCaseHandling getUnquotedIdentifierHandling() { - SchemaAdapterNotes adapterNotes = getContext().getSchemaAdapterNotes(); + final SchemaAdapterNotes adapterNotes = getContext().getSchemaAdapterNotes(); if (adapterNotes.isSupportsMixedCaseIdentifiers()) { // Unquoted identifiers are treated case-sensitive and stored mixed case return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; @@ -57,14 +57,15 @@ public IdentifierCaseHandling getUnquotedIdentifierHandling() { // This case is a bit strange - case insensitive, but still stores it mixed case return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; } else { - throw new RuntimeException("Unexpected quote behavior. Adapternotes: " + SchemaAdapterNotes.serialize(adapterNotes)); + throw new RuntimeException( + "Unexpected quote behavior. Adapternotes: " + SchemaAdapterNotes.serialize(adapterNotes)); } } } @Override public IdentifierCaseHandling getQuotedIdentifierHandling() { - SchemaAdapterNotes adapterNotes = getContext().getSchemaAdapterNotes(); + final SchemaAdapterNotes adapterNotes = getContext().getSchemaAdapterNotes(); if (adapterNotes.isSupportsMixedCaseQuotedIdentifiers()) { // Quoted identifiers are treated case-sensitive and stored mixed case return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; @@ -77,38 +78,41 @@ public IdentifierCaseHandling getQuotedIdentifierHandling() { // This case is a bit strange - case insensitive, but still stores it mixed case return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; } else { - throw new RuntimeException("Unexpected quote behavior. Adapternotes: " + SchemaAdapterNotes.serialize(adapterNotes)); + throw new RuntimeException( + "Unexpected quote behavior. Adapternotes: " + SchemaAdapterNotes.serialize(adapterNotes)); } } } @Override - public String applyQuote(String identifier) { - String quoteString = getContext().getSchemaAdapterNotes().getIdentifierQuoteString(); + public String applyQuote(final String identifier) { + final String quoteString = getContext().getSchemaAdapterNotes().getIdentifierQuoteString(); return quoteString + identifier + quoteString; } @Override - public String applyQuoteIfNeeded(String identifier) { + public String applyQuoteIfNeeded(final String identifier) { // We could consider getExtraNameCharacters() here as well to do less quoting return applyQuote(identifier); } @Override - public boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { return true; } @Override - public boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context) { - // See getCatalogSeparator(): String that this database uses as the separator between a catalog and table name. - // See isCatalogAtStart(): whether a catalog appears at the start of a fully qualified table name + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + // See getCatalogSeparator(): String that this database uses as the separator + // between a catalog and table name. + // See isCatalogAtStart(): whether a catalog appears at the start of a fully + // qualified table name return true; } @Override public NullSorting getDefaultNullSorting() { - SchemaAdapterNotes notes = getContext().getSchemaAdapterNotes(); + final SchemaAdapterNotes notes = getContext().getSchemaAdapterNotes(); if (notes.isNullsAreSortedAtEnd()) { return NullSorting.NULLS_SORTED_AT_END; } else if (notes.isNullsAreSortedAtStart()) { @@ -122,12 +126,12 @@ public NullSorting getDefaultNullSorting() { } @Override - public String getStringLiteral(String value) { + public String getStringLiteral(final String value) { return "'" + value.replace("'", "''") + "'"; } @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcType) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcType) throws SQLException { return null; } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/HiveSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/HiveSqlDialect.java index 3a50cee54..c4bfc33c7 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/HiveSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/HiveSqlDialect.java @@ -1,34 +1,41 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.capabilities.*; -import com.exasol.adapter.dialects.*; -import com.exasol.adapter.metadata.DataType; -import com.exasol.adapter.sql.ScalarFunction; - import java.sql.SQLException; import java.util.EnumMap; import java.util.Map; +import com.exasol.adapter.capabilities.AggregateFunctionCapability; +import com.exasol.adapter.capabilities.Capabilities; +import com.exasol.adapter.capabilities.LiteralCapability; +import com.exasol.adapter.capabilities.MainCapability; +import com.exasol.adapter.capabilities.PredicateCapability; +import com.exasol.adapter.capabilities.ScalarFunctionCapability; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; +import com.exasol.adapter.metadata.DataType; +import com.exasol.adapter.sql.ScalarFunction; + /** - * Dialect for Hive, using the Cloudera Hive JDBC Driver/Connector (developed by Simba). - * Only supports Hive 2.1.0 and later because of the order by (nulls first/last option) - * TODO Finish implementation of this dialect and add as a supported dialect + * Dialect for Hive, using the Cloudera Hive JDBC Driver/Connector (developed by + * Simba). Only supports Hive 2.1.0 and later because of the order by (nulls + * first/last option) TODO Finish implementation of this dialect and add as a + * supported dialect */ public class HiveSqlDialect extends AbstractSqlDialect { - - public HiveSqlDialect(SqlDialectContext context) { + public HiveSqlDialect(final SqlDialectContext context) { super(context); } - public static final String NAME = "HIVE"; - - public String getPublicName() { - return NAME; + public static String getPublicName() { + return "HIVE"; } @Override public Capabilities getCapabilities() { - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); cap.supportMainCapability(MainCapability.SELECTLIST_EXPRESSIONS); cap.supportMainCapability(MainCapability.FILTER_EXPRESSIONS); @@ -131,7 +138,7 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.SECOND); cap.supportScalarFunction(ScalarFunctionCapability.WEEK); - /*hive doesn't support geospatial functions*/ + /* hive doesn't support geospatial functions */ cap.supportScalarFunction(ScalarFunctionCapability.CAST); @@ -145,11 +152,12 @@ public Capabilities getCapabilities() { } /** - * Quote from user manual The Cloudera JDBC Driver for Apache Hive supports both catalogs and schemas to make it easy for - * the driver to work with various JDBC applications. Since Hive only organizes tables into - * schemas/databases, the driver provides a synthetic catalog called “HIVE” under which all of the - * schemas/databases are organized. The driver also maps the JDBC schema to the Hive - * schema/database. + * Quote from user manual The Cloudera JDBC Driver for Apache Hive supports both + * catalogs and schemas to make it easy for the driver to work with various JDBC + * applications. Since Hive only organizes tables into schemas/databases, the + * driver provides a synthetic catalog called “HIVE” under which all of the + * schemas/databases are organized. The driver also maps the JDBC schema to the + * Hive schema/database. */ @Override public SchemaOrCatalogSupport supportsJdbcCatalogs() { @@ -172,26 +180,30 @@ public IdentifierCaseHandling getQuotedIdentifierHandling() { } @Override - public String applyQuote(String identifier) { - // If identifier contains double quotation marks ", it needs to be escaped by another double quotation mark. E.g. "a""b" is the identifier a"b in the db. + public String applyQuote(final String identifier) { + // If identifier contains double quotation marks ", it needs to be escaped by + // another double quotation mark. E.g. "a""b" is the identifier a"b in the db. return "`" + identifier + "`"; } @Override - public String applyQuoteIfNeeded(String identifier) { - // We need to apply quotes only in case of reserved keywords. Since we don't know these (could look up in JDBC Metadata...) we always quote. + public String applyQuoteIfNeeded(final String identifier) { + // We need to apply quotes only in case of reserved keywords. Since we don't + // know these (could look up in JDBC Metadata...) we always quote. return applyQuote(identifier); } @Override - public boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { return false; } @Override - public boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context) { - // We need schema qualifiers a) if we are in IS_LOCAL mode, i.e. we run statements directly in a subselect without IMPORT FROM JDBC - // and b) if we don't have the schema in the jdbc connection string (like "jdbc:exa:localhost:5555;schema=native") + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + // We need schema qualifiers a) if we are in IS_LOCAL mode, i.e. we run + // statements directly in a subselect without IMPORT FROM JDBC + // and b) if we don't have the schema in the jdbc connection string (like + // "jdbc:exa:localhost:5555;schema=native") return true; // return context.isLocal(); } @@ -207,20 +219,20 @@ public NullSorting getDefaultNullSorting() { } @Override - public String getStringLiteral(String value) { + public String getStringLiteral(final String value) { // Don't forget to escape single quote return "'" + value.replace("'", "''") + "'"; } @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new HiveSqlGenerationVisitor(this, context); } @Override public Map getScalarFunctionAliases() { - Map scalarAliases = new EnumMap<>(ScalarFunction.class); + final Map scalarAliases = new EnumMap<>(ScalarFunction.class); scalarAliases.put(ScalarFunction.ADD_DAYS, "DATE_ADD"); scalarAliases.put(ScalarFunction.DAYS_BETWEEN, "DATEDIFF"); @@ -232,7 +244,7 @@ public Map getScalarFunctionAliases() { } @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcType) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcType) throws SQLException { return null; } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/ImpalaSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/ImpalaSqlDialect.java index 426c131b0..202bf0382 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/ImpalaSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/ImpalaSqlDialect.java @@ -1,32 +1,41 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.capabilities.*; -import com.exasol.adapter.dialects.*; -import com.exasol.adapter.metadata.DataType; - import java.sql.SQLException; +import com.exasol.adapter.capabilities.AggregateFunctionCapability; +import com.exasol.adapter.capabilities.Capabilities; +import com.exasol.adapter.capabilities.LiteralCapability; +import com.exasol.adapter.capabilities.MainCapability; +import com.exasol.adapter.capabilities.PredicateCapability; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; +import com.exasol.adapter.metadata.DataType; + /** - * Dialect for Impala, using the Cloudera Impala JDBC Driver/Connector (developed by Simba). - * - * See http://www.cloudera.com/documentation/enterprise/latest/topics/impala_langref.html + * Dialect for Impala, using the Cloudera Impala JDBC Driver/Connector + * (developed by Simba). + * + * See + * http://www.cloudera.com/documentation/enterprise/latest/topics/impala_langref.html */ public class ImpalaSqlDialect extends AbstractSqlDialect { - - public ImpalaSqlDialect(SqlDialectContext context) { + public ImpalaSqlDialect(final SqlDialectContext context) { super(context); } - public static final String NAME = "IMPALA"; + private static final String NAME = "IMPALA"; - public String getPublicName() { + public static String getPublicName() { return NAME; } @Override public Capabilities getCapabilities() { // Main capabilities - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); cap.supportMainCapability(MainCapability.SELECTLIST_EXPRESSIONS); cap.supportMainCapability(MainCapability.FILTER_EXPRESSIONS); @@ -80,7 +89,7 @@ public Capabilities getCapabilities() { cap.supportAggregateFunction(AggregateFunctionCapability.SUM_DISTINCT); // TODO Scalar Functions - + return cap; } @@ -96,10 +105,10 @@ public SchemaOrCatalogSupport supportsJdbcSchemas() { /** * Note from Impala documentation: Impala identifiers are always - * case-insensitive. That is, tables named t1 and T1 always refer to the - * same table, regardless of quote characters. Internally, Impala always - * folds all specified table and column names to lowercase. This is why the - * column headers in query output are always displayed in lowercase. + * case-insensitive. That is, tables named t1 and T1 always refer to the same + * table, regardless of quote characters. Internally, Impala always folds all + * specified table and column names to lowercase. This is why the column headers + * in query output are always displayed in lowercase. */ @Override public IdentifierCaseHandling getUnquotedIdentifierHandling() { @@ -112,55 +121,64 @@ public IdentifierCaseHandling getQuotedIdentifierHandling() { } @Override - public String applyQuote(String identifier) { - // If identifier contains double quotation marks ", it needs to be espaced by another double quotation mark. E.g. "a""b" is the identifier a"b in the db. + public String applyQuote(final String identifier) { + // If identifier contains double quotation marks ", it needs to be espaced by + // another double quotation mark. E.g. "a""b" is the identifier a"b in the db. return "`" + identifier + "`"; } @Override - public String applyQuoteIfNeeded(String identifier) { - // We need to apply quotes only in case of reserved keywords. Since we don't know these (could look up in JDBC Metadata...) we always quote. + public String applyQuoteIfNeeded(final String identifier) { + // We need to apply quotes only in case of reserved keywords. Since we don't + // know these (could look up in JDBC Metadata...) we always quote. return applyQuote(identifier); } @Override - public boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { return false; } @Override - public boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context) { - // We need schema qualifiers a) if we are in IS_LOCAL mode, i.e. we run statements directly in a subselect without IMPORT FROM JDBC - // and b) if we don't have the schema in the jdbc connection string (like "jdbc:exa:localhost:5555;schema=native") + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + // We need schema qualifiers a) if we are in IS_LOCAL mode, i.e. we run + // statements directly in a subselect without IMPORT FROM JDBC + // and b) if we don't have the schema in the jdbc connection string (like + // "jdbc:exa:localhost:5555;schema=native") return true; // return context.isLocal(); } @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new ImpalaSqlGenerationVisitor(this, context); } @Override public NullSorting getDefaultNullSorting() { - // In Impala 1.2.1 and higher, all NULL values come at the end of the result set for ORDER BY ... ASC queries, + // In Impala 1.2.1 and higher, all NULL values come at the end of the result set + // for ORDER BY ... ASC queries, // and at the beginning of the result set for ORDER BY ... DESC queries. - // In effect, NULL is considered greater than all other values for sorting purposes. - // The original Impala behavior always put NULL values at the end, even for ORDER BY ... DESC queries. - // The new behavior in Impala 1.2.1 makes Impala more compatible with other popular database systems. - // In Impala 1.2.1 and higher, you can override or specify the sorting behavior for NULL by adding the clause + // In effect, NULL is considered greater than all other values for sorting + // purposes. + // The original Impala behavior always put NULL values at the end, even for + // ORDER BY ... DESC queries. + // The new behavior in Impala 1.2.1 makes Impala more compatible with other + // popular database systems. + // In Impala 1.2.1 and higher, you can override or specify the sorting behavior + // for NULL by adding the clause // NULLS FIRST or NULLS LAST at the end of the ORDER BY clause. return NullSorting.NULLS_SORTED_HIGH; } @Override - public String getStringLiteral(String value) { + public String getStringLiteral(final String value) { // Don't forget to escape single quote return "'" + value.replace("'", "''") + "'"; } @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcType) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcType) throws SQLException { return null; } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/MysqlSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/MysqlSqlDialect.java index a3b38ac4c..e939963fd 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/MysqlSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/MysqlSqlDialect.java @@ -1,5 +1,7 @@ package com.exasol.adapter.dialects.impl; +import java.sql.SQLException; + import com.exasol.adapter.capabilities.Capabilities; import com.exasol.adapter.dialects.AbstractSqlDialect; import com.exasol.adapter.dialects.JdbcTypeDescription; @@ -7,28 +9,25 @@ import com.exasol.adapter.dialects.SqlGenerationContext; import com.exasol.adapter.metadata.DataType; -import java.sql.SQLException; - /** * Dialect for MySQL using the MySQL Connector jdbc driver. * * TODO Finish implementation of this dialect and add as a supported dialect */ public class MysqlSqlDialect extends AbstractSqlDialect { - - public MysqlSqlDialect(SqlDialectContext context) { + public MysqlSqlDialect(final SqlDialectContext context) { super(context); } - public static final String NAME = "MYSQL"; + private static final String NAME = "MYSQL"; - public String getPublicName() { + public static String getPublicName() { return NAME; } @Override public Capabilities getCapabilities() { - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); return cap; } @@ -53,42 +52,45 @@ public IdentifierCaseHandling getQuotedIdentifierHandling() { } @Override - public String applyQuote(String identifier) { - // TODO ANSI_QUOTES option. Must be obtained from JDBC DatabaseMetadata. http://dev.mysql.com/doc/refman/5.7/en/sql-mode.html#sqlmode_ansi_quotes - CharSequence quoteChar = "`"; + public String applyQuote(final String identifier) { + // TODO ANSI_QUOTES option. Must be obtained from JDBC DatabaseMetadata. + // http://dev.mysql.com/doc/refman/5.7/en/sql-mode.html#sqlmode_ansi_quotes + final CharSequence quoteChar = "`"; return quoteChar + identifier.replace(quoteChar, quoteChar + "" + quoteChar) + quoteChar; } @Override - public String applyQuoteIfNeeded(String identifier) { + public String applyQuoteIfNeeded(final String identifier) { return applyQuote(identifier); } @Override - public boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { return true; } @Override - public boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { return false; } @Override public NullSorting getDefaultNullSorting() { - // See http://stackoverflow.com/questions/2051602/mysql-orderby-a-number-nulls-last - // and also http://stackoverflow.com/questions/9307613/mysql-order-by-null-first-and-desc-after - assert(getContext().getSchemaAdapterNotes().isNullsAreSortedLow()); + // See + // http://stackoverflow.com/questions/2051602/mysql-orderby-a-number-nulls-last + // and also + // http://stackoverflow.com/questions/9307613/mysql-order-by-null-first-and-desc-after + assert (getContext().getSchemaAdapterNotes().isNullsAreSortedLow()); return NullSorting.NULLS_SORTED_LOW; } @Override - public String getStringLiteral(String value) { + public String getStringLiteral(final String value) { return "'" + value.replace("'", "''") + "'"; } @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcType) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcType) throws SQLException { return null; } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java index c891c1ba2..7f4bc71b8 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java @@ -1,48 +1,56 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.capabilities.*; -import com.exasol.adapter.dialects.*; -import com.exasol.adapter.metadata.DataType; -import com.exasol.adapter.sql.AggregateFunction; -import com.exasol.adapter.sql.ScalarFunction; - import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.EnumMap; import java.util.Map; +import com.exasol.adapter.capabilities.AggregateFunctionCapability; +import com.exasol.adapter.capabilities.Capabilities; +import com.exasol.adapter.capabilities.LiteralCapability; +import com.exasol.adapter.capabilities.MainCapability; +import com.exasol.adapter.capabilities.PredicateCapability; +import com.exasol.adapter.capabilities.ScalarFunctionCapability; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; +import com.exasol.adapter.metadata.DataType; +import com.exasol.adapter.sql.AggregateFunction; +import com.exasol.adapter.sql.ScalarFunction; + /** * Work in Progress */ public class OracleSqlDialect extends AbstractSqlDialect { + private final boolean castAggFuncToFloat = true; + private final boolean castScalarFuncToFloat = true; - private boolean castAggFuncToFloat = true; - private boolean castScalarFuncToFloat = true; - - public OracleSqlDialect(SqlDialectContext context) { + public OracleSqlDialect(final SqlDialectContext context) { super(context); - omitParenthesesMap.add(ScalarFunction.SYSDATE); - omitParenthesesMap.add(ScalarFunction.SYSTIMESTAMP); + this.omitParenthesesMap.add(ScalarFunction.SYSDATE); + this.omitParenthesesMap.add(ScalarFunction.SYSTIMESTAMP); } - public static final String NAME = "ORACLE"; + private static final String NAME = "ORACLE"; - public String getPublicName() { + public static String getPublicName() { return NAME; } public boolean getCastAggFuncToFloat() { - return castAggFuncToFloat; + return this.castAggFuncToFloat; } public boolean getCastScalarFuncToFloat() { - return castScalarFuncToFloat; + return this.castScalarFuncToFloat; } @Override public Capabilities getCapabilities() { - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); // Capabilities cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); @@ -96,7 +104,7 @@ public Capabilities getCapabilities() { // GEO_INTERSECTION_AGGREGATE is not supported // GEO_UNION_AGGREGATE is not supported // APPROXIMATE_COUNT_DISTINCT supported with version >= 12.1.0.2 - if (castAggFuncToFloat) { + if (this.castAggFuncToFloat) { // Cast result to FLOAT because result set precision = 0, scale = 0 cap.supportAggregateFunction(AggregateFunctionCapability.SUM); cap.supportAggregateFunction(AggregateFunctionCapability.SUM_DISTINCT); @@ -125,10 +133,12 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.CEIL); cap.supportScalarFunction(ScalarFunctionCapability.DIV); cap.supportScalarFunction(ScalarFunctionCapability.FLOOR); - // ROUND is not supported. DATETIME could be pushed down, NUMBER would have to be rounded. + // ROUND is not supported. DATETIME could be pushed down, NUMBER would have to + // be rounded. cap.supportScalarFunction(ScalarFunctionCapability.SIGN); - // TRUNC is not supported. DATETIME could be pushed down, NUMBER would have to be rounded. - if (castScalarFuncToFloat) { + // TRUNC is not supported. DATETIME could be pushed down, NUMBER would have to + // be rounded. + if (this.castScalarFuncToFloat) { // Cast result to FLOAT because result set precision = 0, scale = 0 cap.supportScalarFunction(ScalarFunctionCapability.ADD); cap.supportScalarFunction(ScalarFunctionCapability.SUB); @@ -165,7 +175,8 @@ public Capabilities getCapabilities() { // COLOGNE_PHONETIC is not supported. // CONCAT is not supported. Number of arguments can be different. // DUMP is not supported. Output is different. - // EDIT_DISTANCE is not supported. Output is different. UTL_MATCH.EDIT_DISTANCE returns -1 with NULL argument. + // EDIT_DISTANCE is not supported. Output is different. UTL_MATCH.EDIT_DISTANCE + // returns -1 with NULL argument. // INSERT is not supported. cap.supportScalarFunction(ScalarFunctionCapability.INSTR); cap.supportScalarFunction(ScalarFunctionCapability.LENGTH); @@ -180,7 +191,8 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.REPEAT); cap.supportScalarFunction(ScalarFunctionCapability.REPLACE); cap.supportScalarFunction(ScalarFunctionCapability.REVERSE); - // RIGHT is not supported. Possible solution with SUBSTRING (must handle corner cases correctly). + // RIGHT is not supported. Possible solution with SUBSTRING (must handle corner + // cases correctly). cap.supportScalarFunction(ScalarFunctionCapability.RPAD); cap.supportScalarFunction(ScalarFunctionCapability.RTRIM); cap.supportScalarFunction(ScalarFunctionCapability.SOUNDEX); @@ -301,9 +313,10 @@ public Capabilities getCapabilities() { @Override public Map getAggregateFunctionAliases() { - Map aggregationAliases = new EnumMap<>(AggregateFunction.class); + final Map aggregationAliases = new EnumMap<>(AggregateFunction.class); // APPROXIMATE_COUNT_DISTINCT supported with version >= 12.1.0.2 - // aggregationAliases.put(AggregateFunction.APPROXIMATE_COUNT_DISTINCT, "APPROX_COUNT_DISTINCT"); + // aggregationAliases.put(AggregateFunction.APPROXIMATE_COUNT_DISTINCT, + // "APPROX_COUNT_DISTINCT"); return aggregationAliases; } @@ -318,10 +331,13 @@ public SchemaOrCatalogSupport supportsJdbcSchemas() { } @Override - public MappedTable mapTable(ResultSet tables) throws SQLException { - String tableName = tables.getString("TABLE_NAME"); + public MappedTable mapTable(final ResultSet tables) throws SQLException { + final String tableName = tables.getString("TABLE_NAME"); if (tableName.startsWith("BIN$")) { - // In case of Oracle we may see deleted tables with strange names (BIN$OeQco6jg/drgUDAKzmRzgA==$0). Should be filtered out. Squirrel also doesn't see them for unknown reasons. See http://stackoverflow.com/questions/2446053/what-are-the-bin-tables-in-oracles-all-tab-columns-table + // In case of Oracle we may see deleted tables with strange names + // (BIN$OeQco6jg/drgUDAKzmRzgA==$0). Should be filtered out. Squirrel also + // doesn't see them for unknown reasons. See + // http://stackoverflow.com/questions/2446053/what-are-the-bin-tables-in-oracles-all-tab-columns-table System.out.println("Skip table: " + tableName); return MappedTable.createIgnoredTable(); } else { @@ -330,52 +346,53 @@ public MappedTable mapTable(ResultSet tables) throws SQLException { } @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescription) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { DataType colType = null; - int jdbcType = jdbcTypeDescription.getJdbcType(); + final int jdbcType = jdbcTypeDescription.getJdbcType(); switch (jdbcType) { - case Types.DECIMAL: - int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); - int decimalScale = jdbcTypeDescription.getDecimalScale(); - if (decimalScale == -127) { - // Oracle JDBC driver returns scale -127 if NUMBER data type was specified without scale and precision. Convert to VARCHAR. - // See http://docs.oracle.com/cd/B28359_01/server.111/b28318/datatype.htm#i16209 - // and https://docs.oracle.com/cd/E19501-01/819-3659/gcmaz/ - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - break; - } - if (decimalPrec <= DataType.maxExasolDecimalPrecision) { - colType = DataType.createDecimal(decimalPrec, decimalScale); - } else { - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - } - break; - case Types.OTHER: - // Oracle JDBC uses OTHER as CLOB - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - break; - case -103: - // INTERVAL YEAR TO MONTH - case -104: - // INTERVAL DAY TO SECOND + case Types.DECIMAL: + final int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); + final int decimalScale = jdbcTypeDescription.getDecimalScale(); + if (decimalScale == -127) { + // Oracle JDBC driver returns scale -127 if NUMBER data type was specified + // without scale and precision. Convert to VARCHAR. + // See http://docs.oracle.com/cd/B28359_01/server.111/b28318/datatype.htm#i16209 + // and https://docs.oracle.com/cd/E19501-01/819-3659/gcmaz/ colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); break; - case -102: - case -101: - // -101 and -102 is TIMESTAMP WITH (LOCAL) TIMEZONE in Oracle. + } + if (decimalPrec <= DataType.maxExasolDecimalPrecision) { + colType = DataType.createDecimal(decimalPrec, decimalScale); + } else { colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - break; - case 100: - case 101: - // 100 and 101 are BINARY_FLOAT and BINARY_DOUBLE in Oracle. - colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); - break; + } + break; + case Types.OTHER: + // Oracle JDBC uses OTHER as CLOB + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + break; + case -103: + // INTERVAL YEAR TO MONTH + case -104: + // INTERVAL DAY TO SECOND + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + break; + case -102: + case -101: + // -101 and -102 is TIMESTAMP WITH (LOCAL) TIMEZONE in Oracle. + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + break; + case 100: + case 101: + // 100 and 101 are BINARY_FLOAT and BINARY_DOUBLE in Oracle. + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + break; } return colType; } @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new OracleSqlGenerationVisitor(this, context); } @@ -390,14 +407,15 @@ public IdentifierCaseHandling getQuotedIdentifierHandling() { } @Override - public String applyQuote(String identifier) { - // If identifier contains double quotation marks ", it needs to be escaped by another double quotation mark. E.g. "a""b" is the identifier a"b in the db. + public String applyQuote(final String identifier) { + // If identifier contains double quotation marks ", it needs to be escaped by + // another double quotation mark. E.g. "a""b" is the identifier a"b in the db. return "\"" + identifier.replace("\"", "\"\"") + "\""; } @Override - public String applyQuoteIfNeeded(String identifier) { - boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); + public String applyQuoteIfNeeded(final String identifier) { + final boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); if (isSimpleIdentifier) { return identifier; } else { @@ -406,12 +424,12 @@ public String applyQuoteIfNeeded(String identifier) { } @Override - public boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { return false; } @Override - public boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { return true; } @@ -421,7 +439,7 @@ public NullSorting getDefaultNullSorting() { } @Override - public String getStringLiteral(String value) { + public String getStringLiteral(final String value) { return "'" + value.replace("'", "''") + "'"; } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/PostgreSQLSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/PostgreSQLSqlDialect.java index de877b5d1..fdacac552 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/PostgreSQLSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/PostgreSQLSqlDialect.java @@ -1,6 +1,5 @@ package com.exasol.adapter.dialects.impl; -import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.EnumMap; @@ -12,31 +11,32 @@ import com.exasol.adapter.capabilities.MainCapability; import com.exasol.adapter.capabilities.PredicateCapability; import com.exasol.adapter.capabilities.ScalarFunctionCapability; -import com.exasol.adapter.dialects.*; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; import com.exasol.adapter.metadata.DataType; -import com.exasol.adapter.sql.AggregateFunction; import com.exasol.adapter.sql.ScalarFunction; -public class PostgreSQLSqlDialect extends AbstractSqlDialect{ +public class PostgreSQLSqlDialect extends AbstractSqlDialect { + public PostgreSQLSqlDialect(final SqlDialectContext context) { + super(context); + } + + private static final String NAME = "POSTGRESQL"; + public static int maxPostgresSQLVarcharSize = 2000000; // Postgres limit actually is 1 GB, so we use as max the + // EXASOL limit - public PostgreSQLSqlDialect(SqlDialectContext context) { - super(context); - } + public static String getPublicName() { + return NAME; + } - public static final String NAME = "POSTGRESQL"; - - public static int maxPostgresSQLVarcharSize = 2000000; // Postgres limit actually is 1 GB, so we use as max the EXASOL limit - - @Override - public String getPublicName() { - return NAME; - } + @Override + public Capabilities getCapabilities() { - @Override - public Capabilities getCapabilities() { - - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); cap.supportMainCapability(MainCapability.SELECTLIST_EXPRESSIONS); @@ -50,7 +50,7 @@ public Capabilities getCapabilities() { cap.supportMainCapability(MainCapability.ORDER_BY_EXPRESSION); cap.supportMainCapability(MainCapability.LIMIT); cap.supportMainCapability(MainCapability.LIMIT_WITH_OFFSET); - + // Predicates cap.supportPredicate(PredicateCapability.AND); cap.supportPredicate(PredicateCapability.OR); @@ -66,7 +66,7 @@ public Capabilities getCapabilities() { cap.supportPredicate(PredicateCapability.IN_CONSTLIST); cap.supportPredicate(PredicateCapability.IS_NULL); cap.supportPredicate(PredicateCapability.IS_NOT_NULL); - + // Literals // BOOL is not supported cap.supportLiteral(LiteralCapability.BOOL); @@ -77,43 +77,41 @@ public Capabilities getCapabilities() { cap.supportLiteral(LiteralCapability.DOUBLE); cap.supportLiteral(LiteralCapability.EXACTNUMERIC); cap.supportLiteral(LiteralCapability.STRING); - //cap.supportLiteral(LiteralCapability.INTERVAL); - - + // cap.supportLiteral(LiteralCapability.INTERVAL); + // Aggregate functions cap.supportAggregateFunction(AggregateFunctionCapability.COUNT); cap.supportAggregateFunction(AggregateFunctionCapability.COUNT_STAR); cap.supportAggregateFunction(AggregateFunctionCapability.COUNT_DISTINCT); - + cap.supportAggregateFunction(AggregateFunctionCapability.SUM); cap.supportAggregateFunction(AggregateFunctionCapability.SUM_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.MIN); cap.supportAggregateFunction(AggregateFunctionCapability.MAX); cap.supportAggregateFunction(AggregateFunctionCapability.AVG); cap.supportAggregateFunction(AggregateFunctionCapability.AVG_DISTINCT); - + cap.supportAggregateFunction(AggregateFunctionCapability.MEDIAN); cap.supportAggregateFunction(AggregateFunctionCapability.FIRST_VALUE); cap.supportAggregateFunction(AggregateFunctionCapability.LAST_VALUE); - + cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_POP); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_POP_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_SAMP); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_SAMP_DISTINCT); - + cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE); cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP); cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.VAR_SAMP); - cap.supportAggregateFunction(AggregateFunctionCapability.VAR_SAMP_DISTINCT) ; - + cap.supportAggregateFunction(AggregateFunctionCapability.VAR_SAMP_DISTINCT); + cap.supportAggregateFunction(AggregateFunctionCapability.GROUP_CONCAT); // translated to string_agg - - //math functions + // math functions // Standard Arithmetic Operators cap.supportScalarFunction(ScalarFunctionCapability.ADD); cap.supportScalarFunction(ScalarFunctionCapability.SUB); @@ -153,45 +151,44 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.TAN); cap.supportScalarFunction(ScalarFunctionCapability.TANH); cap.supportScalarFunction(ScalarFunctionCapability.TRUNC); - - + // String Functions cap.supportScalarFunction(ScalarFunctionCapability.ASCII); cap.supportScalarFunction(ScalarFunctionCapability.BIT_LENGTH); cap.supportScalarFunction(ScalarFunctionCapability.CHR); - //cap.supportScalarFunction(ScalarFunctionCapability.COLOGNE_PHONETIC); + // cap.supportScalarFunction(ScalarFunctionCapability.COLOGNE_PHONETIC); cap.supportScalarFunction(ScalarFunctionCapability.CONCAT); - //cap.supportScalarFunction(ScalarFunctionCapability.DUMP); - //cap.supportScalarFunction(ScalarFunctionCapability.EDIT_DISTANCE); - //cap.supportScalarFunction(ScalarFunctionCapability.INSERT); + // cap.supportScalarFunction(ScalarFunctionCapability.DUMP); + // cap.supportScalarFunction(ScalarFunctionCapability.EDIT_DISTANCE); + // cap.supportScalarFunction(ScalarFunctionCapability.INSERT); cap.supportScalarFunction(ScalarFunctionCapability.INSTR); cap.supportScalarFunction(ScalarFunctionCapability.LENGTH); - //cap.supportScalarFunction(ScalarFunctionCapability.LOCATE); + // cap.supportScalarFunction(ScalarFunctionCapability.LOCATE); cap.supportScalarFunction(ScalarFunctionCapability.LOWER); cap.supportScalarFunction(ScalarFunctionCapability.LPAD); cap.supportScalarFunction(ScalarFunctionCapability.LTRIM); cap.supportScalarFunction(ScalarFunctionCapability.OCTET_LENGTH); - //cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_INSTR); + // cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_INSTR); cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_REPLACE); - //cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_SUBSTR); + // cap.supportScalarFunction(ScalarFunctionCapability.REGEXP_SUBSTR); cap.supportScalarFunction(ScalarFunctionCapability.REPEAT); cap.supportScalarFunction(ScalarFunctionCapability.REPLACE); cap.supportScalarFunction(ScalarFunctionCapability.REVERSE); cap.supportScalarFunction(ScalarFunctionCapability.RIGHT); cap.supportScalarFunction(ScalarFunctionCapability.RPAD); cap.supportScalarFunction(ScalarFunctionCapability.RTRIM); - //cap.supportScalarFunction(ScalarFunctionCapability.SOUNDEX); - //cap.supportScalarFunction(ScalarFunctionCapability.SPACE); + // cap.supportScalarFunction(ScalarFunctionCapability.SOUNDEX); + // cap.supportScalarFunction(ScalarFunctionCapability.SPACE); cap.supportScalarFunction(ScalarFunctionCapability.SUBSTR); cap.supportScalarFunction(ScalarFunctionCapability.TRANSLATE); cap.supportScalarFunction(ScalarFunctionCapability.TRIM); cap.supportScalarFunction(ScalarFunctionCapability.UNICODE); cap.supportScalarFunction(ScalarFunctionCapability.UNICODECHR); cap.supportScalarFunction(ScalarFunctionCapability.UPPER); - + // Date/Time Functions - - //The following functions will be rewrited to + operator in the Visitor + + // The following functions will be rewrited to + operator in the Visitor cap.supportScalarFunction(ScalarFunctionCapability.ADD_DAYS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_HOURS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_MINUTES); @@ -199,19 +196,19 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.ADD_SECONDS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_WEEKS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_YEARS); - - //cap.supportScalarFunction(ScalarFunctionCapability.CONVERT_TZ); - - - //handled via Visitor and transformed to e.g. date_part('day',age('2012-03-05','2010-04-01' )) + + // cap.supportScalarFunction(ScalarFunctionCapability.CONVERT_TZ); + + // handled via Visitor and transformed to e.g. + // date_part('day',age('2012-03-05','2010-04-01' )) cap.supportScalarFunction(ScalarFunctionCapability.SECONDS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.MINUTES_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.HOURS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.DAYS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.MONTHS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.YEARS_BETWEEN); - - //handled via Visitor and transformed to e.g. date_part + + // handled via Visitor and transformed to e.g. date_part cap.supportScalarFunction(ScalarFunctionCapability.MINUTE); cap.supportScalarFunction(ScalarFunctionCapability.SECOND); cap.supportScalarFunction(ScalarFunctionCapability.DAY); @@ -219,20 +216,19 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.MONTH); cap.supportScalarFunction(ScalarFunctionCapability.YEAR); - cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_DATE); cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_TIMESTAMP); cap.supportScalarFunction(ScalarFunctionCapability.DATE_TRUNC); - - //cap.supportScalarFunction(ScalarFunctionCapability.DBTIMEZONE); + + // cap.supportScalarFunction(ScalarFunctionCapability.DBTIMEZONE); cap.supportScalarFunction(ScalarFunctionCapability.EXTRACT); cap.supportScalarFunction(ScalarFunctionCapability.LOCALTIMESTAMP); - //cap.supportScalarFunction(ScalarFunctionCapability.NUMTODSINTERVAL); - //cap.supportScalarFunction(ScalarFunctionCapability.NUMTOYMINTERVAL); - cap.supportScalarFunction(ScalarFunctionCapability.POSIX_TIME); //converted to extract(epoche - //cap.supportScalarFunction(ScalarFunctionCapability.SESSIONTIMEZONE); - //cap.supportScalarFunction(ScalarFunctionCapability.SYSDATE); - //cap.supportScalarFunction(ScalarFunctionCapability.SYSTIMESTAMP); + // cap.supportScalarFunction(ScalarFunctionCapability.NUMTODSINTERVAL); + // cap.supportScalarFunction(ScalarFunctionCapability.NUMTOYMINTERVAL); + cap.supportScalarFunction(ScalarFunctionCapability.POSIX_TIME); // converted to extract(epoche + // cap.supportScalarFunction(ScalarFunctionCapability.SESSIONTIMEZONE); + // cap.supportScalarFunction(ScalarFunctionCapability.SYSDATE); + // cap.supportScalarFunction(ScalarFunctionCapability.SYSTIMESTAMP); // Conversion functions // cap.supportScalarFunction(ScalarFunctionCapability.IS_NUMBER); @@ -247,7 +243,7 @@ public Capabilities getCapabilities() { // cap.supportScalarFunction(ScalarFunctionCapability.TO_YMINTERVAL); // cap.supportScalarFunction(ScalarFunctionCapability.TO_NUMBER); // cap.supportScalarFunction(ScalarFunctionCapability.TO_TIMESTAMP); - + // Bitwise functions // cap.supportScalarFunction(ScalarFunctionCapability.BIT_AND); // cap.supportScalarFunction(ScalarFunctionCapability.BIT_CHECK); @@ -256,9 +252,8 @@ public Capabilities getCapabilities() { // cap.supportScalarFunction(ScalarFunctionCapability.BIT_SET); // cap.supportScalarFunction(ScalarFunctionCapability.BIT_TO_NUM); // cap.supportScalarFunction(ScalarFunctionCapability.BIT_XOR); - - - // Other functions + + // Other functions cap.supportScalarFunction(ScalarFunctionCapability.CASE); // cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_SCHEMA); // cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_SESSION); @@ -271,124 +266,120 @@ public Capabilities getCapabilities() { // cap.supportScalarFunction(ScalarFunctionCapability.NULLIFZERO); // cap.supportScalarFunction(ScalarFunctionCapability.SYS_GUID); // cap.supportScalarFunction(ScalarFunctionCapability.ZEROIFNULL); - + return cap; - } - - @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescription) throws SQLException { + } + + @Override + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { DataType colType = null; - int jdbcType = jdbcTypeDescription.getJdbcType(); + final int jdbcType = jdbcTypeDescription.getJdbcType(); switch (jdbcType) { - case Types.OTHER: - String columnTypeName = jdbcTypeDescription.getTypeName(); - - if(columnTypeName.equals("varbit")){ - int n = jdbcTypeDescription.getPrecisionOrSize(); - colType = DataType.createVarChar(n, DataType.ExaCharset.UTF8); - } - else - colType = DataType.createVarChar(PostgreSQLSqlDialect.maxPostgresSQLVarcharSize, DataType.ExaCharset.UTF8); - break; - case Types.SQLXML: - colType = DataType.createVarChar(PostgreSQLSqlDialect.maxPostgresSQLVarcharSize, DataType.ExaCharset.UTF8); - break; - case Types.DISTINCT: - colType=DataType.createVarChar(PostgreSQLSqlDialect.maxPostgresSQLVarcharSize, DataType.ExaCharset.UTF8); - break; + case Types.OTHER: + final String columnTypeName = jdbcTypeDescription.getTypeName(); + + if (columnTypeName.equals("varbit")) { + final int n = jdbcTypeDescription.getPrecisionOrSize(); + colType = DataType.createVarChar(n, DataType.ExaCharset.UTF8); + } else { + colType = DataType.createVarChar(PostgreSQLSqlDialect.maxPostgresSQLVarcharSize, + DataType.ExaCharset.UTF8); + } + break; + case Types.SQLXML: + colType = DataType.createVarChar(PostgreSQLSqlDialect.maxPostgresSQLVarcharSize, DataType.ExaCharset.UTF8); + break; + case Types.DISTINCT: + colType = DataType.createVarChar(PostgreSQLSqlDialect.maxPostgresSQLVarcharSize, DataType.ExaCharset.UTF8); + break; } - + return colType; } - - @Override + + @Override public Map getScalarFunctionAliases() { - - Map scalarAliases = new EnumMap<>(ScalarFunction.class); - - scalarAliases.put(ScalarFunction.SUBSTR,"SUBSTRING"); - scalarAliases.put(ScalarFunction.HASH_MD5, "MD5"); - - return scalarAliases; - - } - - - @Override - public SchemaOrCatalogSupport supportsJdbcCatalogs() { + + final Map scalarAliases = new EnumMap<>(ScalarFunction.class); + + scalarAliases.put(ScalarFunction.SUBSTR, "SUBSTRING"); + scalarAliases.put(ScalarFunction.HASH_MD5, "MD5"); + + return scalarAliases; + + } + + @Override + public SchemaOrCatalogSupport supportsJdbcCatalogs() { return SchemaOrCatalogSupport.SUPPORTED; - } + } - @Override - public SchemaOrCatalogSupport supportsJdbcSchemas() { + @Override + public SchemaOrCatalogSupport supportsJdbcSchemas() { return SchemaOrCatalogSupport.SUPPORTED; - } - - @Override - public String changeIdentifierCaseIfNeeded(String identifier) { - - boolean isSimplePostgresIdentifier = identifier.matches("^[a-z][0-9a-z_]*"); - - if(isSimplePostgresIdentifier) - return identifier.toUpperCase(); - else - return identifier; - } - - @Override - public IdentifierCaseHandling getUnquotedIdentifierHandling() { - return IdentifierCaseHandling.INTERPRET_AS_LOWER; - } - - @Override - public IdentifierCaseHandling getQuotedIdentifierHandling() { + + @Override + public String changeIdentifierCaseIfNeeded(final String identifier) { + + final boolean isSimplePostgresIdentifier = identifier.matches("^[a-z][0-9a-z_]*"); + + if (isSimplePostgresIdentifier) { + return identifier.toUpperCase(); + } else { + return identifier; + } + + } + + @Override + public IdentifierCaseHandling getUnquotedIdentifierHandling() { + return IdentifierCaseHandling.INTERPRET_AS_LOWER; + } + + @Override + public IdentifierCaseHandling getQuotedIdentifierHandling() { return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; - } - - - @Override - public String applyQuote(String identifier) { - return "\"" + identifier.replace("\"", "\"\"") + "\""; - } - - @Override - public String applyQuoteIfNeeded(String identifier) { - boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); - if (isSimpleIdentifier) { - return identifier; - } else { - return applyQuote(identifier); - } - } - - @Override - public boolean requiresCatalogQualifiedTableNames( - SqlGenerationContext context) { - return false; - } - - - - @Override - public boolean requiresSchemaQualifiedTableNames( - SqlGenerationContext context) { - return true; - } - - @Override - public NullSorting getDefaultNullSorting() { - return NullSorting.NULLS_SORTED_AT_END; - } - - @Override - public String getStringLiteral(String value) { - return "'" + value.replace("'", "''") + "'"; - } - - @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + } + + @Override + public String applyQuote(final String identifier) { + return "\"" + identifier.replace("\"", "\"\"") + "\""; + } + + @Override + public String applyQuoteIfNeeded(final String identifier) { + final boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); + if (isSimpleIdentifier) { + return identifier; + } else { + return applyQuote(identifier); + } + } + + @Override + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { + return false; + } + + @Override + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + return true; + } + + @Override + public NullSorting getDefaultNullSorting() { + return NullSorting.NULLS_SORTED_AT_END; + } + + @Override + public String getStringLiteral(final String value) { + return "'" + value.replace("'", "''") + "'"; + } + + @Override + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new PostgresSQLSqlGenerationVisitor(this, context); } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/RedshiftSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/RedshiftSqlDialect.java index 6d531997a..21e176ae6 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/RedshiftSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/RedshiftSqlDialect.java @@ -1,6 +1,5 @@ package com.exasol.adapter.dialects.impl; -import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.EnumMap; @@ -12,30 +11,31 @@ import com.exasol.adapter.capabilities.MainCapability; import com.exasol.adapter.capabilities.PredicateCapability; import com.exasol.adapter.capabilities.ScalarFunctionCapability; -import com.exasol.adapter.dialects.*; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; import com.exasol.adapter.metadata.DataType; import com.exasol.adapter.sql.AggregateFunction; import com.exasol.adapter.sql.ScalarFunction; +public class RedshiftSqlDialect extends AbstractSqlDialect { -public class RedshiftSqlDialect extends AbstractSqlDialect{ + public RedshiftSqlDialect(final SqlDialectContext context) { + super(context); + } + private static final String NAME = "REDSHIFT"; - public RedshiftSqlDialect(SqlDialectContext context) { - super(context); - } + public static String getPublicName() { + return NAME; + } - public static final String NAME = "REDSHIFT"; - - @Override - public String getPublicName() { - return NAME; - } + @Override + public Capabilities getCapabilities() { - @Override - public Capabilities getCapabilities() { - - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); cap.supportMainCapability(MainCapability.SELECTLIST_EXPRESSIONS); @@ -49,7 +49,7 @@ public Capabilities getCapabilities() { cap.supportMainCapability(MainCapability.ORDER_BY_EXPRESSION); cap.supportMainCapability(MainCapability.LIMIT); cap.supportMainCapability(MainCapability.LIMIT_WITH_OFFSET); - + // Predicates cap.supportPredicate(PredicateCapability.AND); cap.supportPredicate(PredicateCapability.OR); @@ -65,7 +65,7 @@ public Capabilities getCapabilities() { cap.supportPredicate(PredicateCapability.IN_CONSTLIST); cap.supportPredicate(PredicateCapability.IS_NULL); cap.supportPredicate(PredicateCapability.IS_NOT_NULL); - + // Literals // BOOL is not supported cap.supportLiteral(LiteralCapability.BOOL); @@ -77,14 +77,13 @@ public Capabilities getCapabilities() { cap.supportLiteral(LiteralCapability.EXACTNUMERIC); cap.supportLiteral(LiteralCapability.STRING); cap.supportLiteral(LiteralCapability.INTERVAL); - - + // Aggregate functions cap.supportAggregateFunction(AggregateFunctionCapability.COUNT); cap.supportAggregateFunction(AggregateFunctionCapability.COUNT_STAR); cap.supportAggregateFunction(AggregateFunctionCapability.COUNT_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.GROUP_CONCAT); - + cap.supportAggregateFunction(AggregateFunctionCapability.SUM); cap.supportAggregateFunction(AggregateFunctionCapability.SUM_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.MIN); @@ -105,10 +104,9 @@ public Capabilities getCapabilities() { cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP); cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.VAR_SAMP); - cap.supportAggregateFunction(AggregateFunctionCapability.VAR_SAMP_DISTINCT) ; - - - //math functions + cap.supportAggregateFunction(AggregateFunctionCapability.VAR_SAMP_DISTINCT); + + // math functions cap.supportScalarFunction(ScalarFunctionCapability.CEIL); cap.supportScalarFunction(ScalarFunctionCapability.DIV); cap.supportScalarFunction(ScalarFunctionCapability.FLOOR); @@ -141,13 +139,12 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.SQRT); cap.supportScalarFunction(ScalarFunctionCapability.TAN); cap.supportScalarFunction(ScalarFunctionCapability.TANH); - cap.supportScalarFunction(ScalarFunctionCapability.ASCII); + cap.supportScalarFunction(ScalarFunctionCapability.ASCII); cap.supportScalarFunction(ScalarFunctionCapability.CHR); cap.supportScalarFunction(ScalarFunctionCapability.INSTR); cap.supportScalarFunction(ScalarFunctionCapability.LENGTH); cap.supportScalarFunction(ScalarFunctionCapability.SIGN); - - + cap.supportScalarFunction(ScalarFunctionCapability.CONCAT); cap.supportScalarFunction(ScalarFunctionCapability.LOCATE); cap.supportScalarFunction(ScalarFunctionCapability.LOWER); @@ -167,148 +164,138 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.TRIM); cap.supportScalarFunction(ScalarFunctionCapability.UPPER); - - //Bit functions + // Bit functions cap.supportScalarFunction(ScalarFunctionCapability.BIT_AND); cap.supportScalarFunction(ScalarFunctionCapability.BIT_OR); - //Date and Time Functions + // Date and Time Functions cap.supportScalarFunction(ScalarFunctionCapability.ADD_MONTHS); cap.supportScalarFunction(ScalarFunctionCapability.MONTHS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_DATE); cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_TIMESTAMP); cap.supportScalarFunction(ScalarFunctionCapability.CONVERT_TZ); cap.supportScalarFunction(ScalarFunctionCapability.SYSDATE); - - cap.supportScalarFunction(ScalarFunctionCapability.YEAR); + + cap.supportScalarFunction(ScalarFunctionCapability.YEAR); cap.supportScalarFunction(ScalarFunctionCapability.EXTRACT); - - - //Convertion functions + + // Convertion functions cap.supportScalarFunction(ScalarFunctionCapability.CAST); cap.supportScalarFunction(ScalarFunctionCapability.TO_NUMBER); cap.supportScalarFunction(ScalarFunctionCapability.TO_TIMESTAMP); cap.supportScalarFunction(ScalarFunctionCapability.TO_DATE); - - - //hash functions + + // hash functions cap.supportScalarFunction(ScalarFunctionCapability.HASH_MD5); cap.supportScalarFunction(ScalarFunctionCapability.HASH_SHA1); - - - //system information functions + + // system information functions cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_SCHEMA); cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_USER); - + return cap; - } - - @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescription) throws SQLException { + } + + @Override + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { DataType colType = null; - int jdbcType = jdbcTypeDescription.getJdbcType(); + final int jdbcType = jdbcTypeDescription.getJdbcType(); switch (jdbcType) { - case Types.NUMERIC: - int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); - int decimalScale = jdbcTypeDescription.getDecimalScale(); - - if (decimalPrec <= DataType.maxExasolDecimalPrecision) { - colType = DataType.createDecimal(decimalPrec, decimalScale); - } else { - colType = DataType.createDouble(); - } - break; - + case Types.NUMERIC: + final int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); + final int decimalScale = jdbcTypeDescription.getDecimalScale(); + + if (decimalPrec <= DataType.maxExasolDecimalPrecision) { + colType = DataType.createDecimal(decimalPrec, decimalScale); + } else { + colType = DataType.createDouble(); + } + break; + } return colType; } - - @Override + + @Override public Map getScalarFunctionAliases() { - - Map scalarAliases = new EnumMap<>(ScalarFunction.class); - - scalarAliases.put(ScalarFunction.YEAR, "DATE_PART_YEAR"); - scalarAliases.put(ScalarFunction.CONVERT_TZ, "CONVERT_TIMEZONE"); - scalarAliases.put(ScalarFunction.HASH_MD5, "MD5"); - scalarAliases.put(ScalarFunction.HASH_SHA1, "FUNC_SHA1"); - - scalarAliases.put(ScalarFunction.SUBSTR,"SUBSTRING"); - - return scalarAliases; - - } - - - @Override + + final Map scalarAliases = new EnumMap<>(ScalarFunction.class); + + scalarAliases.put(ScalarFunction.YEAR, "DATE_PART_YEAR"); + scalarAliases.put(ScalarFunction.CONVERT_TZ, "CONVERT_TIMEZONE"); + scalarAliases.put(ScalarFunction.HASH_MD5, "MD5"); + scalarAliases.put(ScalarFunction.HASH_SHA1, "FUNC_SHA1"); + + scalarAliases.put(ScalarFunction.SUBSTR, "SUBSTRING"); + + return scalarAliases; + + } + + @Override public Map getAggregateFunctionAliases() { - Map aggregationAliases = new EnumMap<>(AggregateFunction.class); - + final Map aggregationAliases = new EnumMap<>(AggregateFunction.class); + return aggregationAliases; } - - @Override - public SchemaOrCatalogSupport supportsJdbcCatalogs() { + @Override + public SchemaOrCatalogSupport supportsJdbcCatalogs() { return SchemaOrCatalogSupport.SUPPORTED; - } + } - @Override - public SchemaOrCatalogSupport supportsJdbcSchemas() { + @Override + public SchemaOrCatalogSupport supportsJdbcSchemas() { return SchemaOrCatalogSupport.SUPPORTED; - } + } - @Override - public IdentifierCaseHandling getUnquotedIdentifierHandling() { - return IdentifierCaseHandling.INTERPRET_AS_UPPER; - } + @Override + public IdentifierCaseHandling getUnquotedIdentifierHandling() { + return IdentifierCaseHandling.INTERPRET_AS_UPPER; + } - @Override - public IdentifierCaseHandling getQuotedIdentifierHandling() { + @Override + public IdentifierCaseHandling getQuotedIdentifierHandling() { return IdentifierCaseHandling.INTERPRET_AS_UPPER; - } - - @Override - public String applyQuote(String identifier) { - return "\"" + identifier.replace("\"", "\"\"") + "\""; - } - - @Override - public String applyQuoteIfNeeded(String identifier) { - boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); - if (isSimpleIdentifier) { - return identifier; - } else { - return applyQuote(identifier); - } - } - - @Override - public boolean requiresCatalogQualifiedTableNames( - SqlGenerationContext context) { - return false; - } - - - - @Override - public boolean requiresSchemaQualifiedTableNames( - SqlGenerationContext context) { - return true; - } - - @Override - public NullSorting getDefaultNullSorting() { - return NullSorting.NULLS_SORTED_AT_END; - } - - @Override - public String getStringLiteral(String value) { - return "'" + value.replace("'", "''") + "'"; - } - - @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + } + + @Override + public String applyQuote(final String identifier) { + return "\"" + identifier.replace("\"", "\"\"") + "\""; + } + + @Override + public String applyQuoteIfNeeded(final String identifier) { + final boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); + if (isSimpleIdentifier) { + return identifier; + } else { + return applyQuote(identifier); + } + } + + @Override + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { + return false; + } + + @Override + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + return true; + } + + @Override + public NullSorting getDefaultNullSorting() { + return NullSorting.NULLS_SORTED_AT_END; + } + + @Override + public String getStringLiteral(final String value) { + return "'" + value.replace("'", "''") + "'"; + } + + @Override + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new RedshiftSqlGenerationVisitor(this, context); } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SqlServerSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SqlServerSqlDialect.java index 76651cf37..bade1267e 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SqlServerSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SqlServerSqlDialect.java @@ -1,6 +1,5 @@ package com.exasol.adapter.dialects.impl; -import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.EnumMap; @@ -12,38 +11,34 @@ import com.exasol.adapter.capabilities.MainCapability; import com.exasol.adapter.capabilities.PredicateCapability; import com.exasol.adapter.capabilities.ScalarFunctionCapability; -import com.exasol.adapter.dialects.*; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; import com.exasol.adapter.metadata.DataType; import com.exasol.adapter.sql.AggregateFunction; import com.exasol.adapter.sql.ScalarFunction; +public class SqlServerSqlDialect extends AbstractSqlDialect { + // Tested SQL Server versions: SQL Server 2014 + // Tested JDBC drivers: jtds-1.3.1 (https://sourceforge.net/projects/jtds/) + public final static int maxSqlServerVarcharSize = 8000; + public final static int maxSqlServerNVarcharSize = 4000; + private static final String NAME = "SQLSERVER"; -public class SqlServerSqlDialect extends AbstractSqlDialect{ - - - // Tested SQL Server versions: SQL Server 2014 - // Tested JDBC drivers: jtds-1.3.1 (https://sourceforge.net/projects/jtds/) - - public final static int maxSqlServerVarcharSize = 8000; - - public final static int maxSqlServerNVarcharSize = 4000; + public SqlServerSqlDialect(final SqlDialectContext context) { + super(context); + } - - public SqlServerSqlDialect(SqlDialectContext context) { - super(context); - } + public static String getPublicName() { + return NAME; + } - public static final String NAME = "SQLSERVER"; - - @Override - public String getPublicName() { - return NAME; - } + @Override + public Capabilities getCapabilities() { - @Override - public Capabilities getCapabilities() { - - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); cap.supportMainCapability(MainCapability.SELECTLIST_EXPRESSIONS); @@ -55,8 +50,8 @@ public Capabilities getCapabilities() { cap.supportMainCapability(MainCapability.AGGREGATE_HAVING); cap.supportMainCapability(MainCapability.ORDER_BY_COLUMN); cap.supportMainCapability(MainCapability.ORDER_BY_EXPRESSION); - cap.supportMainCapability(MainCapability.LIMIT); // LIMIT will be translated to TOP in SqlServerSqlGenerationVisitor.java - + cap.supportMainCapability(MainCapability.LIMIT); // LIMIT will be translated to TOP in + // SqlServerSqlGenerationVisitor.java // Predicates cap.supportPredicate(PredicateCapability.AND); @@ -73,7 +68,7 @@ public Capabilities getCapabilities() { cap.supportPredicate(PredicateCapability.IN_CONSTLIST); cap.supportPredicate(PredicateCapability.IS_NULL); cap.supportPredicate(PredicateCapability.IS_NOT_NULL); - + // Literals cap.supportLiteral(LiteralCapability.BOOL); cap.supportLiteral(LiteralCapability.NULL); @@ -84,7 +79,7 @@ public Capabilities getCapabilities() { cap.supportLiteral(LiteralCapability.EXACTNUMERIC); cap.supportLiteral(LiteralCapability.STRING); cap.supportLiteral(LiteralCapability.INTERVAL); - + // Aggregate functions cap.supportAggregateFunction(AggregateFunctionCapability.COUNT); cap.supportAggregateFunction(AggregateFunctionCapability.COUNT_STAR); @@ -99,7 +94,7 @@ public Capabilities getCapabilities() { cap.supportAggregateFunction(AggregateFunctionCapability.MEDIAN); cap.supportAggregateFunction(AggregateFunctionCapability.FIRST_VALUE); cap.supportAggregateFunction(AggregateFunctionCapability.LAST_VALUE); - + cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_POP); @@ -107,25 +102,22 @@ public Capabilities getCapabilities() { // STDDEV_SAMP // STDDEV_SAMP_DISTINCT - + cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE); cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE_DISTINCT); - + cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP); cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP_DISTINCT); - - // GROUP_CONCAT, - // GROUP_CONCAT_DISTINCT (AggregateFunction.GROUP_CONCAT), - // GROUP_CONCAT_SEPARATOR (AggregateFunction.GROUP_CONCAT), - // GROUP_CONCAT_ORDER_BY (AggregateFunction.GROUP_CONCAT), - // - // GEO_INTERSECTION_AGGREGATE, - // GEO_UNION_AGGREGATE, - // - // APPROXIMATE_COUNT_DISTINCT; - - + // GROUP_CONCAT, + // GROUP_CONCAT_DISTINCT (AggregateFunction.GROUP_CONCAT), + // GROUP_CONCAT_SEPARATOR (AggregateFunction.GROUP_CONCAT), + // GROUP_CONCAT_ORDER_BY (AggregateFunction.GROUP_CONCAT), + // + // GEO_INTERSECTION_AGGREGATE, + // GEO_UNION_AGGREGATE, + // + // APPROXIMATE_COUNT_DISTINCT; // Standard Arithmetic Operators cap.supportScalarFunction(ScalarFunctionCapability.ADD); @@ -136,23 +128,24 @@ public Capabilities getCapabilities() { // Unary prefix operators cap.supportScalarFunction(ScalarFunctionCapability.NEG); - // Numeric functions https://msdn.microsoft.com/en-us/library/ms177516(v=sql.110).aspx + // Numeric functions + // https://msdn.microsoft.com/en-us/library/ms177516(v=sql.110).aspx cap.supportScalarFunction(ScalarFunctionCapability.ABS); cap.supportScalarFunction(ScalarFunctionCapability.ACOS); cap.supportScalarFunction(ScalarFunctionCapability.ASIN); cap.supportScalarFunction(ScalarFunctionCapability.ATAN); cap.supportScalarFunction(ScalarFunctionCapability.ATAN2); // added alias ATN2 - cap.supportScalarFunction(ScalarFunctionCapability.CEIL); //alias CEILING + cap.supportScalarFunction(ScalarFunctionCapability.CEIL); // alias CEILING cap.supportScalarFunction(ScalarFunctionCapability.COS); - //COSH + // COSH cap.supportScalarFunction(ScalarFunctionCapability.COT); cap.supportScalarFunction(ScalarFunctionCapability.DEGREES); - //DIV, + // DIV, cap.supportScalarFunction(ScalarFunctionCapability.EXP); cap.supportScalarFunction(ScalarFunctionCapability.FLOOR); - //GREATEST, - //LEAST, - //LN, + // GREATEST, + // LEAST, + // LN, cap.supportScalarFunction(ScalarFunctionCapability.LOG); cap.supportScalarFunction(ScalarFunctionCapability.MOD); cap.supportScalarFunction(ScalarFunctionCapability.POWER); @@ -161,76 +154,75 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.ROUND); cap.supportScalarFunction(ScalarFunctionCapability.SIGN); cap.supportScalarFunction(ScalarFunctionCapability.SIN); - //SINH, + // SINH, cap.supportScalarFunction(ScalarFunctionCapability.SQRT); cap.supportScalarFunction(ScalarFunctionCapability.TAN); - //TANH, + // TANH, cap.supportScalarFunction(ScalarFunctionCapability.TRUNC); - - // String Functions + + // String Functions cap.supportScalarFunction(ScalarFunctionCapability.ASCII); - //BIT_LENGTH, - cap.supportScalarFunction(ScalarFunctionCapability.CHR); //CHAR - //COLOGNE_PHONETIC, + // BIT_LENGTH, + cap.supportScalarFunction(ScalarFunctionCapability.CHR); // CHAR + // COLOGNE_PHONETIC, cap.supportScalarFunction(ScalarFunctionCapability.CONCAT); - //DUMP, - //EDIT_DISTANCE, - //INSERT, - cap.supportScalarFunction(ScalarFunctionCapability.INSTR); // translated to CHARINDEX in Visitor with Argument switch - cap.supportScalarFunction(ScalarFunctionCapability.LENGTH); //alias LEN - cap.supportScalarFunction(ScalarFunctionCapability.LOCATE); // CHARINDEX alias + // DUMP, + // EDIT_DISTANCE, + // INSERT, + cap.supportScalarFunction(ScalarFunctionCapability.INSTR); // translated to CHARINDEX in Visitor with Argument + // switch + cap.supportScalarFunction(ScalarFunctionCapability.LENGTH); // alias LEN + cap.supportScalarFunction(ScalarFunctionCapability.LOCATE); // CHARINDEX alias cap.supportScalarFunction(ScalarFunctionCapability.LOWER); - cap.supportScalarFunction(ScalarFunctionCapability.LPAD); //transformed in Visitor + cap.supportScalarFunction(ScalarFunctionCapability.LPAD); // transformed in Visitor cap.supportScalarFunction(ScalarFunctionCapability.LTRIM); - //OCTET_LENGTH, - //REGEXP_INSTR, - //REGEXP_REPLACE, - //REGEXP_SUBSTR, - cap.supportScalarFunction(ScalarFunctionCapability.REPEAT); //REPLICATE + // OCTET_LENGTH, + // REGEXP_INSTR, + // REGEXP_REPLACE, + // REGEXP_SUBSTR, + cap.supportScalarFunction(ScalarFunctionCapability.REPEAT); // REPLICATE cap.supportScalarFunction(ScalarFunctionCapability.REPLACE); cap.supportScalarFunction(ScalarFunctionCapability.REVERSE); cap.supportScalarFunction(ScalarFunctionCapability.RIGHT); - cap.supportScalarFunction(ScalarFunctionCapability.RPAD); + cap.supportScalarFunction(ScalarFunctionCapability.RPAD); cap.supportScalarFunction(ScalarFunctionCapability.RTRIM); - cap.supportScalarFunction(ScalarFunctionCapability.SOUNDEX); + cap.supportScalarFunction(ScalarFunctionCapability.SOUNDEX); cap.supportScalarFunction(ScalarFunctionCapability.SPACE); - cap.supportScalarFunction(ScalarFunctionCapability.SUBSTR); //SUBSTRING - //TRANSLATE, + cap.supportScalarFunction(ScalarFunctionCapability.SUBSTR); // SUBSTRING + // TRANSLATE, cap.supportScalarFunction(ScalarFunctionCapability.TRIM); cap.supportScalarFunction(ScalarFunctionCapability.UNICODE); - //UNICODECHR, + // UNICODECHR, cap.supportScalarFunction(ScalarFunctionCapability.UPPER); - - + // Date/Time Functions - - - // the following functions are translated to DATEADD(datepart,number,date) in Visitor - cap.supportScalarFunction(ScalarFunctionCapability.ADD_DAYS); - cap.supportScalarFunction(ScalarFunctionCapability.ADD_HOURS); + + // the following functions are translated to DATEADD(datepart,number,date) in + // Visitor + cap.supportScalarFunction(ScalarFunctionCapability.ADD_DAYS); + cap.supportScalarFunction(ScalarFunctionCapability.ADD_HOURS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_MINUTES); cap.supportScalarFunction(ScalarFunctionCapability.ADD_MONTHS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_SECONDS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_WEEKS); - cap.supportScalarFunction(ScalarFunctionCapability.ADD_YEARS); - - //CONVERT_TZ, - - cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_DATE); + cap.supportScalarFunction(ScalarFunctionCapability.ADD_YEARS); + + // CONVERT_TZ, + + cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_DATE); cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_TIMESTAMP); - - //DATE_TRUNC, + + // DATE_TRUNC, cap.supportScalarFunction(ScalarFunctionCapability.DAY); - - //the following functions are translated to DATEDIFF in Visitor + + // the following functions are translated to DATEDIFF in Visitor cap.supportScalarFunction(ScalarFunctionCapability.SECONDS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.MINUTES_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.HOURS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.DAYS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.MONTHS_BETWEEN); cap.supportScalarFunction(ScalarFunctionCapability.YEARS_BETWEEN); - - + // DBTIMEZONE, // EXTRACT, // LOCALTIMESTAMP, @@ -246,11 +238,10 @@ public Capabilities getCapabilities() { // SESSIONTIMEZONE, cap.supportScalarFunction(ScalarFunctionCapability.SYSDATE); cap.supportScalarFunction(ScalarFunctionCapability.SYSTIMESTAMP); - + // WEEK, - + cap.supportScalarFunction(ScalarFunctionCapability.YEAR); - // Geospatial // - Point Functions @@ -285,20 +276,20 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.ST_DISTANCE); cap.supportScalarFunction(ScalarFunctionCapability.ST_ENVELOPE); cap.supportScalarFunction(ScalarFunctionCapability.ST_EQUALS); - //cap.supportScalarFunction(ScalarFunctionCapability.ST_FORCE2D); + // cap.supportScalarFunction(ScalarFunctionCapability.ST_FORCE2D); cap.supportScalarFunction(ScalarFunctionCapability.ST_GEOMETRYTYPE); cap.supportScalarFunction(ScalarFunctionCapability.ST_INTERSECTION); cap.supportScalarFunction(ScalarFunctionCapability.ST_INTERSECTS); cap.supportScalarFunction(ScalarFunctionCapability.ST_ISEMPTY); cap.supportScalarFunction(ScalarFunctionCapability.ST_ISSIMPLE); cap.supportScalarFunction(ScalarFunctionCapability.ST_OVERLAPS); - //cap.supportScalarFunction(ScalarFunctionCapability.ST_SETSRID); + // cap.supportScalarFunction(ScalarFunctionCapability.ST_SETSRID); cap.supportScalarFunction(ScalarFunctionCapability.ST_SYMDIFFERENCE); cap.supportScalarFunction(ScalarFunctionCapability.ST_TOUCHES); - //cap.supportScalarFunction(ScalarFunctionCapability.ST_TRANSFORM); + // cap.supportScalarFunction(ScalarFunctionCapability.ST_TRANSFORM); cap.supportScalarFunction(ScalarFunctionCapability.ST_UNION); cap.supportScalarFunction(ScalarFunctionCapability.ST_WITHIN); - + // Conversion functions // CAST, // Has alias CONVERT // IS_NUMBER @@ -313,7 +304,7 @@ public Capabilities getCapabilities() { // TO_YMINTERVAL, // TO_NUMBER, // TO_TIMESTAMP, - + // Bitwise functions cap.supportScalarFunction(ScalarFunctionCapability.BIT_AND); // BIT_CHECK, @@ -329,186 +320,179 @@ public Capabilities getCapabilities() { // CURRENT_SESSION, // CURRENT_STATEMENT, // CURRENT_USER, - cap.supportScalarFunction(ScalarFunctionCapability.HASH_MD5); //translated to HASHBYTES - cap.supportScalarFunction(ScalarFunctionCapability.HASH_SHA); //translated to HASHBYTES - cap.supportScalarFunction(ScalarFunctionCapability.HASH_SHA1); //translated to HASHBYTES -// HASH_TIGER, - cap.supportScalarFunction(ScalarFunctionCapability.NULLIFZERO); //alias NULLIF + cap.supportScalarFunction(ScalarFunctionCapability.HASH_MD5); // translated to HASHBYTES + cap.supportScalarFunction(ScalarFunctionCapability.HASH_SHA); // translated to HASHBYTES + cap.supportScalarFunction(ScalarFunctionCapability.HASH_SHA1); // translated to HASHBYTES +// HASH_TIGER, + cap.supportScalarFunction(ScalarFunctionCapability.NULLIFZERO); // alias NULLIF // SYS_GUID, - cap.supportScalarFunction(ScalarFunctionCapability.ZEROIFNULL); //translated to ISNULL(exp1, exp2) in Visitor + cap.supportScalarFunction(ScalarFunctionCapability.ZEROIFNULL); // translated to ISNULL(exp1, exp2) in Visitor return cap; - } + } - @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescription) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { DataType colType = null; - int jdbcType = jdbcTypeDescription.getJdbcType(); - String columnTypeName = jdbcTypeDescription.getTypeName(); - + final int jdbcType = jdbcTypeDescription.getJdbcType(); + final String columnTypeName = jdbcTypeDescription.getTypeName(); + switch (jdbcType) { - - case Types.VARCHAR: //the JTDS JDBC Type for date, time, datetime2, datetimeoffset is 12 - if(columnTypeName.equalsIgnoreCase("date")) { - colType = DataType.createDate(); - } - else if(columnTypeName.equalsIgnoreCase("datetime2")) { - colType = DataType.createTimestamp(false); - } - - //note: time and datetimeoffset are converted to varchar by default mapping - - break; - case Types.TIME: - colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); - break; - case 2013: //Types.TIME_WITH_TIMEZONE is Java 1.8 specific - colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); - break; - case Types.NUMERIC: - int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); - int decimalScale = jdbcTypeDescription.getDecimalScale(); - - if (decimalPrec <= DataType.maxExasolDecimalPrecision) { - colType = DataType.createDecimal(decimalPrec, decimalScale); - } else { - colType = DataType.createDouble(); - } - break; - case Types.OTHER: - - //TODO - colType = DataType.createVarChar(SqlServerSqlDialect.maxSqlServerVarcharSize, DataType.ExaCharset.UTF8); - break; - - case Types.SQLXML: - - colType = DataType.createVarChar(SqlServerSqlDialect.maxSqlServerVarcharSize, DataType.ExaCharset.UTF8); - break; - - case Types.CLOB: //xml type in SQL Server - - colType = DataType.createVarChar(SqlServerSqlDialect.maxSqlServerNVarcharSize, DataType.ExaCharset.UTF8); - break; - - case Types.BLOB: - if(columnTypeName.equalsIgnoreCase("hierarchyid")) { - colType = DataType.createVarChar(4000, DataType.ExaCharset.UTF8); - } - if(columnTypeName.equalsIgnoreCase("geometry")) { - colType = DataType.createVarChar(SqlServerSqlDialect.maxSqlServerVarcharSize, DataType.ExaCharset.UTF8); - } - else{ - colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); - } - break; - case Types.VARBINARY: - case Types.BINARY: - colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); - break; - case Types.DISTINCT: - colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); - break; + + case Types.VARCHAR: // the JTDS JDBC Type for date, time, datetime2, datetimeoffset is 12 + if (columnTypeName.equalsIgnoreCase("date")) { + colType = DataType.createDate(); + } else if (columnTypeName.equalsIgnoreCase("datetime2")) { + colType = DataType.createTimestamp(false); + } + + // note: time and datetimeoffset are converted to varchar by default mapping + + break; + case Types.TIME: + colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); + break; + case 2013: // Types.TIME_WITH_TIMEZONE is Java 1.8 specific + colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); + break; + case Types.NUMERIC: + final int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); + final int decimalScale = jdbcTypeDescription.getDecimalScale(); + + if (decimalPrec <= DataType.maxExasolDecimalPrecision) { + colType = DataType.createDecimal(decimalPrec, decimalScale); + } else { + colType = DataType.createDouble(); + } + break; + case Types.OTHER: + + // TODO + colType = DataType.createVarChar(SqlServerSqlDialect.maxSqlServerVarcharSize, DataType.ExaCharset.UTF8); + break; + + case Types.SQLXML: + + colType = DataType.createVarChar(SqlServerSqlDialect.maxSqlServerVarcharSize, DataType.ExaCharset.UTF8); + break; + + case Types.CLOB: // xml type in SQL Server + + colType = DataType.createVarChar(SqlServerSqlDialect.maxSqlServerNVarcharSize, DataType.ExaCharset.UTF8); + break; + + case Types.BLOB: + if (columnTypeName.equalsIgnoreCase("hierarchyid")) { + colType = DataType.createVarChar(4000, DataType.ExaCharset.UTF8); + } + if (columnTypeName.equalsIgnoreCase("geometry")) { + colType = DataType.createVarChar(SqlServerSqlDialect.maxSqlServerVarcharSize, DataType.ExaCharset.UTF8); + } else { + colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); + } + break; + case Types.VARBINARY: + case Types.BINARY: + colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); + break; + case Types.DISTINCT: + colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); + break; } return colType; } - - - - @Override + + @Override public Map getScalarFunctionAliases() { - - Map scalarAliases = new EnumMap<>(ScalarFunction.class); - - scalarAliases.put(ScalarFunction.ATAN2, "ATN2"); - scalarAliases.put(ScalarFunction.CEIL, "CEILING"); - scalarAliases.put(ScalarFunction.CHR, "CHAR"); - scalarAliases.put(ScalarFunction.LENGTH, "LEN"); - scalarAliases.put(ScalarFunction.LOCATE, "CHARINDEX"); - scalarAliases.put(ScalarFunction.REPEAT, "REPLICATE"); - scalarAliases.put(ScalarFunction.SUBSTR, "SUBSTRING"); - scalarAliases.put(ScalarFunction.NULLIFZERO, "NULLIF"); - - return scalarAliases; - - } - - @Override + + final Map scalarAliases = new EnumMap<>(ScalarFunction.class); + + scalarAliases.put(ScalarFunction.ATAN2, "ATN2"); + scalarAliases.put(ScalarFunction.CEIL, "CEILING"); + scalarAliases.put(ScalarFunction.CHR, "CHAR"); + scalarAliases.put(ScalarFunction.LENGTH, "LEN"); + scalarAliases.put(ScalarFunction.LOCATE, "CHARINDEX"); + scalarAliases.put(ScalarFunction.REPEAT, "REPLICATE"); + scalarAliases.put(ScalarFunction.SUBSTR, "SUBSTRING"); + scalarAliases.put(ScalarFunction.NULLIFZERO, "NULLIF"); + + return scalarAliases; + + } + + @Override public Map getAggregateFunctionAliases() { - Map aggregationAliases = new EnumMap<>(AggregateFunction.class); - + final Map aggregationAliases = new EnumMap<>(AggregateFunction.class); + aggregationAliases.put(AggregateFunction.STDDEV, "STDEV"); aggregationAliases.put(AggregateFunction.STDDEV_POP, "STDEVP"); - + aggregationAliases.put(AggregateFunction.VARIANCE, "VAR"); - + aggregationAliases.put(AggregateFunction.VAR_POP, "VARP"); - + return aggregationAliases; } - - @Override - public SchemaOrCatalogSupport supportsJdbcCatalogs() { + + @Override + public SchemaOrCatalogSupport supportsJdbcCatalogs() { return SchemaOrCatalogSupport.SUPPORTED; - } + } - @Override - public SchemaOrCatalogSupport supportsJdbcSchemas() { + @Override + public SchemaOrCatalogSupport supportsJdbcSchemas() { return SchemaOrCatalogSupport.SUPPORTED; - } + } - @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + @Override + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new SqlServerSqlGenerationVisitor(this, context); } - - @Override - public IdentifierCaseHandling getUnquotedIdentifierHandling() { - return IdentifierCaseHandling.INTERPRET_AS_UPPER; - } - - @Override - public IdentifierCaseHandling getQuotedIdentifierHandling() { + + @Override + public IdentifierCaseHandling getUnquotedIdentifierHandling() { + return IdentifierCaseHandling.INTERPRET_AS_UPPER; + } + + @Override + public IdentifierCaseHandling getQuotedIdentifierHandling() { return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; - } - - @Override - public String applyQuote(String identifier) { - return "[" + identifier + "]"; - } - - @Override - public String applyQuoteIfNeeded(String identifier) { - boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); - if (isSimpleIdentifier) { - return identifier; - } else { - return applyQuote(identifier); - } - } - - @Override - public boolean requiresCatalogQualifiedTableNames( - SqlGenerationContext context) { - return true; - } - - @Override - public boolean requiresSchemaQualifiedTableNames( - SqlGenerationContext context) { - return true; - } - - @Override - public NullSorting getDefaultNullSorting() { - return NullSorting.NULLS_SORTED_AT_START; - } - - @Override - public String getStringLiteral(String value) { - return "'" + value.replace("'", "''") + "'"; - } + } + + @Override + public String applyQuote(final String identifier) { + return "[" + identifier + "]"; + } + + @Override + public String applyQuoteIfNeeded(final String identifier) { + final boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); + if (isSimpleIdentifier) { + return identifier; + } else { + return applyQuote(identifier); + } + } + + @Override + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { + return true; + } + + @Override + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + return true; + } + + @Override + public NullSorting getDefaultNullSorting() { + return NullSorting.NULLS_SORTED_AT_START; + } + + @Override + public String getStringLiteral(final String value) { + return "'" + value.replace("'", "''") + "'"; + } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SybaseSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SybaseSqlDialect.java new file mode 100644 index 000000000..2cafc56a4 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SybaseSqlDialect.java @@ -0,0 +1,506 @@ +package com.exasol.adapter.dialects.impl; + +import java.sql.SQLException; +import java.sql.Types; +import java.util.EnumMap; +import java.util.Map; + +import com.exasol.adapter.capabilities.AggregateFunctionCapability; +import com.exasol.adapter.capabilities.Capabilities; +import com.exasol.adapter.capabilities.LiteralCapability; +import com.exasol.adapter.capabilities.MainCapability; +import com.exasol.adapter.capabilities.PredicateCapability; +import com.exasol.adapter.capabilities.ScalarFunctionCapability; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; +import com.exasol.adapter.metadata.DataType; +import com.exasol.adapter.sql.AggregateFunction; +import com.exasol.adapter.sql.ScalarFunction; + +public class SybaseSqlDialect extends AbstractSqlDialect { + // The Sybase dialect started as a copy of the SQL Server dialect. + // Tested Sybase version: ASE 16.0 + // Tested JDBC drivers: jtds-1.3.1 (https://sourceforge.net/projects/jtds/) + // Documentation: + // http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.infocenter.help.ase.16.0/doc/html/title.html + // https://help.sap.com/viewer/p/SAP_ASE + public final static int maxSybaseVarcharSize = 8000; + public final static int maxSybaseNVarcharSize = 4000; + private static final String NAME = "SYBASE"; + + public SybaseSqlDialect(final SqlDialectContext context) { + super(context); + } + + public static String getPublicName() { + return NAME; + } + + @Override + public Capabilities getCapabilities() { + + final Capabilities cap = new Capabilities(); + + cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); + cap.supportMainCapability(MainCapability.SELECTLIST_EXPRESSIONS); + cap.supportMainCapability(MainCapability.FILTER_EXPRESSIONS); + cap.supportMainCapability(MainCapability.AGGREGATE_SINGLE_GROUP); + cap.supportMainCapability(MainCapability.AGGREGATE_GROUP_BY_COLUMN); + cap.supportMainCapability(MainCapability.AGGREGATE_GROUP_BY_EXPRESSION); + cap.supportMainCapability(MainCapability.AGGREGATE_GROUP_BY_TUPLE); + cap.supportMainCapability(MainCapability.AGGREGATE_HAVING); + cap.supportMainCapability(MainCapability.ORDER_BY_COLUMN); + cap.supportMainCapability(MainCapability.ORDER_BY_EXPRESSION); + cap.supportMainCapability(MainCapability.LIMIT); // LIMIT will be translated to TOP in + // SybaseSqlGenerationVisitor.java + + // Predicates + cap.supportPredicate(PredicateCapability.AND); + cap.supportPredicate(PredicateCapability.OR); + cap.supportPredicate(PredicateCapability.NOT); + cap.supportPredicate(PredicateCapability.EQUAL); + cap.supportPredicate(PredicateCapability.NOTEQUAL); + cap.supportPredicate(PredicateCapability.LESS); + cap.supportPredicate(PredicateCapability.LESSEQUAL); + cap.supportPredicate(PredicateCapability.LIKE); + cap.supportPredicate(PredicateCapability.LIKE_ESCAPE); + cap.supportPredicate(PredicateCapability.REGEXP_LIKE); + cap.supportPredicate(PredicateCapability.BETWEEN); + cap.supportPredicate(PredicateCapability.IN_CONSTLIST); + cap.supportPredicate(PredicateCapability.IS_NULL); + cap.supportPredicate(PredicateCapability.IS_NOT_NULL); + + // Literals + cap.supportLiteral(LiteralCapability.BOOL); + cap.supportLiteral(LiteralCapability.NULL); + cap.supportLiteral(LiteralCapability.DATE); + cap.supportLiteral(LiteralCapability.TIMESTAMP); + cap.supportLiteral(LiteralCapability.TIMESTAMP_UTC); + cap.supportLiteral(LiteralCapability.DOUBLE); + cap.supportLiteral(LiteralCapability.EXACTNUMERIC); + cap.supportLiteral(LiteralCapability.STRING); + cap.supportLiteral(LiteralCapability.INTERVAL); + + // Aggregate functions + cap.supportAggregateFunction(AggregateFunctionCapability.COUNT); + cap.supportAggregateFunction(AggregateFunctionCapability.COUNT_STAR); + cap.supportAggregateFunction(AggregateFunctionCapability.COUNT_DISTINCT); + + cap.supportAggregateFunction(AggregateFunctionCapability.SUM); // works + cap.supportAggregateFunction(AggregateFunctionCapability.SUM_DISTINCT); + cap.supportAggregateFunction(AggregateFunctionCapability.MIN); + cap.supportAggregateFunction(AggregateFunctionCapability.MAX); + cap.supportAggregateFunction(AggregateFunctionCapability.AVG); + cap.supportAggregateFunction(AggregateFunctionCapability.AVG_DISTINCT); + cap.supportAggregateFunction(AggregateFunctionCapability.MEDIAN); + cap.supportAggregateFunction(AggregateFunctionCapability.FIRST_VALUE); + cap.supportAggregateFunction(AggregateFunctionCapability.LAST_VALUE); + + cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV); + cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_DISTINCT); + cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_POP); + cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_POP_DISTINCT); + + // STDDEV_SAMP + // STDDEV_SAMP_DISTINCT + + cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE); + cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE_DISTINCT); + + cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP); + cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP_DISTINCT); + + // GROUP_CONCAT, + // GROUP_CONCAT_DISTINCT (AggregateFunction.GROUP_CONCAT), + // GROUP_CONCAT_SEPARATOR (AggregateFunction.GROUP_CONCAT), + // GROUP_CONCAT_ORDER_BY (AggregateFunction.GROUP_CONCAT), + // + // GEO_INTERSECTION_AGGREGATE, + // GEO_UNION_AGGREGATE, + // + // APPROXIMATE_COUNT_DISTINCT; + + // Standard Arithmetic Operators + cap.supportScalarFunction(ScalarFunctionCapability.ADD); // works + cap.supportScalarFunction(ScalarFunctionCapability.SUB); + cap.supportScalarFunction(ScalarFunctionCapability.MULT); + cap.supportScalarFunction(ScalarFunctionCapability.FLOAT_DIV); + + // Unary prefix operators + cap.supportScalarFunction(ScalarFunctionCapability.NEG); + + // Numeric functions + // https://msdn.microsoft.com/en-us/library/ms177516(v=sql.110).aspx + cap.supportScalarFunction(ScalarFunctionCapability.ABS); + cap.supportScalarFunction(ScalarFunctionCapability.ACOS); + cap.supportScalarFunction(ScalarFunctionCapability.ASIN); + cap.supportScalarFunction(ScalarFunctionCapability.ATAN); + cap.supportScalarFunction(ScalarFunctionCapability.ATAN2); // added alias ATN2 + cap.supportScalarFunction(ScalarFunctionCapability.CEIL); // alias CEILING + cap.supportScalarFunction(ScalarFunctionCapability.COS); + // COSH + cap.supportScalarFunction(ScalarFunctionCapability.COT); + cap.supportScalarFunction(ScalarFunctionCapability.DEGREES); + // DIV, + cap.supportScalarFunction(ScalarFunctionCapability.EXP); + cap.supportScalarFunction(ScalarFunctionCapability.FLOOR); + // GREATEST, + // LEAST, + // LN, + cap.supportScalarFunction(ScalarFunctionCapability.LOG); + cap.supportScalarFunction(ScalarFunctionCapability.MOD); + cap.supportScalarFunction(ScalarFunctionCapability.POWER); + cap.supportScalarFunction(ScalarFunctionCapability.RADIANS); + cap.supportScalarFunction(ScalarFunctionCapability.RAND); + cap.supportScalarFunction(ScalarFunctionCapability.ROUND); + cap.supportScalarFunction(ScalarFunctionCapability.SIGN); + cap.supportScalarFunction(ScalarFunctionCapability.SIN); + // SINH, + cap.supportScalarFunction(ScalarFunctionCapability.SQRT); + cap.supportScalarFunction(ScalarFunctionCapability.TAN); + // TANH, + cap.supportScalarFunction(ScalarFunctionCapability.TRUNC); + + // String Functions + cap.supportScalarFunction(ScalarFunctionCapability.ASCII); + // BIT_LENGTH, + cap.supportScalarFunction(ScalarFunctionCapability.CHR); // CHAR + // COLOGNE_PHONETIC, + cap.supportScalarFunction(ScalarFunctionCapability.CONCAT); + // DUMP, + // EDIT_DISTANCE, + // INSERT, + cap.supportScalarFunction(ScalarFunctionCapability.INSTR); // translated to CHARINDEX in Visitor with Argument + // switch + cap.supportScalarFunction(ScalarFunctionCapability.LENGTH); // alias LEN + cap.supportScalarFunction(ScalarFunctionCapability.LOCATE); // CHARINDEX alias + cap.supportScalarFunction(ScalarFunctionCapability.LOWER); + cap.supportScalarFunction(ScalarFunctionCapability.LPAD); // transformed in Visitor + cap.supportScalarFunction(ScalarFunctionCapability.LTRIM); + // OCTET_LENGTH, + // REGEXP_INSTR, + // REGEXP_REPLACE, + // REGEXP_SUBSTR, + cap.supportScalarFunction(ScalarFunctionCapability.REPEAT); // REPLICATE + cap.supportScalarFunction(ScalarFunctionCapability.REPLACE); + cap.supportScalarFunction(ScalarFunctionCapability.REVERSE); + cap.supportScalarFunction(ScalarFunctionCapability.RIGHT); + cap.supportScalarFunction(ScalarFunctionCapability.RPAD); + cap.supportScalarFunction(ScalarFunctionCapability.RTRIM); + cap.supportScalarFunction(ScalarFunctionCapability.SOUNDEX); + cap.supportScalarFunction(ScalarFunctionCapability.SPACE); + cap.supportScalarFunction(ScalarFunctionCapability.SUBSTR); // SUBSTRING + // TRANSLATE, + cap.supportScalarFunction(ScalarFunctionCapability.TRIM); + cap.supportScalarFunction(ScalarFunctionCapability.UNICODE); + // UNICODECHR, + cap.supportScalarFunction(ScalarFunctionCapability.UPPER); + + // Date/Time Functions + + // the following functions are translated to DATEADD(datepart,number,date) in + // Visitor + cap.supportScalarFunction(ScalarFunctionCapability.ADD_DAYS); + cap.supportScalarFunction(ScalarFunctionCapability.ADD_HOURS); + cap.supportScalarFunction(ScalarFunctionCapability.ADD_MINUTES); + cap.supportScalarFunction(ScalarFunctionCapability.ADD_MONTHS); + cap.supportScalarFunction(ScalarFunctionCapability.ADD_SECONDS); + cap.supportScalarFunction(ScalarFunctionCapability.ADD_WEEKS); + cap.supportScalarFunction(ScalarFunctionCapability.ADD_YEARS); + + // CONVERT_TZ, + + cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_DATE); + cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_TIMESTAMP); + + // DATE_TRUNC, + cap.supportScalarFunction(ScalarFunctionCapability.DAY); + + // the following functions are translated to DATEDIFF in Visitor + cap.supportScalarFunction(ScalarFunctionCapability.SECONDS_BETWEEN); + cap.supportScalarFunction(ScalarFunctionCapability.MINUTES_BETWEEN); + cap.supportScalarFunction(ScalarFunctionCapability.HOURS_BETWEEN); + cap.supportScalarFunction(ScalarFunctionCapability.DAYS_BETWEEN); + cap.supportScalarFunction(ScalarFunctionCapability.MONTHS_BETWEEN); + cap.supportScalarFunction(ScalarFunctionCapability.YEARS_BETWEEN); + +// DBTIMEZONE, +// EXTRACT, +// LOCALTIMESTAMP, +// MINUTE, + + cap.supportScalarFunction(ScalarFunctionCapability.MONTH); + +// NUMTODSINTERVAL, +// NUMTOYMINTERVAL, +// POSIX_TIME, +// SECOND, + +// SESSIONTIMEZONE, + cap.supportScalarFunction(ScalarFunctionCapability.SYSDATE); + cap.supportScalarFunction(ScalarFunctionCapability.SYSTIMESTAMP); + +// WEEK, + + cap.supportScalarFunction(ScalarFunctionCapability.YEAR); + + // Geospatial + // - Point Functions + cap.supportScalarFunction(ScalarFunctionCapability.ST_X); + cap.supportScalarFunction(ScalarFunctionCapability.ST_Y); +// // - (Multi-)LineString Functions + cap.supportScalarFunction(ScalarFunctionCapability.ST_ENDPOINT); + cap.supportScalarFunction(ScalarFunctionCapability.ST_ISCLOSED); + cap.supportScalarFunction(ScalarFunctionCapability.ST_ISRING); + cap.supportScalarFunction(ScalarFunctionCapability.ST_LENGTH); + cap.supportScalarFunction(ScalarFunctionCapability.ST_NUMPOINTS); + cap.supportScalarFunction(ScalarFunctionCapability.ST_POINTN); + cap.supportScalarFunction(ScalarFunctionCapability.ST_STARTPOINT); +// // - (Multi-)Polygon Functions + cap.supportScalarFunction(ScalarFunctionCapability.ST_AREA); + cap.supportScalarFunction(ScalarFunctionCapability.ST_EXTERIORRING); + cap.supportScalarFunction(ScalarFunctionCapability.ST_INTERIORRINGN); + cap.supportScalarFunction(ScalarFunctionCapability.ST_NUMINTERIORRINGS); +// // - GeometryCollection Functions + cap.supportScalarFunction(ScalarFunctionCapability.ST_GEOMETRYN); + cap.supportScalarFunction(ScalarFunctionCapability.ST_NUMGEOMETRIES); +// // - General Functions + cap.supportScalarFunction(ScalarFunctionCapability.ST_BOUNDARY); + cap.supportScalarFunction(ScalarFunctionCapability.ST_BUFFER); + cap.supportScalarFunction(ScalarFunctionCapability.ST_CENTROID); + cap.supportScalarFunction(ScalarFunctionCapability.ST_CONTAINS); + cap.supportScalarFunction(ScalarFunctionCapability.ST_CONVEXHULL); + cap.supportScalarFunction(ScalarFunctionCapability.ST_CROSSES); + cap.supportScalarFunction(ScalarFunctionCapability.ST_DIFFERENCE); + cap.supportScalarFunction(ScalarFunctionCapability.ST_DIMENSION); + cap.supportScalarFunction(ScalarFunctionCapability.ST_DISJOINT); + cap.supportScalarFunction(ScalarFunctionCapability.ST_DISTANCE); + cap.supportScalarFunction(ScalarFunctionCapability.ST_ENVELOPE); + cap.supportScalarFunction(ScalarFunctionCapability.ST_EQUALS); + // cap.supportScalarFunction(ScalarFunctionCapability.ST_FORCE2D); + cap.supportScalarFunction(ScalarFunctionCapability.ST_GEOMETRYTYPE); + cap.supportScalarFunction(ScalarFunctionCapability.ST_INTERSECTION); + cap.supportScalarFunction(ScalarFunctionCapability.ST_INTERSECTS); + cap.supportScalarFunction(ScalarFunctionCapability.ST_ISEMPTY); + cap.supportScalarFunction(ScalarFunctionCapability.ST_ISSIMPLE); + cap.supportScalarFunction(ScalarFunctionCapability.ST_OVERLAPS); + // cap.supportScalarFunction(ScalarFunctionCapability.ST_SETSRID); + cap.supportScalarFunction(ScalarFunctionCapability.ST_SYMDIFFERENCE); + cap.supportScalarFunction(ScalarFunctionCapability.ST_TOUCHES); + // cap.supportScalarFunction(ScalarFunctionCapability.ST_TRANSFORM); + cap.supportScalarFunction(ScalarFunctionCapability.ST_UNION); + cap.supportScalarFunction(ScalarFunctionCapability.ST_WITHIN); + + // Conversion functions +// CAST, // Has alias CONVERT +// IS_NUMBER +// IS_BOOLEAN, +// IS_DATE, +// IS_DSINTERVAL, +// IS_YMINTERVAL, +// IS_TIMESTAMP, +// TO_CHAR, +// TO_DATE, +// TO_DSINTERVAL, +// TO_YMINTERVAL, +// TO_NUMBER, +// TO_TIMESTAMP, + + // Bitwise functions + cap.supportScalarFunction(ScalarFunctionCapability.BIT_AND); +// BIT_CHECK, + cap.supportScalarFunction(ScalarFunctionCapability.BIT_NOT); + cap.supportScalarFunction(ScalarFunctionCapability.BIT_OR); +// BIT_SET, +// BIT_TO_NUM, + cap.supportScalarFunction(ScalarFunctionCapability.BIT_XOR); + + // Other functions + cap.supportScalarFunction(ScalarFunctionCapability.CASE); +// CURRENT_SCHEMA, +// CURRENT_SESSION, +// CURRENT_STATEMENT, +// CURRENT_USER, + cap.supportScalarFunction(ScalarFunctionCapability.HASH_MD5); // translated to HASHBYTES + cap.supportScalarFunction(ScalarFunctionCapability.HASH_SHA); // translated to HASHBYTES + cap.supportScalarFunction(ScalarFunctionCapability.HASH_SHA1); // translated to HASHBYTES +// HASH_TIGER, + cap.supportScalarFunction(ScalarFunctionCapability.NULLIFZERO); // alias NULLIF +// SYS_GUID, + cap.supportScalarFunction(ScalarFunctionCapability.ZEROIFNULL); // translated to ISNULL(exp1, exp2) in Visitor + + return cap; + } + + @Override + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { + DataType colType = null; + final int jdbcType = jdbcTypeDescription.getJdbcType(); + final String columnTypeName = jdbcTypeDescription.getTypeName(); + + switch (jdbcType) { + + case Types.VARCHAR: // the JTDS JDBC Type for date, time, datetime2, datetimeoffset is 12 + if (columnTypeName.equalsIgnoreCase("date")) { + colType = DataType.createDate(); + } else if (columnTypeName.equalsIgnoreCase("datetime2")) { + colType = DataType.createTimestamp(false); + } + + // note: time and datetimeoffset are converted to varchar by default mapping + + break; + case Types.TIME: + colType = DataType.createVarChar(210, DataType.ExaCharset.UTF8); + break; + case 2013: // Types.TIME_WITH_TIMEZONE is Java 1.8 specific + colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); + break; + case Types.DATE: + colType = DataType.createDate(); + break; + case Types.NUMERIC: + case Types.DECIMAL: + final int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); + final int decimalScale = jdbcTypeDescription.getDecimalScale(); + + if (decimalPrec <= DataType.maxExasolDecimalPrecision) { + colType = DataType.createDecimal(decimalPrec, decimalScale); + } else { + int size = decimalPrec + 1; + if (decimalScale > 0) { + size++; + } + colType = DataType.createVarChar(size, DataType.ExaCharset.UTF8); + } + break; + case Types.OTHER: + + // TODO + colType = DataType.createVarChar(SybaseSqlDialect.maxSybaseVarcharSize, DataType.ExaCharset.UTF8); + break; + + case Types.SQLXML: + + colType = DataType.createVarChar(SybaseSqlDialect.maxSybaseVarcharSize, DataType.ExaCharset.UTF8); + break; + + case Types.CLOB: // TEXT and UNITEXT types in Sybase + + colType = DataType.createVarChar(DataType.maxExasolVarcharSize, DataType.ExaCharset.UTF8); + break; + + case Types.BLOB: + if (columnTypeName.equalsIgnoreCase("hierarchyid")) { + colType = DataType.createVarChar(4000, DataType.ExaCharset.UTF8); + } + if (columnTypeName.equalsIgnoreCase("geometry")) { + colType = DataType.createVarChar(SybaseSqlDialect.maxSybaseVarcharSize, DataType.ExaCharset.UTF8); + } else { + colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); + } + break; + case Types.DISTINCT: + colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); + break; + } + return colType; + } + + @Override + public Map getScalarFunctionAliases() { + + final Map scalarAliases = new EnumMap<>(ScalarFunction.class); + + scalarAliases.put(ScalarFunction.ATAN2, "ATN2"); + scalarAliases.put(ScalarFunction.CEIL, "CEILING"); + scalarAliases.put(ScalarFunction.CHR, "CHAR"); + scalarAliases.put(ScalarFunction.LENGTH, "LEN"); + scalarAliases.put(ScalarFunction.LOCATE, "CHARINDEX"); + scalarAliases.put(ScalarFunction.REPEAT, "REPLICATE"); + scalarAliases.put(ScalarFunction.SUBSTR, "SUBSTRING"); + scalarAliases.put(ScalarFunction.NULLIFZERO, "NULLIF"); + + return scalarAliases; + + } + + @Override + public Map getAggregateFunctionAliases() { + final Map aggregationAliases = new EnumMap<>(AggregateFunction.class); + + aggregationAliases.put(AggregateFunction.STDDEV, "STDEV"); + + aggregationAliases.put(AggregateFunction.STDDEV_POP, "STDEVP"); + + aggregationAliases.put(AggregateFunction.VARIANCE, "VAR"); + + aggregationAliases.put(AggregateFunction.VAR_POP, "VARP"); + + return aggregationAliases; + } + + @Override + public SchemaOrCatalogSupport supportsJdbcCatalogs() { + return SchemaOrCatalogSupport.SUPPORTED; + } + + @Override + public SchemaOrCatalogSupport supportsJdbcSchemas() { + return SchemaOrCatalogSupport.SUPPORTED; + } + + @Override + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { + return new SybaseSqlGenerationVisitor(this, context); + } + + @Override + public IdentifierCaseHandling getUnquotedIdentifierHandling() { + return IdentifierCaseHandling.INTERPRET_AS_UPPER; + } + + @Override + public IdentifierCaseHandling getQuotedIdentifierHandling() { + return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; + } + + @Override + public String applyQuote(final String identifier) { + return "[" + identifier + "]"; + } + + @Override + public String applyQuoteIfNeeded(final String identifier) { + final boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); + if (isSimpleIdentifier) { + return identifier; + } else { + return applyQuote(identifier); + } + } + + @Override + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { + return true; + } + + @Override + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + return true; + } + + @Override + public NullSorting getDefaultNullSorting() { + return NullSorting.NULLS_SORTED_LOW; + } + + @Override + public String getStringLiteral(final String value) { + return "'" + value.replace("'", "''") + "'"; + } + +} diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SybaseSqlGenerationVisitor.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SybaseSqlGenerationVisitor.java new file mode 100644 index 000000000..c58b2d512 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/SybaseSqlGenerationVisitor.java @@ -0,0 +1,634 @@ +package com.exasol.adapter.dialects.impl; + +import com.exasol.adapter.AdapterException; +import com.exasol.adapter.dialects.SqlDialect; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; +import com.exasol.adapter.jdbc.ColumnAdapterNotes; +import com.exasol.adapter.metadata.ColumnMetadata; +import com.exasol.adapter.sql.*; +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; + +import java.util.ArrayList; +import java.util.List; + + +public class SybaseSqlGenerationVisitor extends SqlGenerationVisitor { + + public SybaseSqlGenerationVisitor(SqlDialect dialect, SqlGenerationContext context) { + super(dialect, context); + + } + + @Override + public String visit(SqlSelectList selectList) throws AdapterException { + if (selectList.isRequestAnyColumn()) { + // The system requested any column + return "true"; + } + List selectListElements = new ArrayList<>(); + if (selectList.isSelectStar()) { + if (selectListRequiresCasts(selectList)) { + + // Do as if the user has all columns in select list + SqlStatementSelect select = (SqlStatementSelect) selectList.getParent(); + + int columnId = 0; + for (ColumnMetadata columnMeta : select.getFromClause().getMetadata().getColumns()) { + SqlColumn sqlColumn = new SqlColumn(columnId, columnMeta); + selectListElements.add( getColumnProjectionStringNoCheck(sqlColumn, super.visit(sqlColumn) ) ); + ++columnId; + } + + } else { + selectListElements.add("*"); + } + } else { + for (SqlNode node : selectList.getExpressions()) { + selectListElements.add(node.accept(this)); + } + } + + return Joiner.on(", ").join(selectListElements); + } + + + @Override + public String visit(SqlStatementSelect select) throws AdapterException { + if (!select.hasLimit()) { + return super.visit(select); + } else { + SqlLimit limit = select.getLimit(); + + StringBuilder sql = new StringBuilder(); + sql.append("SELECT TOP "+limit.getLimit()+ " "); + + sql.append(select.getSelectList().accept(this)); + sql.append(" FROM "); + sql.append(select.getFromClause().accept(this)); + if (select.hasFilter()) { + sql.append(" WHERE "); + sql.append(select.getWhereClause().accept(this)); + } + if (select.hasGroupBy()) { + sql.append(" GROUP BY "); + sql.append(select.getGroupBy().accept(this)); + } + if (select.hasHaving()) { + sql.append(" HAVING "); + sql.append(select.getHaving().accept(this)); + } + if (select.hasOrderBy()) { + sql.append(" "); + sql.append(select.getOrderBy().accept(this)); + } + + return sql.toString(); + } + } + + + @Override + public String visit(SqlColumn column) throws AdapterException { + return getColumnProjectionString(column, super.visit(column)); + } + + private String getColumnProjectionString(SqlColumn column, String projString) throws AdapterException { + boolean isDirectlyInSelectList = (column.hasParent() && column.getParent().getType() == SqlNodeType.SELECT_LIST); + if (!isDirectlyInSelectList) { + return projString; + } + String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), + column.getMetadata().getName()).getTypeName(); + return getColumnProjectionStringNoCheckImpl(typeName, column, projString); + } + + + private String getColumnProjectionStringNoCheck(SqlColumn column, String projString) throws AdapterException { + String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), + column.getMetadata().getName()).getTypeName(); + return getColumnProjectionStringNoCheckImpl(typeName, column, projString); + } + + private String getColumnProjectionStringNoCheckImpl(String typeName, SqlColumn column, String projString) { + if ( typeName.startsWith("text") ) { + projString = "CAST(" + projString + " as NVARCHAR("+SybaseSqlDialect.maxSybaseNVarcharSize+") )"; + } else if (typeName.equals("time") ){ + projString = "CONVERT(VARCHAR(12), " + projString + ", 137)"; + } else if (typeName.equals("bigtime") ){ + projString = "CONVERT(VARCHAR(16), " + projString + ", 137)"; + } else if (typeName.startsWith("xml")) { + projString = "CAST(" + projString + " as NVARCHAR("+SybaseSqlDialect.maxSybaseNVarcharSize+") )"; + } else if (TYPE_NAME_NOT_SUPPORTED.contains(typeName)){ + projString = "'"+typeName+" NOT SUPPORTED'"; //returning a string constant for unsupported data types + } + + return projString; + } + + private static final List TYPE_NAMES_REQUIRING_CAST = + ImmutableList.of("text", "time", "bigtime", "xml"); + + private static final List TYPE_NAME_NOT_SUPPORTED = ImmutableList.of("varbinary","binary","image"); + + private boolean nodeRequiresCast(SqlNode node) throws AdapterException { + if (node.getType() == SqlNodeType.COLUMN) { + SqlColumn column = (SqlColumn)node; + String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), + column.getMetadata().getName()).getTypeName(); + return TYPE_NAMES_REQUIRING_CAST.contains(typeName) || TYPE_NAME_NOT_SUPPORTED.contains(typeName) ; + } + return false; + } + + private boolean selectListRequiresCasts(SqlSelectList selectList) throws AdapterException { + boolean requiresCasts = false; + + // Do as if the user has all columns in select list + SqlStatementSelect select = (SqlStatementSelect) selectList.getParent(); + int columnId = 0; + for (ColumnMetadata columnMeta : select.getFromClause().getMetadata().getColumns()) { + if (nodeRequiresCast(new SqlColumn(columnId, columnMeta))) { + requiresCasts = true; + } + } + + return requiresCasts; + } + + + @Override + public String visit(SqlFunctionScalar function) throws AdapterException { + + String sql = super.visit(function); + List argumentsSql = new ArrayList<>(); + for (SqlNode node : function.getArguments()) { + argumentsSql.add(node.accept(this)); + } + StringBuilder builder = new StringBuilder(); + + switch (function.getFunction()) { + case INSTR: { + + builder.append("CHARINDEX("); + builder.append(argumentsSql.get(1)); + builder.append(", "); + builder.append(argumentsSql.get(0)); + if (argumentsSql.size() > 2) { + builder.append(", "); + builder.append(argumentsSql.get(2)); + } + builder.append(")"); + sql = builder.toString(); + break; + } + + + + case LPAD: { //RIGHT(REPLICATE(pad_char, length) + LEFT(string, length), length) + + String padChar = "' '"; + + if (argumentsSql.size() > 2) { + padChar = argumentsSql.get(2); + } + + + String string = argumentsSql.get(0); + + String length = argumentsSql.get(1); + + + builder.append("RIGHT ( REPLICATE("); + builder.append(padChar); + builder.append(","); + builder.append(length); + builder.append(") + LEFT("); + builder.append(string); + builder.append(","); + builder.append(length); + builder.append("),"); + builder.append(length); + builder.append(")"); + sql = builder.toString(); + break; + } + + + case RPAD: { //LEFT(RIGHT(string, length) + REPLICATE(pad_char, length) , length); + + String padChar = "' '"; + + if (argumentsSql.size() > 2) { + padChar = argumentsSql.get(2); + } + + String string = argumentsSql.get(0); + + String length = argumentsSql.get(1); + + builder.append("LEFT(RIGHT("); + builder.append(string); + builder.append(","); + builder.append(length); + builder.append(") + REPLICATE("); + builder.append(padChar); + builder.append(","); + builder.append(length); + builder.append("),"); + builder.append(length); + builder.append(")"); + sql = builder.toString(); + break; + + } + case ADD_DAYS: + case ADD_HOURS: + case ADD_MINUTES: + case ADD_SECONDS: + case ADD_WEEKS: + case ADD_YEARS: { //DATEADD(datepart,number,date) + + builder.append("DATEADD("); + + switch (function.getFunction()) { + case ADD_DAYS: + builder.append("DAY"); + break; + case ADD_HOURS: + builder.append("HOUR"); + break; + case ADD_MINUTES: + builder.append("MINUTE"); + break; + case ADD_SECONDS: + builder.append("SECOND"); + break; + case ADD_WEEKS: + builder.append("WEEK"); + break; + case ADD_YEARS: + builder.append("YEAR"); + break; + default: + break; + } + + builder.append(","); + builder.append( argumentsSql.get(1) ); + builder.append(","); + builder.append( argumentsSql.get(0) ); + builder.append(")"); + sql = builder.toString(); + break; + } + case SECONDS_BETWEEN: + case MINUTES_BETWEEN: + case HOURS_BETWEEN: + case DAYS_BETWEEN: + case MONTHS_BETWEEN: + case YEARS_BETWEEN: { + + builder.append("DATEDIFF("); + + switch (function.getFunction()) { + case SECONDS_BETWEEN: + builder.append("SECOND"); + break; + case MINUTES_BETWEEN: + builder.append("MINUTE"); + break; + case HOURS_BETWEEN: + builder.append("HOUR"); + break; + case DAYS_BETWEEN: + builder.append("DAY"); + break; + case MONTHS_BETWEEN: + builder.append("MONTH"); + break; + case YEARS_BETWEEN: + builder.append("YEAR"); + break; + default: + break; + } + + builder.append(","); + builder.append( argumentsSql.get(1) ); + builder.append(","); + builder.append( argumentsSql.get(0) ); + builder.append(")"); + sql = builder.toString(); + break; + } + case CURRENT_DATE: + sql = "CAST( GETDATE() AS DATE)"; + break; + + case CURRENT_TIMESTAMP: + sql = "GETDATE()"; + break; + + case SYSDATE: + sql = "CAST( SYSDATETIME() AS DATE)"; + break; + + case SYSTIMESTAMP: + sql = "SYSDATETIME()"; + break; + + + case ST_X: + builder.append(argumentsSql.get(0)+".STX") ; + sql = builder.toString(); + break; + + case ST_Y: + builder.append(argumentsSql.get(0)+".STY") ; + sql = builder.toString(); + break; + + case ST_ENDPOINT: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STEndPoint()") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_ISCLOSED: + builder.append(argumentsSql.get(0)+".STIsClosed()") ; + sql = builder.toString(); + break; + + case ST_ISRING: + builder.append(argumentsSql.get(0)+".STIsRing()") ; + sql = builder.toString(); + break; + + case ST_LENGTH: + builder.append(argumentsSql.get(0)+".STLength()") ; + sql = builder.toString(); + break; + + case ST_NUMPOINTS: + builder.append(argumentsSql.get(0)+".STNumPoints()") ; + sql = builder.toString(); + break; + + case ST_POINTN: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STPointN("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_STARTPOINT: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STStartPoint()") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_AREA: + builder.append(argumentsSql.get(0)+".STArea()") ; + sql = builder.toString(); + break; + + case ST_EXTERIORRING: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STExteriorRing()") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_INTERIORRINGN: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STInteriorRingN ("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_NUMINTERIORRINGS: + builder.append(argumentsSql.get(0)+".STNumInteriorRing()") ; + sql = builder.toString(); + break; + + case ST_GEOMETRYN: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STGeometryN("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_NUMGEOMETRIES: + builder.append(argumentsSql.get(0)+".STNumGeometries()") ; + sql = builder.toString(); + break; + + case ST_BOUNDARY: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STBoundary()") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_BUFFER: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STBuffer("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_CENTROID: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STCentroid()") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_CONTAINS: + builder.append(argumentsSql.get(0)+".STContains("+argumentsSql.get(1)+")") ; + sql = builder.toString(); + break; + + case ST_CONVEXHULL: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STConvexHull()") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_CROSSES: + builder.append(argumentsSql.get(0)+".STCrosses("+argumentsSql.get(1)+")") ; + sql = builder.toString(); + break; + + case ST_DIFFERENCE: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STDifference("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_DIMENSION: + builder.append(argumentsSql.get(0)+".STDimension()") ; + sql = builder.toString(); + break; + + case ST_DISJOINT: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STDisjoint("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_DISTANCE: + builder.append(argumentsSql.get(0)+".STDistance("+argumentsSql.get(1)+")") ; + sql = builder.toString(); + break; + + case ST_ENVELOPE: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STEnvelope()") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_EQUALS: + builder.append(argumentsSql.get(0)+".STEquals("+argumentsSql.get(1)+")") ; + sql = builder.toString(); + break; + + + case ST_GEOMETRYTYPE: + builder.append(argumentsSql.get(0)+".STGeometryType()") ; + sql = builder.toString(); + break; + + case ST_INTERSECTION: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STIntersection("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_INTERSECTS: + builder.append(argumentsSql.get(0)+".STIntersects("+argumentsSql.get(1)+")") ; + sql = builder.toString(); + break; + + case ST_ISEMPTY: + builder.append(argumentsSql.get(0)+".STIsEmpty()") ; + sql = builder.toString(); + break; + + case ST_ISSIMPLE: + builder.append(argumentsSql.get(0)+".STIsSimple()") ; + sql = builder.toString(); + break; + case ST_OVERLAPS: + builder.append(argumentsSql.get(0)+".STOverlaps("+argumentsSql.get(1)+")") ; + sql = builder.toString(); + break; + + case ST_SYMDIFFERENCE: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STSymDifference ("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_TOUCHES: + builder.append(argumentsSql.get(0)+".STTouches("+argumentsSql.get(1)+")") ; + sql = builder.toString(); + break; + + case ST_UNION: + builder.append("CAST("); + builder.append(argumentsSql.get(0)+".STUnion("+argumentsSql.get(1)+")") ; + builder.append("as VARCHAR("+SybaseSqlDialect.maxSybaseVarcharSize+") )"); + sql = builder.toString(); + break; + + case ST_WITHIN: + builder.append(argumentsSql.get(0)+".STWithin("+argumentsSql.get(1)+")") ; + sql = builder.toString(); + break; + + case BIT_AND: + builder.append(argumentsSql.get(0)+" & "+argumentsSql.get(1)); + sql = builder.toString(); + break; + + case BIT_OR: + builder.append(argumentsSql.get(0)+" | "+argumentsSql.get(1)); + sql = builder.toString(); + break; + + case BIT_XOR: + builder.append(argumentsSql.get(0)+" ^ "+argumentsSql.get(1)); + sql = builder.toString(); + break; + + case BIT_NOT: + builder.append("~ "+argumentsSql.get(0)); + sql = builder.toString(); + break; + + case HASH_MD5: + builder.append("CONVERT(Char, HASHBYTES('MD5',"+argumentsSql.get(0)+"), 2)"); + sql = builder.toString(); + break; + case HASH_SHA1: + builder.append("CONVERT(Char, HASHBYTES('SHA1',"+argumentsSql.get(0)+"), 2)"); + sql = builder.toString(); + break; + + case HASH_SHA: + builder.append("CONVERT(Char, HASHBYTES('SHA',"+argumentsSql.get(0)+"), 2)"); + sql = builder.toString(); + break; + + case ZEROIFNULL: + builder.append("ISNULL("+argumentsSql.get(0)+",0)"); + sql = builder.toString(); + break; + + default: + break; + } + + + return sql; + } + + @Override + public String visit(SqlOrderBy orderBy) throws AdapterException { + // ORDER BY [ASC/DESC] [NULLS FIRST/LAST] + // ASC and NULLS LAST are default in EXASOL + List sqlOrderElement = new ArrayList<>(); + for (int i = 0; i < orderBy.getExpressions().size(); ++i) { + String elementSql = orderBy.getExpressions().get(i).accept(this); + boolean isNullsLast = orderBy.nullsLast().get(i); + boolean isAscending = orderBy.isAscending().get(i); + + if (!isAscending && !isNullsLast) { + elementSql = "(CASE WHEN " + elementSql + " IS NULL THEN 0 ELSE 1 END), " + elementSql; + } + + if (isAscending && isNullsLast) { + elementSql = "(CASE WHEN " + elementSql + " IS NULL THEN 1 ELSE 0 END), " + elementSql; + } + + if (!isAscending) { + elementSql += " DESC"; + } + + sqlOrderElement.add(elementSql); + } + return "ORDER BY " + Joiner.on(", ").join(sqlOrderElement); + } + +} diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/TeradataSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/TeradataSqlDialect.java index 8bb50dd80..25d2cb180 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/TeradataSqlDialect.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/TeradataSqlDialect.java @@ -9,30 +9,30 @@ import com.exasol.adapter.capabilities.MainCapability; import com.exasol.adapter.capabilities.PredicateCapability; import com.exasol.adapter.capabilities.ScalarFunctionCapability; -import com.exasol.adapter.dialects.*; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.dialects.SqlGenerationVisitor; import com.exasol.adapter.jdbc.JdbcAdapterProperties; import com.exasol.adapter.metadata.DataType; +public class TeradataSqlDialect extends AbstractSqlDialect { + public final static int maxTeradataVarcharSize = 32000; + private static final String NAME = "TERADATA"; -public class TeradataSqlDialect extends AbstractSqlDialect{ + public TeradataSqlDialect(final SqlDialectContext context) { + super(context); + } - public final static int maxTeradataVarcharSize = 32000; - - public TeradataSqlDialect(SqlDialectContext context) { - super(context); - } + public static String getPublicName() { + return NAME; + } - public static final String NAME = "TERADATA"; - - @Override - public String getPublicName() { - return NAME; - } + @Override + public Capabilities getCapabilities() { - @Override - public Capabilities getCapabilities() { - - Capabilities cap = new Capabilities(); + final Capabilities cap = new Capabilities(); cap.supportMainCapability(MainCapability.SELECTLIST_PROJECTION); cap.supportMainCapability(MainCapability.SELECTLIST_EXPRESSIONS); @@ -45,7 +45,6 @@ public Capabilities getCapabilities() { cap.supportMainCapability(MainCapability.ORDER_BY_COLUMN); cap.supportMainCapability(MainCapability.ORDER_BY_EXPRESSION); cap.supportMainCapability(MainCapability.LIMIT); - // Predicates cap.supportPredicate(PredicateCapability.AND); @@ -62,7 +61,7 @@ public Capabilities getCapabilities() { cap.supportPredicate(PredicateCapability.IN_CONSTLIST); cap.supportPredicate(PredicateCapability.IS_NULL); cap.supportPredicate(PredicateCapability.IS_NOT_NULL); - + // Literals // BOOL is not supported cap.supportLiteral(LiteralCapability.NULL); @@ -73,8 +72,7 @@ public Capabilities getCapabilities() { cap.supportLiteral(LiteralCapability.EXACTNUMERIC); cap.supportLiteral(LiteralCapability.STRING); cap.supportLiteral(LiteralCapability.INTERVAL); - - + // Aggregate functions cap.supportAggregateFunction(AggregateFunctionCapability.COUNT); cap.supportAggregateFunction(AggregateFunctionCapability.COUNT_STAR); @@ -83,7 +81,7 @@ public Capabilities getCapabilities() { // GEO_INTERSECTION_AGGREGATE is not supported // GEO_UNION_AGGREGATE is not supported // APPROXIMATE_COUNT_DISTINCT not supported - + cap.supportAggregateFunction(AggregateFunctionCapability.SUM); cap.supportAggregateFunction(AggregateFunctionCapability.SUM_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.MIN); @@ -93,26 +91,26 @@ public Capabilities getCapabilities() { cap.supportAggregateFunction(AggregateFunctionCapability.MEDIAN); cap.supportAggregateFunction(AggregateFunctionCapability.FIRST_VALUE); cap.supportAggregateFunction(AggregateFunctionCapability.LAST_VALUE); - //cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV); - //cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_DISTINCT); + // cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV); + // cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_POP); // STDDEV_POP_DISTINCT cap.supportAggregateFunction(AggregateFunctionCapability.STDDEV_SAMP); // STDDEV_SAMP_DISTINCT - //cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE); - //cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE_DISTINCT); + // cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE); + // cap.supportAggregateFunction(AggregateFunctionCapability.VARIANCE_DISTINCT); cap.supportAggregateFunction(AggregateFunctionCapability.VAR_POP); // VAR_POP_DISTINCT cap.supportAggregateFunction(AggregateFunctionCapability.VAR_SAMP); // VAR_SAMP_DISTINCT - + cap.supportScalarFunction(ScalarFunctionCapability.CEIL); cap.supportScalarFunction(ScalarFunctionCapability.DIV); cap.supportScalarFunction(ScalarFunctionCapability.FLOOR); cap.supportScalarFunction(ScalarFunctionCapability.ROUND); cap.supportScalarFunction(ScalarFunctionCapability.SIGN); cap.supportScalarFunction(ScalarFunctionCapability.TRUNC); - + cap.supportScalarFunction(ScalarFunctionCapability.ADD); cap.supportScalarFunction(ScalarFunctionCapability.SUB); cap.supportScalarFunction(ScalarFunctionCapability.MULT); @@ -141,15 +139,15 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.SQRT); cap.supportScalarFunction(ScalarFunctionCapability.TAN); cap.supportScalarFunction(ScalarFunctionCapability.TANH); - - - cap.supportScalarFunction(ScalarFunctionCapability.ASCII); + + cap.supportScalarFunction(ScalarFunctionCapability.ASCII); // BIT_LENGTH is not supported. Can be different for Unicode characters. cap.supportScalarFunction(ScalarFunctionCapability.CHR); // COLOGNE_PHONETIC is not supported. // CONCAT is not supported. Number of arguments can be different. // DUMP is not supported. Output is different. - // EDIT_DISTANCE is not supported. Output is different. UTL_MATCH.EDIT_DISTANCE returns -1 with NULL argument. + // EDIT_DISTANCE is not supported. Output is different. UTL_MATCH.EDIT_DISTANCE + // returns -1 with NULL argument. // INSERT is not supported. cap.supportScalarFunction(ScalarFunctionCapability.INSTR); cap.supportScalarFunction(ScalarFunctionCapability.LENGTH); @@ -164,7 +162,8 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.REPEAT); cap.supportScalarFunction(ScalarFunctionCapability.REPLACE); cap.supportScalarFunction(ScalarFunctionCapability.REVERSE); - // RIGHT is not supported. Possible solution with SUBSTRING (must handle corner cases correctly). + // RIGHT is not supported. Possible solution with SUBSTRING (must handle corner + // cases correctly). cap.supportScalarFunction(ScalarFunctionCapability.RPAD); cap.supportScalarFunction(ScalarFunctionCapability.RTRIM); cap.supportScalarFunction(ScalarFunctionCapability.SOUNDEX); @@ -182,142 +181,141 @@ public Capabilities getCapabilities() { cap.supportScalarFunction(ScalarFunctionCapability.ADD_SECONDS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_WEEKS); cap.supportScalarFunction(ScalarFunctionCapability.ADD_YEARS); - + cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_DATE); cap.supportScalarFunction(ScalarFunctionCapability.CURRENT_TIMESTAMP); - + cap.supportScalarFunction(ScalarFunctionCapability.NULLIFZERO); cap.supportScalarFunction(ScalarFunctionCapability.ZEROIFNULL); - + return cap; - } + } - @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcTypeDescription) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcTypeDescription) throws SQLException { DataType colType = null; - int jdbcType = jdbcTypeDescription.getJdbcType(); + final int jdbcType = jdbcTypeDescription.getJdbcType(); switch (jdbcType) { - case Types.TIME: - colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); - break; - case 2013: //Types.TIME_WITH_TIMEZONE is Java 1.8 specific - colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); - break; - case Types.NUMERIC: - int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); - int decimalScale = jdbcTypeDescription.getDecimalScale(); - - if (decimalPrec <= DataType.maxExasolDecimalPrecision) { - colType = DataType.createDecimal(decimalPrec, decimalScale); - } else { - colType = DataType.createDouble(); - } - break; - case Types.OTHER: // Teradata JDBC uses OTHER for several data types GEOMETRY, INTERVAL etc... - String columnTypeName = jdbcTypeDescription.getTypeName(); - - if ( columnTypeName.equals("GEOMETRY") ) - colType = DataType.createVarChar(jdbcTypeDescription.getPrecisionOrSize(), DataType.ExaCharset.UTF8); - else if (columnTypeName.startsWith("INTERVAL") ) - colType = DataType.createVarChar(30, DataType.ExaCharset.UTF8); //TODO verify that varchar 30 is sufficient in all cases - else if (columnTypeName.startsWith("PERIOD") ) - colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); - else - colType = DataType.createVarChar(TeradataSqlDialect.maxTeradataVarcharSize, DataType.ExaCharset.UTF8); - break; - - case Types.SQLXML: - colType = DataType.createVarChar(TeradataSqlDialect.maxTeradataVarcharSize, DataType.ExaCharset.UTF8); - break; - - case Types.CLOB: - colType = DataType.createVarChar(TeradataSqlDialect.maxTeradataVarcharSize, DataType.ExaCharset.UTF8); - break; - - case Types.BLOB: - case Types.VARBINARY: - case Types.BINARY: - colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); - break; - case Types.DISTINCT: - colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); - break; + case Types.TIME: + colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); + break; + case 2013: // Types.TIME_WITH_TIMEZONE is Java 1.8 specific + colType = DataType.createVarChar(21, DataType.ExaCharset.UTF8); + break; + case Types.NUMERIC: + final int decimalPrec = jdbcTypeDescription.getPrecisionOrSize(); + final int decimalScale = jdbcTypeDescription.getDecimalScale(); + + if (decimalPrec <= DataType.maxExasolDecimalPrecision) { + colType = DataType.createDecimal(decimalPrec, decimalScale); + } else { + colType = DataType.createDouble(); + } + break; + case Types.OTHER: // Teradata JDBC uses OTHER for several data types GEOMETRY, INTERVAL etc... + final String columnTypeName = jdbcTypeDescription.getTypeName(); + + if (columnTypeName.equals("GEOMETRY")) { + colType = DataType.createVarChar(jdbcTypeDescription.getPrecisionOrSize(), DataType.ExaCharset.UTF8); + } else if (columnTypeName.startsWith("INTERVAL")) { + colType = DataType.createVarChar(30, DataType.ExaCharset.UTF8); // TODO verify that varchar 30 is + // sufficient in all cases + } else if (columnTypeName.startsWith("PERIOD")) { + colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); + } else { + colType = DataType.createVarChar(TeradataSqlDialect.maxTeradataVarcharSize, DataType.ExaCharset.UTF8); + } + break; + + case Types.SQLXML: + colType = DataType.createVarChar(TeradataSqlDialect.maxTeradataVarcharSize, DataType.ExaCharset.UTF8); + break; + + case Types.CLOB: + colType = DataType.createVarChar(TeradataSqlDialect.maxTeradataVarcharSize, DataType.ExaCharset.UTF8); + break; + + case Types.BLOB: + case Types.VARBINARY: + case Types.BINARY: + colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); + break; + case Types.DISTINCT: + colType = DataType.createVarChar(100, DataType.ExaCharset.UTF8); + break; } return colType; } - - - @Override - public SchemaOrCatalogSupport supportsJdbcCatalogs() { + + @Override + public SchemaOrCatalogSupport supportsJdbcCatalogs() { return SchemaOrCatalogSupport.UNSUPPORTED; - } + } - @Override - public SchemaOrCatalogSupport supportsJdbcSchemas() { + @Override + public SchemaOrCatalogSupport supportsJdbcSchemas() { return SchemaOrCatalogSupport.SUPPORTED; - } + } - @Override - public SqlGenerationVisitor getSqlGenerationVisitor(SqlGenerationContext context) { + @Override + public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { return new TeradataSqlGenerationVisitor(this, context); } - - @Override - public IdentifierCaseHandling getUnquotedIdentifierHandling() { - return IdentifierCaseHandling.INTERPRET_AS_UPPER; - } - - @Override - public IdentifierCaseHandling getQuotedIdentifierHandling() { + + @Override + public IdentifierCaseHandling getUnquotedIdentifierHandling() { + return IdentifierCaseHandling.INTERPRET_AS_UPPER; + } + + @Override + public IdentifierCaseHandling getQuotedIdentifierHandling() { return IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; - } - - @Override - public String applyQuote(String identifier) { - return "\"" + identifier.replace("\"", "\"\"") + "\""; - } - - @Override - public String applyQuoteIfNeeded(String identifier) { - boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); - if (isSimpleIdentifier) { - return identifier; - } else { - return applyQuote(identifier); - } - } - - @Override - public boolean requiresCatalogQualifiedTableNames( - SqlGenerationContext context) { - return false; - } - - @Override - public boolean requiresSchemaQualifiedTableNames( - SqlGenerationContext context) { - return true; - } - - @Override - public NullSorting getDefaultNullSorting() { - return NullSorting.NULLS_SORTED_HIGH; - } - - @Override - public String getStringLiteral(String value) { - return "'" + value.replace("'", "''") + "'"; - } - - @Override - public void handleException(SQLException exception, JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException { - if (exceptionMode == JdbcAdapterProperties.ExceptionHandlingMode.IGNORE_INVALID_VIEWS) { - if (exception.getMessage().contains("Teradata Database") && exception.getMessage().contains("Error 3807")) { - return; + } + + @Override + public String applyQuote(final String identifier) { + return "\"" + identifier.replace("\"", "\"\"") + "\""; + } + + @Override + public String applyQuoteIfNeeded(final String identifier) { + final boolean isSimpleIdentifier = identifier.matches("^[A-Z][0-9A-Z_]*"); + if (isSimpleIdentifier) { + return identifier; + } else { + return applyQuote(identifier); + } + } + + @Override + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { + return false; + } + + @Override + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + return true; + } + + @Override + public NullSorting getDefaultNullSorting() { + return NullSorting.NULLS_SORTED_HIGH; + } + + @Override + public String getStringLiteral(final String value) { + return "'" + value.replace("'", "''") + "'"; + } + + @Override + public void handleException(final SQLException exception, + final JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException { + if (exceptionMode == JdbcAdapterProperties.ExceptionHandlingMode.IGNORE_INVALID_VIEWS) { + if (exception.getMessage().contains("Teradata Database") && exception.getMessage().contains("Error 3807")) { + return; } } - throw exception; - }; + throw exception; + }; } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapter.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapter.java index 9010ec5bb..8f70e52cf 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapter.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapter.java @@ -1,129 +1,148 @@ package com.exasol.adapter.jdbc; +import java.io.OutputStream; +import java.sql.*; +import java.util.List; +import java.util.Map; +import java.util.logging.*; + import com.exasol.ExaConnectionInformation; import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterException; import com.exasol.adapter.capabilities.*; import com.exasol.adapter.dialects.*; -import com.exasol.adapter.dialects.impl.*; import com.exasol.adapter.json.RequestJsonParser; import com.exasol.adapter.json.ResponseJsonSerializer; -import com.exasol.adapter.metadata.DataType; -import com.exasol.adapter.metadata.SchemaMetadata; -import com.exasol.adapter.metadata.SchemaMetadataInfo; +import com.exasol.adapter.metadata.*; import com.exasol.adapter.request.*; +import com.exasol.logging.CompactFormatter; import com.exasol.utils.JsonHelper; import com.exasol.utils.UdfUtils; -import com.google.common.collect.ImmutableList; - -import java.sql.*; -import java.util.List; -import java.util.Map; public class JdbcAdapter { - public static final int MAX_STRING_CHAR_LENGTH = 2000000; - - final static SqlDialects supportedDialects; - static { - supportedDialects = new SqlDialects( - ImmutableList.of( - GenericSqlDialect.NAME, - ExasolSqlDialect.NAME, - ImpalaSqlDialect.NAME, - OracleSqlDialect.NAME, - TeradataSqlDialect.NAME, - RedshiftSqlDialect.NAME, - HiveSqlDialect.NAME, - DB2SqlDialect.NAME, - SqlServerSqlDialect.NAME, - PostgreSQLSqlDialect.NAME)); - } + private static Logger logger = null; /** - * This method gets called by the database during interactions with the - * virtual schema. + * This method gets called by the database during interactions with the virtual + * schema. * - * @param meta - * Metadata object - * @param input - * json request, as defined in the Adapter Script API - * @return json response, as defined in the Adapter Script API + * @param meta Metadata object + * @param input JSON request, as defined in the Adapter Script API + * @return JSON response, as defined in the Adapter Script API */ - public static String adapterCall(ExaMetadata meta, String input) throws Exception { + public static String adapterCall(final ExaMetadata meta, final String input) throws Exception { String result = ""; try { - AdapterRequest request = new RequestJsonParser().parseRequest(input); - tryAttachToOutputService(request.getSchemaMetadataInfo()); - System.out.println("----------\nAdapter Request:\n----------\n" + input); - + final AdapterRequest request = new RequestJsonParser().parseRequest(input); + final SchemaMetadataInfo schemaMetadata = request.getSchemaMetadataInfo(); + configureLogOutput(schemaMetadata); + logger.fine(() -> "Adapter request:\n" + input); + switch (request.getType()) { case CREATE_VIRTUAL_SCHEMA: - result = handleCreateVirtualSchema((CreateVirtualSchemaRequest)request, meta); + result = handleCreateVirtualSchema((CreateVirtualSchemaRequest) request, meta); break; case DROP_VIRTUAL_SCHEMA: - result = handleDropVirtualSchema((DropVirtualSchemaRequest)request); + result = handleDropVirtualSchema((DropVirtualSchemaRequest) request); break; case REFRESH: - result = handleRefresh((RefreshRequest)request, meta); + result = handleRefresh((RefreshRequest) request, meta); break; case SET_PROPERTIES: - result = handleSetProperty((SetPropertiesRequest)request, meta); + result = handleSetProperty((SetPropertiesRequest) request, meta); break; case GET_CAPABILITIES: - result = handleGetCapabilities((GetCapabilitiesRequest)request); + result = handleGetCapabilities((GetCapabilitiesRequest) request); break; case PUSHDOWN: - result = handlePushdownRequest((PushdownRequest)request, meta); + result = handlePushdownRequest((PushdownRequest) request, meta); break; default: throw new RuntimeException("Request Type not supported: " + request.getType()); } - assert(result.isEmpty()); - System.out.println("----------\nResponse:\n----------\n" + JsonHelper.prettyJson(JsonHelper.getJsonObject(result))); + assert (result.isEmpty()); + logger.fine("Response:\n" + JsonHelper.prettyJson(JsonHelper.getJsonObject(result))); return result; - } catch (AdapterException ex) { - throw ex; + } catch (final AdapterException e) { + throw e; + } catch (final Exception e) { + throw new Exception("Unexpected error in adapter for following request: " + input + "\nResponse: " + result, + e); } - catch (Exception ex) { - String stacktrace = UdfUtils.traceToString(ex); - throw new Exception("Unexpected error in adapter: " + ex.getMessage() + "\nStacktrace: " + stacktrace + "\nFor following request: " + input + "\nResponse: " + result); + } + + private static void configureLogOutput(final SchemaMetadataInfo schemaMetadata) + throws AdapterException, InvalidPropertyException { + final OutputStream out = tryAttachToOutputService(schemaMetadata); + if (out == null) { + // Fall back to regular STDOUT in case the socket output stream is not + // available. In most cases (except unit test scenarios) this will mean that + // logs will not be available. + configureLogger(System.out, schemaMetadata.getProperties()); + } else { + configureLogger(out, schemaMetadata.getProperties()); + } + + } + + private static synchronized void configureLogger(final OutputStream out, final Map properties) + throws InvalidPropertyException { + if (logger == null) { + final Level logLevel = determineLogLevel(properties); + final Formatter formatter = new CompactFormatter(); + final StreamHandler handler = new StreamHandler(out, formatter); + handler.setFormatter(formatter); + handler.setLevel(logLevel); + final Logger baseLogger = Logger.getLogger("com.exasol"); + baseLogger.setLevel(logLevel); + baseLogger.addHandler(handler); + logger = Logger.getLogger(JdbcAdapter.class.getName()); + logger.info(() -> "Attached to output service with log level " + logLevel + "."); } } - private static String handleCreateVirtualSchema(CreateVirtualSchemaRequest request, ExaMetadata meta) throws SQLException, AdapterException { - JdbcAdapterProperties.checkPropertyConsistency(request.getSchemaMetadataInfo().getProperties(), supportedDialects); - SchemaMetadata remoteMeta = readMetadata(request.getSchemaMetadataInfo(), meta); + private static Level determineLogLevel(final Map properties) throws InvalidPropertyException { + return (JdbcAdapterProperties.getLogLevel(properties) == null) // + ? Level.INFO // + : JdbcAdapterProperties.getLogLevel(properties); + } + + private static String handleCreateVirtualSchema(final CreateVirtualSchemaRequest request, final ExaMetadata meta) + throws SQLException, AdapterException { + final SchemaMetadataInfo schemaMetadata = request.getSchemaMetadataInfo(); + final Map properties = schemaMetadata.getProperties(); + configureLogOutput(schemaMetadata); + JdbcAdapterProperties.checkPropertyConsistency(properties); + final SchemaMetadata remoteMeta = readMetadata(schemaMetadata, meta); return ResponseJsonSerializer.makeCreateVirtualSchemaResponse(remoteMeta); } - - private static SchemaMetadata readMetadata(SchemaMetadataInfo schemaMeta, ExaMetadata meta) throws SQLException, AdapterException { - List tables = JdbcAdapterProperties.getTableFilter(schemaMeta.getProperties()); + + private static SchemaMetadata readMetadata(final SchemaMetadataInfo schemaMeta, final ExaMetadata meta) + throws SQLException, AdapterException { + final List tables = JdbcAdapterProperties.getTableFilter(schemaMeta.getProperties()); return readMetadata(schemaMeta, tables, meta); } - private static SchemaMetadata readMetadata(SchemaMetadataInfo meta, List tables, ExaMetadata exaMeta) throws SQLException, AdapterException { + private static SchemaMetadata readMetadata(final SchemaMetadataInfo meta, final List tables, + final ExaMetadata exaMeta) throws SQLException, AdapterException { // Connect via JDBC and read metadata - ExaConnectionInformation connection = JdbcAdapterProperties.getConnectionInformation(meta.getProperties(), exaMeta); - String catalog = JdbcAdapterProperties.getCatalog(meta.getProperties()); - String schema = JdbcAdapterProperties.getSchema(meta.getProperties()); - return JdbcMetadataReader.readRemoteMetadata( - connection.getAddress(), - connection.getUser(), - connection.getPassword(), - catalog, - schema, - tables, - supportedDialects, - JdbcAdapterProperties.getSqlDialectName(meta.getProperties(), supportedDialects), + final ExaConnectionInformation connection = JdbcAdapterProperties.getConnectionInformation(meta.getProperties(), + exaMeta); + final String catalog = JdbcAdapterProperties.getCatalog(meta.getProperties()); + final String schema = JdbcAdapterProperties.getSchema(meta.getProperties()); + return JdbcMetadataReader.readRemoteMetadata(connection.getAddress(), connection.getUser(), + connection.getPassword(), catalog, schema, tables, + JdbcAdapterProperties.getSqlDialectName(meta.getProperties()), JdbcAdapterProperties.getExceptionHandlingMode(meta.getProperties())); } - - private static String handleRefresh(RefreshRequest request, ExaMetadata meta) throws SQLException, AdapterException { + + private static String handleRefresh(final RefreshRequest request, final ExaMetadata meta) + throws SQLException, AdapterException { SchemaMetadata remoteMeta; - JdbcAdapterProperties.checkPropertyConsistency(request.getSchemaMetadataInfo().getProperties(), supportedDialects); + JdbcAdapterProperties.checkPropertyConsistency(request.getSchemaMetadataInfo().getProperties()); if (request.isRefreshForTables()) { - List tables = request.getTables(); + final List tables = request.getTables(); remoteMeta = readMetadata(request.getSchemaMetadataInfo(), tables, meta); } else { remoteMeta = readMetadata(request.getSchemaMetadataInfo(), meta); @@ -131,59 +150,61 @@ private static String handleRefresh(RefreshRequest request, ExaMetadata meta) th return ResponseJsonSerializer.makeRefreshResponse(remoteMeta); } - private static String handleSetProperty(SetPropertiesRequest request, ExaMetadata exaMeta) throws SQLException, AdapterException { - Map changedProperties = request.getProperties(); - Map newSchemaMeta = JdbcAdapterProperties.getNewProperties( - request.getSchemaMetadataInfo().getProperties(), changedProperties); - JdbcAdapterProperties.checkPropertyConsistency(newSchemaMeta, supportedDialects); + private static String handleSetProperty(final SetPropertiesRequest request, final ExaMetadata exaMeta) + throws SQLException, AdapterException { + final Map changedProperties = request.getProperties(); + final Map newSchemaMeta = JdbcAdapterProperties + .getNewProperties(request.getSchemaMetadataInfo().getProperties(), changedProperties); + JdbcAdapterProperties.checkPropertyConsistency(newSchemaMeta); if (JdbcAdapterProperties.isRefreshNeeded(changedProperties)) { - ExaConnectionInformation connection = JdbcAdapterProperties.getConnectionInformation(newSchemaMeta, exaMeta); - List tableFilter = JdbcAdapterProperties.getTableFilter(newSchemaMeta); - SchemaMetadata remoteMeta = JdbcMetadataReader.readRemoteMetadata( - connection.getAddress(), - connection.getUser(), - connection.getPassword(), - JdbcAdapterProperties.getCatalog(newSchemaMeta), - JdbcAdapterProperties.getSchema(newSchemaMeta), - tableFilter, - supportedDialects, - JdbcAdapterProperties.getSqlDialectName(newSchemaMeta, supportedDialects), + final ExaConnectionInformation connection = JdbcAdapterProperties.getConnectionInformation(newSchemaMeta, + exaMeta); + final List tableFilter = JdbcAdapterProperties.getTableFilter(newSchemaMeta); + final SchemaMetadata remoteMeta = JdbcMetadataReader.readRemoteMetadata(connection.getAddress(), + connection.getUser(), connection.getPassword(), JdbcAdapterProperties.getCatalog(newSchemaMeta), + JdbcAdapterProperties.getSchema(newSchemaMeta), tableFilter, + JdbcAdapterProperties.getSqlDialectName(newSchemaMeta), JdbcAdapterProperties.getExceptionHandlingMode(newSchemaMeta)); return ResponseJsonSerializer.makeSetPropertiesResponse(remoteMeta); } return ResponseJsonSerializer.makeSetPropertiesResponse(null); } - private static String handleDropVirtualSchema(DropVirtualSchemaRequest request) { + private static String handleDropVirtualSchema(final DropVirtualSchemaRequest request) { return ResponseJsonSerializer.makeDropVirtualSchemaResponse(); } - - public static String handleGetCapabilities(GetCapabilitiesRequest request) throws AdapterException { - SqlDialectContext dialectContext = new SqlDialectContext(SchemaAdapterNotes.deserialize(request.getSchemaMetadataInfo().getAdapterNotes(), request.getSchemaMetadataInfo().getSchemaName())); - SqlDialect dialect = JdbcAdapterProperties.getSqlDialect(request.getSchemaMetadataInfo().getProperties(), supportedDialects, dialectContext); - Capabilities capabilities = dialect.getCapabilities(); - Capabilities excludedCapabilities = parseExcludedCapabilities( + + public static String handleGetCapabilities(final GetCapabilitiesRequest request) throws AdapterException { + final SqlDialectContext dialectContext = new SqlDialectContext(SchemaAdapterNotes.deserialize( + request.getSchemaMetadataInfo().getAdapterNotes(), request.getSchemaMetadataInfo().getSchemaName())); + final SqlDialect dialect = JdbcAdapterProperties.getSqlDialect(request.getSchemaMetadataInfo().getProperties(), + dialectContext); + final Capabilities capabilities = dialect.getCapabilities(); + final Capabilities excludedCapabilities = parseExcludedCapabilities( JdbcAdapterProperties.getExcludedCapabilities(request.getSchemaMetadataInfo().getProperties())); capabilities.subtractCapabilities(excludedCapabilities); return ResponseJsonSerializer.makeGetCapabilitiesResponse(capabilities); } - - private static Capabilities parseExcludedCapabilities(String excludedCapabilitiesStr) { - System.out.println("Excluded Capabilities: " + excludedCapabilitiesStr); - Capabilities excludedCapabilities = new Capabilities(); - for (String cap : excludedCapabilitiesStr.split(",")) { + + private static Capabilities parseExcludedCapabilities(final String excludedCapabilitiesStr) { + logger.info(() -> "Excluded Capabilities: " + + (excludedCapabilitiesStr.isEmpty() ? "none" : excludedCapabilitiesStr)); + final Capabilities excludedCapabilities = new Capabilities(); + for (final String cap : excludedCapabilitiesStr.split(",")) { if (cap.trim().isEmpty()) { continue; } if (cap.startsWith(ResponseJsonSerializer.LITERAL_PREFIX)) { - String literalCap = cap.replaceFirst(ResponseJsonSerializer.LITERAL_PREFIX, ""); + final String literalCap = cap.replaceFirst(ResponseJsonSerializer.LITERAL_PREFIX, ""); excludedCapabilities.supportLiteral(LiteralCapability.valueOf(literalCap)); } else if (cap.startsWith(ResponseJsonSerializer.AGGREGATE_FUNCTION_PREFIX)) { // Aggregate functions must be checked before scalar functions - String aggregateFunctionCap = cap.replaceFirst(ResponseJsonSerializer.AGGREGATE_FUNCTION_PREFIX, ""); - excludedCapabilities.supportAggregateFunction(AggregateFunctionCapability.valueOf(aggregateFunctionCap)); + final String aggregateFunctionCap = cap.replaceFirst(ResponseJsonSerializer.AGGREGATE_FUNCTION_PREFIX, + ""); + excludedCapabilities + .supportAggregateFunction(AggregateFunctionCapability.valueOf(aggregateFunctionCap)); } else if (cap.startsWith(ResponseJsonSerializer.SCALAR_FUNCTION_PREFIX)) { - String scalarFunctionCap = cap.replaceFirst(ResponseJsonSerializer.SCALAR_FUNCTION_PREFIX, ""); + final String scalarFunctionCap = cap.replaceFirst(ResponseJsonSerializer.SCALAR_FUNCTION_PREFIX, ""); excludedCapabilities.supportScalarFunction(ScalarFunctionCapability.valueOf(scalarFunctionCap)); } else { // High Level Capability @@ -193,16 +214,23 @@ private static Capabilities parseExcludedCapabilities(String excludedCapabilitie return excludedCapabilities; } - private static String handlePushdownRequest(PushdownRequest request, ExaMetadata exaMeta) throws AdapterException { + private static String handlePushdownRequest(final PushdownRequest request, final ExaMetadata exaMeta) + throws AdapterException { // Generate SQL pushdown query - SchemaMetadataInfo meta = request.getSchemaMetadataInfo(); - SqlDialectContext dialectContext = new SqlDialectContext(SchemaAdapterNotes.deserialize(request.getSchemaMetadataInfo().getAdapterNotes(), request.getSchemaMetadataInfo().getSchemaName())); - SqlDialect dialect = JdbcAdapterProperties.getSqlDialect(request.getSchemaMetadataInfo().getProperties(), supportedDialects, dialectContext); - SqlGenerationContext context = new SqlGenerationContext(JdbcAdapterProperties.getCatalog(meta.getProperties()), JdbcAdapterProperties.getSchema(meta.getProperties()), JdbcAdapterProperties.isLocal(meta.getProperties())); - SqlGenerationVisitor sqlGeneratorVisitor = dialect.getSqlGenerationVisitor(context); - String pushdownQuery = request.getSelect().accept(sqlGeneratorVisitor); + final SchemaMetadataInfo meta = request.getSchemaMetadataInfo(); + final SqlDialectContext dialectContext = new SqlDialectContext(SchemaAdapterNotes.deserialize( + request.getSchemaMetadataInfo().getAdapterNotes(), request.getSchemaMetadataInfo().getSchemaName())); + final SqlDialect dialect = JdbcAdapterProperties.getSqlDialect(request.getSchemaMetadataInfo().getProperties(), + dialectContext); + final SqlGenerationContext context = new SqlGenerationContext( + JdbcAdapterProperties.getCatalog(meta.getProperties()), + JdbcAdapterProperties.getSchema(meta.getProperties()), + JdbcAdapterProperties.isLocal(meta.getProperties())); + final SqlGenerationVisitor sqlGeneratorVisitor = dialect.getSqlGenerationVisitor(context); + final String pushdownQuery = request.getSelect().accept(sqlGeneratorVisitor); - ExaConnectionInformation connection = JdbcAdapterProperties.getConnectionInformation(meta.getProperties(), exaMeta); + final ExaConnectionInformation connection = JdbcAdapterProperties.getConnectionInformation(meta.getProperties(), + exaMeta); String credentials = ""; if (connection.getUser() != null || connection.getPassword() != null) { credentials = "USER '" + connection.getUser() + "' IDENTIFIED BY '" + connection.getPassword() + "'"; @@ -213,13 +241,11 @@ private static String handlePushdownRequest(PushdownRequest request, ExaMetadata sql = pushdownQuery; } else if (JdbcAdapterProperties.isImportFromExa(meta.getProperties())) { sql = String.format("IMPORT FROM EXA AT '%s' %s STATEMENT '%s'", - JdbcAdapterProperties.getExaConnectionString(meta.getProperties()), - credentials, + JdbcAdapterProperties.getExaConnectionString(meta.getProperties()), credentials, pushdownQuery.replace("'", "''")); } else if (JdbcAdapterProperties.isImportFromOra(meta.getProperties())) { sql = String.format("IMPORT FROM ORA AT %s %s STATEMENT '%s'", - JdbcAdapterProperties.getOraConnectionName(meta.getProperties()), - credentials, + JdbcAdapterProperties.getOraConnectionName(meta.getProperties()), credentials, pushdownQuery.replace("'", "''")); } else { if (JdbcAdapterProperties.userSpecifiedConnection(meta.getProperties())) { @@ -228,53 +254,48 @@ private static String handlePushdownRequest(PushdownRequest request, ExaMetadata credentials = "'" + connection.getAddress() + "' " + credentials; } - String columnDescription = createColumnDescription(exaMeta, meta, pushdownQuery, dialect); + final String columnDescription = createColumnDescription(exaMeta, meta, pushdownQuery, dialect); if (columnDescription == null) { - sql = String.format("IMPORT FROM JDBC AT %s STATEMENT '%s'", - credentials, + sql = String.format("IMPORT FROM JDBC AT %s STATEMENT '%s'", credentials, pushdownQuery.replace("'", "''")); } else { - sql = String.format("IMPORT INTO %s FROM JDBC AT %s STATEMENT '%s'", - columnDescription, - credentials, + sql = String.format("IMPORT INTO %s FROM JDBC AT %s STATEMENT '%s'", columnDescription, credentials, pushdownQuery.replace("'", "''")); } } - + return ResponseJsonSerializer.makePushdownResponse(sql); } - private static String createColumnDescription(ExaMetadata exaMeta, - SchemaMetadataInfo meta, - String pushdownQuery, - SqlDialect dialect) throws AdapterException { + private static String createColumnDescription(final ExaMetadata exaMeta, final SchemaMetadataInfo meta, + final String pushdownQuery, final SqlDialect dialect) throws AdapterException { PreparedStatement ps = null; - ExaConnectionInformation connectionInformation = JdbcAdapterProperties.getConnectionInformation(meta.getProperties(), exaMeta); + final ExaConnectionInformation connectionInformation = JdbcAdapterProperties + .getConnectionInformation(meta.getProperties(), exaMeta); Connection connection = null; - int val = -1; try { connection = establishConnection(connectionInformation); + logger.fine(() -> "createColumnDescription: " + pushdownQuery); ps = connection.prepareStatement(pushdownQuery); - ResultSetMetaData metadata=ps.getMetaData(); - if (metadata==null){ + ResultSetMetaData metadata = ps.getMetaData(); + if (metadata == null) { ps.execute(); - metadata=ps.getMetaData(); - if (metadata==null) { + metadata = ps.getMetaData(); + if (metadata == null) { throw new SQLException("getMetaData() failed"); } } - DataType[] internalTypes = new DataType[metadata.getColumnCount()]; - for(int col=1; col <= metadata.getColumnCount(); ++col) { - int jdbcType = metadata.getColumnType(col); - int jdbcPrecisions = metadata.getPrecision(col); - int jdbcScales = metadata.getScale(col); - JdbcTypeDescription description = new JdbcTypeDescription(jdbcType, - jdbcScales, jdbcPrecisions, 0, + final DataType[] internalTypes = new DataType[metadata.getColumnCount()]; + for (int col = 1; col <= metadata.getColumnCount(); ++col) { + final int jdbcType = metadata.getColumnType(col); + final int jdbcPrecisions = metadata.getPrecision(col); + final int jdbcScales = metadata.getScale(col); + final JdbcTypeDescription description = new JdbcTypeDescription(jdbcType, jdbcScales, jdbcPrecisions, 0, metadata.getColumnTypeName(col)); internalTypes[col - 1] = dialect.mapJdbcType(description); } - StringBuffer buffer = new StringBuffer(); + final StringBuffer buffer = new StringBuffer(); buffer.append('('); for (int i = 0; i < internalTypes.length; i++) { buffer.append("c"); @@ -288,19 +309,18 @@ private static String createColumnDescription(ExaMetadata exaMeta, buffer.append(')'); return buffer.toString(); - } catch (SQLException e) { - throw new RuntimeException("Cannot resolve column types." + e.getMessage()); - + } catch (final SQLException e) { + throw new RuntimeException("Cannot resolve column types.", e); } } - private static Connection establishConnection(ExaConnectionInformation connection) throws SQLException { + private static Connection establishConnection(final ExaConnectionInformation connection) throws SQLException { final String connectionString = connection.getAddress(); final String user = connection.getUser(); final String password = connection.getPassword(); - System.out.println("conn: " + connectionString); + logger.fine(() -> "Connection parameters: " + connectionString); - java.util.Properties info = new java.util.Properties(); + final java.util.Properties info = new java.util.Properties(); if (user != null) { info.put("user", user); } @@ -310,8 +330,7 @@ private static Connection establishConnection(ExaConnectionInformation connectio if (KerberosUtils.isKerberosAuth(password)) { try { KerberosUtils.configKerberos(user, password); - } - catch (Exception e) { + } catch (final Exception e) { e.printStackTrace(); throw new RuntimeException("Error configuring Kerberos: " + e.getMessage(), e); } @@ -320,17 +339,18 @@ private static Connection establishConnection(ExaConnectionInformation connectio } // Forward stdout to an external output service - private static void tryAttachToOutputService(SchemaMetadataInfo meta) throws AdapterException { - String debugAddress = JdbcAdapterProperties.getDebugAddress(meta.getProperties()); + private static OutputStream tryAttachToOutputService(final SchemaMetadataInfo meta) throws AdapterException { + final String debugAddress = JdbcAdapterProperties.getDebugAddress(meta.getProperties()); if (!debugAddress.isEmpty()) { try { - String debugHost = debugAddress.split(":")[0]; - int debugPort = Integer.parseInt(debugAddress.split(":")[1]); - UdfUtils.tryAttachToOutputService(debugHost, debugPort); - } catch (Exception ex) { - throw new AdapterException("You have to specify a valid hostname and port for the udf debug service, e.g. 'hostname:3000'"); + final String debugHost = debugAddress.split(":")[0]; + final int debugPort = Integer.parseInt(debugAddress.split(":")[1]); + return UdfUtils.tryAttachToOutputService(debugHost, debugPort); + } catch (final Exception ex) { + throw new AdapterException( + "You have to specify a valid hostname and port for the udf debug service, e.g. 'hostname:3000'"); } } + return null; } - -} +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapterProperties.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapterProperties.java index 33e4d9067..f73cd97c1 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapterProperties.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapterProperties.java @@ -1,5 +1,13 @@ package com.exasol.adapter.jdbc; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + import com.exasol.ExaConnectionAccessException; import com.exasol.ExaConnectionInformation; import com.exasol.ExaMetadata; @@ -8,12 +16,12 @@ import com.exasol.adapter.dialects.SqlDialectContext; import com.exasol.adapter.dialects.SqlDialects; -import java.util.*; - /** - * Class to expose a nice interface to properties. Casts to the correct data types, checks for valid property values and consistency. + * Class to expose a nice interface to properties. Casts to the correct data + * types, checks for valid property values and consistency. */ -public class JdbcAdapterProperties { +public final class JdbcAdapterProperties { + static final Logger LOGGER = Logger.getLogger(JdbcAdapterProperties.class.getName()); // One of the following needs to be set static final String PROP_CATALOG_NAME = "CATALOG_NAME"; @@ -34,86 +42,96 @@ public class JdbcAdapterProperties { static final String PROP_ORA_CONNECTION_NAME = "ORA_CONNECTION_NAME"; static final String PROP_EXCLUDED_CAPABILITIES = "EXCLUDED_CAPABILITIES"; static final String PROP_EXCEPTION_HANDLING = "EXCEPTION_HANDLING"; + static final String PROP_LOG_LEVEL = "LOG_LEVEL"; + + private static final String DEFAULT_LOG_LEVEL = "INFO"; + + private JdbcAdapterProperties() { + // prevent instantiation of static helper class + } // Specifies different exception handling strategies public enum ExceptionHandlingMode { - IGNORE_INVALID_VIEWS, - NONE + IGNORE_INVALID_VIEWS, NONE } - private static String getProperty(Map properties, String name, String defaultValue) { + private static String getProperty(final Map properties, final String name, + final String defaultValue) { if (properties.containsKey(name)) { return properties.get(name); } else { return defaultValue; } } - - public static String getCatalog(Map properties) { + + public static String getCatalog(final Map properties) { return getProperty(properties, PROP_CATALOG_NAME, ""); } - - public static String getSchema(Map properties) { + + public static String getSchema(final Map properties) { return getProperty(properties, PROP_SCHEMA_NAME, ""); } - public static boolean userSpecifiedConnection(Map properties) { - String connName = getProperty(properties, PROP_CONNECTION_NAME, ""); + public static boolean userSpecifiedConnection(final Map properties) { + final String connName = getProperty(properties, PROP_CONNECTION_NAME, ""); return (connName != null && !connName.isEmpty()); } - public static String getConnectionName(Map properties) { - String connName = getProperty(properties, PROP_CONNECTION_NAME, ""); - assert(connName != null && !connName.isEmpty()); + public static String getConnectionName(final Map properties) { + final String connName = getProperty(properties, PROP_CONNECTION_NAME, ""); + assert (connName != null && !connName.isEmpty()); return connName; } /** - * Returns the credentials for the remote system. These are either directly specified - * in the properties or obtained from a connection (requires privilege to access the connection - * . + * Returns the credentials for the remote system. These are either directly + * specified in the properties or obtained from a connection (requires privilege + * to access the connection . */ - public static ExaConnectionInformation getConnectionInformation(Map properties, ExaMetadata exaMeta) { - String connName = getProperty(properties, PROP_CONNECTION_NAME, ""); + public static ExaConnectionInformation getConnectionInformation(final Map properties, + final ExaMetadata exaMeta) { + final String connName = getProperty(properties, PROP_CONNECTION_NAME, ""); if (connName != null && !connName.isEmpty()) { try { - ExaConnectionInformation connInfo = exaMeta.getConnection(connName); + final ExaConnectionInformation connInfo = exaMeta.getConnection(connName); return connInfo; - } catch (ExaConnectionAccessException e) { - throw new RuntimeException("Could not access the connection information of connection " + connName + ". Error: " + e.toString()); + } catch (final ExaConnectionAccessException e) { + throw new RuntimeException("Could not access the connection information of connection " + connName + + ". Error: " + e.toString()); } } else { - String connectionString = properties.get(PROP_CONNECTION_STRING); - String user = properties.get(PROP_USERNAME); - String password = properties.get(PROP_PASSWORD); + final String connectionString = properties.get(PROP_CONNECTION_STRING); + final String user = properties.get(PROP_USERNAME); + final String password = properties.get(PROP_PASSWORD); return new ExaConnectionInformationJdbc(connectionString, user, password); } } - - public static void checkPropertyConsistency(Map properties, SqlDialects supportedDialects) throws AdapterException { - validatePropertyValues(properties); - - checkMandatoryProperties(properties, supportedDialects); + public static void checkPropertyConsistency(final Map properties) throws AdapterException { + validatePropertyValues(properties); + checkMandatoryProperties(properties); checkImportPropertyConsistency(properties, PROP_IMPORT_FROM_EXA, PROP_EXA_CONNECTION_STRING); checkImportPropertyConsistency(properties, PROP_IMPORT_FROM_ORA, PROP_ORA_CONNECTION_NAME); } - private static void checkImportPropertyConsistency(Map properties, String propImportFromX, String propConnection) throws InvalidPropertyException { - boolean isImport = getProperty(properties, propImportFromX, "").toUpperCase().equals("TRUE"); - boolean connectionIsEmpty = getProperty(properties, propConnection, "").isEmpty(); + private static void checkImportPropertyConsistency(final Map properties, + final String propImportFromX, final String propConnection) throws InvalidPropertyException { + final boolean isImport = getProperty(properties, propImportFromX, "").toUpperCase().equals("TRUE"); + final boolean connectionIsEmpty = getProperty(properties, propConnection, "").isEmpty(); if (isImport) { if (connectionIsEmpty) { - throw new InvalidPropertyException("You defined the property " + propImportFromX + ", please also define " + propConnection); + throw new InvalidPropertyException( + "You defined the property " + propImportFromX + ", please also define " + propConnection); } } else { if (!connectionIsEmpty) { - throw new InvalidPropertyException("You defined the property " + propConnection + " without setting " + propImportFromX + " to 'TRUE'. This is not allowed"); + throw new InvalidPropertyException("You defined the property " + propConnection + " without setting " + + propImportFromX + " to 'TRUE'. This is not allowed"); } } } - private static void validatePropertyValues(Map properties) throws AdapterException { + private static void validatePropertyValues(final Map properties) throws AdapterException { validateBooleanProperty(properties, PROP_IS_LOCAL); validateBooleanProperty(properties, PROP_IMPORT_FROM_EXA); validateBooleanProperty(properties, PROP_IMPORT_FROM_ORA); @@ -124,81 +142,90 @@ private static void validatePropertyValues(Map properties) throw validateExceptionHandling(properties.get(PROP_EXCEPTION_HANDLING)); } } - - private static void validateBooleanProperty(Map properties, String property) throws AdapterException { + + private static void validateBooleanProperty(final Map properties, final String property) + throws AdapterException { if (properties.containsKey(property)) { if (!properties.get(property).toUpperCase().matches("^TRUE$|^FALSE$")) { - throw new InvalidPropertyException("The value '" + properties.get(property) + "' for the property " + property + " is invalid. It has to be either 'true' or 'false' (case insensitive)."); + throw new InvalidPropertyException("The value '" + properties.get(property) + "' for the property " + + property + " is invalid. It has to be either 'true' or 'false' (case insensitive)."); } } } - private static void validateDebugOutputAddress(String debugAddress) throws AdapterException { + private static void validateDebugOutputAddress(final String debugAddress) throws AdapterException { if (!debugAddress.isEmpty()) { - String error = "You specified an invalid hostname and port for the udf debug service (" + PROP_DEBUG_ADDRESS + "). Please provide a valid value, e.g. 'hostname:3000'"; - try { - String debugHost = debugAddress.split(":")[0]; - int debugPort = Integer.parseInt(debugAddress.split(":")[1]); - } catch (Exception ex) { + final String error = "You specified an invalid hostname and port for the udf debug service (" + + PROP_DEBUG_ADDRESS + "). Please provide a valid value, e.g. 'hostname:3000'"; + if (debugAddress.split(":").length != 2) { throw new AdapterException(error); } - if (debugAddress.split(":").length != 2) { + try { + Integer.parseInt(debugAddress.split(":")[1]); + } catch (final Exception ex) { throw new AdapterException(error); } } } - private static void validateExceptionHandling(String exceptionHandling) throws AdapterException { + private static void validateExceptionHandling(final String exceptionHandling) throws AdapterException { if (!(exceptionHandling == null || exceptionHandling.isEmpty())) { - for (ExceptionHandlingMode mode : ExceptionHandlingMode.values()) { + for (final ExceptionHandlingMode mode : ExceptionHandlingMode.values()) { if (mode.name().equals(exceptionHandling)) { return; } } - String error = "You specified an invalid exception mode (" + exceptionHandling + ")."; + final String error = "You specified an invalid exception mode (" + exceptionHandling + ")."; throw new AdapterException(error); } } - private static void checkMandatoryProperties(Map properties, SqlDialects supportedDialects) throws AdapterException { + private static void checkMandatoryProperties(final Map properties) throws AdapterException { + final String availableDialects = "Available dialects: " + SqlDialects.getInstance().getDialectsString(); if (!properties.containsKey(PROP_SQL_DIALECT)) { - throw new InvalidPropertyException("You have to specify the SQL dialect (" + PROP_SQL_DIALECT + "). Available dialects: " + supportedDialects.getDialectsString()); + throw new InvalidPropertyException( + "You have to specify the SQL dialect (" + PROP_SQL_DIALECT + "). " + availableDialects); } - if (!supportedDialects.isSupported(properties.get(PROP_SQL_DIALECT))) { - throw new InvalidPropertyException("SQL Dialect not supported: " + properties.get(PROP_SQL_DIALECT) + ". Available dialects: " + supportedDialects.getDialectsString()); + if (!SqlDialects.getInstance().isSupported(properties.get(PROP_SQL_DIALECT))) { + throw new InvalidPropertyException( + "SQL Dialect \"" + properties.get(PROP_SQL_DIALECT) + "\" is not supported. " + availableDialects); } if (properties.containsKey(PROP_CONNECTION_NAME)) { - if (properties.containsKey(PROP_CONNECTION_STRING) || properties.containsKey(PROP_USERNAME) || properties.containsKey(PROP_PASSWORD) ) { - throw new InvalidPropertyException("You specified a connection (" + PROP_CONNECTION_NAME + ") and therefore may not specify the properties " + PROP_CONNECTION_STRING + ", " + PROP_USERNAME + " and " + PROP_PASSWORD); + if (properties.containsKey(PROP_CONNECTION_STRING) || properties.containsKey(PROP_USERNAME) + || properties.containsKey(PROP_PASSWORD)) { + throw new InvalidPropertyException("You specified a connection (" + PROP_CONNECTION_NAME + + ") and therefore may not specify the properties " + PROP_CONNECTION_STRING + ", " + + PROP_USERNAME + " and " + PROP_PASSWORD); } } else { if (!properties.containsKey(PROP_CONNECTION_STRING)) { - throw new InvalidPropertyException("You did not specify a connection (" + PROP_CONNECTION_NAME + ") and therefore have to specify the property " + PROP_CONNECTION_STRING); + throw new InvalidPropertyException("You did not specify a connection (" + PROP_CONNECTION_NAME + + ") and therefore have to specify the property " + PROP_CONNECTION_STRING); } } } - - public static boolean isImportFromExa(Map properties) { + + public static boolean isImportFromExa(final Map properties) { return getProperty(properties, PROP_IMPORT_FROM_EXA, "").toUpperCase().equals("TRUE"); } - public static boolean isImportFromOra(Map properties) { + public static boolean isImportFromOra(final Map properties) { return getProperty(properties, PROP_IMPORT_FROM_ORA, "").toUpperCase().equals("TRUE"); } - public static String getExaConnectionString(Map properties) { + public static String getExaConnectionString(final Map properties) { return getProperty(properties, PROP_EXA_CONNECTION_STRING, ""); } - public static String getOraConnectionName(Map properties) { + public static String getOraConnectionName(final Map properties) { return getProperty(properties, PROP_ORA_CONNECTION_NAME, ""); } - public static List getTableFilter(Map properties) { - String tableNames = getProperty(properties, PROP_TABLES, ""); + public static List getTableFilter(final Map properties) { + final String tableNames = getProperty(properties, PROP_TABLES, ""); if (!tableNames.isEmpty()) { - List tables = Arrays.asList(tableNames.split(",")); - for (int i=0; i tables = Arrays.asList(tableNames.split(",")); + for (int i = 0; i < tables.size(); ++i) { tables.set(i, tables.get(i).trim()); } return tables; @@ -207,37 +234,40 @@ public static List getTableFilter(Map properties) { } } - public static String getExcludedCapabilities(Map properties) { + public static String getExcludedCapabilities(final Map properties) { return getProperty(properties, PROP_EXCLUDED_CAPABILITIES, ""); } - public static String getDebugAddress(Map properties) { + public static String getDebugAddress(final Map properties) { return getProperty(properties, PROP_DEBUG_ADDRESS, ""); } - public static boolean isLocal(Map properties) { + public static boolean isLocal(final Map properties) { return getProperty(properties, PROP_IS_LOCAL, "").toUpperCase().equals("TRUE"); } - public static String getSqlDialectName(Map properties, SqlDialects supportedDialects) { + public static String getSqlDialectName(final Map properties) { return getProperty(properties, PROP_SQL_DIALECT, ""); } - public static SqlDialect getSqlDialect(Map properties, SqlDialects supportedDialects, SqlDialectContext dialectContext) throws AdapterException { - String dialectName = getProperty(properties, PROP_SQL_DIALECT, ""); - SqlDialect dialect = supportedDialects.getDialectByName(dialectName, dialectContext); + public static SqlDialect getSqlDialect(final Map properties, final SqlDialectContext dialectContext) + throws InvalidPropertyException { + final String dialectName = getProperty(properties, PROP_SQL_DIALECT, ""); + final SqlDialect dialect = SqlDialects.getInstance().getDialectInstanceForNameWithContext(dialectName, + dialectContext); if (dialect == null) { - throw new InvalidPropertyException("SQL Dialect not supported: " + dialectName + " - all dialects: " + supportedDialects.getDialectsString()); + throw new InvalidPropertyException("SQL Dialect not supported: " + dialectName + " - all dialects: " + + SqlDialects.getInstance().getDialectsString()); } return dialect; } - public static ExceptionHandlingMode getExceptionHandlingMode(Map properties) { - String propertyValue = getProperty(properties, PROP_EXCEPTION_HANDLING, ""); + public static ExceptionHandlingMode getExceptionHandlingMode(final Map properties) { + final String propertyValue = getProperty(properties, PROP_EXCEPTION_HANDLING, ""); if (propertyValue == null || propertyValue.isEmpty()) { return ExceptionHandlingMode.NONE; } - for (ExceptionHandlingMode mode : ExceptionHandlingMode.values()) { + for (final ExceptionHandlingMode mode : ExceptionHandlingMode.values()) { if (mode.name().equals(propertyValue)) { return mode; } @@ -245,23 +275,29 @@ public static ExceptionHandlingMode getExceptionHandlingMode(Map return ExceptionHandlingMode.NONE; } - public static boolean isRefreshNeeded(Map newProperties) { - return newProperties.containsKey(PROP_CONNECTION_STRING) - || newProperties.containsKey(PROP_CONNECTION_NAME) - || newProperties.containsKey(PROP_USERNAME) - || newProperties.containsKey(PROP_PASSWORD) - || newProperties.containsKey(PROP_SCHEMA_NAME) - || newProperties.containsKey(PROP_CATALOG_NAME) + public static Level getLogLevel(final Map properties) throws InvalidPropertyException { + final String levelAsText = getProperty(properties, PROP_LOG_LEVEL, DEFAULT_LOG_LEVEL); + try { + return Level.parse(levelAsText); + } catch (IllegalArgumentException | NullPointerException e) { + throw new InvalidPropertyException("Unable to set log level \"" + levelAsText + "\""); + } + } + + public static boolean isRefreshNeeded(final Map newProperties) { + return newProperties.containsKey(PROP_CONNECTION_STRING) || newProperties.containsKey(PROP_CONNECTION_NAME) + || newProperties.containsKey(PROP_USERNAME) || newProperties.containsKey(PROP_PASSWORD) + || newProperties.containsKey(PROP_SCHEMA_NAME) || newProperties.containsKey(PROP_CATALOG_NAME) || newProperties.containsKey(PROP_TABLES); } - + public static class ExaConnectionInformationJdbc implements ExaConnectionInformation { - - private String address; - private String user; // can be null - private String password; // can be null - - public ExaConnectionInformationJdbc(String address, String user, String password) { + + private final String address; + private final String user; // can be null + private final String password; // can be null + + public ExaConnectionInformationJdbc(final String address, final String user, final String password) { this.address = address; this.user = user; this.password = password; @@ -289,14 +325,16 @@ public String getPassword() { } /** - * Returns the properties as they would be after successfully applying the changes to the existing (old) set of properties. + * Returns the properties as they would be after successfully applying the + * changes to the existing (old) set of properties. */ - public static Map getNewProperties ( - Map oldProperties, Map changedProperties) { - Map newCompleteProperties = new HashMap<>(oldProperties); - for (Map.Entry changedProperty : changedProperties.entrySet()) { + public static Map getNewProperties(final Map oldProperties, + final Map changedProperties) { + final Map newCompleteProperties = new HashMap<>(oldProperties); + for (final Map.Entry changedProperty : changedProperties.entrySet()) { if (changedProperty.getValue() == null) { - // Null values represent properties which are deleted by the user (might also have never existed actually) + // Null values represent properties which are deleted by the user (might also + // have never existed actually) newCompleteProperties.remove(changedProperty.getKey()); } else { newCompleteProperties.put(changedProperty.getKey(), changedProperty.getValue()); diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcMetadataReader.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcMetadataReader.java index 6aff45af0..a8acf9167 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcMetadataReader.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcMetadataReader.java @@ -1,75 +1,64 @@ package com.exasol.adapter.jdbc; -import com.exasol.adapter.AdapterException; -import com.exasol.adapter.dialects.SqlDialect; -import com.exasol.adapter.dialects.SqlDialectContext; -import com.exasol.adapter.dialects.SqlDialects; -import com.exasol.adapter.metadata.ColumnMetadata; -import com.exasol.adapter.metadata.SchemaMetadata; -import com.exasol.adapter.metadata.TableMetadata; -import com.google.common.base.Joiner; - import java.sql.*; import java.util.ArrayList; import java.util.List; +import java.util.logging.Logger; + +import com.exasol.adapter.AdapterException; +import com.exasol.adapter.dialects.*; +import com.exasol.adapter.metadata.*; +import com.google.common.base.Joiner; /** - * TODO Find good solutions to handle tables with unsupported data types, or tables that generate exceptions. Ideas: Skip such tables by adding a boolean property like IGNORE_INVALID_TABLES. + * TODO Find good solutions to handle tables with unsupported data types, or + * tables that generate exceptions. Ideas: Skip such tables by adding a boolean + * property like IGNORE_INVALID_TABLES. */ public class JdbcMetadataReader { + private static final Logger LOGGER = Logger.getLogger(JdbcMetadataReader.class.getName()); - public static SchemaMetadata readRemoteMetadata(String connectionString, - String user, - String password, - String catalog, - String schema, - List tableFilter, - SqlDialects dialects, - String dialectName, - JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException, AdapterException { + public static SchemaMetadata readRemoteMetadata(final String connectionString, final String user, + final String password, String catalog, String schema, final List tableFilter, + final String dialectName, final JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) + throws SQLException, AdapterException { assert (catalog != null); assert (schema != null); try { - Connection conn = establishConnection(connectionString, user, password); - DatabaseMetaData dbMeta = conn.getMetaData(); + final Connection conn = establishConnection(connectionString, user, password); + final DatabaseMetaData dbMeta = conn.getMetaData(); - // Retrieve relevant parts of DatabaseMetadata. Will be cached in adapternotes of the schema. - SchemaAdapterNotes schemaAdapterNotes = new SchemaAdapterNotes( - dbMeta.getCatalogSeparator(), - dbMeta.getIdentifierQuoteString(), - dbMeta.storesLowerCaseIdentifiers(), - dbMeta.storesUpperCaseIdentifiers(), - dbMeta.storesMixedCaseIdentifiers(), - dbMeta.supportsMixedCaseIdentifiers(), - dbMeta.storesLowerCaseQuotedIdentifiers(), - dbMeta.storesUpperCaseQuotedIdentifiers(), - dbMeta.storesMixedCaseQuotedIdentifiers(), - dbMeta.supportsMixedCaseQuotedIdentifiers(), - dbMeta.nullsAreSortedAtEnd(), - dbMeta.nullsAreSortedAtStart(), - dbMeta.nullsAreSortedHigh(), - dbMeta.nullsAreSortedLow()); + // Retrieve relevant parts of DatabaseMetadata. Will be cached in adapternotes + // of the schema. + final SchemaAdapterNotes schemaAdapterNotes = new SchemaAdapterNotes(dbMeta.getCatalogSeparator(), + dbMeta.getIdentifierQuoteString(), dbMeta.storesLowerCaseIdentifiers(), + dbMeta.storesUpperCaseIdentifiers(), dbMeta.storesMixedCaseIdentifiers(), + dbMeta.supportsMixedCaseIdentifiers(), dbMeta.storesLowerCaseQuotedIdentifiers(), + dbMeta.storesUpperCaseQuotedIdentifiers(), dbMeta.storesMixedCaseQuotedIdentifiers(), + dbMeta.supportsMixedCaseQuotedIdentifiers(), dbMeta.nullsAreSortedAtEnd(), + dbMeta.nullsAreSortedAtStart(), dbMeta.nullsAreSortedHigh(), dbMeta.nullsAreSortedLow()); - SqlDialect dialect = dialects.getDialectByName(dialectName, new SqlDialectContext(schemaAdapterNotes)); + final SqlDialect dialect = SqlDialects.getInstance().getDialectInstanceForNameWithContext(dialectName, + new SqlDialectContext(schemaAdapterNotes)); catalog = findCatalog(catalog, dbMeta, dialect); schema = findSchema(schema, dbMeta, dialect); - List tables = findTables(catalog, schema, tableFilter, dbMeta, dialect, exceptionMode); + final List tables = findTables(catalog, schema, tableFilter, dbMeta, dialect, exceptionMode); conn.close(); return new SchemaMetadata(SchemaAdapterNotes.serialize(schemaAdapterNotes), tables); - } catch (SQLException e) { + } catch (final SQLException e) { e.printStackTrace(); throw e; } } - private static Connection establishConnection(String connectionString, String user, String password) throws SQLException { - System.out.println("conn: " + connectionString); - - java.util.Properties info = new java.util.Properties(); + private static Connection establishConnection(final String connectionString, final String user, + final String password) throws SQLException { + LOGGER.fine(() -> "Establishing connection with paramters: " + connectionString); + final java.util.Properties info = new java.util.Properties(); if (user != null) { info.put("user", user); } @@ -79,8 +68,7 @@ private static Connection establishConnection(String connectionString, String us if (KerberosUtils.isKerberosAuth(password)) { try { KerberosUtils.configKerberos(user, password); - } - catch (Exception e) { + } catch (final Exception e) { e.printStackTrace(); throw new RuntimeException("Error configuring Kerberos: " + e.getMessage(), e); } @@ -88,23 +76,24 @@ private static Connection establishConnection(String connectionString, String us return DriverManager.getConnection(connectionString, info); } - private static String findCatalog(String catalog, DatabaseMetaData dbMeta, SqlDialect dialect) throws SQLException, AdapterException { + private static String findCatalog(final String catalog, final DatabaseMetaData dbMeta, final SqlDialect dialect) + throws SQLException, AdapterException { boolean foundCatalog = false; String curCatalog = ""; int numCatalogs = 0; - List allCatalogs = new ArrayList<>(); + final List allCatalogs = new ArrayList<>(); ResultSet res = null; try { res = dbMeta.getCatalogs(); while (res.next()) { - curCatalog = res.getString("TABLE_CAT"); // EXA_DB in case of EXASOL + curCatalog = res.getString("TABLE_CAT"); // EXA_DB in case of EXASOL allCatalogs.add(curCatalog); if (curCatalog.equals(catalog)) { foundCatalog = true; } - ++ numCatalogs; + ++numCatalogs; } - } catch (Exception ex) { + } catch (final Exception ex) { if (dialect.supportsJdbcCatalogs() == SqlDialect.SchemaOrCatalogSupport.SUPPORTED) { throw new RuntimeException("Unexpected exception when accessing the catalogs: " + ex.getMessage(), ex); } else if (dialect.supportsJdbcCatalogs() == SqlDialect.SchemaOrCatalogSupport.UNSUPPORTED) { @@ -112,17 +101,20 @@ private static String findCatalog(String catalog, DatabaseMetaData dbMeta, SqlDi ex.printStackTrace(); return null; } else { - // We don't know if system supports catalogs. If user specified an catalog, we have a problem, otherwise we ignore the error + // We don't know if system supports catalogs. If user specified an catalog, we + // have a problem, otherwise we ignore the error if (!catalog.isEmpty()) { - throw new RuntimeException("Unexpected exception when accessing the catalogs: " + ex.getMessage(), ex); + throw new RuntimeException("Unexpected exception when accessing the catalogs: " + ex.getMessage(), + ex); } else { ex.printStackTrace(); return null; } } } finally { - if(res != null) - res.close(); + if (res != null) { + res.close(); + } } if (dialect.supportsJdbcCatalogs() == SqlDialect.SchemaOrCatalogSupport.SUPPORTED || dialect.supportsJdbcCatalogs() == SqlDialect.SchemaOrCatalogSupport.UNKNOWN) { @@ -131,43 +123,50 @@ private static String findCatalog(String catalog, DatabaseMetaData dbMeta, SqlDi } else { if (catalog.isEmpty()) { if (dialect.supportsJdbcCatalogs() == SqlDialect.SchemaOrCatalogSupport.SUPPORTED) { - throw new AdapterException("You have to specify a catalog. Available catalogs: " + Joiner.on(", ").join(allCatalogs)); + throw new AdapterException("You have to specify a catalog. Available catalogs: " + + Joiner.on(", ").join(allCatalogs)); } else { if (numCatalogs == 0) { return null; } else { - throw new AdapterException("You have to specify a catalog. Available catalogs: " + Joiner.on(", ").join(allCatalogs)); + throw new AdapterException("You have to specify a catalog. Available catalogs: " + + Joiner.on(", ").join(allCatalogs)); } } } else { - throw new AdapterException("Catalog " + catalog + " does not exist. Available catalogs: " + Joiner.on(", ").join(allCatalogs)); + throw new AdapterException("Catalog " + catalog + " does not exist. Available catalogs: " + + Joiner.on(", ").join(allCatalogs)); } } } else { - assert(dialect.supportsJdbcCatalogs() == SqlDialect.SchemaOrCatalogSupport.UNSUPPORTED); + assert (dialect.supportsJdbcCatalogs() == SqlDialect.SchemaOrCatalogSupport.UNSUPPORTED); if (catalog.isEmpty()) { if (numCatalogs == 0) { return null; - } else if (numCatalogs == 1) { - // Take the one and only catalog (in case of EXASOL this is always EXA_DB). Returning null would probably also work fine. + } else if (numCatalogs == 1) { + // Take the one and only catalog (in case of EXASOL this is always EXA_DB). + // Returning null would probably also work fine. return curCatalog; } else { - throw new AdapterException("The data source is not expected to support catalogs, but has " + numCatalogs + " catalogs: " + Joiner.on(", ").join(allCatalogs)); + throw new AdapterException("The data source is not expected to support catalogs, but has " + + numCatalogs + " catalogs: " + Joiner.on(", ").join(allCatalogs)); } } else { - throw new AdapterException("You specified a catalog, however the data source does not support the concept of catalogs."); + throw new AdapterException( + "You specified a catalog, however the data source does not support the concept of catalogs."); } } } - private static String findSchema(String schema, DatabaseMetaData dbMeta, SqlDialect dialect) throws SQLException, AdapterException { + private static String findSchema(final String schema, final DatabaseMetaData dbMeta, final SqlDialect dialect) + throws SQLException, AdapterException { // Check if schema exists boolean foundSchema = false; - List allSchemas = new ArrayList<>(); + final List allSchemas = new ArrayList<>(); int numSchemas = 0; String curSchema = ""; ResultSet schemas = null; - + try { schemas = dbMeta.getSchemas(); while (schemas.next()) { @@ -178,7 +177,7 @@ private static String findSchema(String schema, DatabaseMetaData dbMeta, SqlDial } ++numSchemas; } - } catch (Exception ex) { + } catch (final Exception ex) { if (dialect.supportsJdbcSchemas() == SqlDialect.SchemaOrCatalogSupport.SUPPORTED) { throw new RuntimeException("Unexpected exception when accessing the schema: " + ex.getMessage(), ex); } else if (dialect.supportsJdbcSchemas() == SqlDialect.SchemaOrCatalogSupport.UNSUPPORTED) { @@ -188,17 +187,19 @@ private static String findSchema(String schema, DatabaseMetaData dbMeta, SqlDial } else { // We don't know if system supports schemas. if (!schema.isEmpty()) { - throw new RuntimeException("Unexpected exception when accessing the schemas: " + ex.getMessage(), ex); + throw new RuntimeException("Unexpected exception when accessing the schemas: " + ex.getMessage(), + ex); } else { ex.printStackTrace(); return null; } } } finally { - if (schemas != null) - schemas.close(); + if (schemas != null) { + schemas.close(); + } } - + if (dialect.supportsJdbcSchemas() == SqlDialect.SchemaOrCatalogSupport.SUPPORTED || dialect.supportsJdbcSchemas() == SqlDialect.SchemaOrCatalogSupport.UNKNOWN) { if (foundSchema) { @@ -206,65 +207,69 @@ private static String findSchema(String schema, DatabaseMetaData dbMeta, SqlDial } else { if (schema.isEmpty()) { if (dialect.supportsJdbcSchemas() == SqlDialect.SchemaOrCatalogSupport.SUPPORTED) { - throw new AdapterException("You have to specify a schema. Available schemas: " + Joiner.on(", ").join(allSchemas)); + throw new AdapterException( + "You have to specify a schema. Available schemas: " + Joiner.on(", ").join(allSchemas)); } else { if (numSchemas == 0) { return null; } else { - throw new AdapterException("You have to specify a schema. Available schemas: " + Joiner.on(", ").join(allSchemas)); + throw new AdapterException("You have to specify a schema. Available schemas: " + + Joiner.on(", ").join(allSchemas)); } } } else { - throw new AdapterException("Schema " + schema + " does not exist. Available schemas: " + Joiner.on(", ").join(allSchemas)); + throw new AdapterException("Schema " + schema + " does not exist. Available schemas: " + + Joiner.on(", ").join(allSchemas)); } } } else { - assert(dialect.supportsJdbcSchemas() == SqlDialect.SchemaOrCatalogSupport.UNSUPPORTED); + assert (dialect.supportsJdbcSchemas() == SqlDialect.SchemaOrCatalogSupport.UNSUPPORTED); if (schema.isEmpty()) { if (numSchemas == 0) { return null; - } else if (numSchemas == 1) { + } else if (numSchemas == 1) { // Take the one and only schema. Returning null would probably also work fine. return curSchema; } else { - throw new AdapterException("The data source is not expected to support schemas, but has " + numSchemas + " schemas: " + Joiner.on(", ").join(allSchemas)); + throw new AdapterException("The data source is not expected to support schemas, but has " + + numSchemas + " schemas: " + Joiner.on(", ").join(allSchemas)); } } else { - throw new AdapterException("You specified a schema, however the data source does not support the concept of schemas."); + throw new AdapterException( + "You specified a schema, however the data source does not support the concept of schemas."); } } } - private static List findTables(String catalog, String schema, List tableFilter, - DatabaseMetaData dbMeta, SqlDialect dialect, - JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) - throws SQLException { - List tables = new ArrayList<>(); - - String[] supportedTableTypes = {"TABLE", "VIEW", "SYSTEM TABLE"}; - - ResultSet resTables = dbMeta.getTables(catalog, schema, null, supportedTableTypes); - List< SqlDialect.MappedTable> tablesMapped = new ArrayList<>(); - //List tableComments = new ArrayList<>(); + private static List findTables(final String catalog, final String schema, + final List tableFilter, final DatabaseMetaData dbMeta, final SqlDialect dialect, + final JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException { + final List tables = new ArrayList<>(); + + final String[] supportedTableTypes = { "TABLE", "VIEW", "SYSTEM TABLE" }; + + final ResultSet resTables = dbMeta.getTables(catalog, schema, null, supportedTableTypes); + final List tablesMapped = new ArrayList<>(); + // List tableComments = new ArrayList<>(); while (resTables.next()) { - SqlDialect.MappedTable mappedTable = dialect.mapTable(resTables); + final SqlDialect.MappedTable mappedTable = dialect.mapTable(resTables); if (!mappedTable.isIgnored()) { - tablesMapped.add(mappedTable); - //tableComments.add(mappedTable.getTableComment()); + tablesMapped.add(mappedTable); + // tableComments.add(mappedTable.getTableComment()); } } - + resTables.close(); // Columns - for (int i=0; i "Processing columns for table \"" + table + "\""); try { if (!tableFilter.isEmpty()) { boolean isInFilter = false; if (identifiersAreCaseInsensitive(dialect)) { - for (String curTable : tableFilter) { + for (final String curTable : tableFilter) { if (curTable.equalsIgnoreCase(table.getTableName())) { isInFilter = true; } @@ -273,44 +278,44 @@ private static List findTables(String catalog, String schema, Lis isInFilter = tableFilter.contains(table.getTableName()); } if (!isInFilter) { - System.out.println("Skip table: " + table); + LOGGER.finest(() -> "Skipping table \"" + table + "\""); continue; } } - List columns = readColumns(dbMeta, catalog, schema, table.getOriginalTableName(), + final List columns = readColumns(dbMeta, catalog, schema, table.getOriginalTableName(), dialect, exceptionMode); if (columns != null) { tables.add(new TableMetadata(table.getTableName(), "", columns, table.getTableComment())); } - } catch (Exception ex) { + } catch (final Exception ex) { throw new RuntimeException("Exception for table " + table.getOriginalTableName(), ex); } } return tables; } - private static boolean identifiersAreCaseInsensitive(SqlDialect dialect) { + private static boolean identifiersAreCaseInsensitive(final SqlDialect dialect) { return (dialect.getQuotedIdentifierHandling() == dialect.getUnquotedIdentifierHandling()) && dialect.getQuotedIdentifierHandling() != SqlDialect.IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE; } - private static List readColumns(DatabaseMetaData dbMeta, String catalog, String schema, - String table, SqlDialect dialect, - JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException { - List columns = new ArrayList<>(); + private static List readColumns(final DatabaseMetaData dbMeta, final String catalog, + final String schema, final String table, final SqlDialect dialect, + final JdbcAdapterProperties.ExceptionHandlingMode exceptionMode) throws SQLException { + final List columns = new ArrayList<>(); try { - ResultSet cols = dbMeta.getColumns(catalog, schema, table, null); + final ResultSet cols = dbMeta.getColumns(catalog, schema, table, null); while (cols.next()) { columns.add(dialect.mapColumn(cols)); } if (columns.isEmpty()) { - System.out.println("Warning: Found a table without columns: " + table); + LOGGER.warning(() -> "Found a table \"" + table + "\" that has no columns."); } cols.close(); - } catch (SQLException exception) { + } catch (final SQLException exception) { dialect.handleException(exception, exceptionMode); return null; } return columns; } -} +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/logging/CompactFormatter.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/logging/CompactFormatter.java new file mode 100644 index 000000000..8990a46c1 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/logging/CompactFormatter.java @@ -0,0 +1,65 @@ +package com.exasol.logging; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.logging.Formatter; +import java.util.logging.LogRecord; + +/** + * Formatter for compact log messages. + */ +public class CompactFormatter extends Formatter { + private static final String LOG_LEVEL_FORMAT = "%-8s"; + private final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS"); + + /** + * Formats a log record according in a compact manner. + * + * The parts of the package name between the dots are abbreviated with their + * first letter. Timestamps are displayed as 24h UTC+0. + * + * yyyy-MM-dd HH:mm:ss.SSS LEVEL [c.e.ClassName] The message. + */ + @Override + public String format(final LogRecord record) { + final StringBuilder builder = new StringBuilder(); + builder.append(formatTimestamp(record.getMillis())); + builder.append(" "); + builder.append(String.format(LOG_LEVEL_FORMAT, record.getLevel())); + appendClassName(record.getSourceClassName(), builder); + builder.append(record.getMessage()); + builder.append(System.lineSeparator()); + return builder.toString(); + } + + private void appendClassName(final String className, final StringBuilder builder) { + if (className != null && !className.isEmpty()) { + builder.append("["); + appendNonEmptyClassName(className, builder); + builder.append("] "); + } + } + + private void appendNonEmptyClassName(final String className, final StringBuilder builder) { + int lastPosition = -1; + int position = className.indexOf("."); + while (position > 0) { + final String characterAfterDot = className.substring(lastPosition + 1, lastPosition + 2); + if (!characterAfterDot.equals(".")) { + builder.append(characterAfterDot); + } + builder.append("."); + lastPosition = position; + position = className.indexOf(".", position + 1); + } + if (lastPosition < className.length()) { + builder.append(className.substring(lastPosition + 1)); + } + } + + private String formatTimestamp(final long millis) { + final Instant instant = Instant.ofEpochMilli(millis); + return this.dateTimeFormatter.format(instant.atZone(ZoneId.of("Z"))); + } +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/utils/UdfUtils.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/utils/UdfUtils.java index 480f263cb..5d37ccc15 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/utils/UdfUtils.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/utils/UdfUtils.java @@ -1,25 +1,24 @@ package com.exasol.utils; -import java.io.PrintStream; +import java.io.OutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.net.Socket; public class UdfUtils { - - public static void tryAttachToOutputService(String ip, int port) { + public static OutputStream tryAttachToOutputService(final String ip, final int port) { // Start before: udf_debug.py try { @SuppressWarnings("resource") - Socket socket = new Socket(ip, port); - PrintStream out = new PrintStream(socket.getOutputStream(), true); - System.setOut(out); - System.out.println("\n\n\nAttached to outputservice"); - } catch (Exception ex) {} // could not start output server} + final Socket socket = new Socket(ip, port); + return socket.getOutputStream(); + } catch (final Exception ex) { + return null; + } // could not start output server} } - public static String traceToString(Exception ex) { - StringWriter errors = new StringWriter(); + public static String traceToString(final Exception ex) { + final StringWriter errors = new StringWriter(); ex.printStackTrace(new PrintWriter(errors)); return errors.toString(); } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/resources/sql_dialects.properties b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/resources/sql_dialects.properties new file mode 100644 index 000000000..0b10e0766 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/resources/sql_dialects.properties @@ -0,0 +1,13 @@ +com.exasol.adapter.dialects.supported=\ +com.exasol.adapter.dialects.impl.DB2SqlDialect,\ +com.exasol.adapter.dialects.impl.ExasolSqlDialect,\ +com.exasol.adapter.dialects.impl.GenericSqlDialect,\ +com.exasol.adapter.dialects.impl.HiveSqlDialect,\ +com.exasol.adapter.dialects.impl.ImpalaSqlDialect,\ +com.exasol.adapter.dialects.impl.MysqlSqlDialect,\ +com.exasol.adapter.dialects.impl.OracleSqlDialect,\ +com.exasol.adapter.dialects.impl.PostgreSQLSqlDialect,\ +com.exasol.adapter.dialects.impl.RedshiftSqlDialect,\ +com.exasol.adapter.dialects.impl.SqlServerSqlDialect,\ +com.exasol.adapter.dialects.impl.SybaseSqlDialect,\ +com.exasol.adapter.dialects.impl.TeradataSqlDialect \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/AbstractIntegrationTest.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/AbstractIntegrationTest.java index 14123e12c..5e0561ddc 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/AbstractIntegrationTest.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/AbstractIntegrationTest.java @@ -1,7 +1,18 @@ package com.exasol.adapter.dialects; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + import java.io.FileNotFoundException; -import java.sql.*; +import java.sql.Connection; +import java.sql.Date; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.GregorianCalendar; @@ -9,8 +20,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import static org.junit.Assert.*; - public class AbstractIntegrationTest { private static Connection connection; @@ -25,9 +34,10 @@ public static IntegrationTestConfig getConfig() throws FileNotFoundException { } /** - * You have to call this method with a connection to your EXASOL database during the @BeforeClass method of your integration test + * You have to call this method with a connection to your EXASOL database during + * the @BeforeClass method of your integration test */ - public static void setConnection(Connection connection) { + public static void setConnection(final Connection connection) { AbstractIntegrationTest.connection = connection; } @@ -40,49 +50,54 @@ private static void checkConnection() { } public static Connection connectToExa() throws ClassNotFoundException, SQLException, FileNotFoundException { - String user = config.getExasolUser(); - String password = config.getExasolPassword(); + final String user = config.getExasolUser(); + final String password = config.getExasolPassword(); return connectToExa(user, password); } - public static Connection connectToExa(String user, String password) throws ClassNotFoundException, SQLException, FileNotFoundException { - String exaAddress = config.getExasolAddress(); + public static Connection connectToExa(final String user, final String password) + throws ClassNotFoundException, SQLException, FileNotFoundException { + final String exaAddress = config.getExasolAddress(); Class.forName("com.exasol.jdbc.EXADriver"); return DriverManager.getConnection("jdbc:exa:" + exaAddress, user, password); } - public ResultSet executeQuery(Connection conn, String query) throws SQLException { + public ResultSet executeQuery(final Connection conn, final String query) throws SQLException { return conn.createStatement().executeQuery(query); } - public ResultSet executeQuery(String query) throws SQLException { + public ResultSet executeQuery(final String query) throws SQLException { checkConnection(); return executeQuery(connection, query); } - public int executeUpdate(String query) throws SQLException { + public int executeUpdate(final String query) throws SQLException { checkConnection(); return connection.createStatement().executeUpdate(query); } - public static void createJDBCAdapter(Connection conn, List jarIncludes) throws SQLException { - Statement stmt = conn.createStatement(); + + public static void createJDBCAdapter(final Connection conn, final List jarIncludes) throws SQLException { + final Statement stmt = conn.createStatement(); stmt.execute("CREATE SCHEMA IF NOT EXISTS ADAPTER"); String sql = "CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS\n"; sql += " %scriptclass com.exasol.adapter.jdbc.JdbcAdapter;\n"; - for (String includePath : jarIncludes) { + for (final String includePath : jarIncludes) { sql += " %jar " + includePath + ";\n"; } - //sql += " %jvmoption -Xms64m -Xmx64m;"; + // sql += " %jvmoption -Xms64m -Xmx64m;"; sql += "/"; stmt.execute(sql); } - public static void createJDBCAdapter(List jarIncludes) throws SQLException { + public static void createJDBCAdapter(final List jarIncludes) throws SQLException { checkConnection(); createJDBCAdapter(connection, jarIncludes); } - public static void createVirtualSchema(Connection conn, String virtualSchemaName, String dialect, String remoteCatalog, String remoteSchema, String connectionName, String user, String password, String adapter, String remoteConnectionString, boolean isLocal, String debugAddress, String tableFilter, String suffix) throws SQLException { + public static void createVirtualSchema(final Connection conn, final String virtualSchemaName, final String dialect, + final String remoteCatalog, final String remoteSchema, final String connectionName, final String user, + final String password, final String adapter, final String remoteConnectionString, final boolean isLocal, + final String debugAddress, final String tableFilter, final String suffix) throws SQLException { removeVirtualSchema(conn, virtualSchemaName); String sql = "CREATE VIRTUAL SCHEMA " + virtualSchemaName; sql += " USING " + adapter; @@ -113,6 +128,7 @@ public static void createVirtualSchema(Connection conn, String virtualSchemaName if (!debugAddress.isEmpty()) { sql += " DEBUG_ADDRESS='" + debugAddress + "'"; } + sql += " LOG_LEVEL='ALL'"; if (!tableFilter.isEmpty()) { sql += " TABLE_FILTER='" + tableFilter + "'"; } @@ -122,27 +138,34 @@ public static void createVirtualSchema(Connection conn, String virtualSchemaName conn.createStatement().execute(sql); } - public static void createVirtualSchema(String virtualSchemaName, String dialect, String remoteCatalog, String remoteSchema, String connectionName, String user, String password, String adapter, String remoteConnectionString, boolean isLocal, String debugAddress, String tableFilter, String suffix) throws SQLException { + public static void createVirtualSchema(final String virtualSchemaName, final String dialect, + final String remoteCatalog, final String remoteSchema, final String connectionName, final String user, + final String password, final String adapter, final String remoteConnectionString, final boolean isLocal, + final String debugAddress, final String tableFilter, final String suffix) throws SQLException { checkConnection(); - createVirtualSchema(connection, virtualSchemaName, dialect, remoteCatalog, remoteSchema, connectionName, user, password, adapter, remoteConnectionString, isLocal, debugAddress, tableFilter, suffix); + createVirtualSchema(connection, virtualSchemaName, dialect, remoteCatalog, remoteSchema, connectionName, user, + password, adapter, remoteConnectionString, isLocal, debugAddress, tableFilter, suffix); } - public static void createConnection(Connection conn, String connectionName, String connectionString, String user, String password) throws SQLException { + public static void createConnection(final Connection conn, final String connectionName, + final String connectionString, final String user, final String password) throws SQLException { removeConnection(conn, connectionName); String sql = "CREATE CONNECTION " + connectionName; sql += " TO '" + connectionString + "'"; sql += " USER '" + user + "'"; - sql += " IDENTIFIED BY '" + password +"'"; + sql += " IDENTIFIED BY '" + password + "'"; conn.createStatement().execute(sql); } - public static void createConnection(String connectionName, String connectionString, String user, String password) throws SQLException { + public static void createConnection(final String connectionName, final String connectionString, final String user, + final String password) throws SQLException { checkConnection(); createConnection(connection, connectionName, connectionString, user, password); } - public static String getPortOfConnectedDatabase(Connection conn) throws SQLException { - ResultSet result = conn.createStatement().executeQuery("SELECT PARAM_VALUE FROM EXA_COMMANDLINE where PARAM_NAME = 'port'"); + public static String getPortOfConnectedDatabase(final Connection conn) throws SQLException { + final ResultSet result = conn.createStatement() + .executeQuery("SELECT PARAM_VALUE FROM EXA_COMMANDLINE where PARAM_NAME = 'port'"); result.next(); return result.getString("PARAM_VALUE"); } @@ -152,37 +175,42 @@ public static String getPortOfConnectedDatabase() throws SQLException { return getPortOfConnectedDatabase(connection); } - public static void matchNextRow(ResultSet result, Object... expectedElements) throws SQLException { + public static void matchNextRow(final ResultSet result, final Object... expectedElements) throws SQLException { result.next(); - assertEquals(getDiffWithTypes(Arrays.asList(expectedElements), rowToObject(result)), Arrays.asList(expectedElements), rowToObject(result)); + assertEquals(getDiffWithTypes(Arrays.asList(expectedElements), rowToObject(result)), + Arrays.asList(expectedElements), rowToObject(result)); } - public static void matchLastRow(ResultSet result, Object... expectedElements) throws SQLException { + public static void matchLastRow(final ResultSet result, final Object... expectedElements) throws SQLException { matchNextRow(result, expectedElements); assertFalse(result.next()); } - private static void removeConnection(Connection conn, String connectionName) throws SQLException { - Statement stmt = conn.createStatement(); - String sql = "DROP CONNECTION IF EXISTS " + connectionName; + private static void removeConnection(final Connection conn, final String connectionName) throws SQLException { + final Statement stmt = conn.createStatement(); + final String sql = "DROP CONNECTION IF EXISTS " + connectionName; stmt.execute(sql); } - private static void removeVirtualSchema(Connection conn, String schemaName) throws SQLException { - Statement stmt = conn.createStatement(); - String sql = "DROP VIRTUAL SCHEMA IF EXISTS " + schemaName + " CASCADE"; + private static void removeVirtualSchema(final Connection conn, final String schemaName) throws SQLException { + final Statement stmt = conn.createStatement(); + final String sql = "DROP VIRTUAL SCHEMA IF EXISTS " + schemaName + " CASCADE"; stmt.execute(sql); } /** - * This method shows the diff with the types. Normally, only the String representation is shown in the diff, so you cannot distinguish between (int)1 and (long)1. + * This method shows the diff with the types. Normally, only the String + * representation is shown in the diff, so you cannot distinguish between (int)1 + * and (long)1. */ - private static String getDiffWithTypes(List expected, List actual) { - StringBuilder builder = new StringBuilder(); + private static String getDiffWithTypes(final List expected, final List actual) { + final StringBuilder builder = new StringBuilder(); builder.append("expected elements <["); boolean first = true; - for (Object element : expected) { - if (!first) { builder.append(", "); } + for (final Object element : expected) { + if (!first) { + builder.append(", "); + } if (element == null) { builder.append("null"); } else { @@ -192,8 +220,10 @@ private static String getDiffWithTypes(List expected, List actua } builder.append("]> but was <["); first = true; - for (Object element : actual) { - if (!first) { builder.append(", "); } + for (final Object element : actual) { + if (!first) { + builder.append(", "); + } if (element == null) { builder.append("null"); } else { @@ -205,8 +235,9 @@ private static String getDiffWithTypes(List expected, List actua return builder.toString(); } - public static void matchSingleRowExplain(Connection conn, String query, String expectedExplain, boolean isLocal) throws SQLException { - ResultSet result = conn.createStatement().executeQuery("EXPLAIN VIRTUAL " + query); + public static void matchSingleRowExplain(final Connection conn, final String query, final String expectedExplain, + final boolean isLocal) throws SQLException { + final ResultSet result = conn.createStatement().executeQuery("EXPLAIN VIRTUAL " + query); result.next(); if (isLocal) { assertEquals(expectedExplain, result.getString("PUSHDOWN_SQL")); @@ -216,50 +247,53 @@ public static void matchSingleRowExplain(Connection conn, String query, String e assertEquals(false, result.next()); } - public static void matchSingleRowExplain(Connection conn, String query, String expectedExplain) throws SQLException { + public static void matchSingleRowExplain(final Connection conn, final String query, final String expectedExplain) + throws SQLException { checkConnection(); matchSingleRowExplain(connection, query, expectedExplain, false); } - public static void matchSingleRowExplain(String query, String expectedExplain, boolean isLocal) throws SQLException { + public static void matchSingleRowExplain(final String query, final String expectedExplain, final boolean isLocal) + throws SQLException { checkConnection(); matchSingleRowExplain(connection, query, expectedExplain, isLocal); } - public static void matchSingleRowExplain(String query, String expectedExplain) throws SQLException { + public static void matchSingleRowExplain(final String query, final String expectedExplain) throws SQLException { checkConnection(); matchSingleRowExplain(connection, query, expectedExplain); - } - - private static List rowToObject(ResultSet resultSet) throws SQLException { - int colCount = resultSet.getMetaData().getColumnCount(); - List res = new ArrayList<>(colCount); - for (int i=1; i<=colCount; ++i) { - String type = (resultSet.getObject(i) == null) ? "null" : resultSet.getObject(i).getClass().getName(); - // System.out.println("- col " + i + " type: " + type); + } + + private static List rowToObject(final ResultSet resultSet) throws SQLException { + final int colCount = resultSet.getMetaData().getColumnCount(); + final List res = new ArrayList<>(colCount); + for (int i = 1; i <= colCount; ++i) { res.add(resultSet.getObject(i)); } return res; } - private static String extractStatementFromImport(String importStatement) { - String regexp = " STATEMENT '(.*)'"; - Pattern pattern = Pattern.compile(regexp); - Matcher matcher = pattern.matcher(importStatement); + private static String extractStatementFromImport(final String importStatement) { + final String regexp = " STATEMENT '(.*)'"; + final Pattern pattern = Pattern.compile(regexp); + final Matcher matcher = pattern.matcher(importStatement); assertTrue(matcher.find()); - String statement = matcher.group(1); - // Replace double single quotes, e.g. in "IMPORT ... STATEMENT 'SELECT A=''x'' FROM T'"; + final String statement = matcher.group(1); + // Replace double single quotes, e.g. in "IMPORT ... STATEMENT 'SELECT A=''x'' + // FROM T'"; return statement.replace("''", "'"); } - public Date getSqlDate(int year, int month, int day) { + public Date getSqlDate(final int year, final int month, final int day) { // Attention: month start with 0! - return new java.sql.Date(new GregorianCalendar(year, month-1, day).getTime().getTime()); + return new java.sql.Date(new GregorianCalendar(year, month - 1, day).getTime().getTime()); } - public Timestamp getSqlTimestamp(int year, int month, int day, int hour, int minute, int second, int millisecond) { + public Timestamp getSqlTimestamp(final int year, final int month, final int day, final int hour, final int minute, + final int second, final int millisecond) { // Attention: month start with 0! - return new java.sql.Timestamp(new GregorianCalendar(year, month-1, day, hour, minute, second).getTime().getTime() + millisecond); + return new java.sql.Timestamp( + new GregorianCalendar(year, month - 1, day, hour, minute, second).getTime().getTime() + millisecond); } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestConfig.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestConfig.java index eaaf88e53..5ee82fff9 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestConfig.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestConfig.java @@ -218,8 +218,7 @@ public List getDB2JdbcJars() { public boolean DB2TestsRequested() { return getProperty("db2", "runIntegrationTests", false); } - - + public boolean genericTestsRequested() { return getProperty("generic", "runIntegrationTests", false); } @@ -239,11 +238,31 @@ public String getGenericUser() { public String getGenericPassword() { return getProperty("generic", "password"); } - + + public boolean sybaseTestsRequested() { + return getProperty("sybase", "runIntegrationTests", false); + } + + public String getSybaseJdbcDriverPath() { + return getProperty("sybase", "jdbcDriverPath"); + } + + public String getSybaseJdbcConnectionString() { + return getProperty("sybase", "connectionString"); + } + + public String getSybaseUser() { + return getProperty("sybase", "user"); + } + + public String getSybasePassword() { + return getProperty("sybase", "password"); + } + public String getBucketFSPassword(){ return getProperty("general", "bucketFsPassword"); } - + public String getBucketFSURL(){ return getProperty("general", "bucketFsUrl"); } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestSetup.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestSetup.java index b945110bc..0d6bdbfb7 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestSetup.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestSetup.java @@ -1,9 +1,11 @@ package com.exasol.adapter.dialects; import java.io.File; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.Charset; +import java.util.logging.Logger; import org.apache.commons.codec.binary.Base64; import org.apache.http.HttpHeaders; @@ -15,72 +17,142 @@ import org.apache.http.entity.FileEntity; import org.apache.http.impl.client.HttpClientBuilder; - /** - * The main method of this class will be called in the pre-integration-test maven phase before the actual integration-test phase starst. - * Here we have to setup the integration test environment + * The main method of this class will be called in the + * pre-integration-test Maven phase before the actual + * integration-test phase starts. + * + *

+ * We upload the JAR file containing the virtual schema adapter to BucketFS so + * that we don't accidentally forget to do this before the integration test. + * There is a caveat though: if you run this on a cluster, the cluster nodes + * will not replicate the JAR instantly. This takes a while. So in clustered + * environments it is safer to upload the file and wait a while. + *

* - * Attention: This does not deploy the latest jar, because the dist maven module is cleaned and build after this module. Right now you need to do something like "mvn clean package && mvn verify -Pit -D..." to upload and test the latest jar. + *

+ * Attention: + *

+ * + *

+ * This does not deploy the latest JAR, because the distribution Maven module is + * and and build after this module. Right now you need to do something like + *

mvn clean package && mvn verify -Pit -D...
to upload + * and test the latest JAR. + *

*/ public class IntegrationTestSetup { + private static final String ARTIFACT_DISTRIBUTION_NAME = "virtualschema-jdbc-adapter-dist"; + private static final Logger LOGGER = Logger.getLogger(IntegrationTestConfig.class.getName()); + private IntegrationTestConfig config = null; + private final String version; + private final String configFile; + + /** + * Entry point of the {@link IntegrationTestSetup} + * + * @param args version of the adapter, path to configuration file and skipping + * (optional) + * @throws ClientProtocolException + * @throws IOException + * @throws URISyntaxException + */ + public static void main(final String[] args) { + if (isSkippingIntegrationTestConfigured(args)) { + LOGGER.info("Skipping setup of the integration test environment"); + } else { + LOGGER.info("Setting up the integration test environment"); + final String projectVersion = args[0]; + final String configFile = args[1]; + new IntegrationTestSetup(configFile, projectVersion).run(); + } + } + + private IntegrationTestSetup(final String configFile, final String version) { + this.configFile = configFile; + this.version = version; + } + + private static boolean isSkippingIntegrationTestConfigured(final String[] args) { + return args.length > 2 && Boolean.valueOf(args[2]); + } + + private void run() { + readConfiguration(); + uploadFileToBucketFS(getJarUrlForBucketFS(this.config.getBucketFSURL()), // + getLocalJarPath(), // + this.config.getBucketFSPassword()); + } + + private void readConfiguration() { + try { + this.config = new IntegrationTestConfig(this.configFile); + } catch (final FileNotFoundException e) { + throw new IntegrationTestSetupException( + "Unable to read integration test configuration file \"" + this.configFile + "\"", e); + } + } - public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException { - if (args.length > 2) { - Boolean skipTestSetup = Boolean.valueOf(args[2]); - if (skipTestSetup) { - System.out.println("Skip setup of the integration test environment"); - return; - } + private String getJarUrlForBucketFS(final String bucketFSurl) { + return bucketFSurl + "/" + getJarName(this.version); + } + + private String getLocalJarPath() { + return ".." + File.separator + ARTIFACT_DISTRIBUTION_NAME + File.separator + "target" + File.separator + + getJarName(this.version); + } + + private String getJarName(final String projectVersion) { + return ARTIFACT_DISTRIBUTION_NAME + "-" + projectVersion + ".jar"; + } + + private void uploadFileToBucketFS(final String url, final String filePath, final String password) { + LOGGER.info(() -> "Uploading \"" + filePath + "\"" + " to \"" + url + "\""); + final HttpPut request = buildPutRequest(url, filePath, password); + final HttpResponse response = executePutRequest(request); + handleResponse(response); + LOGGER.fine(() -> "HTTP PUT response:" + System.lineSeparator() + response); + } + + private void handleResponse(final HttpResponse response) { + if (response.getStatusLine().getStatusCode() != 200) { + throw new IntegrationTestSetupException("HTTP PUT request to BucketFS failed: " + response.toString()); } + } - System.out.println("Start setup of the integration test environment"); - String projectVersion = args[0]; - String configFile = args[1]; - - IntegrationTestConfig config = new IntegrationTestConfig(configFile); - - String bucketFSurl = config.getBucketFSURL(); - String bucketFSpassword = config.getBucketFSPassword(); - - - // The local path look like "virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar" - String artifactDistName = "virtualschema-jdbc-adapter-dist"; - - String jarName = artifactDistName + "-" + projectVersion + ".jar"; - - String jarLocalPath = "../" + artifactDistName + "/target/" + jarName; - - - uploadFileToBucketFS(bucketFSurl+"/"+jarName, jarLocalPath, bucketFSpassword); - - //uploadFileToBucketFS("http://192.168.106.131:2580/bucket1/original-virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar", "C:\\Users\\tb\\Desktop\\github-repos\\virtual-schemas\\jdbc-adapter\\virtualschema-jdbc-adapter-dist\\target\\original-virtualschema-jdbc-adapter-dist-1.0.2-SNAPSHOT.jar","bucket1"); - + private HttpResponse executePutRequest(final HttpPut request) { + try { + final HttpClient httpClient = HttpClientBuilder.create().build(); + HttpResponse response; + response = httpClient.execute(request); + return response; + } catch (final IOException e) { + throw new IntegrationTestSetupException("Unable to execute HTTP PUT to BucketFS", e); + } } - - private static void uploadFileToBucketFS(String url, String filePath, String password) throws ClientProtocolException, IOException, URISyntaxException { - - HttpClient httpClient = HttpClientBuilder.create().build(); - URIBuilder uriBuilder = new URIBuilder(url); - HttpPut request = new HttpPut(uriBuilder.build()); - - String auth = "w:"+password; - byte[] encodedAuth = Base64.encodeBase64(auth.getBytes(Charset.forName("UTF-8"))); - String authHeader = "Basic " + new String(encodedAuth); + private HttpPut buildPutRequest(final String url, final String filePath, final String password) { + try { + final URIBuilder uriBuilder = new URIBuilder(url); + final HttpPut request = new HttpPut(uriBuilder.build()); + setAuthenticationHeaderInRequestForPassword(request, password); + setFileToBeTransferred(request, filePath); + return request; + } catch (final URISyntaxException e) { + throw new IntegrationTestSetupException( + "Unable to build HTTP PUT request from \"" + filePath + "\" to \"" + url + "\"", e); + } + } + + private void setAuthenticationHeaderInRequestForPassword(final HttpPut request, final String password) { + final String auth = "w:" + password; + final byte[] encodedAuth = Base64.encodeBase64(auth.getBytes(Charset.forName("UTF-8"))); + final String authHeader = "Basic " + new String(encodedAuth); request.setHeader(HttpHeaders.AUTHORIZATION, authHeader); - - FileEntity fileEntity = new FileEntity(new File(filePath)); - + } + + private void setFileToBeTransferred(final HttpPut request, final String filePath) { + final FileEntity fileEntity = new FileEntity(new File(filePath)); request.setEntity(fileEntity); - - HttpResponse response = httpClient.execute(request); - - if ( response.getStatusLine().getStatusCode() != 200 ) - throw new IOException( response.toString() ); - - System.out.println (response); - } - - -} +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestSetupException.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestSetupException.java new file mode 100644 index 000000000..61bee3380 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestSetupException.java @@ -0,0 +1,27 @@ +package com.exasol.adapter.dialects; + +/** + * Runtime exceptions for integration test setup + */ +public class IntegrationTestSetupException extends RuntimeException { + private static final long serialVersionUID = -6106780352553599816L; + + /** + * Create a new instance of an {@link IntegrationTestSetupException} + * + * @param message error message + */ + public IntegrationTestSetupException(final String message) { + super(message); + } + + /** + * Create a new instance of an {@link IntegrationTestSetupException} + * + * @param message error message + * @param cause root cause + */ + public IntegrationTestSetupException(final String message, final Throwable cause) { + super(message, cause); + } +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/SqlDialectTest.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/SqlDialectTest.java index a21f598ef..c36a66717 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/SqlDialectTest.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/SqlDialectTest.java @@ -1,5 +1,16 @@ package com.exasol.adapter.dialects; +import static org.junit.Assert.assertEquals; + +import java.math.BigDecimal; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; + +import org.junit.Test; + import com.exasol.adapter.AdapterException; import com.exasol.adapter.capabilities.Capabilities; import com.exasol.adapter.jdbc.SchemaAdapterNotes; @@ -7,117 +18,124 @@ import com.exasol.adapter.metadata.DataType; import com.exasol.adapter.metadata.MetadataException; import com.exasol.adapter.metadata.TableMetadata; -import com.exasol.adapter.sql.*; +import com.exasol.adapter.sql.AggregateFunction; +import com.exasol.adapter.sql.ScalarFunction; +import com.exasol.adapter.sql.SqlColumn; +import com.exasol.adapter.sql.SqlFunctionAggregate; +import com.exasol.adapter.sql.SqlFunctionScalar; +import com.exasol.adapter.sql.SqlLiteralExactnumeric; +import com.exasol.adapter.sql.SqlNode; +import com.exasol.adapter.sql.SqlSelectList; +import com.exasol.adapter.sql.SqlStatementSelect; +import com.exasol.adapter.sql.SqlTable; import com.exasol.utils.SqlTestUtil; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import org.junit.Test; - -import java.math.BigDecimal; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.EnumMap; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; public class SqlDialectTest { @Test public void testAggregateFunctionAliases() throws AdapterException, MetadataException { - TableMetadata clicksMeta = getTestTableMetadata(); - SqlTable fromClause = new SqlTable("TEST", clicksMeta); - SqlColumn col1 = new SqlColumn(1, clicksMeta.getColumns().get(0)); - SqlSelectList selectList = SqlSelectList.createRegularSelectList( ImmutableList.of( - new SqlFunctionAggregate(AggregateFunction.APPROXIMATE_COUNT_DISTINCT, ImmutableList.of(col1), false), + final TableMetadata clicksMeta = getTestTableMetadata(); + final SqlTable fromClause = new SqlTable("TEST", clicksMeta); + final SqlColumn col1 = new SqlColumn(1, clicksMeta.getColumns().get(0)); + final SqlSelectList selectList = SqlSelectList.createRegularSelectList(ImmutableList.of( + new SqlFunctionAggregate(AggregateFunction.APPROXIMATE_COUNT_DISTINCT, ImmutableList.of(col1), + false), new SqlFunctionAggregate(AggregateFunction.AVG, ImmutableList.of(col1), false), new SqlFunctionAggregate(AggregateFunction.COUNT, new ArrayList(), true), - new SqlFunctionAggregate(AggregateFunction.MAX, ImmutableList.of(col1), false) - )); - SqlNode node = new SqlStatementSelect(fromClause, selectList, null, null, null, null, null); + new SqlFunctionAggregate(AggregateFunction.MAX, ImmutableList.of(col1), false))); + final SqlNode node = new SqlStatementSelect(fromClause, selectList, null, null, null, null, null); - String schemaName = "SCHEMA"; - String expectedSql = "SELECT NDV(C1), AVERAGE(C1), COUNT2(DISTINCT *), MAX(C1) FROM " + schemaName + ".TEST"; + final String schemaName = "SCHEMA"; + final String expectedSql = "SELECT NDV(C1), AVERAGE(C1), COUNT2(DISTINCT *), MAX(C1) FROM " + schemaName + + ".TEST"; - Map aggAliases = new EnumMap<>(AggregateFunction.class); - Map scalarAliases = ImmutableMap.of(); - Map infixAliases = ImmutableMap.of(); + final Map aggAliases = new EnumMap<>(AggregateFunction.class); + final Map scalarAliases = ImmutableMap.of(); + final Map infixAliases = ImmutableMap.of(); aggAliases.put(AggregateFunction.APPROXIMATE_COUNT_DISTINCT, "NDV"); aggAliases.put(AggregateFunction.AVG, "AVERAGE"); aggAliases.put(AggregateFunction.COUNT, "COUNT2"); - Map prefixAliases = ImmutableMap.of(); + final Map prefixAliases = ImmutableMap.of(); - SqlDialect dialect = new AliasesSqlDialect(aggAliases, scalarAliases, infixAliases, prefixAliases); + final SqlDialect dialect = new AliasesSqlDialect(aggAliases, scalarAliases, infixAliases, prefixAliases); - SqlGenerationContext context = new SqlGenerationContext("", schemaName, false); - SqlGenerationVisitor generator = new SqlGenerationVisitor(dialect, context); - String actualSql = node.accept(generator); + final SqlGenerationContext context = new SqlGenerationContext("", schemaName, false); + final SqlGenerationVisitor generator = new SqlGenerationVisitor(dialect, context); + final String actualSql = node.accept(generator); assertEquals(SqlTestUtil.normalizeSql(expectedSql), SqlTestUtil.normalizeSql(actualSql)); } @Test public void testScalarFunctionAliases() throws AdapterException, MetadataException { - TableMetadata clicksMeta = getTestTableMetadata(); - SqlTable fromClause = new SqlTable("TEST", clicksMeta); - SqlColumn col1 = new SqlColumn(1, clicksMeta.getColumns().get(0)); - SqlSelectList selectList = SqlSelectList.createRegularSelectList( ImmutableList.of( + final TableMetadata clicksMeta = getTestTableMetadata(); + final SqlTable fromClause = new SqlTable("TEST", clicksMeta); + final SqlColumn col1 = new SqlColumn(1, clicksMeta.getColumns().get(0)); + final SqlSelectList selectList = SqlSelectList.createRegularSelectList(ImmutableList.of( new SqlFunctionScalar(ScalarFunction.ABS, ImmutableList.of(col1), false, false), - new SqlFunctionScalar(ScalarFunction.ADD, ImmutableList.of(col1, new SqlLiteralExactnumeric(new BigDecimal(100))), true, false), - new SqlFunctionScalar(ScalarFunction.SUB, ImmutableList.of(col1, new SqlLiteralExactnumeric(new BigDecimal(100))), true, false), + new SqlFunctionScalar(ScalarFunction.ADD, + ImmutableList.of(col1, new SqlLiteralExactnumeric(new BigDecimal(100))), true, false), + new SqlFunctionScalar(ScalarFunction.SUB, + ImmutableList.of(col1, new SqlLiteralExactnumeric(new BigDecimal(100))), true, false), new SqlFunctionScalar(ScalarFunction.TO_CHAR, ImmutableList.of(col1), true, false), - new SqlFunctionScalar(ScalarFunction.NEG, ImmutableList.of(col1), false, false) - )); - SqlNode node = new SqlStatementSelect(fromClause, selectList, null, null, null, null, null); + new SqlFunctionScalar(ScalarFunction.NEG, ImmutableList.of(col1), false, false))); + final SqlNode node = new SqlStatementSelect(fromClause, selectList, null, null, null, null, null); - String schemaName = "SCHEMA"; + final String schemaName = "SCHEMA"; // ADD is infix by default, but must be non-infix after applying the alias. - String expectedSql = "SELECT ABSOLUTE(C1), PLUS(C1, 100), (C1 - 100), TO_CHAR(C1), NEGATIVE(C1) FROM " + schemaName + ".TEST"; + final String expectedSql = "SELECT ABSOLUTE(C1), PLUS(C1, 100), (C1 - 100), TO_CHAR(C1), NEGATIVE(C1) FROM " + + schemaName + ".TEST"; - Map scalarAliases = new EnumMap<>(ScalarFunction.class); + final Map scalarAliases = new EnumMap<>(ScalarFunction.class); scalarAliases.put(ScalarFunction.ABS, "ABSOLUTE"); scalarAliases.put(ScalarFunction.ADD, "PLUS"); scalarAliases.put(ScalarFunction.NEG, "NEGATIVE"); - SqlDialect dialect = new AliasesSqlDialect(ImmutableMap.of(), scalarAliases, ImmutableMap.of(), ImmutableMap.of()); + final SqlDialect dialect = new AliasesSqlDialect(ImmutableMap.of(), scalarAliases, + ImmutableMap.of(), ImmutableMap.of()); - SqlGenerationContext context = new SqlGenerationContext("", schemaName, false); - SqlGenerationVisitor generator = new SqlGenerationVisitor(dialect, context); - String actualSql = node.accept(generator); + final SqlGenerationContext context = new SqlGenerationContext("", schemaName, false); + final SqlGenerationVisitor generator = new SqlGenerationVisitor(dialect, context); + final String actualSql = node.accept(generator); assertEquals(SqlTestUtil.normalizeSql(expectedSql), SqlTestUtil.normalizeSql(actualSql)); } @Test public void testInvalidAliases() throws Exception { - TableMetadata clicksMeta = getTestTableMetadata(); - SqlTable fromClause = new SqlTable("TEST", clicksMeta); - SqlSelectList selectList = SqlSelectList.createSelectStarSelectList(); - SqlNode node = new SqlStatementSelect(fromClause, selectList, null, null, null, null, null); + final TableMetadata clicksMeta = getTestTableMetadata(); + final SqlTable fromClause = new SqlTable("TEST", clicksMeta); + final SqlSelectList selectList = SqlSelectList.createSelectStarSelectList(); + final SqlNode node = new SqlStatementSelect(fromClause, selectList, null, null, null, null, null); - SqlGenerationContext context = new SqlGenerationContext("", "schema", false); + final SqlGenerationContext context = new SqlGenerationContext("", "schema", false); // Test non-simple scalar functions - for (ScalarFunction function : ScalarFunction.values()) { + for (final ScalarFunction function : ScalarFunction.values()) { if (!function.isSimple()) { - Map scalarAliases = ImmutableMap.of(function, "ALIAS"); - SqlDialect dialect = new AliasesSqlDialect(ImmutableMap.of(), scalarAliases, ImmutableMap.of(), ImmutableMap.of()); + final Map scalarAliases = ImmutableMap.of(function, "ALIAS"); + final SqlDialect dialect = new AliasesSqlDialect(ImmutableMap.of(), + scalarAliases, ImmutableMap.of(), + ImmutableMap.of()); try { - SqlGenerationVisitor generator = new SqlGenerationVisitor(dialect, context); + final SqlGenerationVisitor generator = new SqlGenerationVisitor(dialect, context); throw new Exception("Should never arrive here"); - } catch(RuntimeException ex) { + } catch (final RuntimeException ex) { // This error is expected } } } // Test non-simple aggregate functions - for (AggregateFunction function : AggregateFunction.values()) { + for (final AggregateFunction function : AggregateFunction.values()) { if (!function.isSimple()) { - Map aggregateAliases = ImmutableMap.of(function, "ALIAS"); - SqlDialect dialect = new AliasesSqlDialect(aggregateAliases, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of()); + final Map aggregateAliases = ImmutableMap.of(function, "ALIAS"); + final SqlDialect dialect = new AliasesSqlDialect(aggregateAliases, + ImmutableMap.of(), ImmutableMap.of(), + ImmutableMap.of()); try { - SqlGenerationVisitor generator = new SqlGenerationVisitor(dialect, context); + final SqlGenerationVisitor generator = new SqlGenerationVisitor(dialect, context); throw new Exception("Should never arrive here"); - } catch(RuntimeException ex) { + } catch (final RuntimeException ex) { // This error is expected } } @@ -125,22 +143,23 @@ public void testInvalidAliases() throws Exception { } private TableMetadata getTestTableMetadata() throws MetadataException { - List columns = new ArrayList<>(); - columns.add(new ColumnMetadata("C1", "", DataType.createBool(), true, - false, "", "")); + final List columns = new ArrayList<>(); + columns.add(new ColumnMetadata("C1", "", DataType.createBool(), true, false, "", "")); return new TableMetadata("TEST", "", columns, ""); } static class AliasesSqlDialect extends AbstractSqlDialect { - private Map aggregationAliases; - private Map scalarAliases; - private Map infixAliases; - private Map prefixAliases; + private final Map aggregationAliases; + private final Map scalarAliases; + private final Map infixAliases; + private final Map prefixAliases; - public AliasesSqlDialect(Map aggregationAliases, Map scalarAliases - , Map infixAliases, Map prefixAliases) { - super(new SqlDialectContext(new SchemaAdapterNotes(".", "\"", false, false, false, false, false, false, false, false, false, false, true, false))); + public AliasesSqlDialect(final Map aggregationAliases, + final Map scalarAliases, final Map infixAliases, + final Map prefixAliases) { + super(new SqlDialectContext(new SchemaAdapterNotes(".", "\"", false, false, false, false, false, false, + false, false, false, false, true, false))); this.aggregationAliases = aggregationAliases; this.scalarAliases = scalarAliases; this.infixAliases = infixAliases; @@ -149,7 +168,7 @@ public AliasesSqlDialect(Map aggregationAliases, Map< @Override public Capabilities getCapabilities() { - Capabilities caps = new Capabilities(); + final Capabilities caps = new Capabilities(); caps.supportAllCapabilities(); return caps; } @@ -166,34 +185,33 @@ public SchemaOrCatalogSupport supportsJdbcSchemas() { @Override public Map getAggregateFunctionAliases() { - return aggregationAliases; + return this.aggregationAliases; } @Override public Map getScalarFunctionAliases() { - return scalarAliases; + return this.scalarAliases; } @Override public Map getBinaryInfixFunctionAliases() { - if (infixAliases.isEmpty()) { + if (this.infixAliases.isEmpty()) { return super.getBinaryInfixFunctionAliases(); } else { - return infixAliases; + return this.infixAliases; } } @Override public Map getPrefixFunctionAliases() { - if (prefixAliases.isEmpty()) { + if (this.prefixAliases.isEmpty()) { return super.getPrefixFunctionAliases(); } else { - return prefixAliases; + return this.prefixAliases; } } - @Override - public String getPublicName() { + public static String getPublicName() { return "TEST"; } @@ -208,22 +226,22 @@ public IdentifierCaseHandling getQuotedIdentifierHandling() { } @Override - public String applyQuote(String identifier) { + public String applyQuote(final String identifier) { return "\"" + identifier + "\""; } @Override - public String applyQuoteIfNeeded(String identifier) { - return identifier; // Intentionally kept simple + public String applyQuoteIfNeeded(final String identifier) { + return identifier; // Intentionally kept simple } @Override - public boolean requiresCatalogQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { return false; } @Override - public boolean requiresSchemaQualifiedTableNames(SqlGenerationContext context) { + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { return true; } @@ -233,12 +251,12 @@ public NullSorting getDefaultNullSorting() { } @Override - public String getStringLiteral(String value) { + public String getStringLiteral(final String value) { return "'" + value + "'"; } @Override - public DataType dialectSpecificMapJdbcType(JdbcTypeDescription jdbcType) throws SQLException { + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcType) throws SQLException { return null; } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/SqlDialectsTest.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/SqlDialectsTest.java index 08b2edcd0..3e65dc043 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/SqlDialectsTest.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/SqlDialectsTest.java @@ -1,28 +1,82 @@ package com.exasol.adapter.dialects; -import com.exasol.adapter.dialects.impl.ExasolSqlDialect; -import com.exasol.adapter.dialects.impl.ImpalaSqlDialect; -import com.exasol.adapter.jdbc.SchemaAdapterNotes; -import com.google.common.collect.ImmutableList; +import static org.hamcrest.CoreMatchers.sameInstance; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.matchesPattern; +import static org.junit.Assert.assertThat; + +import org.junit.Before; import org.junit.Test; -import static org.junit.Assert.assertTrue; +import com.exasol.adapter.dialects.impl.DB2SqlDialect; +import com.exasol.adapter.dialects.impl.ExasolSqlDialect; public class SqlDialectsTest { + @Before + public void before() { + SqlDialects.deleteInstance(); + System.clearProperty(SqlDialects.SQL_DIALECTS_PROPERTY); + } + + @Test + public void testGetInstance() { + final SqlDialects dialects = SqlDialects.getInstance(); + assertThat(dialects, instanceOf(SqlDialects.class)); + } + + @Test + public void testGetInstanceTwiceYieldsSameInstance() { + assertThat(SqlDialects.getInstance(), sameInstance(SqlDialects.getInstance())); + } + + @Test + public void testIsSupported() { + assertThat(SqlDialects.getInstance().isSupported(ExasolSqlDialect.getPublicName()), is(true)); + } + + @Test + public void testIsNotSupported() { + assertThat(SqlDialects.getInstance().isSupported("Unknown Dialect"), is(false)); + } + + @Test + public void testGetDialectNames() { + assertThat(SqlDialects.getInstance().getDialectsString(), matchesPattern( + ".*" + DB2SqlDialect.getPublicName() + ".*,.* " + ExasolSqlDialect.getPublicName() + ".*")); + } @Test public void testGetDialectByName() { - SqlDialects dialects = new SqlDialects(ImmutableList.of(ExasolSqlDialect.NAME, ImpalaSqlDialect.NAME)); - SqlDialectContext context = new SqlDialectContext(new SchemaAdapterNotes(".", "\"", false, false, false, false, false, false, false, false, false, false, true, false)); - assertTrue(dialects.getDialectByName("IMPALA", context).getClass().equals(ImpalaSqlDialect.class)); - - assertTrue(dialects.getDialectByName("iMpAlA", context).getClass().equals(ImpalaSqlDialect.class)); - - assertTrue(dialects.getDialectByName("impala", context).getClass().equals(ImpalaSqlDialect.class)); - - assertTrue(dialects.getDialectByName("EXASOL", context).getClass().equals(ExasolSqlDialect.class)); - - assertTrue(dialects.getDialectByName("unknown-dialect", context) == null); - } - -} + assertThat( + SqlDialects.getInstance().getDialectInstanceForNameWithContext(ExasolSqlDialect.getPublicName(), null), + instanceOf(ExasolSqlDialect.class)); + + } + + @Test + public void testReadDialectsFromSystemProperty() { + System.setProperty(SqlDialects.SQL_DIALECTS_PROPERTY, "com.exasol.adapter.dialects.impl.ExasolSqlDialect"); + assertThat(SqlDialects.getInstance().getDialectsString(), equalTo("EXASOL")); + } + + @Test(expected = SqlDialectsRegistryException.class) + public void testUsingDialectWithoutNameMethodThrowsException() { + System.setProperty(SqlDialects.SQL_DIALECTS_PROPERTY, + "com.exasol.adapter.dialects.impl.DummyDialectWithoutNameMethod"); + SqlDialects.getInstance().getDialectsString(); + } + + @Test(expected = SqlDialectsRegistryException.class) + public void testRegisteringNonExistentDialectThrowsException() { + System.setProperty(SqlDialects.SQL_DIALECTS_PROPERTY, "this.dialect.does.not.exist.DummySqlDialect"); + SqlDialects.getInstance(); + } + + @Test(expected = SqlDialectsRegistryException.class) + public void testRequestingInstanceOfNonExistentDialectThrowsException() { + SqlDialects.getInstance(); + SqlDialects.getInstance().getDialectInstanceForNameWithContext("NonExistentDialect", null); + } +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DB2SqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DB2SqlDialectIT.java index cad0261bd..a4efdaac4 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DB2SqlDialectIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DB2SqlDialectIT.java @@ -1,11 +1,5 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.dialects.AbstractIntegrationTest; - -import org.junit.Assume; -import org.junit.BeforeClass; -import org.junit.Test; - import java.io.FileNotFoundException; import java.math.BigDecimal; import java.sql.ResultSet; @@ -13,121 +7,104 @@ import java.util.ArrayList; import java.util.List; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.exasol.adapter.dialects.AbstractIntegrationTest; + /** * Integration test for theDB2 SQL dialect * */ public class DB2SqlDialectIT extends AbstractIntegrationTest { - private static final String VIRTUAL_SCHEMA = "DB2"; private static final String DB2_SCHEMA = "DB2TEST"; private static final boolean IS_LOCAL = false; - - + @BeforeClass public static void setUpClass() throws FileNotFoundException, SQLException, ClassNotFoundException { Assume.assumeTrue(getConfig().DB2TestsRequested()); setConnection(connectToExa()); createDB2JDBCAdapter(); - createVirtualSchema( - VIRTUAL_SCHEMA, - DB2SqlDialect.NAME, - "", DB2_SCHEMA, - "", - getConfig().getDB2User(), - getConfig().getDB2Password(), - "ADAPTER.JDBC_ADAPTER", - getConfig().getDB2JdbcConnectionString(), - IS_LOCAL, - getConfig().debugAddress(), - "",null); + createVirtualSchema(VIRTUAL_SCHEMA, DB2SqlDialect.getPublicName(), "", DB2_SCHEMA, "", getConfig().getDB2User(), + getConfig().getDB2Password(), "ADAPTER.JDBC_ADAPTER", getConfig().getDB2JdbcConnectionString(), + IS_LOCAL, getConfig().debugAddress(), "", null); } - - + @Test public void testSelectNumericDataTypes() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT PRICE,PROMOPRICE FROM " + VIRTUAL_SCHEMA + ".PRODUCT WHERE pid = '100-100-01'"; - ResultSet result = executeQuery(query); - matchNextRow( - result, - new BigDecimal("9.99"), - new BigDecimal("7.25") - ); - matchSingleRowExplain(query, "SELECT PRICE, PROMOPRICE FROM " + DB2_SCHEMA + ".PRODUCT WHERE PID = '100-100-01'"); + final String query = "SELECT PRICE,PROMOPRICE FROM " + VIRTUAL_SCHEMA + ".PRODUCT WHERE pid = '100-100-01'"; + final ResultSet result = executeQuery(query); + matchNextRow(result, new BigDecimal("9.99"), new BigDecimal("7.25")); + matchSingleRowExplain(query, + "SELECT PRICE, PROMOPRICE FROM " + DB2_SCHEMA + ".PRODUCT WHERE PID = '100-100-01'"); } - - @Test - public void testLimit() throws SQLException, ClassNotFoundException,FileNotFoundException { - String query = "SELECT * FROM (SELECT price,PROMOPRICE FROM DB2.PRODUCT) AS A LIMIT 1 "; - ResultSet result = executeQuery(query); - matchNextRow( - result, - new BigDecimal("9.99"), - new BigDecimal("7.25") - ); - matchSingleRowExplain(query, "SELECT PRICE, PROMOPRICE FROM " + DB2_SCHEMA + ".PRODUCT FETCH FIRST 1 ROWS ONLY"); + + @Test + public void testLimit() throws SQLException, ClassNotFoundException, FileNotFoundException { + final String query = "SELECT * FROM (SELECT price,PROMOPRICE FROM DB2.PRODUCT) AS A LIMIT 1 "; + final ResultSet result = executeQuery(query); + matchNextRow(result, new BigDecimal("9.99"), new BigDecimal("7.25")); + matchSingleRowExplain(query, + "SELECT PRICE, PROMOPRICE FROM " + DB2_SCHEMA + ".PRODUCT FETCH FIRST 1 ROWS ONLY"); } - + @Test - public void testTimeDataTypeConversions() throws SQLException, ClassNotFoundException,FileNotFoundException { - String query = "SELECT DETAIL_TIMESTAMP,UHRZEIT FROM " + VIRTUAL_SCHEMA + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"; - ResultSet result = executeQuery(query); - matchNextRow( - result, - "2020-01-01-00.00.00.123456789123", - "12.05.11" - ); - matchSingleRowExplain(query, "SELECT VARCHAR(DETAIL_TIMESTAMP), VARCHAR(UHRZEIT) FROM " + DB2_SCHEMA + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"); - + public void testTimeDataTypeConversions() throws SQLException, ClassNotFoundException, FileNotFoundException { + final String query = "SELECT DETAIL_TIMESTAMP,UHRZEIT FROM " + VIRTUAL_SCHEMA + + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"; + final ResultSet result = executeQuery(query); + matchNextRow(result, "2020-01-01-00.00.00.123456789123", "12.05.11"); + matchSingleRowExplain(query, "SELECT VARCHAR(DETAIL_TIMESTAMP), VARCHAR(UHRZEIT) FROM " + DB2_SCHEMA + + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"); + } - + @Test - public void testBitDataConversion() throws SQLException, ClassNotFoundException,FileNotFoundException { - String query = "SELECT BIDATAVARCHAR,BIDATACHAR FROM " + VIRTUAL_SCHEMA + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"; - ResultSet result = executeQuery(query); - matchNextRow( - result, - "30303031", - "41414242202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020" - ); - matchSingleRowExplain(query, "SELECT HEX(BIDATAVARCHAR), HEX(BIDATACHAR) FROM "+ DB2_SCHEMA + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"); - + public void testBitDataConversion() throws SQLException, ClassNotFoundException, FileNotFoundException { + final String query = "SELECT BIDATAVARCHAR,BIDATACHAR FROM " + VIRTUAL_SCHEMA + + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"; + final ResultSet result = executeQuery(query); + matchNextRow(result, "30303031", + "41414242202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020"); + matchSingleRowExplain(query, "SELECT HEX(BIDATAVARCHAR), HEX(BIDATACHAR) FROM " + DB2_SCHEMA + + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"); + } - + @Test - public void testUnicode() throws SQLException, ClassNotFoundException,FileNotFoundException { - String query = "SELECT UNICODECOL FROM " + VIRTUAL_SCHEMA + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"; - ResultSet result = executeQuery(query); - matchNextRow( - result, - "CHAR 茶" - ); - matchSingleRowExplain(query, "SELECT UNICODECOL FROM " + DB2_SCHEMA + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"); - + public void testUnicode() throws SQLException, ClassNotFoundException, FileNotFoundException { + final String query = "SELECT UNICODECOL FROM " + VIRTUAL_SCHEMA + + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"; + final ResultSet result = executeQuery(query); + matchNextRow(result, "CHAR 茶"); + matchSingleRowExplain(query, "SELECT UNICODECOL FROM " + DB2_SCHEMA + + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"); + } - - + @Test - public void testScalarFunctions() throws SQLException, ClassNotFoundException,FileNotFoundException { - String query = "SELECT ADD_DAYS(DETAIL_TIMESTAMP,2),ADD_YEARS(DETAIL_TIMESTAMP,-2),SUBSTR(UNICODECOL,1,4) FROM " + VIRTUAL_SCHEMA + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"; - ResultSet result = executeQuery(query); - matchNextRow( - result, - "2020-01-03-00.00.00.123456789123", - "2018-01-01-00.00.00.123456789123", - "CHAR" - ); - matchSingleRowExplain(query, "SELECT VARCHAR(DETAIL_TIMESTAMP + 2 DAYS), VARCHAR(DETAIL_TIMESTAMP + -2 YEARS), SUBSTR(UNICODECOL, 1, 4) FROM " + DB2_SCHEMA + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"); + public void testScalarFunctions() throws SQLException, ClassNotFoundException, FileNotFoundException { + final String query = "SELECT ADD_DAYS(DETAIL_TIMESTAMP,2),ADD_YEARS(DETAIL_TIMESTAMP,-2),SUBSTR(UNICODECOL,1,4) FROM " + + VIRTUAL_SCHEMA + + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"; + final ResultSet result = executeQuery(query); + matchNextRow(result, "2020-01-03-00.00.00.123456789123", "2018-01-01-00.00.00.123456789123", "CHAR"); + matchSingleRowExplain(query, + "SELECT VARCHAR(DETAIL_TIMESTAMP + 2 DAYS), VARCHAR(DETAIL_TIMESTAMP + -2 YEARS), SUBSTR(UNICODECOL, 1, 4) FROM " + + DB2_SCHEMA + + ".\"Additional_Datatypes\" WHERE DETAIL_TIMESTAMP = '2020-01-01-00.00.00.123456789123'"); } - + private static void createDB2JDBCAdapter() throws SQLException, FileNotFoundException { - ListDB2Includes = new ArrayList<>(); - DB2Includes.add(getConfig().getJdbcAdapterPath()); - String jdbcPrefixPath = getConfig().getDB2JdbcPrefixPath(); - for (String jar : getConfig().getDB2JdbcJars()) { - DB2Includes.add(jdbcPrefixPath + jar); + final List DB2Includes = new ArrayList<>(); + DB2Includes.add(getConfig().getJdbcAdapterPath()); + final String jdbcPrefixPath = getConfig().getDB2JdbcPrefixPath(); + for (final String jar : getConfig().getDB2JdbcJars()) { + DB2Includes.add(jdbcPrefixPath + jar); } createJDBCAdapter(DB2Includes); } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DummyDialectWithHiddenConstructor.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DummyDialectWithHiddenConstructor.java new file mode 100644 index 000000000..a5d61fcd3 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DummyDialectWithHiddenConstructor.java @@ -0,0 +1,86 @@ +package com.exasol.adapter.dialects.impl; + +import java.sql.SQLException; + +import com.exasol.adapter.capabilities.Capabilities; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialect; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.metadata.DataType; + +/** + * This class implements a dummy of an {@link SqlDialect} that intentionally + * hides the constructor to prevent instantiation. + */ +public class DummyDialectWithHiddenConstructor extends AbstractSqlDialect { + public static String getPublicName() { + return "Dummy Dialect With hidden constructor"; + } + + // The constructor is hidden intentionally for testing purposes. + private DummyDialectWithHiddenConstructor(final SqlDialectContext context) { + super(context); + } + + @Override + public Capabilities getCapabilities() { + return null; + } + + @Override + public SchemaOrCatalogSupport supportsJdbcCatalogs() { + return null; + } + + @Override + public SchemaOrCatalogSupport supportsJdbcSchemas() { + return null; + } + + @Override + public IdentifierCaseHandling getUnquotedIdentifierHandling() { + return null; + } + + @Override + public IdentifierCaseHandling getQuotedIdentifierHandling() { + return null; + } + + @Override + public String applyQuote(final String identifier) { + return null; + } + + @Override + public String applyQuoteIfNeeded(final String identifier) { + return null; + } + + @Override + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { + return false; + } + + @Override + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + return false; + } + + @Override + public NullSorting getDefaultNullSorting() { + return null; + } + + @Override + public String getStringLiteral(final String value) { + return null; + } + + @Override + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcType) throws SQLException { + return null; + } +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DummyDialectWithoutNameMethod.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DummyDialectWithoutNameMethod.java new file mode 100644 index 000000000..382c735d3 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/DummyDialectWithoutNameMethod.java @@ -0,0 +1,81 @@ +package com.exasol.adapter.dialects.impl; + +import java.sql.SQLException; + +import com.exasol.adapter.capabilities.Capabilities; +import com.exasol.adapter.dialects.AbstractSqlDialect; +import com.exasol.adapter.dialects.JdbcTypeDescription; +import com.exasol.adapter.dialects.SqlDialect; +import com.exasol.adapter.dialects.SqlDialectContext; +import com.exasol.adapter.dialects.SqlGenerationContext; +import com.exasol.adapter.metadata.DataType; + +/** + * This class implements a dummy of an {@link SqlDialect} without a name method. + * It is used for tests only. + */ +public class DummyDialectWithoutNameMethod extends AbstractSqlDialect { + public DummyDialectWithoutNameMethod(final SqlDialectContext context) { + super(context); + } + + @Override + public Capabilities getCapabilities() { + return null; + } + + @Override + public SchemaOrCatalogSupport supportsJdbcCatalogs() { + return null; + } + + @Override + public SchemaOrCatalogSupport supportsJdbcSchemas() { + return null; + } + + @Override + public IdentifierCaseHandling getUnquotedIdentifierHandling() { + return null; + } + + @Override + public IdentifierCaseHandling getQuotedIdentifierHandling() { + return null; + } + + @Override + public String applyQuote(final String identifier) { + return null; + } + + @Override + public String applyQuoteIfNeeded(final String identifier) { + return null; + } + + @Override + public boolean requiresCatalogQualifiedTableNames(final SqlGenerationContext context) { + return false; + } + + @Override + public boolean requiresSchemaQualifiedTableNames(final SqlGenerationContext context) { + return false; + } + + @Override + public NullSorting getDefaultNullSorting() { + return null; + } + + @Override + public String getStringLiteral(final String value) { + return null; + } + + @Override + public DataType dialectSpecificMapJdbcType(final JdbcTypeDescription jdbcType) throws SQLException { + return null; + } +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/ExasolSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/ExasolSqlDialectIT.java index 2e459daac..a557d9e2b 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/ExasolSqlDialectIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/ExasolSqlDialectIT.java @@ -1,14 +1,6 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.AdapterException; -import com.exasol.adapter.dialects.AbstractIntegrationTest; -import com.exasol.adapter.dialects.SqlDialects; -import com.exasol.adapter.jdbc.JdbcMetadataReader; -import com.exasol.adapter.json.SchemaMetadataSerializer; -import com.exasol.adapter.metadata.SchemaMetadata; -import com.google.common.collect.ImmutableList; -import org.junit.*; -import org.junit.rules.ExpectedException; +import static org.junit.Assert.assertNotNull; import java.io.FileNotFoundException; import java.math.BigDecimal; @@ -19,10 +11,22 @@ import java.util.Arrays; import java.util.List; -import static org.junit.Assert.assertNotNull; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import com.exasol.adapter.AdapterException; +import com.exasol.adapter.dialects.AbstractIntegrationTest; +import com.exasol.adapter.jdbc.JdbcMetadataReader; +import com.exasol.adapter.json.SchemaMetadataSerializer; +import com.exasol.adapter.metadata.SchemaMetadata; +import com.google.common.collect.ImmutableList; /** - * Integration tests for the EXASOL SQL dialect. + * Integration tests for the Exasol SQL dialect. */ public class ExasolSqlDialectIT extends AbstractIntegrationTest { @@ -39,94 +43,54 @@ public class ExasolSqlDialectIT extends AbstractIntegrationTest { public static void setUpClass() throws FileNotFoundException, SQLException, ClassNotFoundException { Assume.assumeTrue(getConfig().exasolTestsRequested()); setConnection(connectToExa()); - String connectionString = "jdbc:exa:localhost:" + getPortOfConnectedDatabase(); // connect via Virtual Schema to local database - // The EXASOL jdbc driver is included in the Maven dependencies, so no need to add - List includes = ImmutableList.of(getConfig().getJdbcAdapterPath()); + final String connectionString = "jdbc:exa:localhost:" + getPortOfConnectedDatabase(); // connect via Virtual + // Schema to local + // database + // The Exasol JDBC driver is included in the Maven dependencies, so no need to + // add it. + final List includes = ImmutableList.of(getConfig().getJdbcAdapterPath()); createJDBCAdapter(includes); createTestSchema(); - createVirtualSchema( - VIRTUAL_SCHEMA, - ExasolSqlDialect.NAME, - "", TEST_SCHEMA, - "", - getConfig().getExasolUser(), - getConfig().getExasolPassword(), - "ADAPTER.JDBC_ADAPTER", - connectionString, IS_LOCAL, - getConfig().debugAddress(), - "", null); - createVirtualSchema( - VIRTUAL_SCHEMA_MIXED_CASE, - ExasolSqlDialect.NAME, - "", TEST_SCHEMA_MIXED_CASE, - "", - getConfig().getExasolUser(), - getConfig().getExasolPassword(), - "ADAPTER.JDBC_ADAPTER", - connectionString, IS_LOCAL, - getConfig().debugAddress(), - "",null); + createVirtualSchema(VIRTUAL_SCHEMA, ExasolSqlDialect.getPublicName(), "", TEST_SCHEMA, "", + getConfig().getExasolUser(), getConfig().getExasolPassword(), "ADAPTER.JDBC_ADAPTER", connectionString, + IS_LOCAL, getConfig().debugAddress(), "", null); + createVirtualSchema(VIRTUAL_SCHEMA_MIXED_CASE, ExasolSqlDialect.getPublicName(), "", TEST_SCHEMA_MIXED_CASE, "", + getConfig().getExasolUser(), getConfig().getExasolPassword(), "ADAPTER.JDBC_ADAPTER", connectionString, + IS_LOCAL, getConfig().debugAddress(), "", null); } private static void createTestSchema() throws SQLException { - // EXASOL integration test is special, because we can directly create our test data. - // For other dialects you have to prepare the source data base separately, because - // otherwise we would need to make the jdbc driver visible to the integration test framework as well (adds complexity) - Statement stmt = getConnection().createStatement(); + // Exasol integration test is special, because we can directly create our test + // data. + // For other dialects you have to prepare the source data base separately, + // because + // otherwise we would need to make the JDBC driver visible to the integration + // test framework as well (adds complexity) + final Statement stmt = getConnection().createStatement(); stmt.execute("DROP SCHEMA IF EXISTS " + TEST_SCHEMA + " CASCADE"); stmt.execute("CREATE SCHEMA " + TEST_SCHEMA); - stmt.execute("CREATE TABLE ALL_EXA_TYPES (" + - " c1 varchar(100) default 'bar'," + - " c2 varchar(100) CHARACTER SET ASCII default 'bar'," + - " c3 char(10) default 'foo'," + - " c4 char(10) CHARACTER SET ASCII default 'bar'," + - " c5 decimal(5,0) default 1," + - " c6 decimal(6,3) default 1.2," + - " c7 double default 1E2," + - " c8 boolean default TRUE," + - " c9 date default '2016-06-01'," + - " c10 timestamp default '2016-06-01 00:00:01.000'," + - " c11 timestamp with local time zone default '2016-06-01 00:00:02.000'," + - " c12 interval year to month default '3-5'," + - " c13 interval day to second default '2 12:50:10.123'," + - " c14 geometry(3857) default 'POINT(2 5)'" + - ")"); + stmt.execute("CREATE TABLE ALL_EXA_TYPES (" + " c1 varchar(100) default 'bar'," + + " c2 varchar(100) CHARACTER SET ASCII default 'bar'," + " c3 char(10) default 'foo'," + + " c4 char(10) CHARACTER SET ASCII default 'bar'," + " c5 decimal(5,0) default 1," + + " c6 decimal(6,3) default 1.2," + " c7 double default 1E2," + " c8 boolean default TRUE," + + " c9 date default '2016-06-01'," + " c10 timestamp default '2016-06-01 00:00:01.000'," + + " c11 timestamp with local time zone default '2016-06-01 00:00:02.000'," + + " c12 interval year to month default '3-5'," + " c13 interval day to second default '2 12:50:10.123'," + + " c14 geometry(3857) default 'POINT(2 5)'" + ")"); - stmt.execute("INSERT INTO " + TEST_SCHEMA + ".ALL_EXA_TYPES VALUES(" + - "'a茶'," + - "'b'," + - "'c茶'," + - "'d'," + - "123," + - "123.456," + - "2.2," + - "FALSE," + - "'2016-08-01'," + - "'2016-08-01 00:00:01.000'," + - "'2016-08-01 00:00:02.000'," + - "'4-6'," + - "'3 12:50:10.123'," + - "'POINT(2 5)'" + - ");"); + stmt.execute("INSERT INTO " + TEST_SCHEMA + ".ALL_EXA_TYPES VALUES(" + "'a茶'," + "'b'," + "'c茶'," + "'d'," + + "123," + "123.456," + "2.2," + "FALSE," + "'2016-08-01'," + "'2016-08-01 00:00:01.000'," + + "'2016-08-01 00:00:02.000'," + "'4-6'," + "'3 12:50:10.123'," + "'POINT(2 5)'" + ");"); stmt.execute("CREATE TABLE WITH_NULLS (c1 int, c2 varchar(100))"); - stmt.execute("INSERT INTO WITH_NULLS VALUES " + - " (1, 'a')," + - " (2, null)," + - " (3, 'b')," + - " (1, null)," + - " (null, 'c')"); + stmt.execute("INSERT INTO WITH_NULLS VALUES " + " (1, 'a')," + " (2, null)," + " (3, 'b')," + " (1, null)," + + " (null, 'c')"); stmt.execute("CREATE TABLE SIMPLE_VALUES (a int, b varchar(100), c double)"); - stmt.execute("INSERT INTO SIMPLE_VALUES VALUES " + - " (1, 'a', 1.1)," + - " (2, 'b', 2.2)," + - " (3, 'c', 3.3)," + - " (1, 'd', 4.4)," + - " (2, 'e', 5.5)," + - " (3, 'f', 6.6)," + - " (null, null, null)"); + stmt.execute("INSERT INTO SIMPLE_VALUES VALUES " + " (1, 'a', 1.1)," + " (2, 'b', 2.2)," + " (3, 'c', 3.3)," + + " (1, 'd', 4.4)," + " (2, 'e', 5.5)," + " (3, 'f', 6.6)," + " (null, null, null)"); - // Create schema, table and column with mixed case identifiers (to test correct mapping, and correct sql generation of adapter) + // Create schema, table and column with mixed case identifiers (to test correct + // mapping, and correct sql generation of adapter) stmt.execute("DROP SCHEMA IF EXISTS \"" + TEST_SCHEMA_MIXED_CASE + "\" CASCADE"); stmt.execute("CREATE SCHEMA \"" + TEST_SCHEMA_MIXED_CASE + "\""); stmt.execute("CREATE TABLE \"Table_Mixed_Case\" (\"Column1\" int, \"column2\" int, COLUMN3 int)"); @@ -135,41 +99,35 @@ private static void createTestSchema() throws SQLException { @Test public void testDataTypeMapping() throws SQLException { - ResultSet result = executeQuery("SELECT COLUMN_NAME, COLUMN_TYPE, COLUMN_MAXSIZE, COLUMN_NUM_PREC, COLUMN_NUM_SCALE, COLUMN_DEFAULT FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = '" + VIRTUAL_SCHEMA + "' AND COLUMN_TABLE='ALL_EXA_TYPES' ORDER BY COLUMN_ORDINAL_POSITION"); - matchNextRow(result, "C1", "VARCHAR(100) UTF8", (long)100, null, null, "'bar'"); - matchNextRow(result, "C2", "VARCHAR(100) ASCII", (long)100, null, null, "'bar'"); - matchNextRow(result, "C3", "CHAR(10) UTF8", (long)10, null, null, "'foo'"); - matchNextRow(result, "C4", "CHAR(10) ASCII", (long)10, null, null, "'bar'"); - matchNextRow(result, "C5", "DECIMAL(5,0)", (long)5, (long)5, (long)0, "1"); - matchNextRow(result, "C6", "DECIMAL(6,3)", (long)6, (long)6, (long)3, "1.2"); - matchNextRow(result, "C7", "DOUBLE", (long)64, null, null, "100"); - matchNextRow(result, "C8", "BOOLEAN", (long)1, null, null, "TRUE"); - matchNextRow(result, "C9", "DATE", (long)10, null, null, "'2016-06-01'"); - matchNextRow(result, "C10", "TIMESTAMP", (long)29, null, null, "'2016-06-01 00:00:01.000'"); - matchNextRow(result, "C11", "TIMESTAMP WITH LOCAL TIME ZONE", (long)29, null, null, "'2016-06-01 00:00:02.000'"); - matchNextRow(result, "C12", "INTERVAL YEAR(2) TO MONTH", (long)13, null, null, "'3-5'"); - matchNextRow(result, "C13", "INTERVAL DAY(2) TO SECOND(3)", (long)29, null, null, "'2 12:50:10.123'"); - matchLastRow(result, "C14", "GEOMETRY(3857)", (long)8000000, null, null, "'POINT(2 5)'"); // srid not yet supported, so will always default to 3857 + final ResultSet result = executeQuery( + "SELECT COLUMN_NAME, COLUMN_TYPE, COLUMN_MAXSIZE, COLUMN_NUM_PREC, COLUMN_NUM_SCALE, COLUMN_DEFAULT FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = '" + + VIRTUAL_SCHEMA + "' AND COLUMN_TABLE='ALL_EXA_TYPES' ORDER BY COLUMN_ORDINAL_POSITION"); + matchNextRow(result, "C1", "VARCHAR(100) UTF8", (long) 100, null, null, "'bar'"); + matchNextRow(result, "C2", "VARCHAR(100) ASCII", (long) 100, null, null, "'bar'"); + matchNextRow(result, "C3", "CHAR(10) UTF8", (long) 10, null, null, "'foo'"); + matchNextRow(result, "C4", "CHAR(10) ASCII", (long) 10, null, null, "'bar'"); + matchNextRow(result, "C5", "DECIMAL(5,0)", (long) 5, (long) 5, (long) 0, "1"); + matchNextRow(result, "C6", "DECIMAL(6,3)", (long) 6, (long) 6, (long) 3, "1.2"); + matchNextRow(result, "C7", "DOUBLE", (long) 64, null, null, "100"); + matchNextRow(result, "C8", "BOOLEAN", (long) 1, null, null, "TRUE"); + matchNextRow(result, "C9", "DATE", (long) 10, null, null, "'2016-06-01'"); + matchNextRow(result, "C10", "TIMESTAMP", (long) 29, null, null, "'2016-06-01 00:00:01.000'"); + matchNextRow(result, "C11", "TIMESTAMP WITH LOCAL TIME ZONE", (long) 29, null, null, + "'2016-06-01 00:00:02.000'"); + matchNextRow(result, "C12", "INTERVAL YEAR(2) TO MONTH", (long) 13, null, null, "'3-5'"); + matchNextRow(result, "C13", "INTERVAL DAY(2) TO SECOND(3)", (long) 29, null, null, "'2 12:50:10.123'"); + matchLastRow(result, "C14", "GEOMETRY(3857)", (long) 8000000, null, null, "'POINT(2 5)'"); // srid not yet + // supported, so will + // always default to + // 3857 } @Test public void testDataTypeSelect() throws SQLException { - ResultSet result = executeQuery("SELECT * FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"); - matchNextRow(result, - "a茶", - "b", - "c茶 ", - "d ", - 123, - new BigDecimal("123.456"), - 2.2, - false, - getSqlDate(2016,8,1), - getSqlTimestamp(2016,8,1,0,0,1,0), - getSqlTimestamp(2016,8,1,0,0,2,0), - "+04-06", - "+03 12:50:10.123", - "POINT (2 5)"); + final ResultSet result = executeQuery("SELECT * FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"); + matchNextRow(result, "a茶", "b", "c茶 ", "d ", 123, new BigDecimal("123.456"), 2.2, false, + getSqlDate(2016, 8, 1), getSqlTimestamp(2016, 8, 1, 0, 0, 1, 0), + getSqlTimestamp(2016, 8, 1, 0, 0, 2, 0), "+04-06", "+03 12:50:10.123", "POINT (2 5)"); } @Test @@ -184,15 +142,15 @@ public void testIdentifierCaseSensitivity() throws SQLException, FileNotFoundExc @Test public void testIdentifierCaseSensitivityException1() throws SQLException, FileNotFoundException { - thrown.expect(SQLException.class); - thrown.expectMessage("object TABLE_MIXED_CASE not found"); + this.thrown.expect(SQLException.class); + this.thrown.expectMessage("object TABLE_MIXED_CASE not found"); executeQuery("SELECT \"Column1\", \"column2\", COLUMN3 FROM Table_Mixed_Case"); } @Test public void testIdentifierCaseSensitivityException2() throws SQLException, FileNotFoundException { - thrown.expect(SQLException.class); - thrown.expectMessage("object COLUMN1 not found"); + this.thrown.expect(SQLException.class); + this.thrown.expectMessage("object COLUMN1 not found"); executeQuery("SELECT Column1, column2, COLUMN3 FROM \"Table_Mixed_Case\""); } @@ -205,34 +163,39 @@ public void testGroupConcat() throws SQLException, FileNotFoundException { query = "SELECT GROUP_CONCAT(DISTINCT A) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchLastRow(result, "1,2,3"); - matchSingleRowExplain(query, "SELECT GROUP_CONCAT(DISTINCT A) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, "SELECT GROUP_CONCAT(DISTINCT A) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", + IS_LOCAL); query = "SELECT GROUP_CONCAT(A ORDER BY C) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchLastRow(result, "1,2,3,1,2,3"); - matchSingleRowExplain(query, "SELECT GROUP_CONCAT(A ORDER BY C) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, "SELECT GROUP_CONCAT(A ORDER BY C) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", + IS_LOCAL); query = "SELECT GROUP_CONCAT(A ORDER BY C DESC) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchLastRow(result, "3,2,1,3,2,1"); - matchSingleRowExplain(query, "SELECT GROUP_CONCAT(A ORDER BY C DESC) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, "SELECT GROUP_CONCAT(A ORDER BY C DESC) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", + IS_LOCAL); query = "SELECT GROUP_CONCAT(A ORDER BY C DESC NULLS LAST) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchLastRow(result, "3,2,1,3,2,1"); - matchSingleRowExplain(query, "SELECT GROUP_CONCAT(A ORDER BY C DESC NULLS LAST) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, + "SELECT GROUP_CONCAT(A ORDER BY C DESC NULLS LAST) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); query = "SELECT GROUP_CONCAT(A SEPARATOR ';'||' ') FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchLastRow(result, "1; 1; 2; 2; 3; 3"); - matchSingleRowExplain(query, "SELECT GROUP_CONCAT(A SEPARATOR '; ') FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, "SELECT GROUP_CONCAT(A SEPARATOR '; ') FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", + IS_LOCAL); } @Test public void testExtract() throws SQLException, FileNotFoundException { String query = "SELECT EXTRACT(MONTH FROM C9) FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"; ResultSet result = executeQuery(query); - matchLastRow(result, (short)8); + matchLastRow(result, (short) 8); matchSingleRowExplain(query, "SELECT EXTRACT(MONTH FROM C9) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); query = "SELECT EXTRACT(MONTH FROM C12) FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"; result = executeQuery(query); - matchLastRow(result, (short)6); + matchLastRow(result, (short) 6); matchSingleRowExplain(query, "SELECT EXTRACT(MONTH FROM C12) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); } @@ -241,102 +204,136 @@ public void testCast() throws SQLException, FileNotFoundException { String query = "SELECT CAST(A AS CHAR(15)) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; ResultSet result = executeQuery(query); matchNextRow(result, "1 "); - matchSingleRowExplain(query, "SELECT CAST(A AS CHAR(15) UTF8) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, "SELECT CAST(A AS CHAR(15) UTF8) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", + IS_LOCAL); query = "SELECT CAST(CAST(A > 0 AS VARCHAR(15)) AS BOOLEAN) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchNextRow(result, true); - matchSingleRowExplain(query, "SELECT CAST(CAST(0 < A AS VARCHAR(15) UTF8) AS BOOLEAN) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, + "SELECT CAST(CAST(0 < A AS VARCHAR(15) UTF8) AS BOOLEAN) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", + IS_LOCAL); query = "SELECT CAST(CAST(C9 AS VARCHAR(30)) AS DATE) FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"; result = executeQuery(query); matchNextRow(result, getSqlDate(2016, 8, 1)); - matchSingleRowExplain(query, "SELECT CAST(CAST(C9 AS VARCHAR(30) UTF8) AS DATE) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); + matchSingleRowExplain(query, + "SELECT CAST(CAST(C9 AS VARCHAR(30) UTF8) AS DATE) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); query = "SELECT CAST(CAST(A AS VARCHAR(15)) AS DECIMAL(8, 1)) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchNextRow(result, new BigDecimal("1.0")); - matchSingleRowExplain(query, "SELECT CAST(CAST(A AS VARCHAR(15) UTF8) AS DECIMAL(8, 1)) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, + "SELECT CAST(CAST(A AS VARCHAR(15) UTF8) AS DECIMAL(8, 1)) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", + IS_LOCAL); query = "SELECT CAST(CAST(C AS VARCHAR(15)) AS DOUBLE) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchNextRow(result, 1.1d); - matchSingleRowExplain(query, "SELECT CAST(CAST(C AS VARCHAR(15) UTF8) AS DOUBLE) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, + "SELECT CAST(CAST(C AS VARCHAR(15) UTF8) AS DOUBLE) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); query = "SELECT CAST(CAST(C14 AS VARCHAR(100)) AS GEOMETRY(5)) FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"; result = executeQuery(query); matchNextRow(result, "POINT (2 5)"); - matchSingleRowExplain(query, "SELECT CAST(CAST(C14 AS VARCHAR(100) UTF8) AS GEOMETRY(5)) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); - query = "SELECT CAST(CAST(C13 AS VARCHAR(100)) AS INTERVAL DAY (5) TO SECOND (2)) FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"; + matchSingleRowExplain(query, + "SELECT CAST(CAST(C14 AS VARCHAR(100) UTF8) AS GEOMETRY(5)) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", + IS_LOCAL); + query = "SELECT CAST(CAST(C13 AS VARCHAR(100)) AS INTERVAL DAY (5) TO SECOND (2)) FROM " + VIRTUAL_SCHEMA + + ".ALL_EXA_TYPES"; result = executeQuery(query); matchNextRow(result, "+00003 12:50:10.12"); - matchSingleRowExplain(query, "SELECT CAST(CAST(C13 AS VARCHAR(100) UTF8) AS INTERVAL DAY (5) TO SECOND (2)) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); - query = "SELECT CAST(CAST(C12 AS VARCHAR(100)) AS INTERVAL YEAR (5) TO MONTH) FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"; + matchSingleRowExplain(query, + "SELECT CAST(CAST(C13 AS VARCHAR(100) UTF8) AS INTERVAL DAY (5) TO SECOND (2)) FROM " + TEST_SCHEMA + + ".ALL_EXA_TYPES", + IS_LOCAL); + query = "SELECT CAST(CAST(C12 AS VARCHAR(100)) AS INTERVAL YEAR (5) TO MONTH) FROM " + VIRTUAL_SCHEMA + + ".ALL_EXA_TYPES"; result = executeQuery(query); matchNextRow(result, "+00004-06"); - matchSingleRowExplain(query, "SELECT CAST(CAST(C12 AS VARCHAR(100) UTF8) AS INTERVAL YEAR (5) TO MONTH) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); + matchSingleRowExplain(query, "SELECT CAST(CAST(C12 AS VARCHAR(100) UTF8) AS INTERVAL YEAR (5) TO MONTH) FROM " + + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); query = "SELECT CAST(CAST(C10 AS VARCHAR(100)) AS TIMESTAMP) FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"; result = executeQuery(query); - matchNextRow(result, getSqlTimestamp(2016,8,1,0,0,1,0)); - matchSingleRowExplain(query, "SELECT CAST(CAST(C10 AS VARCHAR(100) UTF8) AS TIMESTAMP) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); - query = "SELECT CAST(CAST(C11 AS VARCHAR(100)) AS TIMESTAMP WITH LOCAL TIME ZONE) FROM " + VIRTUAL_SCHEMA + ".ALL_EXA_TYPES"; + matchNextRow(result, getSqlTimestamp(2016, 8, 1, 0, 0, 1, 0)); + matchSingleRowExplain(query, + "SELECT CAST(CAST(C10 AS VARCHAR(100) UTF8) AS TIMESTAMP) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", + IS_LOCAL); + query = "SELECT CAST(CAST(C11 AS VARCHAR(100)) AS TIMESTAMP WITH LOCAL TIME ZONE) FROM " + VIRTUAL_SCHEMA + + ".ALL_EXA_TYPES"; result = executeQuery(query); - matchNextRow(result,getSqlTimestamp(2016,8,1,0,0,2,0)); - matchSingleRowExplain(query, "SELECT CAST(CAST(C11 AS VARCHAR(100) UTF8) AS TIMESTAMP WITH LOCAL TIME ZONE) FROM " + TEST_SCHEMA + ".ALL_EXA_TYPES", IS_LOCAL); + matchNextRow(result, getSqlTimestamp(2016, 8, 1, 0, 0, 2, 0)); + matchSingleRowExplain(query, + "SELECT CAST(CAST(C11 AS VARCHAR(100) UTF8) AS TIMESTAMP WITH LOCAL TIME ZONE) FROM " + TEST_SCHEMA + + ".ALL_EXA_TYPES", + IS_LOCAL); query = "SELECT CAST(A AS VARCHAR(15)) FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchNextRow(result, "1"); - matchSingleRowExplain(query, "SELECT CAST(A AS VARCHAR(15) UTF8) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, "SELECT CAST(A AS VARCHAR(15) UTF8) FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", + IS_LOCAL); } @Test public void testCase() throws SQLException, FileNotFoundException { - String query = "SELECT CASE A WHEN 1 THEN 'YES' WHEN 2 THEN 'PERHAPS' ELSE 'NO' END FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; + String query = "SELECT CASE A WHEN 1 THEN 'YES' WHEN 2 THEN 'PERHAPS' ELSE 'NO' END FROM " + VIRTUAL_SCHEMA + + ".SIMPLE_VALUES"; ResultSet result = executeQuery(query); matchNextRow(result, "YES"); - matchSingleRowExplain(query, "SELECT CASE A WHEN 1 THEN 'YES' WHEN 2 THEN 'PERHAPS' ELSE 'NO' END FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, "SELECT CASE A WHEN 1 THEN 'YES' WHEN 2 THEN 'PERHAPS' ELSE 'NO' END FROM " + + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); query = "SELECT CASE WHEN A > 1 THEN 'YES' ELSE 'NO' END FROM " + VIRTUAL_SCHEMA + ".SIMPLE_VALUES"; result = executeQuery(query); matchNextRow(result, "NO"); - matchSingleRowExplain(query, "SELECT CASE WHEN 1 < A THEN 'YES' ELSE 'NO' END FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); + matchSingleRowExplain(query, + "SELECT CASE WHEN 1 < A THEN 'YES' ELSE 'NO' END FROM " + TEST_SCHEMA + ".SIMPLE_VALUES", IS_LOCAL); } /** - * This was replaced by integration test {@link #testDataTypeMapping()}. It can be enabled temporarily for debugging. + * This was replaced by integration test {@link #testDataTypeMapping()}. It can + * be enabled temporarily for debugging. */ @Ignore @Test - public void testDifferentDataTypes() throws SQLException, ClassNotFoundException, FileNotFoundException, AdapterException { - Statement stmt = getConnection().createStatement(); - String jdbc_adapter_test_schema = "JDBC_ADAPTER_TEST_SCHEMA"; + public void testDifferentDataTypes() + throws SQLException, ClassNotFoundException, FileNotFoundException, AdapterException { + final Statement stmt = getConnection().createStatement(); + createOrReplaceSchema(stmt); + createTables(stmt); + final String[] tableNames = new String[] { "T8", "T9", "TA", "TB", "TC", "TD" }; + final List tables = new ArrayList<>(Arrays.asList(tableNames)); + final SchemaMetadata meta = JdbcMetadataReader.readRemoteMetadata("jdbc:exa:" + getConfig().getExasolAddress(), + getConfig().getExasolUser(), getConfig().getExasolPassword(), "EXA_DB", "JDBC_ADAPTER_TEST_SCHEMA", + tables, ExasolSqlDialect.getPublicName(), getConfig().getExceptionHandlingMode()); + if (getConfig().isDebugOn()) { + System.out.println("Meta: " + SchemaMetadataSerializer.serialize(meta).build().toString()); + } + assertNotNull(meta); + } + + private void createOrReplaceSchema(final Statement stmt) throws SQLException { + final String jdbc_adapter_test_schema = "JDBC_ADAPTER_TEST_SCHEMA"; String sql = "DROP SCHEMA IF EXISTS " + jdbc_adapter_test_schema + " CASCADE"; stmt.execute(sql); sql = "CREATE SCHEMA " + jdbc_adapter_test_schema; stmt.execute(sql); - sql = "CREATE TABLE T8(c1 boolean default TRUE, c2 char(10) default 'foo'" + - ", c3 date default '2016-06-01', c4 decimal(5,0) default 0)"; + } + + private void createTables(final Statement stmt) throws SQLException { + String sql; + sql = "CREATE TABLE T8(c1 boolean default TRUE, c2 char(10) default 'foo'" + + ", c3 date default '2016-06-01', c4 decimal(5,0) default 0)"; stmt.execute(sql); - sql = "CREATE TABLE T9(c1 double default 1E2, c2 geometry default 'POINT(2 5)'" + - ", c3 interval year to month default '3-5', c4 interval day to second default '2 12:50:10.123')"; + sql = "CREATE TABLE T9(c1 double default 1E2, c2 geometry default 'POINT(2 5)'" + + ", c3 interval year to month default '3-5', c4 interval day to second default '2 12:50:10.123')"; stmt.execute(sql); - sql = "CREATE TABLE TA(c1 timestamp default '2016-06-01 00:00:01.000'" + - ", c2 timestamp with local time zone default '2016-06-01 00:00:02.000', c3 varchar(100) default 'bar')"; + sql = "CREATE TABLE TA(c1 timestamp default '2016-06-01 00:00:01.000'" + + ", c2 timestamp with local time zone default '2016-06-01 00:00:02.000', c3 varchar(100) default 'bar')"; stmt.execute(sql); - sql = "CREATE TABLE TB(c1 boolean default NULL, c2 char(10) default NULL" + - ", c3 date default NULL, c4 decimal(5,0) default NULL)"; + sql = "CREATE TABLE TB(c1 boolean default NULL, c2 char(10) default NULL" + + ", c3 date default NULL, c4 decimal(5,0) default NULL)"; stmt.execute(sql); - sql = "CREATE TABLE TC(c1 double default NULL, c2 geometry default NULL" + - ", c3 interval year to month default NULL, c4 interval day to second default NULL)"; + sql = "CREATE TABLE TC(c1 double default NULL, c2 geometry default NULL" + + ", c3 interval year to month default NULL, c4 interval day to second default NULL)"; stmt.execute(sql); - sql = "CREATE TABLE TD(c1 timestamp default NULL, c2 timestamp with local time zone default NULL" + - ", c3 varchar(100) default NULL)"; + sql = "CREATE TABLE TD(c1 timestamp default NULL, c2 timestamp with local time zone default NULL" + + ", c3 varchar(100) default NULL)"; stmt.execute(sql); - String[] tableNames = new String[]{"T8", "T9", "TA", "TB", "TC", "TD"}; - List tables = new ArrayList<>(Arrays.asList(tableNames)); - SqlDialects dialects = new SqlDialects(ImmutableList.of(ExasolSqlDialect.NAME)); - SchemaMetadata meta = JdbcMetadataReader.readRemoteMetadata("jdbc:exa:" + getConfig().getExasolAddress(), - getConfig().getExasolUser(), getConfig().getExasolPassword(), - "EXA_DB", "JDBC_ADAPTER_TEST_SCHEMA", tables, dialects, ExasolSqlDialect.NAME, - getConfig().getExceptionHandlingMode()); - if (getConfig().isDebugOn()) { - System.out.println("Meta: " + SchemaMetadataSerializer.serialize(meta).build().toString()); - } - assertNotNull(meta); } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/GenericSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/GenericSqlDialectIT.java index 750998129..b11917072 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/GenericSqlDialectIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/GenericSqlDialectIT.java @@ -1,9 +1,6 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.dialects.AbstractIntegrationTest; -import org.junit.Assume; -import org.junit.BeforeClass; -import org.junit.Test; +import static org.junit.Assert.assertEquals; import java.io.FileNotFoundException; import java.sql.ResultSet; @@ -11,47 +8,41 @@ import java.util.ArrayList; import java.util.List; -import static org.junit.Assert.assertEquals; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.exasol.adapter.dialects.AbstractIntegrationTest; public class GenericSqlDialectIT extends AbstractIntegrationTest { private static final boolean IS_LOCAL = false; - @BeforeClass public static void setUpClass() throws FileNotFoundException, SQLException, ClassNotFoundException { Assume.assumeTrue(getConfig().genericTestsRequested()); - String connectionString = getConfig().getGenericJdbcConnectionString(); + final String connectionString = getConfig().getGenericJdbcConnectionString(); setConnection(connectToExa()); createGenericJDBCAdapter(); - String catalogName = "jm3450"; // This only works for the database in our test environment - String schemaName = ""; - createVirtualSchema("VS_GENERIC_MYSQL", - GenericSqlDialect.NAME, - catalogName, - schemaName, - "", - getConfig().getGenericUser(), - getConfig().getGenericPassword(), - "ADAPTER.JDBC_ADAPTER", - connectionString, - IS_LOCAL, - getConfig().debugAddress(), - "", null); + final String catalogName = "jm3450"; // This only works for the database in our test environment + final String schemaName = ""; + createVirtualSchema("VS_GENERIC_MYSQL", GenericSqlDialect.getPublicName(), catalogName, schemaName, "", + getConfig().getGenericUser(), getConfig().getGenericPassword(), "ADAPTER.JDBC_ADAPTER", + connectionString, IS_LOCAL, getConfig().debugAddress(), "", null); } @Test public void testVirtualSchema() throws SQLException, ClassNotFoundException, FileNotFoundException { - ResultSet result = executeQuery("SELECT * FROM \"customers\" ORDER BY id"); + final ResultSet result = executeQuery("SELECT * FROM \"customers\" ORDER BY id"); result.next(); assertEquals("1", result.getString(1)); } private static void createGenericJDBCAdapter() throws SQLException, FileNotFoundException { - String jdbcAdapterPath = getConfig().getJdbcAdapterPath(); - String jdbcDriverDriver = getConfig().getGenericJdbcDriverPath(); - List includes = new ArrayList<>(); + final String jdbcAdapterPath = getConfig().getJdbcAdapterPath(); + final String jdbcDriverDriver = getConfig().getGenericJdbcDriverPath(); + final List includes = new ArrayList<>(); includes.add(jdbcAdapterPath); includes.add(jdbcDriverDriver); createJDBCAdapter(includes); diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/HiveSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/HiveSqlDialectIT.java index e1b88b559..af46d9cdf 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/HiveSqlDialectIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/HiveSqlDialectIT.java @@ -1,11 +1,5 @@ package com.exasol.adapter.dialects.impl; - -import com.exasol.adapter.dialects.AbstractIntegrationTest; -import org.junit.Assume; -import org.junit.BeforeClass; -import org.junit.Test; - import java.io.FileNotFoundException; import java.math.BigDecimal; import java.sql.ResultSet; @@ -13,6 +7,12 @@ import java.util.ArrayList; import java.util.List; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.exasol.adapter.dialects.AbstractIntegrationTest; + /** * Integration test for the Hive SQL dialect * @@ -30,228 +30,219 @@ public static void setUpClass() throws FileNotFoundException, SQLException, Clas setConnection(connectToExa()); createHiveJDBCAdapter(); - createVirtualSchema( - VIRTUAL_SCHEMA, - HiveSqlDialect.NAME, - "", HIVE_SCHEMA, - "", - "hdfs", - "hdfs", - "ADAPTER.JDBC_ADAPTER", - getConfig().getHiveJdbcConnectionString(), - IS_LOCAL, - getConfig().debugAddress(), - "ALL_HIVE_DATA_TYPES",null); + createVirtualSchema(VIRTUAL_SCHEMA, HiveSqlDialect.getPublicName(), "", HIVE_SCHEMA, "", "hdfs", "hdfs", + "ADAPTER.JDBC_ADAPTER", getConfig().getHiveJdbcConnectionString(), IS_LOCAL, getConfig().debugAddress(), + "ALL_HIVE_DATA_TYPES", null); } @Test public void testTypeMapping() throws SQLException, ClassNotFoundException, FileNotFoundException { - ResultSet result = executeQuery("SELECT COLUMN_NAME, COLUMN_TYPE, COLUMN_MAXSIZE, COLUMN_NUM_PREC, COLUMN_NUM_SCALE, COLUMN_DEFAULT FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = '" + VIRTUAL_SCHEMA + "' AND COLUMN_TABLE='ALL_HIVE_DATA_TYPES' ORDER BY COLUMN_ORDINAL_POSITION"); - matchNextRow(result, "ARRAYCOL", "VARCHAR(255) ASCII", (long)255, null, null, null); - matchNextRow(result, "BIGINTEGER", "DECIMAL(19,0)", (long)19, (long)19, (long)0, null); - matchNextRow(result, "BOOLCOLUMN", "BOOLEAN", (long)1, null, null, null); - matchNextRow(result, "CHARCOLUMN", "CHAR(1) UTF8", (long)1, null, null, null); - matchNextRow(result, "DECIMALCOL", "DECIMAL(10,0)", (long)10, (long)10, (long)0, null); - matchNextRow(result, "DOUBLECOL", "DOUBLE", (long)64, null, null, null); - matchNextRow(result, "FLOATCOL", "DOUBLE", (long)64, null, null, null); - matchNextRow(result, "INTCOL", "DECIMAL(10,0)", (long)10, (long)10, (long)0, null); - matchNextRow(result, "MAPCOL", "VARCHAR(255) ASCII", (long)255, null, null, null); - matchNextRow(result, "SMALLINTEGER", "DECIMAL(5,0)", (long)5, (long)5, (long)0, null); - matchNextRow(result, "STRINGCOL", "VARCHAR(255) ASCII", (long)255, null, null, null); - matchNextRow(result, "STRUCTCOL", "VARCHAR(255) ASCII", (long)255, null, null, null); - matchNextRow(result, "TIMESTAMPCOL", "TIMESTAMP", (long)29, null, null, null); - matchNextRow(result, "TINYINTEGER", "DECIMAL(3,0)", (long)3, (long)3, (long)0, null); - matchNextRow(result, "VARCHARCOL", "VARCHAR(10) UTF8", (long)10, null, null, null); - matchNextRow(result, "BINARYCOL", "VARCHAR(2000000) UTF8", (long)2000000, null, null, null); - matchLastRow(result, "DATECOL", "DATE", (long)10, null, null, null); + final ResultSet result = executeQuery( + "SELECT COLUMN_NAME, COLUMN_TYPE, COLUMN_MAXSIZE, COLUMN_NUM_PREC, COLUMN_NUM_SCALE, COLUMN_DEFAULT FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = '" + + VIRTUAL_SCHEMA + "' AND COLUMN_TABLE='ALL_HIVE_DATA_TYPES' ORDER BY COLUMN_ORDINAL_POSITION"); + matchNextRow(result, "ARRAYCOL", "VARCHAR(255) ASCII", (long) 255, null, null, null); + matchNextRow(result, "BIGINTEGER", "DECIMAL(19,0)", (long) 19, (long) 19, (long) 0, null); + matchNextRow(result, "BOOLCOLUMN", "BOOLEAN", (long) 1, null, null, null); + matchNextRow(result, "CHARCOLUMN", "CHAR(1) UTF8", (long) 1, null, null, null); + matchNextRow(result, "DECIMALCOL", "DECIMAL(10,0)", (long) 10, (long) 10, (long) 0, null); + matchNextRow(result, "DOUBLECOL", "DOUBLE", (long) 64, null, null, null); + matchNextRow(result, "FLOATCOL", "DOUBLE", (long) 64, null, null, null); + matchNextRow(result, "INTCOL", "DECIMAL(10,0)", (long) 10, (long) 10, (long) 0, null); + matchNextRow(result, "MAPCOL", "VARCHAR(255) ASCII", (long) 255, null, null, null); + matchNextRow(result, "SMALLINTEGER", "DECIMAL(5,0)", (long) 5, (long) 5, (long) 0, null); + matchNextRow(result, "STRINGCOL", "VARCHAR(255) ASCII", (long) 255, null, null, null); + matchNextRow(result, "STRUCTCOL", "VARCHAR(255) ASCII", (long) 255, null, null, null); + matchNextRow(result, "TIMESTAMPCOL", "TIMESTAMP", (long) 29, null, null, null); + matchNextRow(result, "TINYINTEGER", "DECIMAL(3,0)", (long) 3, (long) 3, (long) 0, null); + matchNextRow(result, "VARCHARCOL", "VARCHAR(10) UTF8", (long) 10, null, null, null); + matchNextRow(result, "BINARYCOL", "VARCHAR(2000000) UTF8", (long) 2000000, null, null, null); + matchLastRow(result, "DATECOL", "DATE", (long) 10, null, null, null); } @Test public void testSelectWithAllTypes() throws SQLException { - ResultSet result = executeQuery("SELECT * from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"); - matchNextRow(result, - "[\"etet\",\"ettee\"]", - new BigDecimal("56"), - true, - "2", - (long)53, - 56.3, - 5.199999809265137, - (long)85, - "{\"jkljj\":5}", - 2, - "tshg", - "{\"a\":\"value\",\"b\":{\"c\":8}}", - getSqlTimestamp(2017,1,2, 13,32,50,744), - (short)1, - "tytu", - "MTAxMA==", - getSqlDate(1970,1,1)); + final ResultSet result = executeQuery("SELECT * from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"); + matchNextRow(result, "[\"etet\",\"ettee\"]", new BigDecimal("56"), true, "2", (long) 53, 56.3, + 5.199999809265137, (long) 85, "{\"jkljj\":5}", 2, "tshg", "{\"a\":\"value\",\"b\":{\"c\":8}}", + getSqlTimestamp(2017, 1, 2, 13, 32, 50, 744), (short) 1, "tytu", "MTAxMA==", getSqlDate(1970, 1, 1)); } - - @Test public void testProjection() throws SQLException { - String query = "SELECT BIGINTEGER FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; - ResultSet result = executeQuery(query); + final String query = "SELECT BIGINTEGER FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; + final ResultSet result = executeQuery(query); matchNextRow(result, new BigDecimal("56")); matchSingleRowExplain(query, "SELECT `BIGINTEGER` FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); } - @Test public void testRewrittenProjection() throws SQLException { - String query = "SELECT BINARYCOL FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; - ResultSet result = executeQuery(query); + final String query = "SELECT BINARYCOL FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; + final ResultSet result = executeQuery(query); matchNextRow(result, "MTAxMA=="); matchSingleRowExplain(query, "SELECT base64(`BINARYCOL`) FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); } - @Test public void testAggregateGroupByColumn() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT boolcolumn, min(biginteger) FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES GROUP BY boolcolumn"; - ResultSet result = executeQuery(query); + final String query = "SELECT boolcolumn, min(biginteger) FROM " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES GROUP BY boolcolumn"; + final ResultSet result = executeQuery(query); matchNextRow(result, false, new BigDecimal("56")); matchNextRow(result, true, new BigDecimal("51")); - matchSingleRowExplain(query, "SELECT `BOOLCOLUMN`, MIN(`BIGINTEGER`) FROM `xperience`.`ALL_HIVE_DATA_TYPES` GROUP BY `BOOLCOLUMN`"); + matchSingleRowExplain(query, + "SELECT `BOOLCOLUMN`, MIN(`BIGINTEGER`) FROM `xperience`.`ALL_HIVE_DATA_TYPES` GROUP BY `BOOLCOLUMN`"); } @Test public void testAggregateHaving() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT boolcolumn, min(biginteger) FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES GROUP BY boolcolumn having min(biginteger)<56"; - ResultSet result = executeQuery(query); + final String query = "SELECT boolcolumn, min(biginteger) FROM " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES GROUP BY boolcolumn having min(biginteger)<56"; + final ResultSet result = executeQuery(query); matchNextRow(result, true, new BigDecimal("51")); - matchSingleRowExplain(query, "SELECT `BOOLCOLUMN`, MIN(`BIGINTEGER`) FROM `xperience`.`ALL_HIVE_DATA_TYPES` GROUP BY `BOOLCOLUMN` HAVING MIN(`BIGINTEGER`) < 56"); + matchSingleRowExplain(query, + "SELECT `BOOLCOLUMN`, MIN(`BIGINTEGER`) FROM `xperience`.`ALL_HIVE_DATA_TYPES` GROUP BY `BOOLCOLUMN` HAVING MIN(`BIGINTEGER`) < 56"); } @Test public void testComparisonPredicates() throws SQLException { // =, !=, <, <=, >, >= - String query = "select biginteger, biginteger=60, biginteger!=60, biginteger<60, biginteger<=60, biginteger>60, biginteger>=60 from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES where intcol = 85"; - ResultSet result = executeQuery(query); + final String query = "select biginteger, biginteger=60, biginteger!=60, biginteger<60, biginteger<=60, biginteger>60, biginteger>=60 from " + + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES where intcol = 85"; + final ResultSet result = executeQuery(query); matchNextRow(result, new BigDecimal("56"), false, true, true, true, false, false); - matchSingleRowExplain(query, "SELECT `BIGINTEGER`, `BIGINTEGER` = 60, `BIGINTEGER` != 60, `BIGINTEGER` < 60, `BIGINTEGER` <= 60, 60 < `BIGINTEGER`," + - " 60 <= `BIGINTEGER` FROM `xperience`.`ALL_HIVE_DATA_TYPES` WHERE `INTCOL` = 85"); + matchSingleRowExplain(query, + "SELECT `BIGINTEGER`, `BIGINTEGER` = 60, `BIGINTEGER` != 60, `BIGINTEGER` < 60, `BIGINTEGER` <= 60, 60 < `BIGINTEGER`," + + " 60 <= `BIGINTEGER` FROM `xperience`.`ALL_HIVE_DATA_TYPES` WHERE `INTCOL` = 85"); } - - @Test public void testLogicalPredicates() throws SQLException { // NOT, AND, OR - String query = "select biginteger from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES where (biginteger < 56 or biginteger > 56) and not (biginteger is null)"; - ResultSet result = executeQuery(query); + final String query = "select biginteger from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES where (biginteger < 56 or biginteger > 56) and not (biginteger is null)"; + final ResultSet result = executeQuery(query); matchNextRow(result, new BigDecimal("51")); matchNextRow(result, new BigDecimal("60")); - matchSingleRowExplain(query, "SELECT `BIGINTEGER` FROM `xperience`.`ALL_HIVE_DATA_TYPES` " + - "WHERE ((`BIGINTEGER` < 56 OR 56 < `BIGINTEGER`) AND NOT (`BIGINTEGER` IS NULL))"); + matchSingleRowExplain(query, "SELECT `BIGINTEGER` FROM `xperience`.`ALL_HIVE_DATA_TYPES` " + + "WHERE ((`BIGINTEGER` < 56 OR 56 < `BIGINTEGER`) AND NOT (`BIGINTEGER` IS NULL))"); } @Test public void testLikePredicates() throws SQLException { // LIKE, LIKE ESCAPE (not pushed down) - String query = "select varcharcol, varcharcol like 't%' escape 't' from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES where (varcharcol like 't%')"; - ResultSet result = executeQuery(query); + final String query = "select varcharcol, varcharcol like 't%' escape 't' from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES where (varcharcol like 't%')"; + final ResultSet result = executeQuery(query); matchNextRow(result, "tytu", false); - matchSingleRowExplain(query, "SELECT `VARCHARCOL` FROM `xperience`.`ALL_HIVE_DATA_TYPES` WHERE `VARCHARCOL` LIKE 't%'"); + matchSingleRowExplain(query, + "SELECT `VARCHARCOL` FROM `xperience`.`ALL_HIVE_DATA_TYPES` WHERE `VARCHARCOL` LIKE 't%'"); } @Test public void testLikePredicatesRewritten() throws SQLException { // REGEXP_LIKE rewritten to REGEXP - String query = "select varcharcol from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES where varcharcol REGEXP_LIKE 'a+'"; - ResultSet result = executeQuery(query); + final String query = "select varcharcol from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES where varcharcol REGEXP_LIKE 'a+'"; + final ResultSet result = executeQuery(query); matchLastRow(result, "anotherStr"); - matchSingleRowExplain(query, "SELECT `VARCHARCOL` FROM `xperience`.`ALL_HIVE_DATA_TYPES` WHERE `VARCHARCOL`REGEXP'a+'"); + matchSingleRowExplain(query, + "SELECT `VARCHARCOL` FROM `xperience`.`ALL_HIVE_DATA_TYPES` WHERE `VARCHARCOL`REGEXP'a+'"); } - @Test public void testMiscPredicates() throws SQLException { // BETWEEN, IN, IS NULL, !=NULL(rewritten to "IS NOT NULL") - String query = "select biginteger, biginteger in (56, 61), biginteger is null, biginteger != null from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES WHERE biginteger between 51 and 60"; - ResultSet result = executeQuery(query); - matchNextRow(result, new BigDecimal("56") , true, false, true); + final String query = "select biginteger, biginteger in (56, 61), biginteger is null, biginteger != null from " + + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES WHERE biginteger between 51 and 60"; + final ResultSet result = executeQuery(query); + matchNextRow(result, new BigDecimal("56"), true, false, true); matchNextRow(result, new BigDecimal("51"), false, false, true); - matchSingleRowExplain(query, "SELECT `BIGINTEGER`, `BIGINTEGER` IN (56, 61), `BIGINTEGER` IS NULL, " + - "`BIGINTEGER` IS NOT NULL FROM `xperience`.`ALL_HIVE_DATA_TYPES` WHERE `BIGINTEGER` BETWEEN 51 AND 60"); + matchSingleRowExplain(query, "SELECT `BIGINTEGER`, `BIGINTEGER` IN (56, 61), `BIGINTEGER` IS NULL, " + + "`BIGINTEGER` IS NOT NULL FROM `xperience`.`ALL_HIVE_DATA_TYPES` WHERE `BIGINTEGER` BETWEEN 51 AND 60"); } @Test public void testCountSumAggregateFunction() throws SQLException { - String query = "SELECT COUNT(biginteger), COUNT(*), COUNT(DISTINCT biginteger), SUM(biginteger), SUM(DISTINCT biginteger) from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; - ResultSet result = executeQuery(query); + final String query = "SELECT COUNT(biginteger), COUNT(*), COUNT(DISTINCT biginteger), SUM(biginteger), SUM(DISTINCT biginteger) from " + + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; + final ResultSet result = executeQuery(query); matchNextRow(result, new BigDecimal("7"), new BigDecimal("8"), new BigDecimal("3"), 403.0, 167.0); - matchSingleRowExplain(query, "SELECT COUNT(`BIGINTEGER`), COUNT(*), COUNT(DISTINCT `BIGINTEGER`), SUM(`BIGINTEGER`), SUM(DISTINCT `BIGINTEGER`) FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); + matchSingleRowExplain(query, + "SELECT COUNT(`BIGINTEGER`), COUNT(*), COUNT(DISTINCT `BIGINTEGER`), SUM(`BIGINTEGER`), SUM(DISTINCT `BIGINTEGER`) FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); } - @Test public void testAvgMinMaxAggregateFunction() throws SQLException { - String query = "SELECT AVG(biginteger), MIN(biginteger), MAX(biginteger) from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; - ResultSet result = executeQuery(query); - matchNextRow(result, 57.57142857142857,new BigDecimal("51"),new BigDecimal("60")); - matchSingleRowExplain(query, "SELECT AVG(`BIGINTEGER`), MIN(`BIGINTEGER`), MAX(`BIGINTEGER`) FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); + final String query = "SELECT AVG(biginteger), MIN(biginteger), MAX(biginteger) from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES"; + final ResultSet result = executeQuery(query); + matchNextRow(result, 57.57142857142857, new BigDecimal("51"), new BigDecimal("60")); + matchSingleRowExplain(query, + "SELECT AVG(`BIGINTEGER`), MIN(`BIGINTEGER`), MAX(`BIGINTEGER`) FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); } - @Test public void testCastedStringFunctions() throws SQLException { - String query = "select concat(upper(varcharcol),lower(repeat(varcharcol,2))) from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; - ResultSet result = executeQuery(query); + final String query = "select concat(upper(varcharcol),lower(repeat(varcharcol,2))) from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES"; + final ResultSet result = executeQuery(query); matchNextRow(result, "TYTUtytutytu"); - matchSingleRowExplain(query, "SELECT CAST(CONCAT(CAST(UPPER(`VARCHARCOL`) as string),CAST(LOWER(CAST(REPEAT(`VARCHARCOL`,2) " + - "as string)) as string)) as string) FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); + matchSingleRowExplain(query, + "SELECT CAST(CONCAT(CAST(UPPER(`VARCHARCOL`) as string),CAST(LOWER(CAST(REPEAT(`VARCHARCOL`,2) " + + "as string)) as string)) as string) FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); } - @Test public void testRewrittenDivAndModFunctions() throws SQLException { - String query = "select DIV(biginteger,biginteger), mod(biginteger,biginteger) from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; - ResultSet result = executeQuery(query); - matchNextRow(result, new BigDecimal("1"),new BigDecimal("0")); - matchSingleRowExplain(query, "SELECT `BIGINTEGER` DIV `BIGINTEGER`, `BIGINTEGER` % `BIGINTEGER` FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); + final String query = "select DIV(biginteger,biginteger), mod(biginteger,biginteger) from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES"; + final ResultSet result = executeQuery(query); + matchNextRow(result, new BigDecimal("1"), new BigDecimal("0")); + matchSingleRowExplain(query, + "SELECT `BIGINTEGER` DIV `BIGINTEGER`, `BIGINTEGER` % `BIGINTEGER` FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); } - @Test public void testRewrittenSubStringFunction() throws SQLException { - String query = "select substring(stringcol FROM 1 FOR 2) from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; - ResultSet result = executeQuery(query); + final String query = "select substring(stringcol FROM 1 FOR 2) from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES"; + final ResultSet result = executeQuery(query); matchNextRow(result, "ts"); matchSingleRowExplain(query, "SELECT SUBSTR(`STRINGCOL`, 1, 2) FROM `xperience`.`ALL_HIVE_DATA_TYPES`"); } - - //can not test because it is supported only in newer Hive version + // can not test because it is supported only in newer Hive version public void testOrderBy() throws SQLException { - String query = "SELECT boolcolumn, biginteger from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES ORDER BY biginteger"; - ResultSet result = executeQuery(query); - matchSingleRowExplain(query, "SELECT `BIGINTEGER`, `BOOLCOLUMN` FROM `xperience`.`ALL_HIVE_DATA_TYPES` ORDER BY `BIGINTEGER` NULLS LAST"); + final String query = "SELECT boolcolumn, biginteger from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES ORDER BY biginteger"; + final ResultSet result = executeQuery(query); + matchSingleRowExplain(query, + "SELECT `BIGINTEGER`, `BOOLCOLUMN` FROM `xperience`.`ALL_HIVE_DATA_TYPES` ORDER BY `BIGINTEGER` NULLS LAST"); } - //can not test because it is supported only in newer Hive version + // can not test because it is supported only in newer Hive version public void testOrderByLimit() throws SQLException { - String query = "SELECT boolcolumn, biginteger from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES ORDER BY biginteger LIMIT 3"; - ResultSet result = executeQuery(query); - matchSingleRowExplain(query, "SELECT `BIGINTEGER`, `BOOLCOLUMN` FROM `xperience`.`ALL_HIVE_DATA_TYPES` ORDER BY `BIGINTEGER` NULLS LAST LIMIT 3"); + final String query = "SELECT boolcolumn, biginteger from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES ORDER BY biginteger LIMIT 3"; + final ResultSet result = executeQuery(query); + matchSingleRowExplain(query, + "SELECT `BIGINTEGER`, `BOOLCOLUMN` FROM `xperience`.`ALL_HIVE_DATA_TYPES` ORDER BY `BIGINTEGER` NULLS LAST LIMIT 3"); } - //can not test because it is supported only in newer Hive version + // can not test because it is supported only in newer Hive version public void testOrderByLimitOffset() throws SQLException { - String query = "SELECT boolcolumn, biginteger from " + VIRTUAL_SCHEMA + ".ALL_HIVE_DATA_TYPES ORDER BY biginteger LIMIT 2 offset 1"; - ResultSet result = executeQuery(query); - matchSingleRowExplain(query, "SELECT `BIGINTEGER`, `BOOLCOLUMN` FROM `xperience`.`ALL_HIVE_DATA_TYPES` ORDER BY `BIGINTEGER` NULLS LAST"); + final String query = "SELECT boolcolumn, biginteger from " + VIRTUAL_SCHEMA + + ".ALL_HIVE_DATA_TYPES ORDER BY biginteger LIMIT 2 offset 1"; + final ResultSet result = executeQuery(query); + matchSingleRowExplain(query, + "SELECT `BIGINTEGER`, `BOOLCOLUMN` FROM `xperience`.`ALL_HIVE_DATA_TYPES` ORDER BY `BIGINTEGER` NULLS LAST"); } - private static void createHiveJDBCAdapter() throws SQLException, FileNotFoundException { - List hiveIncludes = new ArrayList<>(); + final List hiveIncludes = new ArrayList<>(); hiveIncludes.add(getConfig().getJdbcAdapterPath()); - String jdbcPrefixPath = getConfig().getHiveJdbcPrefixPath(); - for (String jar : getConfig().getHiveJdbcJars()) { + final String jdbcPrefixPath = getConfig().getHiveJdbcPrefixPath(); + for (final String jar : getConfig().getHiveJdbcJars()) { hiveIncludes.add(jdbcPrefixPath + jar); } createJDBCAdapter(hiveIncludes); diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/ImpalaSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/ImpalaSqlDialectIT.java index fcd804d87..ffb46ebb6 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/ImpalaSqlDialectIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/ImpalaSqlDialectIT.java @@ -1,11 +1,5 @@ package com.exasol.adapter.dialects.impl; - -import com.exasol.adapter.dialects.AbstractIntegrationTest; -import org.junit.Assume; -import org.junit.BeforeClass; -import org.junit.Test; - import java.io.FileNotFoundException; import java.math.BigDecimal; import java.sql.ResultSet; @@ -13,89 +7,83 @@ import java.util.ArrayList; import java.util.List; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.exasol.adapter.dialects.AbstractIntegrationTest; + /** * Integration test for the Impala SQL dialect * - * Testdata: sample_07: code string, description string, total_emp int, salary int + * Testdata: sample_07: code string, description string, total_emp int, salary + * int */ public class ImpalaSqlDialectIT extends AbstractIntegrationTest { private static final String VIRTUAL_SCHEMA = "VS_IMPALA"; private static final String IMPALA_SCHEMA = "default"; private static final boolean IS_LOCAL = false; - + @BeforeClass public static void setUpClass() throws FileNotFoundException, SQLException, ClassNotFoundException { Assume.assumeTrue(getConfig().impalaTestsRequested()); setConnection(connectToExa()); createImpalaJDBCAdapter(); - createVirtualSchema( - VIRTUAL_SCHEMA, - ImpalaSqlDialect.NAME, - "", IMPALA_SCHEMA, - "", - "no-user", - "no-password", - "ADAPTER.JDBC_ADAPTER", - getConfig().getImpalaJdbcConnectionString(), - IS_LOCAL, - getConfig().debugAddress(), - "SAMPLE_07,ALL_HIVE_IMPALA_TYPES,SIMPLE,SIMPLE_WITH_NULLS",null); + createVirtualSchema(VIRTUAL_SCHEMA, ImpalaSqlDialect.getPublicName(), "", IMPALA_SCHEMA, "", "no-user", + "no-password", "ADAPTER.JDBC_ADAPTER", getConfig().getImpalaJdbcConnectionString(), IS_LOCAL, + getConfig().debugAddress(), "SAMPLE_07,ALL_HIVE_IMPALA_TYPES,SIMPLE,SIMPLE_WITH_NULLS", null); } @Test public void testTypeMapping() throws SQLException, ClassNotFoundException, FileNotFoundException { // TODO Test type mapping for tables with invalid Impala Types - ResultSet result = executeQuery("SELECT COLUMN_NAME, COLUMN_TYPE, COLUMN_MAXSIZE, COLUMN_NUM_PREC, COLUMN_NUM_SCALE, COLUMN_DEFAULT FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = '" + VIRTUAL_SCHEMA + "' AND COLUMN_TABLE='ALL_HIVE_IMPALA_TYPES' ORDER BY COLUMN_ORDINAL_POSITION"); + final ResultSet result = executeQuery( + "SELECT COLUMN_NAME, COLUMN_TYPE, COLUMN_MAXSIZE, COLUMN_NUM_PREC, COLUMN_NUM_SCALE, COLUMN_DEFAULT FROM EXA_DBA_COLUMNS WHERE COLUMN_SCHEMA = '" + + VIRTUAL_SCHEMA + + "' AND COLUMN_TABLE='ALL_HIVE_IMPALA_TYPES' ORDER BY COLUMN_ORDINAL_POSITION"); matchNextRow(result, "C1", "DECIMAL(3,0)", 3L, 3L, 0L, null); matchNextRow(result, "C2", "DECIMAL(5,0)", 5L, 5L, 0L, null); - matchNextRow(result, "C3", "DECIMAL(10,0)", (long)10, (long)10, (long)0, null); - matchNextRow(result, "C4", "DECIMAL(19,0)", (long)19, (long)19, (long)0, null); - matchNextRow(result, "C5", "DOUBLE", (long)64, null, null, null); - matchNextRow(result, "C6", "DOUBLE", (long)64, null, null, null); - matchNextRow(result, "C7", "DECIMAL(9,0)", (long)9, (long)9, (long)0, null); - matchNextRow(result, "C8", "DECIMAL(12,2)", (long)12, (long)12, (long)2, null); - matchNextRow(result, "C9", "VARCHAR(2000000) UTF8", (long)2000000, null, null, null); - matchNextRow(result, "C10", "TIMESTAMP", (long)29, null, null, null); - // Impala has problems with STRING data type, and probably automatically restricts it to ASCII (otherwise several operations don't work with the column) - // See http://www.cloudera.com/documentation/enterprise/5-5-x/topics/impala_string.html - matchNextRow(result, "C11", "VARCHAR(32767) ASCII", (long)32767, null, null, null); - matchNextRow(result, "C12", "VARCHAR(1000) UTF8", (long)1000, null, null, null); - matchNextRow(result, "C13", "CHAR(10) UTF8", (long)10, null, null, null); - matchLastRow(result, "C14", "BOOLEAN", (long)1, null, null, null); + matchNextRow(result, "C3", "DECIMAL(10,0)", (long) 10, (long) 10, (long) 0, null); + matchNextRow(result, "C4", "DECIMAL(19,0)", (long) 19, (long) 19, (long) 0, null); + matchNextRow(result, "C5", "DOUBLE", (long) 64, null, null, null); + matchNextRow(result, "C6", "DOUBLE", (long) 64, null, null, null); + matchNextRow(result, "C7", "DECIMAL(9,0)", (long) 9, (long) 9, (long) 0, null); + matchNextRow(result, "C8", "DECIMAL(12,2)", (long) 12, (long) 12, (long) 2, null); + matchNextRow(result, "C9", "VARCHAR(2000000) UTF8", (long) 2000000, null, null, null); + matchNextRow(result, "C10", "TIMESTAMP", (long) 29, null, null, null); + // Impala has problems with STRING data type, and probably automatically + // restricts it to ASCII (otherwise several operations don't work with the + // column) + // See + // http://www.cloudera.com/documentation/enterprise/5-5-x/topics/impala_string.html + matchNextRow(result, "C11", "VARCHAR(32767) ASCII", (long) 32767, null, null, null); + matchNextRow(result, "C12", "VARCHAR(1000) UTF8", (long) 1000, null, null, null); + matchNextRow(result, "C13", "CHAR(10) UTF8", (long) 10, null, null, null); + matchLastRow(result, "C14", "BOOLEAN", (long) 1, null, null, null); } @Test public void testSelectWithAllTypes() throws SQLException { - ResultSet result = executeQuery("SELECT * from " + VIRTUAL_SCHEMA + ".ALL_HIVE_IMPALA_TYPES"); - matchLastRow(result, - (short)123, - 12345, - 1234567890L, - new BigDecimal(1234567890123456789L), - 12.199999809265137, - 12.2, - 12345, - new BigDecimal("12345.12"), - "12345.12",getSqlTimestamp(1985, 9, 25, 17, 45, 30, 5), - "abc", - "varchar 茶", - "char 茶 ", // 茶 requires 3 bytes, and char(10) means 10 bytes for Impala. + final ResultSet result = executeQuery("SELECT * from " + VIRTUAL_SCHEMA + ".ALL_HIVE_IMPALA_TYPES"); + matchLastRow(result, (short) 123, 12345, 1234567890L, new BigDecimal(1234567890123456789L), 12.199999809265137, + 12.2, 12345, new BigDecimal("12345.12"), "12345.12", getSqlTimestamp(1985, 9, 25, 17, 45, 30, 5), "abc", + "varchar 茶", "char 茶 ", // 茶 requires 3 bytes, and char(10) means 10 bytes for Impala. true); } @Test public void testSimpleQuery() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT * FROM sample_07"; - ResultSet result = executeQuery(query); - matchNextRow(result, "00-0000", "All Occupations", (long)134354250, (long)40690); + final String query = "SELECT * FROM sample_07"; + final ResultSet result = executeQuery(query); + matchNextRow(result, "00-0000", "All Occupations", (long) 134354250, (long) 40690); } @Test public void testProjection() throws SQLException { - String query = "SELECT c2 FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_IMPALA_TYPES"; - ResultSet result = executeQuery(query); + final String query = "SELECT c2 FROM " + VIRTUAL_SCHEMA + ".ALL_HIVE_IMPALA_TYPES"; + final ResultSet result = executeQuery(query); matchLastRow(result, 12345); matchSingleRowExplain(query, "SELECT `C2` FROM `default`.`ALL_HIVE_IMPALA_TYPES`"); } @@ -103,143 +91,110 @@ public void testProjection() throws SQLException { @Test public void testComparisonPredicates() throws SQLException { // =, !=, <, <=, >, >= - String query = "select salary, salary=33880, salary!=33880, salary<33880, salary<=33880, salary>33880, salary>=33880 from " + VIRTUAL_SCHEMA + ".sample_07 where code = '11-1031'"; - ResultSet result = executeQuery(query); + final String query = "select salary, salary=33880, salary!=33880, salary<33880, salary<=33880, salary>33880, salary>=33880 from " + + VIRTUAL_SCHEMA + ".sample_07 where code = '11-1031'"; + final ResultSet result = executeQuery(query); matchLastRow(result, 33880L, true, false, false, true, false, true); - matchSingleRowExplain(query, "SELECT `SALARY`, `SALARY` = 33880, `SALARY` != 33880, `SALARY` < 33880, `SALARY` <= 33880, 33880 < `SALARY`, 33880 <= `SALARY` FROM `default`.`SAMPLE_07` WHERE `CODE` = '11-1031'"); + matchSingleRowExplain(query, + "SELECT `SALARY`, `SALARY` = 33880, `SALARY` != 33880, `SALARY` < 33880, `SALARY` <= 33880, 33880 < `SALARY`, 33880 <= `SALARY` FROM `default`.`SAMPLE_07` WHERE `CODE` = '11-1031'"); } @Test public void testLogicalPredicates() throws SQLException { // NOT, AND, OR - String query = "select * from vs_impala.simple_with_nulls where (c1 < 2 or c1 > 2) and not (c2 is null)"; - ResultSet result = executeQuery(query); + final String query = "select * from vs_impala.simple_with_nulls where (c1 < 2 or c1 > 2) and not (c2 is null)"; + final ResultSet result = executeQuery(query); matchNextRow(result, 1L, "a"); matchLastRow(result, 3L, "b"); - matchSingleRowExplain(query, "SELECT * FROM `default`.`SIMPLE_WITH_NULLS` WHERE ((`C1` < 2 OR 2 < `C1`) AND NOT (`C2` IS NULL))"); + matchSingleRowExplain(query, + "SELECT * FROM `default`.`SIMPLE_WITH_NULLS` WHERE ((`C1` < 2 OR 2 < `C1`) AND NOT (`C2` IS NULL))"); } @Test public void testLikePredicates() throws SQLException { // LIKE, LIKE ESCAPE (not pushed down), REGEXP_LIKE - String query = "select code, code like 'x%1' escape 'x' from " + VIRTUAL_SCHEMA + ".sample_07 where (code like '15%' and not code regexp_like '.*1$')"; - ResultSet result = executeQuery(query); + final String query = "select code, code like 'x%1' escape 'x' from " + VIRTUAL_SCHEMA + + ".sample_07 where (code like '15%' and not code regexp_like '.*1$')"; + final ResultSet result = executeQuery(query); matchNextRow(result, "15-0000", false); matchNextRow(result, "15-1032", false); matchNextRow(result, "15-1099", false); matchLastRow(result, "15-2099", false); - matchSingleRowExplain(query, "SELECT `CODE` FROM `default`.`SAMPLE_07` WHERE (`CODE` LIKE '15%' AND NOT (`CODE` REGEXP '.*1$'))"); + matchSingleRowExplain(query, + "SELECT `CODE` FROM `default`.`SAMPLE_07` WHERE (`CODE` LIKE '15%' AND NOT (`CODE` REGEXP '.*1$'))"); } @Test public void testMiscPredicates() throws SQLException { // BETWEEN, IN, IS NULL, IS NOT NULL - String query = "select c1, c2, c1 in (2, 3), c2 is null, c2 is not null from vs_impala.simple_with_nulls WHERE c1 between 1 and 2"; - ResultSet result = executeQuery(query); + final String query = "select c1, c2, c1 in (2, 3), c2 is null, c2 is not null from vs_impala.simple_with_nulls WHERE c1 between 1 and 2"; + final ResultSet result = executeQuery(query); matchNextRow(result, 1L, "a", false, false, true); matchNextRow(result, 2L, null, true, true, false); matchLastRow(result, 1L, null, false, true, false); - matchSingleRowExplain(query, "SELECT `C1`, `C2`, `C1` IN (2, 3), `C2` IS NULL, `C2` IS NOT NULL FROM `default`.`SIMPLE_WITH_NULLS` WHERE `C1` BETWEEN 1 AND 2"); + matchSingleRowExplain(query, + "SELECT `C1`, `C2`, `C1` IN (2, 3), `C2` IS NULL, `C2` IS NOT NULL FROM `default`.`SIMPLE_WITH_NULLS` WHERE `C1` BETWEEN 1 AND 2"); } @Test public void testCountSumAggregateFunction() throws SQLException { - String query = "SELECT COUNT(A), COUNT(*), COUNT(DISTINCT A), SUM(A), SUM(DISTINCT A) from vs_impala.simple"; - ResultSet result = executeQuery(query); + final String query = "SELECT COUNT(A), COUNT(*), COUNT(DISTINCT A), SUM(A), SUM(DISTINCT A) from vs_impala.simple"; + final ResultSet result = executeQuery(query); matchLastRow(result, new BigDecimal(6), new BigDecimal(6), new BigDecimal(3), 12D, 6D); - matchSingleRowExplain(query, "SELECT COUNT(`A`), COUNT(*), COUNT(DISTINCT `A`), CAST(SUM(`A`) AS DOUBLE), CAST(SUM(DISTINCT `A`) AS DOUBLE) FROM `default`.`SIMPLE`"); + matchSingleRowExplain(query, + "SELECT COUNT(`A`), COUNT(*), COUNT(DISTINCT `A`), CAST(SUM(`A`) AS DOUBLE), CAST(SUM(DISTINCT `A`) AS DOUBLE) FROM `default`.`SIMPLE`"); } public void testAvgMinMaxAggregateFunction() throws SQLException { - String query = "SELECT AVG(C), MIN(A), MIN(DISTINCT A), MAX(A), MAX(DISTINCT A) from VS_IMPALA.SIMPLE"; - ResultSet result = executeQuery(query); - matchLastRow(result, new BigDecimal(3.85), new BigDecimal(1), new BigDecimal(1), new BigDecimal(3), new BigDecimal(3)); + final String query = "SELECT AVG(C), MIN(A), MIN(DISTINCT A), MAX(A), MAX(DISTINCT A) from VS_IMPALA.SIMPLE"; + final ResultSet result = executeQuery(query); + matchLastRow(result, new BigDecimal(3.85), new BigDecimal(1), new BigDecimal(1), new BigDecimal(3), + new BigDecimal(3)); matchSingleRowExplain(query, "SELECT AVG(`C`), MIN(`A`), MIN(`A`), MAX(`A`), MAX(`A`) FROM `default`.`SIMPLE`"); } @Test public void testLiteralsPredicates() throws SQLException { // String/varchar, bool, null, double, decimal - String query = "select count(*) from vs_impala.ALL_HIVE_IMPALA_TYPES where c11 = 'abc' and c12 = 'varchar 茶' and c6 = 1.22E1 and c8 = 12345.12"; - ResultSet result = executeQuery(query); + final String query = "select count(*) from vs_impala.ALL_HIVE_IMPALA_TYPES where c11 = 'abc' and c12 = 'varchar 茶' and c6 = 1.22E1 and c8 = 12345.12"; + final ResultSet result = executeQuery(query); matchLastRow(result, new BigDecimal(1)); - matchSingleRowExplain(query, "SELECT COUNT(*) FROM `default`.`ALL_HIVE_IMPALA_TYPES` WHERE (`C11` = 'abc' AND `C12` = 'varchar 茶' AND `C6` = 12.2 AND `C8` = 12345.12)"); + matchSingleRowExplain(query, + "SELECT COUNT(*) FROM `default`.`ALL_HIVE_IMPALA_TYPES` WHERE (`C11` = 'abc' AND `C12` = 'varchar 茶' AND `C6` = 12.2 AND `C8` = 12345.12)"); } @Test public void testAggregationFunctions() throws SQLException { /** - * COUNT(A) - COUNT(*) - COUNT(DISTINCT A) - COUNT(ALL (A, C)) - COVAR_POP(A, C) - COVAR_SAMP(A, C) - FIRST_VALUE(A) - GROUP_CONCAT(A) - GROUP_CONCAT(DISTINCT A) - GROUP_CONCAT(A ORDER BY C) - GROUP_CONCAT(A ORDER BY C DESC) - GROUP_CONCAT(A SEPARATOR - GROUPING(A) - GROUPING(A, C) - GROUPING_ID(A) - GROUPING_ID(A, C) - LAST_VALUE(A) - MAX(A) - MAX(ALL A) - MAX(DISTINCT A) - MEDIAN(A) - MIN(A) - MIN(ALL A) - MIN(DISTINCT A) - PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY A) - PERCENTILE_DISC(0.5) WITHIN GROUP (ORDER BY A) - REGR_AVGX(A, C) - REGR_AVGY(A, C) - REGR_COUNT(A, C) - REGR_INTERCEPT(A, C) - REGR_R2(A, C) - REGR_SLOPE(A, C) - REGR_SXX(A, C) - REGR_SXY(A, C) - REGR_SYY(A, C) - STDDEV(A) - STDDEV(ALL A) - STDDEV(DISTINCT A) - STDDEV_POP(A) - STDDEV_POP(ALL A) - STDDEV_POP(DISTINCT A) - STDDEV_SAMP(A) - STDDEV_SAMP(ALL A) - STDDEV_SAMP(DISTINCT A) - SUM(A) - SUM(ALL A) - SUM(DISTINCT A) - VAR_POP(A) - VAR_POP(ALL A) - VAR_POP(DISTINCT A) - VAR_SAMP(A) - VAR_SAMP(ALL A) - VAR_SAMP(DISTINCT A) - VARIANCE(A) - VARIANCE(ALL A) - VARIANCE(DISTINCT A) + * COUNT(A) COUNT(*) COUNT(DISTINCT A) COUNT(ALL (A, C)) COVAR_POP(A, C) + * COVAR_SAMP(A, C) FIRST_VALUE(A) GROUP_CONCAT(A) GROUP_CONCAT(DISTINCT A) + * GROUP_CONCAT(A ORDER BY C) GROUP_CONCAT(A ORDER BY C DESC) GROUP_CONCAT(A + * SEPARATOR GROUPING(A) GROUPING(A, C) GROUPING_ID(A) GROUPING_ID(A, C) + * LAST_VALUE(A) MAX(A) MAX(ALL A) MAX(DISTINCT A) MEDIAN(A) MIN(A) MIN(ALL A) + * MIN(DISTINCT A) PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY A) + * PERCENTILE_DISC(0.5) WITHIN GROUP (ORDER BY A) REGR_AVGX(A, C) REGR_AVGY(A, + * C) REGR_COUNT(A, C) REGR_INTERCEPT(A, C) REGR_R2(A, C) REGR_SLOPE(A, C) + * REGR_SXX(A, C) REGR_SXY(A, C) REGR_SYY(A, C) STDDEV(A) STDDEV(ALL A) + * STDDEV(DISTINCT A) STDDEV_POP(A) STDDEV_POP(ALL A) STDDEV_POP(DISTINCT A) + * STDDEV_SAMP(A) STDDEV_SAMP(ALL A) STDDEV_SAMP(DISTINCT A) SUM(A) SUM(ALL A) + * SUM(DISTINCT A) VAR_POP(A) VAR_POP(ALL A) VAR_POP(DISTINCT A) VAR_SAMP(A) + * VAR_SAMP(ALL A) VAR_SAMP(DISTINCT A) VARIANCE(A) VARIANCE(ALL A) + * VARIANCE(DISTINCT A) */ } @Test public void testOrderBy() throws SQLException { - String query = "SELECT CODE, SALARY from sample_07 ORDER BY SALARY"; - ResultSet result = executeQuery(query); + final String query = "SELECT CODE, SALARY from sample_07 ORDER BY SALARY"; + final ResultSet result = executeQuery(query); matchNextRow(result, "35-3021", 16700L); matchSingleRowExplain(query, "SELECT `CODE`, `SALARY` FROM `default`.`SAMPLE_07` ORDER BY `SALARY`"); } @Test public void testOrderByLimit() throws SQLException { - String query = "SELECT CODE, SALARY from sample_07 ORDER BY SALARY LIMIT 3"; - ResultSet result = executeQuery(query); + final String query = "SELECT CODE, SALARY from sample_07 ORDER BY SALARY LIMIT 3"; + final ResultSet result = executeQuery(query); matchNextRow(result, "35-3021", 16700L); matchNextRow(result, "35-2011", 16860L); matchLastRow(result, "35-9021", 17060L); @@ -248,26 +203,28 @@ public void testOrderByLimit() throws SQLException { @Test public void testOrderByLimitOffset() throws SQLException { - String query = "SELECT CODE, SALARY from sample_07 ORDER BY SALARY LIMIT 2 OFFSET 1"; - ResultSet result = executeQuery(query); + final String query = "SELECT CODE, SALARY from sample_07 ORDER BY SALARY LIMIT 2 OFFSET 1"; + final ResultSet result = executeQuery(query); matchNextRow(result, "35-2011", 16860L); matchLastRow(result, "35-9021", 17060L); - matchSingleRowExplain(query, "SELECT `CODE`, `SALARY` FROM `default`.`SAMPLE_07` ORDER BY `SALARY` LIMIT 2 OFFSET 1"); + matchSingleRowExplain(query, + "SELECT `CODE`, `SALARY` FROM `default`.`SAMPLE_07` ORDER BY `SALARY` LIMIT 2 OFFSET 1"); } @Test public void testAggregateFunctions() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT count(*), count(salary), count(distinct salary) FROM sample_07"; - ResultSet result = executeQuery(query); + final String query = "SELECT count(*), count(salary), count(distinct salary) FROM sample_07"; + final ResultSet result = executeQuery(query); matchLastRow(result, new BigDecimal(823), new BigDecimal(819), new BigDecimal(759)); - matchSingleRowExplain(query, "SELECT COUNT(*), COUNT(`SALARY`), COUNT(DISTINCT `SALARY`) FROM `default`.`SAMPLE_07`"); + matchSingleRowExplain(query, + "SELECT COUNT(*), COUNT(`SALARY`), COUNT(DISTINCT `SALARY`) FROM `default`.`SAMPLE_07`"); } private static void createImpalaJDBCAdapter() throws SQLException, FileNotFoundException { - List impalaIncludes = new ArrayList<>(); + final List impalaIncludes = new ArrayList<>(); impalaIncludes.add(getConfig().getJdbcAdapterPath()); - String jdbcPrefixPath = getConfig().getImpalaJdbcPrefixPath(); - for (String jar : getConfig().getImpalaJdbcJars()) { + final String jdbcPrefixPath = getConfig().getImpalaJdbcPrefixPath(); + for (final String jar : getConfig().getImpalaJdbcJars()) { impalaIncludes.add(jdbcPrefixPath + jar); } createJDBCAdapter(impalaIncludes); diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/KerberosIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/KerberosIT.java index 948ac2dfd..26b1de6bf 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/KerberosIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/KerberosIT.java @@ -1,9 +1,6 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.dialects.AbstractIntegrationTest; -import org.junit.Assume; -import org.junit.BeforeClass; -import org.junit.Test; +import static org.junit.Assert.assertEquals; import java.io.FileNotFoundException; import java.sql.Connection; @@ -13,10 +10,16 @@ import java.util.ArrayList; import java.util.List; -import static org.junit.Assert.assertEquals; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.exasol.adapter.dialects.AbstractIntegrationTest; /** - * Integration test for JDBC drivers requiring Kerberos authentication. This is currently only tested for the Cloudera Hive JDBC driver developed by Simba (probably also works for the Cloudera Impala Driver developed by Simba) + * Integration test for JDBC drivers requiring Kerberos authentication. This is + * currently only tested for the Cloudera Hive JDBC driver developed by Simba + * (probably also works for the Cloudera Impala Driver developed by Simba) */ public class KerberosIT extends AbstractIntegrationTest { @@ -38,31 +41,23 @@ public static void setUpClass() throws FileNotFoundException, SQLException, Clas @Test public void testKerberosVirtualSchema() throws SQLException, ClassNotFoundException, FileNotFoundException { - createVirtualSchema( - VIRTUAL_SCHEMA, - ExasolSqlDialect.NAME, - "", - "default", - CONNECTION_NAME, - "", - "", - "ADAPTER.JDBC_ADAPTER", - "", IS_LOCAL, - getConfig().debugAddress(), - "", null); - Statement stmt = getConnection().createStatement(); - ResultSet result = stmt.executeQuery("SELECT * FROM \"sample_07\""); + createVirtualSchema(VIRTUAL_SCHEMA, ExasolSqlDialect.getPublicName(), "", "default", CONNECTION_NAME, "", "", + "ADAPTER.JDBC_ADAPTER", "", IS_LOCAL, getConfig().debugAddress(), "", null); + final Statement stmt = getConnection().createStatement(); + final ResultSet result = stmt.executeQuery("SELECT * FROM \"sample_07\""); result.next(); assertEquals("00-0000", result.getString(1)); } @Test - public void testKerberosVirtualSchemaGrantConnection() throws SQLException, ClassNotFoundException, FileNotFoundException { - // Create Kerberos Virtual Schema using a different user, which has the appropriate privileges for the connection + public void testKerberosVirtualSchemaGrantConnection() + throws SQLException, ClassNotFoundException, FileNotFoundException { + // Create Kerberos Virtual Schema using a different user, which has the + // appropriate privileges for the connection final String userName = "user1"; - Statement stmt = getConnection().createStatement(); - stmt.execute("DROP USER IF EXISTS " + userName +" CASCADE"); - stmt.execute("CREATE USER " + userName +" identified by \"" + userName + "\""); + final Statement stmt = getConnection().createStatement(); + stmt.execute("DROP USER IF EXISTS " + userName + " CASCADE"); + stmt.execute("CREATE USER " + userName + " identified by \"" + userName + "\""); stmt.execute("GRANT CREATE SESSION TO " + userName); stmt.execute("GRANT CREATE VIRTUAL SCHEMA TO " + userName); stmt.execute("GRANT DROP ANY VIRTUAL SCHEMA TO " + userName); @@ -71,33 +66,22 @@ public void testKerberosVirtualSchemaGrantConnection() throws SQLException, Clas stmt.execute("GRANT ACCESS ON CONNECTION " + CONNECTION_NAME + " TO " + userName); stmt.execute("GRANT CONNECTION " + CONNECTION_NAME + " TO " + userName); stmt.execute("COMMIT"); - Connection conn2 = connectToExa(userName, userName); - Statement stmt2 = conn2.createStatement(); - createVirtualSchema( - conn2, - VIRTUAL_SCHEMA, - ExasolSqlDialect.NAME, - "", - "default", - CONNECTION_NAME, - "", - "", - adapterName, - "", false, - getConfig().debugAddress(), - "", null); - ResultSet result = stmt2.executeQuery("SELECT * FROM \"sample_07\""); + final Connection conn2 = connectToExa(userName, userName); + final Statement stmt2 = conn2.createStatement(); + createVirtualSchema(conn2, VIRTUAL_SCHEMA, ExasolSqlDialect.getPublicName(), "", "default", CONNECTION_NAME, "", + "", adapterName, "", false, getConfig().debugAddress(), "", null); + final ResultSet result = stmt2.executeQuery("SELECT * FROM \"sample_07\""); result.next(); assertEquals("00-0000", result.getString(1)); - stmt.execute("DROP USER IF EXISTS " + userName +" CASCADE"); + stmt.execute("DROP USER IF EXISTS " + userName + " CASCADE"); } private static void createKerberosJDBCAdapter() throws SQLException, FileNotFoundException { - List kerberosIncludes = new ArrayList<>(); + final List kerberosIncludes = new ArrayList<>(); kerberosIncludes.add(getConfig().getJdbcAdapterPath()); - String jdbcPrefixPath = getConfig().getKerberosJdbcPrefixPath(); - for (String jar : getConfig().getKerberosJdbcJars()) { + final String jdbcPrefixPath = getConfig().getKerberosJdbcPrefixPath(); + for (final String jar : getConfig().getKerberosJdbcJars()) { kerberosIncludes.add(jdbcPrefixPath + jar); } createJDBCAdapter(kerberosIncludes); diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/OracleSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/OracleSqlDialectIT.java index 82145b880..8060c5f7c 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/OracleSqlDialectIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/OracleSqlDialectIT.java @@ -1,34 +1,30 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.dialects.AbstractIntegrationTest; -import org.junit.Assume; -import org.junit.BeforeClass; -import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -import javax.print.DocFlavor; import java.io.FileNotFoundException; import java.math.BigDecimal; import java.net.URI; import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Timestamp; -import java.text.DecimalFormat; -import java.text.NumberFormat; -import java.time.*; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.exasol.adapter.dialects.AbstractIntegrationTest; /** * Tested with Oracle 12 * - * TODO Add tests for data types - * TODO Test Expanding of SELECT * if elements of select list require casting + * TODO Add tests for data types TODO Test Expanding of SELECT * if elements of + * select list require casting */ public class OracleSqlDialectIT extends AbstractIntegrationTest { @@ -55,58 +51,39 @@ public static void beforeMethod() throws FileNotFoundException, SQLException, Cl createOracleConnection(); // create JDBC virtual schema - createVirtualSchema( - VIRTUAL_SCHEMA_JDBC, - OracleSqlDialect.NAME, - "", - ORACLE_SCHEMA, - "", - getConfig().getOracleUser(), - getConfig().getOraclePassword(), - //"ADAPTER.JDBC_ORACLE_DEBUG", - "ADAPTER.JDBC_ORACLE", - getConfig().getOracleJdbcConnectionString(), - IS_LOCAL, - getConfig().debugAddress(), - TEST_TABLE,null); + createVirtualSchema(VIRTUAL_SCHEMA_JDBC, OracleSqlDialect.getPublicName(), "", ORACLE_SCHEMA, "", + getConfig().getOracleUser(), getConfig().getOraclePassword(), + // "ADAPTER.JDBC_ORACLE_DEBUG", + "ADAPTER.JDBC_ORACLE", getConfig().getOracleJdbcConnectionString(), IS_LOCAL, + getConfig().debugAddress(), TEST_TABLE, null); // create IMPORT FROM ORA virtual schema - createVirtualSchema( - VIRTUAL_SCHEMA_IMPORT, - OracleSqlDialect.NAME, - "", - ORACLE_SCHEMA, - "", - getConfig().getOracleUser(), - getConfig().getOraclePassword(), - //"ADAPTER.JDBC_ORACLE_DEBUG", - "ADAPTER.JDBC_ORACLE", - getConfig().getOracleJdbcConnectionString(), - IS_LOCAL, - getConfig().debugAddress(), - TEST_TABLE, - "IMPORT_FROM_ORA='true' ORA_CONNECTION_NAME='CONN_ORACLE'"); + createVirtualSchema(VIRTUAL_SCHEMA_IMPORT, OracleSqlDialect.getPublicName(), "", ORACLE_SCHEMA, "", + getConfig().getOracleUser(), getConfig().getOraclePassword(), + // "ADAPTER.JDBC_ORACLE_DEBUG", + "ADAPTER.JDBC_ORACLE", getConfig().getOracleJdbcConnectionString(), IS_LOCAL, + getConfig().debugAddress(), TEST_TABLE, "IMPORT_FROM_ORA='true' ORA_CONNECTION_NAME='CONN_ORACLE'"); } private static void createOracleJDBCAdapter() throws SQLException, FileNotFoundException { - String jdbcAdapterPath = getConfig().getJdbcAdapterPath(); - String oracleJdbcDriverdbcDriver = getConfig().getOracleJdbcDriverPath(); - List includes = new ArrayList<>(); + final String jdbcAdapterPath = getConfig().getJdbcAdapterPath(); + final String oracleJdbcDriverdbcDriver = getConfig().getOracleJdbcDriverPath(); + final List includes = new ArrayList<>(); includes.add(jdbcAdapterPath); includes.add(oracleJdbcDriverdbcDriver); createJDBCAdapter(includes); } - private String getColumnType(String column) throws SQLException { - if (columnTypes == null) { - columnTypes = getColumnTypesOfTable(EXA_TABLE_JDBC); + private String getColumnType(final String column) throws SQLException { + if (this.columnTypes == null) { + this.columnTypes = getColumnTypesOfTable(EXA_TABLE_JDBC); } - return columnTypes.get(column.toUpperCase()); + return this.columnTypes.get(column.toUpperCase()); } - private Map getColumnTypesOfTable(String table) throws SQLException { - Map map = new HashMap<>(); - ResultSet result = executeQuery("DESCRIBE " + table); + private Map getColumnTypesOfTable(final String table) throws SQLException { + final Map map = new HashMap<>(); + final ResultSet result = executeQuery("DESCRIBE " + table); while (result.next()) { map.put(result.getString("COLUMN_NAME").toUpperCase(), result.getString("SQL_TYPE").toUpperCase()); } @@ -114,25 +91,26 @@ private Map getColumnTypesOfTable(String table) throws SQLExcept } private static void createOracleConnection() throws SQLException, FileNotFoundException { - URI conn = getConfig().getOracleConnectionInformation(); - String connectionString = String.format("(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST = %s)(PORT = %d)))(CONNECT_DATA = (SERVICE_NAME = %s)))", + final URI conn = getConfig().getOracleConnectionInformation(); + final String connectionString = String.format( + "(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST = %s)(PORT = %d)))(CONNECT_DATA = (SERVICE_NAME = %s)))", conn.getHost(), conn.getPort(), conn.getPath().substring(1)); createConnection("CONN_ORACLE", connectionString, getConfig().getOracleUser(), getConfig().getOraclePassword()); } - private List runQuery(String query) throws SQLException { - ArrayList result = new ArrayList<>(); + private List runQuery(final String query) throws SQLException { + final ArrayList result = new ArrayList<>(); result.add(executeQuery(String.format(query, EXA_TABLE_JDBC))); result.add(executeQuery(String.format(query, EXA_TABLE_IMPORT))); return result; } - private void runMatchSingleRowExplain(String query, String expectedExplain) throws SQLException { + private void runMatchSingleRowExplain(final String query, final String expectedExplain) throws SQLException { matchSingleRowExplain(String.format(query, EXA_TABLE_JDBC), expectedExplain); matchSingleRowExplain(String.format(query, EXA_TABLE_IMPORT), expectedExplain); } - private void matchNextRowDecimal(ResultSet result, String... expectedStrings) throws SQLException { + private void matchNextRowDecimal(final ResultSet result, final String... expectedStrings) throws SQLException { result.next(); if (result.getMetaData().getColumnCount() != expectedStrings.length) { throw new IllegalArgumentException(String.format("Row has %d columns but only %d arguments were given", @@ -140,27 +118,29 @@ private void matchNextRowDecimal(ResultSet result, String... expectedStrings) th } for (int i = 1; i <= result.getMetaData().getColumnCount(); i++) { - if (result.getObject(i) == null) continue; - BigDecimal expected = new BigDecimal(expectedStrings[i-1]); - BigDecimal actual = result.getBigDecimal(i).stripTrailingZeros(); + if (result.getObject(i) == null) { + continue; + } + final BigDecimal expected = new BigDecimal(expectedStrings[i - 1]); + final BigDecimal actual = result.getBigDecimal(i).stripTrailingZeros(); assertEquals(expected, actual); } } @Test public void testColumnTypeEquivalence() throws SQLException { - Map jdbcColumnTypes = getColumnTypesOfTable(EXA_TABLE_JDBC); - Map importColumnTypes = getColumnTypesOfTable(EXA_TABLE_IMPORT); + final Map jdbcColumnTypes = getColumnTypesOfTable(EXA_TABLE_JDBC); + final Map importColumnTypes = getColumnTypesOfTable(EXA_TABLE_IMPORT); - for (Map.Entry entry : jdbcColumnTypes.entrySet()) { + for (final Map.Entry entry : jdbcColumnTypes.entrySet()) { assertEquals(entry.getValue(), importColumnTypes.get(entry.getKey())); } } @Test public void testSelectExpression() throws SQLException { - String query = "SELECT C7 + 1 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C7 + 1 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRowDecimal(result, "12346.12345"); matchNextRowDecimal(result, "12356.12345"); } @@ -169,8 +149,8 @@ public void testSelectExpression() throws SQLException { @Test public void testFilterExpression() throws SQLException { - String query = "SELECT C7 FROM %s WHERE C7 > 12346"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C7 FROM %s WHERE C7 > 12346"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "12355.12345"); } matchSingleRowExplain(query, "SELECT C7 FROM " + ORA_TABLE + " WHERE 12346 < C7"); @@ -178,57 +158,61 @@ public void testFilterExpression() throws SQLException { @Test public void testAggregateSingleGroup() throws SQLException { - String query = "SELECT min(C7) FROM %s"; - for(ResultSet result : runQuery(query)) { - matchNextRowDecimal(result,"12345.12345"); + final String query = "SELECT min(C7) FROM %s"; + for (final ResultSet result : runQuery(query)) { + matchNextRowDecimal(result, "12345.12345"); } runMatchSingleRowExplain(query, "SELECT CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE); } @Test public void testAggregateGroupByColumn() throws SQLException { - String query = "SELECT C5, min(C7) FROM %s GROUP BY C5"; - for(ResultSet result : runQuery(query)) { + final String query = "SELECT C5, min(C7) FROM %s GROUP BY C5"; + for (final ResultSet result : runQuery(query)) { matchNextRowDecimal(result, "123456789012345678901234567890123456", "12345.12345"); matchNextRowDecimal(result, "1234567890.123456789", "12355.12345"); } - runMatchSingleRowExplain(query, "SELECT TO_CHAR(C5), CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE + " GROUP BY C5"); + runMatchSingleRowExplain(query, + "SELECT TO_CHAR(C5), CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE + " GROUP BY C5"); } - @Test public void testAggregateGroupByExpression() throws SQLException { - String query = "SELECT C5 + 1, min(C7) FROM %s GROUP BY C5 + 1"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C5 + 1, min(C7) FROM %s GROUP BY C5 + 1"; + for (final ResultSet result : runQuery(query)) { matchNextRowDecimal(result, "123456789012345678901234567890123457", "12345.12345"); matchNextRowDecimal(result, "1234567891.123456789", "12355.12345"); } - runMatchSingleRowExplain(query, "SELECT CAST((C5 + 1) AS FLOAT), CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE + " GROUP BY (C5 + 1)"); + runMatchSingleRowExplain(query, + "SELECT CAST((C5 + 1) AS FLOAT), CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE + " GROUP BY (C5 + 1)"); } @Test public void testAggregateGroupByTuple() throws SQLException { - String query = "SELECT C_NUMBER36, C5, min(C7) FROM %s GROUP BY C_NUMBER36, C5 ORDER BY C5 DESC"; - for (ResultSet result : runQuery(query)) { - matchNextRowDecimal(result, "123456789012345678901234567890123456", "123456789012345678901234567890123456", "12345.12345"); + final String query = "SELECT C_NUMBER36, C5, min(C7) FROM %s GROUP BY C_NUMBER36, C5 ORDER BY C5 DESC"; + for (final ResultSet result : runQuery(query)) { + matchNextRowDecimal(result, "123456789012345678901234567890123456", "123456789012345678901234567890123456", + "12345.12345"); matchNextRowDecimal(result, "123456789012345678901234567890123456", "1234567890.123456789", "12355.12345"); } - runMatchSingleRowExplain(query, "SELECT C_NUMBER36, TO_CHAR(C5), CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE + " GROUP BY C5, C_NUMBER36 ORDER BY C5 DESC"); + runMatchSingleRowExplain(query, "SELECT C_NUMBER36, TO_CHAR(C5), CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE + + " GROUP BY C5, C_NUMBER36 ORDER BY C5 DESC"); } @Test public void testAggregateHaving() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT C5, min(C7) FROM %s GROUP BY C5 HAVING MIN(C7) > 12350"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C5, min(C7) FROM %s GROUP BY C5 HAVING MIN(C7) > 12350"; + for (final ResultSet result : runQuery(query)) { matchNextRowDecimal(result, "1234567890.123456789", "12355.12345"); } - runMatchSingleRowExplain(query, "SELECT TO_CHAR(C5), CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE + " GROUP BY C5 HAVING 12350 < MIN(C7)"); + runMatchSingleRowExplain(query, + "SELECT TO_CHAR(C5), CAST(MIN(C7) AS FLOAT) FROM " + ORA_TABLE + " GROUP BY C5 HAVING 12350 < MIN(C7)"); } @Test public void testOrderByColumn() throws SQLException { - String query = "SELECT C1 FROM %s ORDER BY C1 DESC NULLS LAST"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C1 FROM %s ORDER BY C1 DESC NULLS LAST"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "aaaaaaaaaaaaaaaaaaaa "); matchNextRow(result, (Object) null); } @@ -237,8 +221,8 @@ public void testOrderByColumn() throws SQLException { @Test public void testOrderByExpression() throws SQLException { - String query = "SELECT C7 FROM %s ORDER BY ABS(C7) DESC NULLS FIRST"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C7 FROM %s ORDER BY ABS(C7) DESC NULLS FIRST"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "12355.12345"); matchNextRow(result, "12345.12345"); } @@ -247,27 +231,30 @@ public void testOrderByExpression() throws SQLException { @Test public void testLimit() throws SQLException { - String query = "SELECT C7 FROM %s ORDER BY C7 LIMIT 2"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C7 FROM %s ORDER BY C7 LIMIT 2"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "12345.12345"); matchNextRow(result, "12355.12345"); } - matchSingleRowExplain(query, "SELECT LIMIT_SUBSELECT.* FROM ( SELECT C7 FROM " + ORA_TABLE + " ORDER BY C7 ) LIMIT_SUBSELECT WHERE ROWNUM <= 2"); + matchSingleRowExplain(query, "SELECT LIMIT_SUBSELECT.* FROM ( SELECT C7 FROM " + ORA_TABLE + + " ORDER BY C7 ) LIMIT_SUBSELECT WHERE ROWNUM <= 2"); } @Test public void testLimitOffset() throws SQLException { - String query = "SELECT C7 FROM %s ORDER BY C7 LIMIT 1 OFFSET 1"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C7 FROM %s ORDER BY C7 LIMIT 1 OFFSET 1"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "12355.12345"); } - matchSingleRowExplain(query, "SELECT c0 FROM ( SELECT LIMIT_SUBSELECT.*, ROWNUM ROWNUM_SUB FROM ( SELECT C7 AS c0 FROM " + ORA_TABLE + " ORDER BY C7 ) LIMIT_SUBSELECT WHERE ROWNUM <= 2 ) WHERE ROWNUM_SUB > 1"); + matchSingleRowExplain(query, + "SELECT c0 FROM ( SELECT LIMIT_SUBSELECT.*, ROWNUM ROWNUM_SUB FROM ( SELECT C7 AS c0 FROM " + ORA_TABLE + + " ORDER BY C7 ) LIMIT_SUBSELECT WHERE ROWNUM <= 2 ) WHERE ROWNUM_SUB > 1"); } @Test public void testChar() throws SQLException { - String query = "SELECT C1 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C1 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "aaaaaaaaaaaaaaaaaaaa "); } assertEquals("CHAR(50) ASCII", getColumnType("C1")); @@ -275,8 +262,8 @@ public void testChar() throws SQLException { @Test public void testNChar() throws SQLException { - String query = "SELECT C2 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C2 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "bbbbbbbbbbbbbbbbbbbb "); } assertEquals("CHAR(50) UTF8", getColumnType("C2")); @@ -284,8 +271,8 @@ public void testNChar() throws SQLException { @Test public void testVarchar() throws SQLException { - String query = "SELECT C3 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C3 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "cccccccccccccccccccc"); } assertEquals("VARCHAR(50) ASCII", getColumnType("C3")); @@ -293,8 +280,8 @@ public void testVarchar() throws SQLException { @Test public void testNVarchar() throws SQLException { - String query = "SELECT C4 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C4 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "dddddddddddddddddddd"); } assertEquals("VARCHAR(50) UTF8", getColumnType("C4")); @@ -302,11 +289,11 @@ public void testNVarchar() throws SQLException { @Test public void testNumber() throws SQLException { - String query = "SELECT C5 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C5 FROM %s"; + for (final ResultSet result : runQuery(query)) { result.next(); - BigDecimal expected = new BigDecimal("123456789012345678901234567890123456"); - BigDecimal actual = result.getBigDecimal("C5"); + final BigDecimal expected = new BigDecimal("123456789012345678901234567890123456"); + final BigDecimal actual = result.getBigDecimal("C5"); assertEquals(expected, actual); } assertEquals("VARCHAR(2000000) UTF8", getColumnType("C5")); @@ -314,11 +301,11 @@ public void testNumber() throws SQLException { @Test public void testNumber36() throws SQLException { - String query = "SELECT c_number36 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT c_number36 FROM %s"; + for (final ResultSet result : runQuery(query)) { result.next(); - BigDecimal expected = new BigDecimal("123456789012345678901234567890123456"); - BigDecimal actual = result.getBigDecimal("c_number36"); + final BigDecimal expected = new BigDecimal("123456789012345678901234567890123456"); + final BigDecimal actual = result.getBigDecimal("c_number36"); assertEquals(expected, actual); } assertEquals("DECIMAL(36,0)", getColumnType("C_NUMBER36")); @@ -326,11 +313,11 @@ public void testNumber36() throws SQLException { @Test public void testNumber38() throws SQLException { - String query = "SELECT C6 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C6 FROM %s"; + for (final ResultSet result : runQuery(query)) { result.next(); - BigDecimal expected = new BigDecimal("12345678901234567890123456789012345678"); - BigDecimal actual = result.getBigDecimal("C6"); + final BigDecimal expected = new BigDecimal("12345678901234567890123456789012345678"); + final BigDecimal actual = result.getBigDecimal("C6"); assertEquals(expected, actual); } assertEquals("VARCHAR(2000000) UTF8", getColumnType("C6")); @@ -338,11 +325,11 @@ public void testNumber38() throws SQLException { @Test public void testNumber10S5() throws SQLException { - String query = "SELECT C7 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C7 FROM %s"; + for (final ResultSet result : runQuery(query)) { result.next(); - BigDecimal expected = new BigDecimal("12345.12345"); - BigDecimal actual = result.getBigDecimal("C7").stripTrailingZeros(); + final BigDecimal expected = new BigDecimal("12345.12345"); + final BigDecimal actual = result.getBigDecimal("C7").stripTrailingZeros(); assertEquals(expected, actual); } assertEquals("DECIMAL(10,5)", getColumnType("C7")); @@ -350,11 +337,11 @@ public void testNumber10S5() throws SQLException { @Test public void testBinaryFloat() throws SQLException { - String query = "SELECT C_BINFLOAT FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C_BINFLOAT FROM %s"; + for (final ResultSet result : runQuery(query)) { result.next(); - Float expected = Float.parseFloat("1234.1241723"); - Float actual = result.getFloat(1); + final Float expected = Float.parseFloat("1234.1241723"); + final Float actual = result.getFloat(1); if (Math.abs(expected - actual) > 0.00001) { fail(); } @@ -364,11 +351,11 @@ public void testBinaryFloat() throws SQLException { @Test public void testBinaryDouble() throws SQLException { - String query = "SELECT C_BINDOUBLE FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C_BINDOUBLE FROM %s"; + for (final ResultSet result : runQuery(query)) { result.next(); - Double expected = Double.parseDouble("1234987.120871234"); - Double actual = result.getDouble(1); + final Double expected = Double.parseDouble("1234987.120871234"); + final Double actual = result.getDouble(1); if (Math.abs(expected - actual) > 0.0000001) { fail(); } @@ -378,11 +365,11 @@ public void testBinaryDouble() throws SQLException { @Test public void testFloat() throws SQLException { - String query = "SELECT C_FLOAT FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C_FLOAT FROM %s"; + for (final ResultSet result : runQuery(query)) { result.next(); - Float expected = Float.parseFloat("12345.01982348239"); - Float actual = result.getFloat(1); + final Float expected = Float.parseFloat("12345.01982348239"); + final Float actual = result.getFloat(1); if (Math.abs(expected - actual) > 0.000000001) { fail(); } @@ -392,11 +379,11 @@ public void testFloat() throws SQLException { @Test public void testFloat126() throws SQLException { - String query = "SELECT C_FLOAT126 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C_FLOAT126 FROM %s"; + for (final ResultSet result : runQuery(query)) { result.next(); - Float expected = Float.parseFloat("12345678.01234567901234567890123456789"); - Float actual = result.getFloat(1); + final Float expected = Float.parseFloat("12345678.01234567901234567890123456789"); + final Float actual = result.getFloat(1); if (Math.abs(expected - actual) > 0.00000000000000000001) { fail(); } @@ -406,16 +393,16 @@ public void testFloat126() throws SQLException { @Test public void testLong() throws SQLException { - String query = "SELECT C_LONG FROM " + EXA_TABLE_JDBC; - ResultSet result = executeQuery(query); + final String query = "SELECT C_LONG FROM " + EXA_TABLE_JDBC; + final ResultSet result = executeQuery(query); matchNextRow(result, "test long 123"); assertEquals("VARCHAR(2000000) ASCII", getColumnType("C_LONG")); } @Test public void testDate() throws SQLException { - String query = "SELECT C10 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C10 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, Date.valueOf("2016-08-19")); } runMatchSingleRowExplain(query, "SELECT C10 FROM " + ORA_TABLE); @@ -424,8 +411,8 @@ public void testDate() throws SQLException { @Test public void testTimestamp3() throws SQLException { - String query = "SELECT C11 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C11 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "11-MAR-13 05.30.15.123 PM"); } runMatchSingleRowExplain(query, "SELECT TO_CHAR(C11) FROM " + ORA_TABLE); @@ -434,8 +421,8 @@ public void testTimestamp3() throws SQLException { @Test public void testTimestamp6() throws SQLException { - String query = "SELECT C12 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C12 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "11-MAR-13 05.30.15.123456 PM"); } runMatchSingleRowExplain(query, "SELECT TO_CHAR(C12) FROM " + ORA_TABLE); @@ -444,8 +431,8 @@ public void testTimestamp6() throws SQLException { @Test public void testTimestamp9() throws SQLException { - String query = "SELECT C13 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C13 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "11-MAR-13 05.30.15.123456789 PM"); } runMatchSingleRowExplain(query, "SELECT TO_CHAR(C13) FROM " + ORA_TABLE); @@ -454,8 +441,8 @@ public void testTimestamp9() throws SQLException { @Test public void testTimestampTZ() throws SQLException { - String query = "SELECT C14 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C14 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "19-AUG-16 11.28.05.000000 AM -08:00"); } runMatchSingleRowExplain(query, "SELECT TO_CHAR(C14) FROM " + ORA_TABLE); @@ -465,8 +452,8 @@ public void testTimestampTZ() throws SQLException { @Test public void testTimestampLocalTZ() throws SQLException { executeUpdate("ALTER SESSION SET TIME_ZONE = 'UTC'"); - String query = "SELECT C15 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C15 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "30-APR-18 06.00.05.000000 PM"); } assertEquals("VARCHAR(2000000) UTF8", getColumnType("C15")); @@ -474,8 +461,8 @@ public void testTimestampLocalTZ() throws SQLException { @Test public void testIntervalYear() throws SQLException { - String query = "SELECT C16 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C16 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "+54-02"); } runMatchSingleRowExplain(query, "SELECT TO_CHAR(C16) FROM " + ORA_TABLE); @@ -484,8 +471,8 @@ public void testIntervalYear() throws SQLException { @Test public void testIntervalDay() throws SQLException { - String query = "SELECT C17 FROM %s"; - for (ResultSet result : runQuery(query)) { + final String query = "SELECT C17 FROM %s"; + for (final ResultSet result : runQuery(query)) { matchNextRow(result, "+01 11:12:10.123000"); matchNextRow(result, "+02 02:03:04.123456"); } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/SybaseSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/SybaseSqlDialectIT.java new file mode 100644 index 000000000..dd37676ca --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/SybaseSqlDialectIT.java @@ -0,0 +1,398 @@ +package com.exasol.adapter.dialects.impl; + +import static org.junit.Assert.assertEquals; + +import java.io.FileNotFoundException; +import java.math.BigDecimal; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.*; + +import org.junit.*; + +import com.exasol.adapter.dialects.AbstractIntegrationTest; + +public class SybaseSqlDialectIT extends AbstractIntegrationTest { + private static final boolean IS_LOCAL = false; + private static final String VS_NAME = "VS_SYBASE"; + + @BeforeClass + public static void setUpClass() throws FileNotFoundException, SQLException, ClassNotFoundException { + Assume.assumeTrue(getConfig().sybaseTestsRequested()); + + setConnection(connectToExa()); + createSybaseJDBCAdapter(); + final String catalogName = "testdb"; // This only works for the database in our test environment + final String schemaName = "tester"; + createVirtualSchema(VS_NAME, SybaseSqlDialect.getPublicName(), catalogName, schemaName, "", + getConfig().getSybaseUser(), getConfig().getSybasePassword(), "ADAPTER.JDBC_ADAPTER", + getConfig().getSybaseJdbcConnectionString(), IS_LOCAL, getConfig().debugAddress(), "", null); + } + + private static void createSybaseJDBCAdapter() throws SQLException, FileNotFoundException { + final String jdbcAdapterPath = getConfig().getJdbcAdapterPath(); + final String jdbcDriverDriver = getConfig().getSybaseJdbcDriverPath(); + final List includes = new ArrayList<>(); + includes.add(jdbcAdapterPath); + includes.add(jdbcDriverDriver); + createJDBCAdapter(includes); + } + + // Use getColumnTypes() to access this map + private final Map> columnTypes = new HashMap<>(); + + private String getColumnType(final String table, final String column) throws SQLException { + Map map = this.columnTypes.get(table); + if (map == null) { + map = getColumnTypesOfTable(table); + this.columnTypes.put(table, map); + } + return map.get(column.toUpperCase()); + } + + private Map getColumnTypesOfTable(final String table) throws SQLException { + final Map map = new HashMap<>(); + final ResultSet result = executeQuery("DESCRIBE " + VS_NAME + ".\"" + table + "\""); + while (result.next()) { + map.put(result.getString("COLUMN_NAME").toUpperCase(), result.getString("SQL_TYPE").toUpperCase()); + } + return map; + } + + private void assertColumnTypeEquals(final String expected, final String table, final String column) + throws SQLException { + assertEquals(expected.toUpperCase(), getColumnType(table, column).toUpperCase()); + } + + @Test + public void testSelect() throws SQLException { + final ResultSet result = executeQuery("SELECT * FROM vs_sybase.\"ittable\""); + matchNextRow(result, "e", 2L); + } + + @Test + public void testProjection() throws SQLException { + final ResultSet result = executeQuery("SELECT \"a\" FROM vs_sybase.\"ittable\""); + matchNextRow(result, "e"); + } + + @Test + public void testOrderByAsc() throws SQLException { + final ResultSet result = executeQuery("SELECT \"a\" FROM vs_sybase.\"ittable\" ORDER BY \"a\""); + matchNextRow(result, "a"); + result.last(); + assertEquals(null, result.getObject(1)); + } + + @Test + public void testOrderByAscNullsFirst() throws SQLException { + final ResultSet result = executeQuery("SELECT \"a\" FROM vs_sybase.\"ittable\" ORDER BY \"a\" NULLS FIRST"); + result.next(); + assertEquals(null, result.getObject(1)); + result.last(); + matchLastRow(result, "z"); + } + + @Test + public void testOrderByDesc() throws SQLException { + final ResultSet result = executeQuery("SELECT \"a\" FROM vs_sybase.\"ittable\" ORDER BY \"a\" DESC"); + result.next(); + assertEquals(null, result.getObject(1)); + result.last(); + matchLastRow(result, "a"); + } + + @Test + public void testOrderByDescNullsLast() throws SQLException { + final ResultSet result = executeQuery("SELECT \"a\" FROM vs_sybase.\"ittable\" ORDER BY \"a\" DESC NULLS LAST"); + matchNextRow(result, "z"); + result.last(); + assertEquals(null, result.getObject(1)); + } + + @Test + public void testWhereGreater() throws SQLException { + final ResultSet result = executeQuery("SELECT \"b\" FROM vs_sybase.\"ittable\" WHERE \"b\" > 0"); + result.last(); + assertEquals(2, result.getRow()); + } + + // TODO: add datatype tests + @Test + public void testTypeSmalldatetime() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_smalldatetime\" FROM vs_sybase.\"timetypes\""); + matchNextRow(result, getSqlTimestamp(1900, 1, 1, 1, 2, 0, 0)); + } + + @Test + public void testTypeDatetime() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_datetime\" FROM vs_sybase.\"timetypes\""); + matchNextRow(result, getSqlTimestamp(1753, 1, 1, 1, 2, 3, 100)); + } + + @Test + public void testTypeDate() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_date\" FROM vs_sybase.\"timetypes\""); + matchNextRow(result, getSqlDate(2032, 12, 3)); + } + + @Test + public void testTypeTime() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_time\" FROM vs_sybase.\"timetypes\""); + matchNextRow(result, "11:22:33.456"); + } + + @Test + @Ignore + public void testTypeBigdatetime() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_bigdatetime\" FROM vs_sybase.\"timetypes\""); + matchNextRow(result, getSqlTimestamp(1753, 1, 1, 1, 2, 3, 100)); + // SQL Error [22001]: Data truncation + // Arithmetic overflow during implicit conversion of BIGDATETIME value to a + // DATETIME field . + } + + @Test + public void testTypeBigtime() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_bigtime\" FROM vs_sybase.\"timetypes\""); + matchNextRow(result, "11:11:11.111111"); + } + + @Test + public void testTypeBigint() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_bigint\" FROM vs_sybase.\"integertypes\""); + matchNextRow(result, new BigDecimal("-9223372036854775808")); + matchNextRow(result, new BigDecimal("9223372036854775807")); + assertColumnTypeEquals("DECIMAL(19,0)", "integertypes", "c_bigint"); + } + + @Test + public void testTypeInt() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_int\" FROM vs_sybase.\"integertypes\""); + matchNextRow(result, -2147483648L); + matchNextRow(result, 2147483647L); + assertColumnTypeEquals("DECIMAL(10,0)", "integertypes", "c_int"); + } + + @Test + public void testTypeSmallint() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_smallint\" FROM vs_sybase.\"integertypes\""); + matchNextRow(result, -32768); + matchNextRow(result, 32767); + assertColumnTypeEquals("DECIMAL(5,0)", "integertypes", "c_smallint"); + } + + @Test + public void testTypeUbigint() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_ubigint\" FROM vs_sybase.\"integertypes\""); + matchNextRow(result, new BigDecimal("0")); + matchNextRow(result, new BigDecimal("18446744073709551615")); + assertColumnTypeEquals("DECIMAL(20,0)", "integertypes", "c_ubigint"); + } + + @Test + public void testTypeUint() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_uint\" FROM vs_sybase.\"integertypes\""); + matchNextRow(result, 0L); + matchNextRow(result, 4294967295L); + assertColumnTypeEquals("DECIMAL(10,0)", "integertypes", "c_uint"); + } + + @Test + public void testTypeUsmallint() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_usmallint\" FROM vs_sybase.\"integertypes\""); + matchNextRow(result, 0); + matchNextRow(result, 65535); + assertColumnTypeEquals("DECIMAL(5,0)", "integertypes", "c_usmallint"); + } + + @Test + public void testTypeNumeric36() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_numeric_36_0\" FROM vs_sybase.\"decimaltypes\""); + matchNextRow(result, new BigDecimal("12345678901234567890123456")); + matchNextRow(result, new BigDecimal("-12345678901234567890123456")); + assertColumnTypeEquals("DECIMAL(36,0)", "decimaltypes", "c_numeric_36_0"); + } + + @Test + public void testTypeNumeric38() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_numeric_38_0\" FROM vs_sybase.\"decimaltypes\""); + matchNextRow(result, "1234567890123456789012345678"); + matchNextRow(result, "-1234567890123456789012345678"); + assertColumnTypeEquals("VARCHAR(39) UTF8", "decimaltypes", "c_numeric_38_0"); + } + + @Test + public void testTypeDecimal2010() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_decimal_20_10\" FROM vs_sybase.\"decimaltypes\""); + matchNextRow(result, new BigDecimal("1234567890.0123456789")); + matchNextRow(result, new BigDecimal("-1234567890.0123456789")); + assertColumnTypeEquals("DECIMAL(20,10)", "decimaltypes", "c_decimal_20_10"); + } + + @Test + public void testTypeDecimal3710() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_decimal_37_10\" FROM vs_sybase.\"decimaltypes\""); + matchNextRow(result, "12345678901234567.0123456789"); + matchNextRow(result, "-12345678901234567.0123456789"); + assertColumnTypeEquals("VARCHAR(39) UTF8", "decimaltypes", "c_decimal_37_10"); + } + + @Test + public void testTypeDouble() throws SQLException { + // ResultSet result = executeQuery("SELECT \"c_double\" FROM + // vs_sybase.\"approxtypes\""); + // matchNextRow(result, "2.2250738585072014e-308"); + // matchNextRow(result, "1.797693134862315708e+308"); + assertColumnTypeEquals("DOUBLE", "approxtypes", "c_double"); + } + + @Test + public void testTypeReal() throws SQLException { + // ResultSet result = executeQuery("SELECT \"c_real\" FROM + // vs_sybase.\"approxtypes\""); + // matchNextRow(result, new Double("1.175494351e-38")); + // matchNextRow(result, new Double("3.402823466e+38")); + assertColumnTypeEquals("DOUBLE", "approxtypes", "c_real"); + } + + @Test + public void testTypeSmallmoney() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_smallmoney\" FROM vs_sybase.\"moneytypes\""); + matchNextRow(result, new BigDecimal("214748.3647")); + matchNextRow(result, new BigDecimal("-214748.3648")); + assertColumnTypeEquals("DECIMAL(10,4)", "moneytypes", "c_smallmoney"); + } + + @Test + public void testTypeMoney() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_money\" FROM vs_sybase.\"moneytypes\""); + matchNextRow(result, new BigDecimal("922337203685477.5807")); + matchNextRow(result, new BigDecimal("-922337203685477.5808")); + assertColumnTypeEquals("DECIMAL(19,4)", "moneytypes", "c_money"); + } + + public String padRight(final String s, final int n) { + return String.format("%-" + n + "s", s); + } + + @Test + public void testTypeChar10() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_char_10\" FROM vs_sybase.\"chartypes\""); + matchNextRow(result, padRight("c10", 10)); + assertColumnTypeEquals("CHAR(10) ASCII", "chartypes", "c_char_10"); + } + + @Test + public void testTypeCharTooBig() throws SQLException { + final ResultSet result = executeQuery("SELECT \"c_char_toobig\" FROM vs_sybase.\"chartypes\""); + matchNextRow(result, padRight("c2001", 2001)); + assertColumnTypeEquals("VARCHAR(2001) ASCII", "chartypes", "c_char_toobig"); + } + + @Test + public void testTypeVarchar() throws SQLException { + final String column = "c_varchar"; + final String table = "chartypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, "vc10"); + assertColumnTypeEquals("VARCHAR(10) ASCII", table, column); + } + + @Test + public void testTypeUnichar10() throws SQLException { + final String column = "c_unichar_10"; + final String table = "chartypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, padRight("uc10", 10)); + assertColumnTypeEquals("CHAR(10) UTF8", table, column); + } + + @Test + public void testTypeUnicharToobig() throws SQLException { + final int fieldSize = 8148; + final String column = "c_unichar_toobig"; + final String table = "fatunichartypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, padRight("xyz", fieldSize)); + assertColumnTypeEquals("VARCHAR(" + fieldSize + ") UTF8", table, column); + } + + @Test + public void testTypeUnivarchar() throws SQLException { + final String column = "c_univarchar"; + final String table = "chartypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, "uvc10"); + assertColumnTypeEquals("VARCHAR(10) UTF8", table, column); + } + + @Test + public void testTypeNchar() throws SQLException { + final String column = "c_nchar"; + final String table = "chartypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, padRight("nc10", 10)); + assertColumnTypeEquals("CHAR(10) ASCII", table, column); + } + + @Test + public void testTypeNvarchar() throws SQLException { + final String column = "c_nvarchar"; + final String table = "chartypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, "nvc10"); + assertColumnTypeEquals("VARCHAR(10) ASCII", table, column); + } + + @Test + public void testTypeText() throws SQLException { + final String column = "c_text"; + final String table = "texttypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, "Text. A wall of text."); + assertColumnTypeEquals("VARCHAR(2000000) UTF8", table, column); + } + + @Test + public void testTypeUnitext() throws SQLException { + final String column = "c_unitext"; + final String table = "texttypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, "Text. A wall of Unicode text."); + assertColumnTypeEquals("VARCHAR(2000000) UTF8", table, column); + } + + @Test + public void testTypeBinary() throws SQLException { + final String column = "c_binary"; + final String table = "misctypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, "binary NOT SUPPORTED"); + } + + @Test + public void testTypeVarbinary() throws SQLException { + final String column = "c_varbinary"; + final String table = "misctypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, "varbinary NOT SUPPORTED"); + } + + @Test + public void testTypeImage() throws SQLException { + final String column = "c_image"; + final String table = "misctypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, "image NOT SUPPORTED"); + } + + @Test + public void testTypeBit() throws SQLException { + final String column = "c_bit"; + final String table = "misctypes"; + final ResultSet result = executeQuery("SELECT \"" + column + "\" FROM vs_sybase.\"" + table + "\""); + matchNextRow(result, false); + assertColumnTypeEquals("BOOLEAN", table, column); + } +} \ No newline at end of file diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/TeradataSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/TeradataSqlDialectIT.java index be6d5e3c1..30d0a0b64 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/TeradataSqlDialectIT.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/TeradataSqlDialectIT.java @@ -1,11 +1,5 @@ package com.exasol.adapter.dialects.impl; -import com.exasol.adapter.dialects.AbstractIntegrationTest; - -import org.junit.Assume; -import org.junit.BeforeClass; -import org.junit.Test; - import java.io.FileNotFoundException; import java.math.BigDecimal; import java.sql.ResultSet; @@ -13,6 +7,12 @@ import java.util.ArrayList; import java.util.List; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.exasol.adapter.dialects.AbstractIntegrationTest; + /** * Integration test for the Teradata SQL dialect * @@ -21,24 +21,16 @@ public class TeradataSqlDialectIT extends AbstractIntegrationTest { private static final String VIRTUAL_SCHEMA = "VS_TERADATA"; private static final String TERADATA_SCHEMA = "retail"; - + @BeforeClass public static void setUpClass() throws FileNotFoundException, SQLException, ClassNotFoundException { Assume.assumeTrue(getConfig().teradataTestsRequested()); setConnection(connectToExa()); createTeradataJDBCAdapter(); - createVirtualSchema( - VIRTUAL_SCHEMA, - TeradataSqlDialect.NAME, - "", TERADATA_SCHEMA, - "", - getConfig().getTeradataUser(), - getConfig().getTeradataPassword(), - "ADAPTER.JDBC_ADAPTER", - getConfig().getTeradataJdbcConnectionString(), - false, - getConfig().debugAddress(), + createVirtualSchema(VIRTUAL_SCHEMA, TeradataSqlDialect.getPublicName(), "", TERADATA_SCHEMA, "", + getConfig().getTeradataUser(), getConfig().getTeradataPassword(), "ADAPTER.JDBC_ADAPTER", + getConfig().getTeradataJdbcConnectionString(), false, getConfig().debugAddress(), "numeric_data_types, REGION, DateTime_and_Interval_Data_Types, Period_Data_Types", null); } @@ -83,73 +75,49 @@ public static void setUpClass() throws FileNotFoundException, SQLException, Clas // true); // } - @Test public void testSelectNumericDataTypes() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT * FROM "+ VIRTUAL_SCHEMA + ".\"numeric_data_types\""; - ResultSet result = executeQuery(query); - matchNextRow( - result, - (short)1, - (java.lang.Integer)2, - (long)3, - new BigDecimal(4), - new BigDecimal("7.22"), - (java.lang.Double)1214325.1234, - (java.lang.Double)1.3451345135541E9, - (java.lang.Double)1.234513245783E9, - (java.lang.Double)113.321, - (java.lang.Double)3143.0, - (java.lang.Double)2.3452345E7, - new BigDecimal("1234.1"), - (short)132 - ); - matchSingleRowExplain(query, "SELECT \"mybyteint\", \"mysmallint\", \"myinteger\", \"myBIGINT\", \"myDecimal\", \"myFloat\", \"myReal\", \"myDouble\", CAST(\"n1\" as DOUBLE PRECISION), CAST(\"n2\" as DOUBLE PRECISION), CAST(\"n3\" as DOUBLE PRECISION), \"n4\", \"n5\" FROM \"retail\".\"numeric_data_types\""); + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".\"numeric_data_types\""; + final ResultSet result = executeQuery(query); + matchNextRow(result, (short) 1, 2, (long) 3, new BigDecimal(4), new BigDecimal("7.22"), 1214325.1234, + 1.3451345135541E9, 1.234513245783E9, 113.321, 3143.0, 2.3452345E7, new BigDecimal("1234.1"), + (short) 132); + matchSingleRowExplain(query, + "SELECT \"mybyteint\", \"mysmallint\", \"myinteger\", \"myBIGINT\", \"myDecimal\", \"myFloat\", \"myReal\", \"myDouble\", CAST(\"n1\" as DOUBLE PRECISION), CAST(\"n2\" as DOUBLE PRECISION), CAST(\"n3\" as DOUBLE PRECISION), \"n4\", \"n5\" FROM \"retail\".\"numeric_data_types\""); } - + @Test - public void testSelectDateTime_and_Interval_Data_Types() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT * FROM "+ VIRTUAL_SCHEMA + ".\"DateTime_and_Interval_Data_Types\""; - ResultSet result = executeQuery(query); - matchNextRow( - result, - getSqlDate(2017, 1 , 11), - "13:09:52.000000", - getSqlTimestamp(2017, 01, 11, 13, 9, 52, 430), - "13:09:52.000000+00:00", - getSqlTimestamp(2017, 01, 11, 13, 9, 52, 430), - (java.lang.String) " -2 ", - (java.lang.String) " 10-10 ", - (java.lang.String) " 30 12:30:30.5000 ", - (java.lang.String) " 6:15.24 " - ); - matchSingleRowExplain(query, "SELECT \"myDate\", CAST(\"myTime\" as VARCHAR(21) ), \"myTimestamp\", CAST(\"myTimeWithTimezone\" as VARCHAR(21) ), \"myTimestampWithTimezone\", CAST(\"myIntervalYear\" as VARCHAR(30) ), CAST(\"myIntervalYearToMonth\" as VARCHAR(30) ), CAST(\"myIntervalDayToSecond\" as VARCHAR(30) ), CAST(\"myIntervalMinuteToSecond\" as VARCHAR(30) ) FROM \"retail\".\"DateTime_and_Interval_Data_Types\""); + public void testSelectDateTime_and_Interval_Data_Types() + throws SQLException, ClassNotFoundException, FileNotFoundException { + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".\"DateTime_and_Interval_Data_Types\""; + final ResultSet result = executeQuery(query); + matchNextRow(result, getSqlDate(2017, 1, 11), "13:09:52.000000", getSqlTimestamp(2017, 01, 11, 13, 9, 52, 430), + "13:09:52.000000+00:00", getSqlTimestamp(2017, 01, 11, 13, 9, 52, 430), + " -2 ", " 10-10 ", " 30 12:30:30.5000 ", + " 6:15.24 "); + matchSingleRowExplain(query, + "SELECT \"myDate\", CAST(\"myTime\" as VARCHAR(21) ), \"myTimestamp\", CAST(\"myTimeWithTimezone\" as VARCHAR(21) ), \"myTimestampWithTimezone\", CAST(\"myIntervalYear\" as VARCHAR(30) ), CAST(\"myIntervalYearToMonth\" as VARCHAR(30) ), CAST(\"myIntervalDayToSecond\" as VARCHAR(30) ), CAST(\"myIntervalMinuteToSecond\" as VARCHAR(30) ) FROM \"retail\".\"DateTime_and_Interval_Data_Types\""); } - + @Test public void testSelectPeriod_Data_Types() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT * FROM "+ VIRTUAL_SCHEMA + ".\"Period_Data_Types\""; - ResultSet result = executeQuery(query); - matchNextRow( - result, - (long)1, - "hans ", - "('05/02/03', '06/02/04')", - "('10:00:00.123456', '11:00:00.123456')", - "('10:37:58.123456+08:00', '11:37:58.123456+08:00')", - "('2005-02-03 10:00:00.123', '2005-02-03 11:00:00.123')", - "('2005-02-03 10:37:58.123+08:00', '2005-02-03 11:37:58.123+08:00')" - - ); - matchSingleRowExplain(query, "SELECT \"employee_id\", \"employee_name\", CAST(\"myPeriodDate\" as VARCHAR(100) ), CAST(\"myPeriodTime\" as VARCHAR(100) ), CAST(\"myPeriodTimeWithTimeZone\" as VARCHAR(100) ), CAST(\"myPeriodTimestamp\" as VARCHAR(100) ), CAST(\"myPeriodTimestampTimezone\" as VARCHAR(100) ) FROM \"retail\".\"Period_Data_Types\""); - } - - + final String query = "SELECT * FROM " + VIRTUAL_SCHEMA + ".\"Period_Data_Types\""; + final ResultSet result = executeQuery(query); + matchNextRow(result, (long) 1, "hans ", "('05/02/03', '06/02/04')", + "('10:00:00.123456', '11:00:00.123456')", "('10:37:58.123456+08:00', '11:37:58.123456+08:00')", + "('2005-02-03 10:00:00.123', '2005-02-03 11:00:00.123')", + "('2005-02-03 10:37:58.123+08:00', '2005-02-03 11:37:58.123+08:00')" + + ); + matchSingleRowExplain(query, + "SELECT \"employee_id\", \"employee_name\", CAST(\"myPeriodDate\" as VARCHAR(100) ), CAST(\"myPeriodTime\" as VARCHAR(100) ), CAST(\"myPeriodTimeWithTimeZone\" as VARCHAR(100) ), CAST(\"myPeriodTimestamp\" as VARCHAR(100) ), CAST(\"myPeriodTimestampTimezone\" as VARCHAR(100) ) FROM \"retail\".\"Period_Data_Types\""); + } + @Test public void testProjection() throws SQLException, ClassNotFoundException, FileNotFoundException { - String query = "SELECT R_REGIONKEY FROM "+ VIRTUAL_SCHEMA + ".REGION order by R_REGIONKEY"; - ResultSet result = executeQuery(query); - matchNextRow(result,(long)0); + final String query = "SELECT R_REGIONKEY FROM " + VIRTUAL_SCHEMA + ".REGION order by R_REGIONKEY"; + final ResultSet result = executeQuery(query); + matchNextRow(result, (long) 0); matchSingleRowExplain(query, "SELECT R_REGIONKEY FROM \"retail\".REGION ORDER BY R_REGIONKEY"); } @@ -325,10 +293,10 @@ public void testProjection() throws SQLException, ClassNotFoundException, FileNo // } private static void createTeradataJDBCAdapter() throws SQLException, FileNotFoundException { - List teradataIncludes = new ArrayList<>(); + final List teradataIncludes = new ArrayList<>(); teradataIncludes.add(getConfig().getJdbcAdapterPath()); - String jdbcPrefixPath = getConfig().getTeradataJdbcPrefixPath(); - for (String jar : getConfig().getTeradataJdbcJars()) { + final String jdbcPrefixPath = getConfig().getTeradataJdbcPrefixPath(); + for (final String jar : getConfig().getTeradataJdbcJars()) { teradataIncludes.add(jdbcPrefixPath + jar); } createJDBCAdapter(teradataIncludes); diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/jdbc/JdbcAdapterPropertiesTest.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/jdbc/JdbcAdapterPropertiesTest.java index bbd71ec54..e4ca68d5f 100644 --- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/jdbc/JdbcAdapterPropertiesTest.java +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/jdbc/JdbcAdapterPropertiesTest.java @@ -1,242 +1,249 @@ package com.exasol.adapter.jdbc; -import com.exasol.adapter.AdapterException; -import com.google.common.collect.ImmutableList; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import static org.junit.Assert.assertEquals; import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.junit.Assert.assertEquals; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import com.exasol.adapter.AdapterException; +import com.google.common.collect.ImmutableList; public class JdbcAdapterPropertiesTest { - + @Rule public ExpectedException thrown = ExpectedException.none(); - + private static Map getMinimumMandatory() { - Map properties = new HashMap<>(); + final Map properties = new HashMap<>(); properties.put(JdbcAdapterProperties.PROP_SQL_DIALECT, "GENERIC"); properties.put(JdbcAdapterProperties.PROP_CONNECTION_NAME, "MY_CONN"); return properties; } - + @Test public void testNoCredentials() throws AdapterException { - Map properties = new HashMap<>(); + final Map properties = new HashMap<>(); properties.put(JdbcAdapterProperties.PROP_SQL_DIALECT, "GENERIC"); properties.put(JdbcAdapterProperties.PROP_SCHEMA_NAME, "MY_SCHEMA"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You did not specify a connection (CONNECTION_NAME) and therefore have to specify"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You did not specify a connection (CONNECTION_NAME) and therefore have to specify"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testUserNamePasswordOptional() throws AdapterException { - Map properties = new HashMap<>(); + final Map properties = new HashMap<>(); properties.put(JdbcAdapterProperties.PROP_SQL_DIALECT, "GENERIC"); properties.put(JdbcAdapterProperties.PROP_CONNECTION_STRING, "MY_CONN"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + JdbcAdapterProperties.checkPropertyConsistency(properties); } - + @Test public void testRedundantCredentials() throws AdapterException { Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_CONNECTION_STRING, "MY_CONN"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You specified a connection (CONNECTION_NAME) and therefore may not specify "); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); - + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You specified a connection (CONNECTION_NAME) and therefore may not specify "); + JdbcAdapterProperties.checkPropertyConsistency(properties); + properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_USERNAME, "MY_USER"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You specified a connection (CONNECTION_NAME) and therefore may not specify "); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); - + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You specified a connection (CONNECTION_NAME) and therefore may not specify "); + JdbcAdapterProperties.checkPropertyConsistency(properties); + properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_PASSWORD, "MY_PASSWORD"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You specified a connection (CONNECTION_NAME) and therefore may not specify "); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You specified a connection (CONNECTION_NAME) and therefore may not specify "); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testNoDialect() throws AdapterException { - Map properties = new HashMap<>(); + final Map properties = new HashMap<>(); properties.put(JdbcAdapterProperties.PROP_CONNECTION_NAME, "MY_CONN"); properties.put(JdbcAdapterProperties.PROP_SCHEMA_NAME, "MY_SCHEMA"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You have to specify the SQL dialect"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You have to specify the SQL dialect"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testInvalidDialect() throws AdapterException { - Map properties = new HashMap<>(); + final Map properties = new HashMap<>(); properties.put(JdbcAdapterProperties.PROP_CONNECTION_NAME, "MY_CONN"); properties.put(JdbcAdapterProperties.PROP_SCHEMA_NAME, "MY_SCHEMA"); properties.put(JdbcAdapterProperties.PROP_SQL_DIALECT, "INVALID_DIALECT"); - thrown.expect(AdapterException.class); - thrown.expectMessage("SQL Dialect not supported"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("SQL Dialect \"INVALID_DIALECT\" is not supported."); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testInvalidDebugAddress1() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_DEBUG_ADDRESS, "bla"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You specified an invalid hostname and port"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You specified an invalid hostname and port"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testInvalidDebugAddress2() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_DEBUG_ADDRESS, "bla:no-number"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You specified an invalid hostname and port"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You specified an invalid hostname and port"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testInvalidDebugAddress3() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_DEBUG_ADDRESS, "bla:123:456"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You specified an invalid hostname and port"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You specified an invalid hostname and port"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testValidDebugAddress() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_DEBUG_ADDRESS, "bla:123"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + JdbcAdapterProperties.checkPropertyConsistency(properties); } - + @Test public void testSchemaAndCatalogOptional() throws AdapterException { - Map properties = new HashMap<>(); + final Map properties = new HashMap<>(); properties.put(JdbcAdapterProperties.PROP_SQL_DIALECT, "GENERIC"); properties.put(JdbcAdapterProperties.PROP_CONNECTION_NAME, "MY_CONN"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + JdbcAdapterProperties.checkPropertyConsistency(properties); } - + @Test public void checkValidBoolOptions() throws AdapterException { Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_IS_LOCAL, "TrUe"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); - + JdbcAdapterProperties.checkPropertyConsistency(properties); + properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_IS_LOCAL, "FalSe"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); - + JdbcAdapterProperties.checkPropertyConsistency(properties); + properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_IMPORT_FROM_EXA, "TrUe"); properties.put(JdbcAdapterProperties.PROP_EXA_CONNECTION_STRING, "localhost:5555"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); - + JdbcAdapterProperties.checkPropertyConsistency(properties); + properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_IMPORT_FROM_EXA, "FalSe"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + JdbcAdapterProperties.checkPropertyConsistency(properties); } - + @Test public void checkInvalidBoolOption() throws AdapterException { Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_IS_LOCAL, "asdasd"); - thrown.expect(AdapterException.class); - thrown.expectMessage("The value 'asdasd' for the property IS_LOCAL is invalid. It has to be either 'true' or 'false' (case insensitive)"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); - + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage( + "The value 'asdasd' for the property IS_LOCAL is invalid. It has to be either 'true' or 'false' (case insensitive)"); + JdbcAdapterProperties.checkPropertyConsistency(properties); + properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_IMPORT_FROM_EXA, "asdasd"); properties.put(JdbcAdapterProperties.PROP_EXA_CONNECTION_STRING, "localhost:5555"); - thrown.expect(AdapterException.class); - thrown.expectMessage("The value 'asdasd' for the property IMPORT_FROM_EXA is invalid. It has to be either 'true' or 'false' (case insensitive)"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage( + "The value 'asdasd' for the property IMPORT_FROM_EXA is invalid. It has to be either 'true' or 'false' (case insensitive)"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testInconsistentExaProperties() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_EXA_CONNECTION_STRING, "localhost:5555"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You defined the property EXA_CONNECTION_STRING without setting IMPORT_FROM_EXA"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You defined the property EXA_CONNECTION_STRING without setting IMPORT_FROM_EXA"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testInvalidExaProperties2() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_IMPORT_FROM_EXA, "True"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You defined the property IMPORT_FROM_EXA, please also define EXA_CONNECTION_STRING"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You defined the property IMPORT_FROM_EXA, please also define EXA_CONNECTION_STRING"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } - + @Test public void testGetTableFilters() { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_TABLES, "T1, T2,T3,t4"); - List expectedTables = ImmutableList.of("T1", "T2", "T3", "t4"); + final List expectedTables = ImmutableList.of("T1", "T2", "T3", "t4"); assertEquals(expectedTables, JdbcAdapterProperties.getTableFilter(properties)); } - + @Test public void testGetNewSchemaMetadata() { - Map oldSchemaProperties = new HashMap(); + final Map oldSchemaProperties = new HashMap(); oldSchemaProperties.put("EXISTING_PROP_1", "Old Value 1"); oldSchemaProperties.put("EXISTING_PROP_2", "Old Value 2"); - - Map changedProperties = new HashMap(); + + final Map changedProperties = new HashMap(); changedProperties.put("EXISTING_PROP_1", "New Value"); changedProperties.put("EXISTING_PROP_2", null); changedProperties.put("NEW_PROP", "VAL2"); changedProperties.put("DELETED_PROP_NON_EXISTING", null); - - Map expectedChangedProperties = new HashMap(); + + final Map expectedChangedProperties = new HashMap(); expectedChangedProperties.put("EXISTING_PROP_1", "New Value"); expectedChangedProperties.put("NEW_PROP", "VAL2"); - - assertEquals(expectedChangedProperties, JdbcAdapterProperties.getNewProperties(oldSchemaProperties, changedProperties)); + + assertEquals(expectedChangedProperties, + JdbcAdapterProperties.getNewProperties(oldSchemaProperties, changedProperties)); } @Test public void testNullInExceptionHandling() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_EXCEPTION_HANDLING, null); - assertEquals(JdbcAdapterProperties.ExceptionHandlingMode.NONE, JdbcAdapterProperties.getExceptionHandlingMode(properties)); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + assertEquals(JdbcAdapterProperties.ExceptionHandlingMode.NONE, + JdbcAdapterProperties.getExceptionHandlingMode(properties)); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testEmptyExceptionHandling() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_EXCEPTION_HANDLING, ""); - assertEquals(JdbcAdapterProperties.ExceptionHandlingMode.NONE, JdbcAdapterProperties.getExceptionHandlingMode(properties)); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + assertEquals(JdbcAdapterProperties.ExceptionHandlingMode.NONE, + JdbcAdapterProperties.getExceptionHandlingMode(properties)); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testInvalidExceptionHandling() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_EXCEPTION_HANDLING, "IGNORE_ALL"); - thrown.expect(AdapterException.class); - thrown.expectMessage("You specified an invalid exception mode (IGNORE_ALL)"); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + this.thrown.expect(AdapterException.class); + this.thrown.expectMessage("You specified an invalid exception mode (IGNORE_ALL)"); + JdbcAdapterProperties.checkPropertyConsistency(properties); } @Test public void testNoneAsExceptionValue() throws AdapterException { - Map properties = getMinimumMandatory(); + final Map properties = getMinimumMandatory(); properties.put(JdbcAdapterProperties.PROP_EXCEPTION_HANDLING, "NONE"); - assertEquals(JdbcAdapterProperties.ExceptionHandlingMode.NONE, JdbcAdapterProperties.getExceptionHandlingMode(properties)); - JdbcAdapterProperties.checkPropertyConsistency(properties, JdbcAdapter.supportedDialects); + assertEquals(JdbcAdapterProperties.ExceptionHandlingMode.NONE, + JdbcAdapterProperties.getExceptionHandlingMode(properties)); + JdbcAdapterProperties.checkPropertyConsistency(properties); } } diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/logging/CompactFormatterTest.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/logging/CompactFormatterTest.java new file mode 100644 index 000000000..667db2357 --- /dev/null +++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/logging/CompactFormatterTest.java @@ -0,0 +1,57 @@ +package com.exasol.logging; + +import static org.hamcrest.Matchers.matchesPattern; +import static org.junit.Assert.assertThat; + +import java.util.logging.Level; +import java.util.logging.LogRecord; + +import org.junit.Test; + +public class CompactFormatterTest { + + @Test + public void testFormat() { + final LogRecord record = new LogRecord(Level.FINEST, "Regular."); + record.setSourceClassName(this.getClass().getName()); + assertFormattedRecordMatchesPattern(record, + "\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3} FINEST \\[c.e.l.CompactFormatterTest\\] Regular.[\\n\\r]+"); + } + + private void assertFormattedRecordMatchesPattern(final LogRecord record, final String expectedPattern) { + final String formattedMessage = new CompactFormatter().format(record); + assertThat(formattedMessage, matchesPattern(expectedPattern)); + } + + @Test + public void testFormatForClassWithoutPackageName() { + final LogRecord record = new LogRecord(Level.INFO, "No package."); + record.setSourceClassName("TheClass"); + assertFormattedRecordMatchesPattern(record, + "\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3} INFO \\[TheClass\\] No package.[\\n\\r]+"); + } + + @Test + public void testFormatForClassWithAnEmptytPackagePart() { + final LogRecord record = new LogRecord(Level.INFO, "Empty package part."); + record.setSourceClassName("alpha.beta..TheClass"); + assertFormattedRecordMatchesPattern(record, + "\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3} INFO \\[a\\.b\\.\\.TheClass\\] Empty package part.[\\n\\r]+"); + } + + @Test + public void testFormatForEmptyClass() { + final LogRecord record = new LogRecord(Level.INFO, "Empty class."); + record.setSourceClassName(""); + assertFormattedRecordMatchesPattern(record, + "\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3} INFO Empty class.[\\n\\r]+"); + } + + @Test + public void testFormatForNullClass() { + final LogRecord record = new LogRecord(Level.INFO, "Null class."); + record.setSourceClassName(null); + assertFormattedRecordMatchesPattern(record, + "\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3} INFO Null class.[\\n\\r]+"); + } +}