diff --git a/jdbc-adapter/doc/deploying_the_virtual_schema_adapter.md b/jdbc-adapter/doc/deploying_the_virtual_schema_adapter.md
index 3cf72072c..20d6e33b4 100644
--- a/jdbc-adapter/doc/deploying_the_virtual_schema_adapter.md
+++ b/jdbc-adapter/doc/deploying_the_virtual_schema_adapter.md
@@ -23,7 +23,7 @@ cd virtual-schemas/jdbc-adapter/
mvn clean -DskipTests package
```
-The resulting fat JAR is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.7.2.jar`.
+The resulting fat JAR is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.8.1.jar`.
## Uploading the Adapter JAR Archive
@@ -42,8 +42,8 @@ Following steps are required to upload a file to a bucket:
1. Now upload the file into this bucket, e.g. using curl (adapt the hostname, BucketFS port, bucket name and bucket write password).
```bash
-curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.7.2.jar \
- http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar
+curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-1.8.1.jar \
+ http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar
```
See chapter 3.6.4. "The synchronous cluster file system BucketFS" in the EXASolution User Manual for more details about BucketFS.
@@ -75,7 +75,7 @@ CREATE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS
// This will add the adapter jar to the classpath so that it can be used inside the adapter script
// Replace the names of the bucketfs and the bucket with the ones you used.
- %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.8.1.jar;
// You have to add all files of the data source jdbc driver here (e.g. Hive JDBC driver files)
%jar /buckets/your-bucket-fs/your-bucket/name-of-data-source-jdbc-driver.jar;
diff --git a/jdbc-adapter/doc/developing_an_sql_dialect.md b/jdbc-adapter/doc/developing_an_sql_dialect.md
index 80013c3e4..dd6ea3976 100644
--- a/jdbc-adapter/doc/developing_an_sql_dialect.md
+++ b/jdbc-adapter/doc/developing_an_sql_dialect.md
@@ -292,7 +292,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter
// This will add the adapter jar to the classpath so that it can be used inside the adapter script
// Replace the names of the bucketfs and the bucket with the ones you used.
- %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar;
// You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive)
diff --git a/jdbc-adapter/doc/sql_dialects/db2.md b/jdbc-adapter/doc/sql_dialects/db2.md
index d87ac9ed0..01a170c34 100644
--- a/jdbc-adapter/doc/sql_dialects/db2.md
+++ b/jdbc-adapter/doc/sql_dialects/db2.md
@@ -46,7 +46,7 @@ CREATE or replace JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS
// This will add the adapter jar to the classpath so that it can be used inside the adapter script
// Replace the names of the bucketfs and the bucket with the ones you used.
- %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar;
// DB2 Driver files
%jar /buckets/bucketfs1/bucket1/db2jcc4.jar;
diff --git a/jdbc-adapter/doc/sql_dialects/exasol.md b/jdbc-adapter/doc/sql_dialects/exasol.md
index e989bfb2f..2df5178fc 100644
--- a/jdbc-adapter/doc/sql_dialects/exasol.md
+++ b/jdbc-adapter/doc/sql_dialects/exasol.md
@@ -17,7 +17,7 @@ After uploading the adapter jar, the adapter script can be created as follows:
CREATE SCHEMA adapter;
CREATE JAVA ADAPTER SCRIPT adapter.jdbc_adapter AS
%scriptclass com.exasol.adapter.jdbc.JdbcAdapter;
- %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.8.1.jar;
/
```
diff --git a/jdbc-adapter/doc/sql_dialects/hive.md b/jdbc-adapter/doc/sql_dialects/hive.md
index f7c1608cf..ad73ea34f 100644
--- a/jdbc-adapter/doc/sql_dialects/hive.md
+++ b/jdbc-adapter/doc/sql_dialects/hive.md
@@ -23,7 +23,7 @@ CREATE SCHEMA adapter;
CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS
%scriptclass com.exasol.adapter.jdbc.JdbcAdapter;
- %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar;
%jar /buckets/bucketfs1/bucket1/hive_metastore.jar;
%jar /buckets/bucketfs1/bucket1/hive_service.jar;
diff --git a/jdbc-adapter/doc/sql_dialects/impala.md b/jdbc-adapter/doc/sql_dialects/impala.md
index 3691ad3ce..d99ec1e2f 100644
--- a/jdbc-adapter/doc/sql_dialects/impala.md
+++ b/jdbc-adapter/doc/sql_dialects/impala.md
@@ -22,7 +22,7 @@ CREATE SCHEMA adapter;
CREATE JAVA ADAPTER SCRIPT jdbc_adapter AS
%scriptclass com.exasol.adapter.jdbc.JdbcAdapter;
- %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar;
%jar /buckets/bucketfs1/bucket1/hive_metastore.jar;
%jar /buckets/bucketfs1/bucket1/hive_service.jar;
diff --git a/jdbc-adapter/doc/sql_dialects/oracle.md b/jdbc-adapter/doc/sql_dialects/oracle.md
index 528bcf94b..a635bbaf4 100644
--- a/jdbc-adapter/doc/sql_dialects/oracle.md
+++ b/jdbc-adapter/doc/sql_dialects/oracle.md
@@ -28,7 +28,7 @@ CREATE JAVA ADAPTER SCRIPT adapter.jdbc_oracle AS
// You need to replace `your-bucket-fs` and `your-bucket` to match the actual location
// of the adapter jar.
- %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-1.8.1.jar;
// Add the oracle jdbc driver to the classpath
%jar /buckets/bucketfs1/bucket1/ojdbc7-12.1.0.2.jar
diff --git a/jdbc-adapter/doc/sql_dialects/postgresql.md b/jdbc-adapter/doc/sql_dialects/postgresql.md
index 3f45bb462..7f4a8e426 100644
--- a/jdbc-adapter/doc/sql_dialects/postgresql.md
+++ b/jdbc-adapter/doc/sql_dialects/postgresql.md
@@ -15,7 +15,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter
// This will add the adapter jar to the classpath so that it can be used inside the adapter script
// Replace the names of the bucketfs and the bucket with the ones you used.
- %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar;
// You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive)
%jar /buckets/bucketfs1/bucket1/postgresql-42.0.0.jar;
diff --git a/jdbc-adapter/doc/sql_dialects/redshift.md b/jdbc-adapter/doc/sql_dialects/redshift.md
index a88b0d864..7347609fb 100644
--- a/jdbc-adapter/doc/sql_dialects/redshift.md
+++ b/jdbc-adapter/doc/sql_dialects/redshift.md
@@ -21,7 +21,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter
// This will add the adapter jar to the classpath so that it can be used inside the adapter script
// Replace the names of the bucketfs and the bucket with the ones you used.
- %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar;
// You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive)
diff --git a/jdbc-adapter/doc/sql_dialects/sql_server.md b/jdbc-adapter/doc/sql_dialects/sql_server.md
index c836151e8..6c6bea6a6 100644
--- a/jdbc-adapter/doc/sql_dialects/sql_server.md
+++ b/jdbc-adapter/doc/sql_dialects/sql_server.md
@@ -17,7 +17,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.sql_server_jdbc_adapter
// This will add the adapter jar to the classpath so that it can be used inside the adapter script
// Replace the names of the bucketfs and the bucket with the ones you used.
- %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar;
// You have to add all files of the data source jdbc driver here
%jar /buckets/bucketfs1/bucket1/jtds.jar;
diff --git a/jdbc-adapter/doc/sql_dialects/sybase.md b/jdbc-adapter/doc/sql_dialects/sybase.md
index fcba72d3a..007da99fd 100644
--- a/jdbc-adapter/doc/sql_dialects/sybase.md
+++ b/jdbc-adapter/doc/sql_dialects/sybase.md
@@ -18,7 +18,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter
AS
%scriptclass com.exasol.adapter.jdbc.JdbcAdapter;
- %jar /buckets/bucketfs1/virtualschema/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/virtualschema/virtualschema-jdbc-adapter-dist-1.8.1.jar;
%jar /buckets/bucketfs1/virtualschema/jtds-1.3.1.jar;
/
```
diff --git a/jdbc-adapter/doc/sql_dialects/teradata.md b/jdbc-adapter/doc/sql_dialects/teradata.md
index 589b44c9a..04a4171e1 100644
--- a/jdbc-adapter/doc/sql_dialects/teradata.md
+++ b/jdbc-adapter/doc/sql_dialects/teradata.md
@@ -22,7 +22,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT adapter.jdbc_adapter
// This will add the adapter jar to the classpath so that it can be used inside the adapter script
// Replace the names of the bucketfs and the bucket with the ones you used.
- %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar;
+ %jar /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar;
// You have to add all files of the data source jdbc driver here (e.g. MySQL or Hive)
%jar /buckets/bucketfs1/bucket1/terajdbc4.jar;
diff --git a/jdbc-adapter/integration-test-data/integration-test-db2.yaml b/jdbc-adapter/integration-test-data/integration-test-db2.yaml
index 44b6acc8d..8e00a5e3b 100644
--- a/jdbc-adapter/integration-test-data/integration-test-db2.yaml
+++ b/jdbc-adapter/integration-test-data/integration-test-db2.yaml
@@ -5,7 +5,7 @@ general:
debugAddress: '192.168.0.12:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas
bucketFsUrl: http://exasol-host:2580/bucket1
bucketFsPassword: bucket1
- jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar
+ jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar
exasol:
runIntegrationTests: true
diff --git a/jdbc-adapter/integration-test-data/integration-test-sample.yaml b/jdbc-adapter/integration-test-data/integration-test-sample.yaml
index 27856742d..335df885a 100644
--- a/jdbc-adapter/integration-test-data/integration-test-sample.yaml
+++ b/jdbc-adapter/integration-test-data/integration-test-sample.yaml
@@ -5,7 +5,7 @@ general:
debugAddress: '192.168.0.12:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas
bucketFsUrl: http://exasol-host:2580/bucket1
bucketFsPassword: bucket1
- jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.7.2.jar
+ jdbcAdapterPath: /buckets/bucketfs1/bucket1/virtualschema-jdbc-adapter-dist-1.8.1.jar
exasol:
runIntegrationTests: true
diff --git a/jdbc-adapter/integration-test-data/integration-test-travis.yaml b/jdbc-adapter/integration-test-data/integration-test-travis.yaml
index f390d0264..8df768d82 100644
--- a/jdbc-adapter/integration-test-data/integration-test-travis.yaml
+++ b/jdbc-adapter/integration-test-data/integration-test-travis.yaml
@@ -1,11 +1,12 @@
# Configuration file for integration tests run by `run_integration_tests.sh`
general:
- debug: false
- debugAddress: ''
- bucketFsUrl: http://127.0.0.1:6594/default
- bucketFsPassword: write
- jdbcAdapterPath: /buckets/bfsdefault/default/virtualschema-jdbc-adapter-dist-1.7.2.jar
+ debug: false
+ debugAddress: ''
+ bucketFsUrl: http://127.0.0.1:6594/default
+ bucketFsPassword: write
+ jdbcAdapterPath: /buckets/bfsdefault/default/virtualschema-jdbc-adapter-dist-1.8.1.jar
+ additionalJDBCDriverDir: /var/tmp/vstest/drivers/
exasol:
runIntegrationTests: true
@@ -24,3 +25,13 @@ postgresql:
dockerPortMapping: 45432:5432
dockerName: testpg
dockerConnectionString: jdbc:postgresql://DBHOST:5432/postgres
+
+oracle:
+ runIntegrationTests: false
+ jdbcDriverPath: /buckets/bfsdefault/default/drivers/jdbc/ORACLE/ojdbc7.jar;
+ connectionString: jdbc:oracle:thin:@localhost:1521/XE
+ user: system
+ password: myorapwd
+ dockerName: myora
+ dockerConnectionString: jdbc:oracle:thin:@DBHOST:1521/XE
+ instantclientDir: /var/tmp/vstest/instantclient/
diff --git a/jdbc-adapter/integration-test-data/oracle-testdata.sql b/jdbc-adapter/integration-test-data/oracle-testdata.sql
index 7024a4f62..1c8455018 100644
--- a/jdbc-adapter/integration-test-data/oracle-testdata.sql
+++ b/jdbc-adapter/integration-test-data/oracle-testdata.sql
@@ -91,3 +91,7 @@ INSERT INTO LOADER.TYPE_TEST (c3, c5, c7, c_binfloat, c17) VALUES (
-- c_float126
-- c_long
);
+
+create table ts_t(a timestamp, b timestamp with local time zone, c timestamp with time zone);
+insert into ts_t values (timestamp '2018-01-01 11:00:00', timestamp '2018-01-01 11:00:00 +01:00', timestamp '2018-01-01 11:00:00 +01:00');
+
diff --git a/jdbc-adapter/integration-test-data/run_integration_tests.sh b/jdbc-adapter/integration-test-data/run_integration_tests.sh
index 0d93a4f12..77d50f6fa 100755
--- a/jdbc-adapter/integration-test-data/run_integration_tests.sh
+++ b/jdbc-adapter/integration-test-data/run_integration_tests.sh
@@ -41,12 +41,31 @@ deploy_jdbc_drivers() {
bucket_fs_url=$(awk '/bucketFsUrl/{print $NF}' $config)
bfs_url_no_http=$(echo $bucket_fs_url | awk -F/ '{for(i=3;i<=NF;++i)printf "%s/",$i}')
bucket_fs_pwd=$(awk '/bucketFsPassword/{print $NF}' $config)
- bucket_fs_upload_url=http://w:$bucket_fs_pwd@$bfs_url_no_http/drivers/jdbc/
+ bucket_fs_upload_url=http://w:$bucket_fs_pwd@$bfs_url_no_http/drivers/
+ #upload drivers that are part of the repository
for d in $jdbc_driver_dir/*
do
db_driver=$(basename $d)
- find $jdbc_driver_dir/$db_driver -type f -exec curl -X PUT -T {} $bucket_fs_upload_url/$db_driver/ \;
+ find $jdbc_driver_dir/$db_driver -type f -exec curl -X PUT -T {} $bucket_fs_upload_url/jdbc/$db_driver/ \;
done
+ #upload additional (local) drivers
+ additional_jdbc_driver_dir=$(awk '/additionalJDBCDriverDir/{print $NF}' $config)
+ if [ -d "$additional_jdbc_driver_dir" ]; then
+ for d in $additional_jdbc_driver_dir/*
+ do
+ db_driver=$(basename $d)
+ find $additional_jdbc_driver_dir/$db_driver -type f -exec curl -X PUT -T {} $bucket_fs_upload_url/jdbc/$db_driver/ \;
+ done
+ fi
+ #deploy oracle instantclient
+ instantclient_dir=$(awk '/instantclientDir/{print $NF}' $config)
+ instantclient_path=$instantclient_dir/instantclient-basic-linux.x64-12.1.0.2.0.zip
+ if [ -f $instantclient_path ]; then
+ curl -X PUT -T $instantclient_path $bucket_fs_upload_url/oracle/
+ fi
+ #workaround for https://github.com/exasol/docker-db/issues/26
+ docker exec -d exasoldb mkdir -p /exa/data/bucketfs/default/drivers
+ docker exec -d exasoldb ln -s /exa/data/bucketfs/bfsdefault/.dest/default/drivers/jdbc /exa/data/bucketfs/default/drivers/jdbc
}
replace_hosts_with_ips_in_config() {
@@ -55,6 +74,7 @@ replace_hosts_with_ips_in_config() {
start_remote_dbs() {
$docker_helper --run $config
+ sleep 10
}
cleanup_remote_dbs() {
diff --git a/jdbc-adapter/integration-test-data/socker.py b/jdbc-adapter/integration-test-data/socker.py
index f669a72bb..a8c86c7ba 100755
--- a/jdbc-adapter/integration-test-data/socker.py
+++ b/jdbc-adapter/integration-test-data/socker.py
@@ -7,21 +7,31 @@
def docker_run(config):
for db, properties in config.items():
- if 'dockerImage' in properties:
- cmd = "docker run -d -p {port_map} --name {name} {image}:{version}".format(
- port_map = properties['dockerPortMapping'],
- name = properties['dockerName'],
- image = properties['dockerImage'],
- version = properties['dockerImageVersion'])
- print(cmd)
- run(cmd)
+ if properties.get('runIntegrationTests', False):
+ if 'dockerImage' in properties:
+ cmd = "docker run -d -p {port_map} --name {name} {image}:{version}".format(
+ port_map = properties['dockerPortMapping'],
+ name = properties['dockerName'],
+ image = properties['dockerImage'],
+ version = properties['dockerImageVersion'])
+ print(cmd)
+ run(cmd)
+ elif 'dockerName' in properties:
+ cmd = "docker start {name}".format(name = properties['dockerName'])
+ print(cmd)
+ run(cmd)
def docker_rm(config):
for db, properties in config.items():
- if 'dockerImage' in properties:
- cmd = "docker rm -f {name}".format(name = properties['dockerName'])
- print(cmd)
- run(cmd)
+ if properties.get('runIntegrationTests', False):
+ if 'dockerImage' in properties:
+ cmd = "docker rm -f {name}".format(name = properties['dockerName'])
+ print(cmd)
+ run(cmd)
+ elif 'dockerName' in properties:
+ cmd = "docker stop {name}".format(name = properties['dockerName'])
+ print(cmd)
+ run(cmd)
def run(cmd):
try:
@@ -45,11 +55,12 @@ def run(cmd):
def replace_hosts_in(config):
for db, properties in config.items():
- if 'dockerImage' in properties:
- container_ip = get_ip_for(properties['dockerName'])
- conn_string_with_ip = properties['dockerConnectionString'].replace(
- 'DBHOST',container_ip)
- properties['dockerConnectionString'] = conn_string_with_ip
+ if properties.get('runIntegrationTests', False):
+ if 'dockerName' in properties:
+ container_ip = get_ip_for(properties['dockerName'])
+ conn_string_with_ip = properties['dockerConnectionString'].replace(
+ 'DBHOST',container_ip)
+ properties['dockerConnectionString'] = conn_string_with_ip
return yaml.dump(config, default_flow_style=False)
def get_ip_for(docker_name):
diff --git a/jdbc-adapter/local/integration-test-config.yaml b/jdbc-adapter/local/integration-test-config.yaml
index bd54da0eb..a9b04ac05 100644
--- a/jdbc-adapter/local/integration-test-config.yaml
+++ b/jdbc-adapter/local/integration-test-config.yaml
@@ -5,7 +5,7 @@ general:
debugAddress: '10.44.1.228:3000' # Address which will be defined as DEBUG_ADDRESS in the virtual schemas
bucketFsUrl: http://localhost:2580/jars
bucketFsPassword: public
- jdbcAdapterPath: /buckets/bfsdefault/jars/virtualschema-jdbc-adapter-dist-1.7.2.jar
+ jdbcAdapterPath: /buckets/bfsdefault/jars/virtualschema-jdbc-adapter-dist-1.8.1.jar
exasol:
runIntegrationTests: true
diff --git a/jdbc-adapter/pom.xml b/jdbc-adapter/pom.xml
index 30f39cc93..2df4e8677 100644
--- a/jdbc-adapter/pom.xml
+++ b/jdbc-adapter/pom.xml
@@ -10,7 +10,7 @@
virtualschema-jdbc-adapter-dist
- 1.7.2
+ 1.8.1
UTF-8
UTF-8
1.8
diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlGenerationVisitor.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlGenerationVisitor.java
index 0250fe156..653a35c21 100644
--- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlGenerationVisitor.java
+++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/SqlGenerationVisitor.java
@@ -82,6 +82,10 @@ public SqlGenerationVisitor(final SqlDialect dialect, final SqlGenerationContext
checkDialectAliases();
}
+ protected SqlDialect getDialect() {
+ return dialect;
+ }
+
protected void checkDialectAliases() {
// Check if dialect provided invalid aliases, which would never be applied.
for (final ScalarFunction function : this.dialect.getScalarFunctionAliases().keySet()) {
diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java
index 168b4b3e6..718d9cddf 100644
--- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java
+++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlDialect.java
@@ -2,6 +2,7 @@
import com.exasol.adapter.capabilities.Capabilities;
import com.exasol.adapter.dialects.*;
+import com.exasol.adapter.jdbc.ConnectionInformation;
import com.exasol.adapter.metadata.DataType;
import com.exasol.adapter.sql.AggregateFunction;
import com.exasol.adapter.sql.ScalarFunction;
@@ -203,4 +204,21 @@ public String getStringLiteral(final String value) {
return "'" + value.replace("'", "''") + "'";
}
+ @Override
+ public String generatePushdownSql(final ConnectionInformation connectionInformation, final String columnDescription, final String pushdownSql) {
+ final ImportType importType = getContext().getImportType();
+ if (importType == ImportType.JDBC) {
+ return super.generatePushdownSql(connectionInformation, columnDescription, pushdownSql);
+ } else {
+ if ((importType != ImportType.ORA)) {
+ throw new AssertionError("OracleSqlDialect has wrong ImportType");
+ }
+ final StringBuilder oracleImportQuery = new StringBuilder();
+ oracleImportQuery.append("IMPORT FROM ORA AT ").append(connectionInformation.getOraConnectionName()).append(" ");
+ oracleImportQuery.append(connectionInformation.getCredentials());
+ oracleImportQuery.append(" STATEMENT '").append(pushdownSql.replace("'", "''")).append("'");
+ return oracleImportQuery.toString();
+ }
+ }
+
}
diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlGenerationVisitor.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlGenerationVisitor.java
index 779ddc3eb..565baae2b 100644
--- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlGenerationVisitor.java
+++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/impl/OracleSqlGenerationVisitor.java
@@ -1,10 +1,7 @@
package com.exasol.adapter.dialects.impl;
import com.exasol.adapter.AdapterException;
-import com.exasol.adapter.dialects.SqlDialect;
-import com.exasol.adapter.dialects.SqlGenerationContext;
-import com.exasol.adapter.dialects.SqlGenerationHelper;
-import com.exasol.adapter.dialects.SqlGenerationVisitor;
+import com.exasol.adapter.dialects.*;
import com.exasol.adapter.jdbc.ColumnAdapterNotes;
import com.exasol.adapter.metadata.ColumnMetadata;
import com.exasol.adapter.metadata.DataType;
@@ -511,8 +508,9 @@ private String getColumnProjectionString(SqlColumn column, String projString) th
if (!isDirectlyInSelectList) {
return projString;
}
- String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
- if (typeName.startsWith("TIMESTAMP") ||
+ final AbstractSqlDialect dialect = (AbstractSqlDialect) getDialect();
+ final String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
+ if ((typeName.startsWith("TIMESTAMP") && dialect.getContext().getImportType() == ImportType.JDBC) ||
typeName.startsWith("INTERVAL") ||
typeName.equals("BINARY_FLOAT") ||
typeName.equals("BINARY_DOUBLE") ||
@@ -540,7 +538,11 @@ private boolean nodeRequiresCast(SqlNode node) throws AdapterException {
if (typeName.equals("NUMBER") && column.getMetadata().getType().getExaDataType() == DataType.ExaDataType.VARCHAR) {
return true;
} else {
- return TYPE_NAMES_REQUIRING_CAST.contains(typeName);
+ for (final String typeRequiringCast : TYPE_NAMES_REQUIRING_CAST) {
+ if (typeName.startsWith(typeRequiringCast)) {
+ return true;
+ }
+ }
}
}
return false;
diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapter.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapter.java
index b8b776ffc..5620fab41 100644
--- a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapter.java
+++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/JdbcAdapter.java
@@ -325,9 +325,9 @@ private static String createColumnDescription(final ExaMetadata exaMeta, final S
try {
final Connection connection = establishConnection(connectionInformation);
logger.fine(() -> "createColumnDescription: " + pushdownQuery);
- ResultSetMetaData metadata = null;
+ DataType[] internalTypes = null;
try (final PreparedStatement ps = connection.prepareStatement(pushdownQuery)) {
- metadata = ps.getMetaData();
+ ResultSetMetaData metadata = ps.getMetaData();
if (metadata == null) {
ps.execute();
metadata = ps.getMetaData();
@@ -336,33 +336,41 @@ private static String createColumnDescription(final ExaMetadata exaMeta, final S
"Unable to read source metadata trying to create description for " + "source columns.");
}
}
- }
- final DataType[] internalTypes = new DataType[metadata.getColumnCount()];
- for (int col = 1; col <= metadata.getColumnCount(); ++col) {
- final int jdbcType = metadata.getColumnType(col);
- final int jdbcPrecisions = metadata.getPrecision(col);
- final int jdbcScales = metadata.getScale(col);
- final JdbcTypeDescription description = new JdbcTypeDescription(jdbcType, jdbcScales, jdbcPrecisions, 0,
- metadata.getColumnTypeName(col));
- internalTypes[col - 1] = dialect.mapJdbcType(description);
- }
- final StringBuffer buffer = new StringBuffer();
- buffer.append('(');
- for (int i = 0; i < internalTypes.length; i++) {
- buffer.append("c");
- buffer.append(i);
- buffer.append(" ");
- buffer.append(internalTypes[i].toString());
- if (i < (internalTypes.length - 1)) {
- buffer.append(",");
+
+ internalTypes = new DataType[metadata.getColumnCount()];
+ for (int col = 1; col <= metadata.getColumnCount(); ++col) {
+ final JdbcTypeDescription description = getJdbcTypeDescription(metadata, col);
+ internalTypes[col - 1] = dialect.mapJdbcType(description);
}
}
-
- buffer.append(')');
- return buffer.toString();
+ return buildColumnDescriptionFrom(internalTypes);
} catch (final SQLException e) {
- throw new RuntimeException("Cannot resolve column types.", e);
+ throw new RetrieveMetadataException("Cannot resolve column types. " + e.getMessage(), e);
+ }
+ }
+
+ protected static JdbcTypeDescription getJdbcTypeDescription(ResultSetMetaData metadata, int col) throws SQLException {
+ final int jdbcType = metadata.getColumnType(col);
+ final int jdbcPrecisions = metadata.getPrecision(col);
+ final int jdbcScales = metadata.getScale(col);
+ return new JdbcTypeDescription(jdbcType, jdbcScales, jdbcPrecisions, 0,
+ metadata.getColumnTypeName(col));
+ }
+
+ protected static String buildColumnDescriptionFrom(DataType[] internalTypes) {
+ final StringBuilder columnDescription = new StringBuilder();
+ columnDescription.append('(');
+ for (int i = 0; i < internalTypes.length; i++) {
+ columnDescription.append("c");
+ columnDescription.append(i);
+ columnDescription.append(" ");
+ columnDescription.append(internalTypes[i].toString());
+ if (i < (internalTypes.length - 1)) {
+ columnDescription.append(",");
+ }
}
+ columnDescription.append(')');
+ return columnDescription.toString();
}
private static Connection establishConnection(final ExaConnectionInformation connection) throws SQLException {
diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/RetrieveMetadataException.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/RetrieveMetadataException.java
new file mode 100644
index 000000000..aeb991a3b
--- /dev/null
+++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/jdbc/RetrieveMetadataException.java
@@ -0,0 +1,18 @@
+package com.exasol.adapter.jdbc;
+
+/**
+ * This class provides runtime exceptions for metadata retrieval.
+ */
+public class RetrieveMetadataException extends RuntimeException {
+ private static final long serialVersionUID = -2971883064471515520L;
+
+ /**
+ * Create a new instance of the {@link RetrieveMetadataException}
+ *
+ * @param message message to be displayed
+ * @param cause root cause
+ */
+ public RetrieveMetadataException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
+}
diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestConfig.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestConfig.java
index d9a8ca7bd..9974eb165 100644
--- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestConfig.java
+++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/IntegrationTestConfig.java
@@ -134,14 +134,26 @@ public String getOracleJdbcDriverPath() {
return getProperty("oracle", "jdbcDriverPath");
}
+ public String getOracleDockerJdbcConnectionString() {
+ return getProperty("oracle", "dockerConnectionString");
+ }
+
public String getOracleJdbcConnectionString() {
return getProperty("oracle", "connectionString");
}
public URI getOracleConnectionInformation() {
- Matcher matcher = jdbcConnectionStringRegEx.matcher(getOracleJdbcConnectionString());
+ return getURIFor(getOracleJdbcConnectionString());
+ }
+
+ public URI getOracleDockerConnectionInformation() {
+ return getURIFor(getOracleDockerJdbcConnectionString());
+ }
+
+ public URI getURIFor(final String connectionString) {
+ Matcher matcher = jdbcConnectionStringRegEx.matcher(connectionString);
if (!matcher.find()) {
- throw new RuntimeException("oracle.connectionString '" + getOracleJdbcConnectionString() + "' could not be parsed");
+ throw new RuntimeException("oracle.connectionString '" + connectionString + "' could not be parsed");
}
String host = matcher.group(1);
@@ -158,7 +170,7 @@ public URI getOracleConnectionInformation() {
try {
return new URI(null, null, host, port, path, null, null);
} catch (URISyntaxException e) {
- throw new RuntimeException("oracle.connectionString '" + getOracleJdbcConnectionString() + "' could not be parsed: " + e.getMessage());
+ throw new RuntimeException("oracle.connectionString '" + connectionString + "' could not be parsed: " + e.getMessage());
}
}
diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/OracleSqlDialectIT.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/OracleSqlDialectIT.java
index 1cd7efae3..8b13f7d75 100644
--- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/OracleSqlDialectIT.java
+++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/dialects/impl/OracleSqlDialectIT.java
@@ -9,6 +9,7 @@
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.SQLException;
+import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -29,18 +30,19 @@
public class OracleSqlDialectIT extends AbstractIntegrationTest {
private static final String VIRTUAL_SCHEMA_JDBC = "VS_ORACLE_JDBC";
- private static final String VIRTUAL_SCHEMA_IMPORT = "VS_ORACLE_IMPORT";
- private static final String ORACLE_SCHEMA = "LOADER";
+ private static final String VIRTUAL_SCHEMA_ORA = "VS_ORACLE_IMPORT";
+ private static final String ORACLE_SCHEMA = "SYSTEM";
private static final String TEST_TABLE = "TYPE_TEST";
private static final String EXA_TABLE_JDBC = VIRTUAL_SCHEMA_JDBC + "." + TEST_TABLE;
- private static final String EXA_TABLE_IMPORT = VIRTUAL_SCHEMA_IMPORT + "." + TEST_TABLE;
- private static final String ORA_TABLE = ORACLE_SCHEMA + "." + TEST_TABLE;
+ private static final String EXA_TABLE_ORA = VIRTUAL_SCHEMA_ORA + "." + TEST_TABLE;
+ private static final String ORA_TABLE = ORACLE_SCHEMA + "\".\"" + TEST_TABLE;
private static final boolean IS_LOCAL = false;
// Use getColumnTypes() to access this map
- private Map columnTypes;
+ private Map columnTypesJDBC;
+ private Map columnTypesORA;
@BeforeClass
public static void beforeMethod() throws FileNotFoundException, SQLException, ClassNotFoundException {
@@ -54,15 +56,15 @@ public static void beforeMethod() throws FileNotFoundException, SQLException, Cl
createVirtualSchema(VIRTUAL_SCHEMA_JDBC, OracleSqlDialect.getPublicName(), "", ORACLE_SCHEMA, "",
getConfig().getOracleUser(), getConfig().getOraclePassword(),
// "ADAPTER.JDBC_ORACLE_DEBUG",
- "ADAPTER.JDBC_ORACLE", getConfig().getOracleJdbcConnectionString(), IS_LOCAL,
- getConfig().debugAddress(), TEST_TABLE, null,"");
+ "ADAPTER.JDBC_ADAPTER", getConfig().getOracleDockerJdbcConnectionString(), IS_LOCAL,
+ getConfig().debugAddress(), "", null,"");
// create IMPORT FROM ORA virtual schema
- createVirtualSchema(VIRTUAL_SCHEMA_IMPORT, OracleSqlDialect.getPublicName(), "", ORACLE_SCHEMA, "",
+ createVirtualSchema(VIRTUAL_SCHEMA_ORA, OracleSqlDialect.getPublicName(), "", ORACLE_SCHEMA, "",
getConfig().getOracleUser(), getConfig().getOraclePassword(),
// "ADAPTER.JDBC_ORACLE_DEBUG",
- "ADAPTER.JDBC_ORACLE", getConfig().getOracleJdbcConnectionString(), IS_LOCAL,
- getConfig().debugAddress(), TEST_TABLE, "IMPORT_FROM_ORA='true' ORA_CONNECTION_NAME='CONN_ORACLE'","");
+ "ADAPTER.JDBC_ADAPTER", getConfig().getOracleDockerJdbcConnectionString(), IS_LOCAL,
+ getConfig().debugAddress(), "", "IMPORT_FROM_ORA='true' ORA_CONNECTION_NAME='CONN_ORACLE'","");
}
private static void createOracleJDBCAdapter() throws SQLException, FileNotFoundException {
@@ -75,12 +77,24 @@ private static void createOracleJDBCAdapter() throws SQLException, FileNotFoundE
}
private String getColumnType(final String column) throws SQLException {
- if (this.columnTypes == null) {
- this.columnTypes = getColumnTypesOfTable(EXA_TABLE_JDBC);
+ return getColumnTypeJDBC(column);
+ }
+
+ private String getColumnTypeJDBC(final String column) throws SQLException {
+ if (this.columnTypesJDBC == null) {
+ this.columnTypesJDBC = getColumnTypesOfTable(EXA_TABLE_JDBC);
+ }
+ return this.columnTypesJDBC.get(column.toUpperCase());
+ }
+
+ private String getColumnTypeORA(final String column) throws SQLException {
+ if (this.columnTypesORA == null) {
+ this.columnTypesORA = getColumnTypesOfTable(EXA_TABLE_ORA);
}
- return this.columnTypes.get(column.toUpperCase());
+ return this.columnTypesORA.get(column.toUpperCase());
}
+
private Map getColumnTypesOfTable(final String table) throws SQLException {
final Map map = new HashMap<>();
final ResultSet result = executeQuery("DESCRIBE " + table);
@@ -91,7 +105,7 @@ private Map getColumnTypesOfTable(final String table) throws SQL
}
private static void createOracleConnection() throws SQLException, FileNotFoundException {
- final URI conn = getConfig().getOracleConnectionInformation();
+ final URI conn = getConfig().getOracleDockerConnectionInformation();
final String connectionString = String.format(
"(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST = %s)(PORT = %d)))(CONNECT_DATA = (SERVICE_NAME = %s)))",
conn.getHost(), conn.getPort(), conn.getPath().substring(1));
@@ -100,14 +114,30 @@ private static void createOracleConnection() throws SQLException, FileNotFoundEx
private List runQuery(final String query) throws SQLException {
final ArrayList result = new ArrayList<>();
- result.add(executeQuery(String.format(query, EXA_TABLE_JDBC)));
- result.add(executeQuery(String.format(query, EXA_TABLE_IMPORT)));
+ result.add(runQueryJDBC(query));
+ result.add(runQueryORA(query));
return result;
}
+ private ResultSet runQueryJDBC(final String query) throws SQLException {
+ return executeQuery(String.format(query, EXA_TABLE_JDBC));
+ }
+
+ private ResultSet runQueryORA(final String query) throws SQLException {
+ return executeQuery(String.format(query, EXA_TABLE_ORA));
+ }
+
private void runMatchSingleRowExplain(final String query, final String expectedExplain) throws SQLException {
+ runMatchSingleRowExplainJDBC(query, expectedExplain);
+ runMatchSingleRowExplainORA(query, expectedExplain);
+ }
+
+ private void runMatchSingleRowExplainJDBC(final String query, final String expectedExplain) throws SQLException {
matchSingleRowExplain(String.format(query, EXA_TABLE_JDBC), expectedExplain);
- matchSingleRowExplain(String.format(query, EXA_TABLE_IMPORT), expectedExplain);
+ }
+
+ private void runMatchSingleRowExplainORA(final String query, final String expectedExplain) throws SQLException {
+ matchSingleRowExplain(String.format(query, EXA_TABLE_ORA), expectedExplain);
}
private void matchNextRowDecimal(final ResultSet result, final String... expectedStrings) throws SQLException {
@@ -130,7 +160,7 @@ private void matchNextRowDecimal(final ResultSet result, final String... expecte
@Test
public void testColumnTypeEquivalence() throws SQLException {
final Map jdbcColumnTypes = getColumnTypesOfTable(EXA_TABLE_JDBC);
- final Map importColumnTypes = getColumnTypesOfTable(EXA_TABLE_IMPORT);
+ final Map importColumnTypes = getColumnTypesOfTable(EXA_TABLE_ORA);
for (final Map.Entry entry : jdbcColumnTypes.entrySet()) {
assertEquals(entry.getValue(), importColumnTypes.get(entry.getKey()));
@@ -139,21 +169,23 @@ public void testColumnTypeEquivalence() throws SQLException {
@Test
public void testSelectExpression() throws SQLException {
- final String query = "SELECT C7 + 1 FROM %s";
+ final String query = "SELECT C7 + 1 FROM %s ORDER BY 1";
for (final ResultSet result : runQuery(query)) {
matchNextRowDecimal(result, "12346.12345");
matchNextRowDecimal(result, "12356.12345");
}
- runMatchSingleRowExplain(query, "SELECT CAST((\"C7\" + 1) AS FLOAT) FROM \"" + ORA_TABLE + "\"");
+ runMatchSingleRowExplain(query, "SELECT CAST((\"C7\" + 1) AS FLOAT) FROM \"" + ORA_TABLE + "\" ORDER BY (\"C7\" + 1)");
}
@Test
public void testFilterExpression() throws SQLException {
final String query = "SELECT C7 FROM %s WHERE C7 > 12346";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "12355.12345");
- }
- matchSingleRowExplain(query, "SELECT \"C7\" FROM \"" + ORA_TABLE + "\" WHERE 12346 < \"C7\"");
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "12355.12345");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA, "01.2355123450E4");
+
+ runMatchSingleRowExplain(query, "SELECT \"C7\" FROM \"" + ORA_TABLE + "\" WHERE 12346 < \"C7\"");
}
@Test
@@ -167,24 +199,24 @@ public void testAggregateSingleGroup() throws SQLException {
@Test
public void testAggregateGroupByColumn() throws SQLException {
- final String query = "SELECT C5, min(C7) FROM %s GROUP BY C5";
+ final String query = "SELECT C5, min(C7) FROM %s GROUP BY C5 ORDER BY 1 DESC";
for (final ResultSet result : runQuery(query)) {
matchNextRowDecimal(result, "123456789012345678901234567890123456", "12345.12345");
matchNextRowDecimal(result, "1234567890.123456789", "12355.12345");
}
runMatchSingleRowExplain(query,
- "SELECT TO_CHAR(\"C5\"), CAST(MIN(\"C7\") AS FLOAT) FROM \"" + ORA_TABLE + "\" GROUP BY \"C5\"");
+ "SELECT TO_CHAR(\"C5\"), CAST(MIN(\"C7\") AS FLOAT) FROM \"" + ORA_TABLE + "\" GROUP BY \"C5\" ORDER BY \"C5\" DESC");
}
@Test
public void testAggregateGroupByExpression() throws SQLException {
- final String query = "SELECT C5 + 1, min(C7) FROM %s GROUP BY C5 + 1";
+ final String query = "SELECT C5 + 1, min(C7) FROM %s GROUP BY C5 + 1 ORDER BY 1 DESC";
for (final ResultSet result : runQuery(query)) {
matchNextRowDecimal(result, "123456789012345678901234567890123457", "12345.12345");
matchNextRowDecimal(result, "1234567891.123456789", "12355.12345");
}
runMatchSingleRowExplain(query,
- "SELECT CAST((\"C5\" + 1) AS FLOAT), CAST(MIN(\"C7\") AS FLOAT) FROM \"" + ORA_TABLE + "\" GROUP BY (\"C5\" + 1)");
+ "SELECT CAST((\"C5\" + 1) AS FLOAT), CAST(MIN(\"C7\") AS FLOAT) FROM \"" + ORA_TABLE + "\" GROUP BY (\"C5\" + 1) ORDER BY (\"C5\" + 1) DESC");
}
@Test
@@ -195,7 +227,7 @@ public void testAggregateGroupByTuple() throws SQLException {
"12345.12345");
matchNextRowDecimal(result, "123456789012345678901234567890123456", "1234567890.123456789", "12355.12345");
}
- runMatchSingleRowExplain(query, "SELECT \"C\"_NUMBER36, TO_CHAR(\"C5\"), CAST(MIN(\"C7\") AS FLOAT) FROM \"" + ORA_TABLE
+ runMatchSingleRowExplain(query, "SELECT \"C_NUMBER36\", TO_CHAR(\"C5\"), CAST(MIN(\"C7\") AS FLOAT) FROM \"" + ORA_TABLE
+ "\" GROUP BY \"C5\", \"C_NUMBER36\" ORDER BY \"C5\" DESC");
}
@@ -222,32 +254,40 @@ public void testOrderByColumn() throws SQLException {
@Test
public void testOrderByExpression() throws SQLException {
final String query = "SELECT C7 FROM %s ORDER BY ABS(C7) DESC NULLS FIRST";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "12355.12345");
- matchNextRow(result, "12345.12345");
- }
- matchSingleRowExplain(query, "SELECT \"C7\" FROM \"" + ORA_TABLE + "\" ORDER BY ABS(\"C7\") DESC");
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "12355.12345");
+ matchNextRow(resultJDBC, "12345.12345");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA, "01.2355123450E4");
+ matchNextRow(resultORA, "01.2345123450E4");
+
+ runMatchSingleRowExplain(query, "SELECT \"C7\" FROM \"" + ORA_TABLE + "\" ORDER BY ABS(\"C7\") DESC");
}
@Test
public void testLimit() throws SQLException {
final String query = "SELECT C7 FROM %s ORDER BY C7 LIMIT 2";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "12345.12345");
- matchNextRow(result, "12355.12345");
- }
- matchSingleRowExplain(query, "SELECT LIMIT_SUBSELECT.* FROM ( SELECT \"C7\" FROM \"" + ORA_TABLE
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "12345.12345");
+ matchNextRow(resultJDBC, "12355.12345");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA, "01.2345123450E4");
+ matchNextRow(resultORA, "01.2355123450E4");
+
+ runMatchSingleRowExplain(query, "SELECT LIMIT_SUBSELECT.* FROM ( SELECT \"C7\" FROM \"" + ORA_TABLE
+ "\" ORDER BY \"C7\" ) LIMIT_SUBSELECT WHERE ROWNUM <= 2");
}
@Test
public void testLimitOffset() throws SQLException {
final String query = "SELECT C7 FROM %s ORDER BY C7 LIMIT 1 OFFSET 1";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "12355.12345");
- }
- matchSingleRowExplain(query,
- "SELECT \"c0\" FROM ( SELECT LIMIT_SUBSELECT.*, ROWNUM ROWNUM_SUB FROM ( SELECT \"C7\" AS c0 FROM \"" + ORA_TABLE
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "12355.12345");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA, "01.2355123450E4");
+
+ runMatchSingleRowExplain(query,
+ "SELECT c0 FROM ( SELECT LIMIT_SUBSELECT.*, ROWNUM ROWNUM_SUB FROM ( SELECT \"C7\" AS c0 FROM \"" + ORA_TABLE
+ "\" ORDER BY \"C7\" ) LIMIT_SUBSELECT WHERE ROWNUM <= 2 ) WHERE ROWNUM_SUB > 1");
}
@@ -266,7 +306,7 @@ public void testNChar() throws SQLException {
for (final ResultSet result : runQuery(query)) {
matchNextRow(result, "bbbbbbbbbbbbbbbbbbbb ");
}
- assertEquals("CHAR(50) UTF8", getColumnType("C2"));
+ assertEquals("VARCHAR(2000000) UTF8", getColumnType("C2"));
}
@Test
@@ -284,7 +324,7 @@ public void testNVarchar() throws SQLException {
for (final ResultSet result : runQuery(query)) {
matchNextRow(result, "dddddddddddddddddddd");
}
- assertEquals("VARCHAR(50) UTF8", getColumnType("C4"));
+ assertEquals("VARCHAR(2000000) UTF8", getColumnType("C4"));
}
@Test
@@ -405,58 +445,79 @@ public void testDate() throws SQLException {
for (final ResultSet result : runQuery(query)) {
matchNextRow(result, Date.valueOf("2016-08-19"));
}
- runMatchSingleRowExplain(query, "SELECT \"C10\" FROM " + ORA_TABLE + "\"");
+ runMatchSingleRowExplain(query, "SELECT \"C10\" FROM \"" + ORA_TABLE + "\"");
assertEquals("TIMESTAMP", getColumnType("C10"));
}
@Test
public void testTimestamp3() throws SQLException {
final String query = "SELECT C11 FROM %s";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "11-MAR-13 05.30.15.123 PM");
- }
- runMatchSingleRowExplain(query, "SELECT TO_CHAR(\"C11\") FROM \"" + ORA_TABLE + "\"");
- assertEquals("TIMESTAMP", getColumnType("C11"));
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "11-MAR-13 05.30.15.123 PM");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA,Timestamp.valueOf("2013-03-11 17:30:15.123"));
+
+ runMatchSingleRowExplainJDBC(query, "SELECT TO_CHAR(\"C11\") FROM \"" + ORA_TABLE + "\"");
+ runMatchSingleRowExplainORA(query, "SELECT \"C11\" FROM \"" + ORA_TABLE + "\"");
+ assertEquals("VARCHAR(2000000) UTF8", getColumnTypeJDBC("C11"));
+ assertEquals("VARCHAR(2000000) UTF8", getColumnTypeORA("C11"));
}
@Test
public void testTimestamp6() throws SQLException {
final String query = "SELECT C12 FROM %s";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "11-MAR-13 05.30.15.123456 PM");
- }
- runMatchSingleRowExplain(query, "SELECT TO_CHAR(\"C12\") FROM \"" + ORA_TABLE + "\"");
- assertEquals("TIMESTAMP", getColumnType("C12"));
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "11-MAR-13 05.30.15.123456 PM");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA,Timestamp.valueOf("2013-03-11 17:30:15.123"));
+
+ runMatchSingleRowExplainJDBC(query, "SELECT TO_CHAR(\"C12\") FROM \"" + ORA_TABLE + "\"");
+ runMatchSingleRowExplainORA(query, "SELECT \"C12\" FROM \"" + ORA_TABLE + "\"");
+ assertEquals("TIMESTAMP", getColumnTypeJDBC("C12"));
+ assertEquals("TIMESTAMP", getColumnTypeORA("C12"));
}
@Test
public void testTimestamp9() throws SQLException {
final String query = "SELECT C13 FROM %s";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "11-MAR-13 05.30.15.123456789 PM");
- }
- runMatchSingleRowExplain(query, "SELECT TO_CHAR(\"C13\") FROM \"" + ORA_TABLE + "\"");
- assertEquals("TIMESTAMP", getColumnType("C13"));
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "11-MAR-13 05.30.15.123456789 PM");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA,Timestamp.valueOf("2013-03-11 17:30:15.123"));
+
+ runMatchSingleRowExplainJDBC(query, "SELECT TO_CHAR(\"C13\") FROM \"" + ORA_TABLE + "\"");
+ runMatchSingleRowExplainORA(query, "SELECT \"C13\" FROM \"" + ORA_TABLE + "\"");
+ assertEquals("VARCHAR(2000000) UTF8", getColumnTypeJDBC("C13"));
+ assertEquals("VARCHAR(2000000) UTF8", getColumnTypeORA("C13"));
}
@Test
public void testTimestampTZ() throws SQLException {
final String query = "SELECT C14 FROM %s";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "19-AUG-16 11.28.05.000000 AM -08:00");
- }
- runMatchSingleRowExplain(query, "SELECT TO_CHAR(\"C14\") FROM \"" + ORA_TABLE + "\"");
- assertEquals("VARCHAR(2000000) UTF8", getColumnType("C14"));
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "19-AUG-16 11.28.05.000000 AM -08:00");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA, Timestamp.valueOf("2016-08-19 19:28:05.000"));
+
+ runMatchSingleRowExplainJDBC(query, "SELECT TO_CHAR(\"C14\") FROM \"" + ORA_TABLE + "\"");
+ runMatchSingleRowExplainORA(query, "SELECT \"C14\" FROM \"" + ORA_TABLE + "\"");
+ assertEquals("VARCHAR(2000000) UTF8", getColumnTypeJDBC("C14"));
+ assertEquals("VARCHAR(2000000) UTF8", getColumnTypeORA("C14"));
}
@Test
public void testTimestampLocalTZ() throws SQLException {
executeUpdate("ALTER SESSION SET TIME_ZONE = 'UTC'");
final String query = "SELECT C15 FROM %s";
- for (final ResultSet result : runQuery(query)) {
- matchNextRow(result, "30-APR-18 06.00.05.000000 PM");
- }
- assertEquals("VARCHAR(2000000) UTF8", getColumnType("C15"));
+ ResultSet resultJDBC = runQueryJDBC(query);
+ matchNextRow(resultJDBC, "30-APR-18 06.00.05.000000 PM");
+ ResultSet resultORA = runQueryORA(query);
+ matchNextRow(resultORA, Timestamp.valueOf("2018-04-30 18:00:05.000"));
+
+ runMatchSingleRowExplainJDBC(query, "SELECT TO_CHAR(\"C15\") FROM \"" + ORA_TABLE + "\"");
+ runMatchSingleRowExplainORA(query, "SELECT \"C15\" FROM \"" + ORA_TABLE + "\"");
+ assertEquals("VARCHAR(2000000) UTF8", getColumnTypeJDBC("C15"));
+ assertEquals("VARCHAR(2000000) UTF8", getColumnTypeORA("C15"));
}
@Test
@@ -471,12 +532,31 @@ public void testIntervalYear() throws SQLException {
@Test
public void testIntervalDay() throws SQLException {
- final String query = "SELECT C17 FROM %s";
+ final String query = "SELECT C17 FROM %s ORDER BY 1";
for (final ResultSet result : runQuery(query)) {
matchNextRow(result, "+01 11:12:10.123000");
matchNextRow(result, "+02 02:03:04.123456");
}
- runMatchSingleRowExplain(query, "SELECT TO_CHAR(\"C17\") FROM \"" + ORA_TABLE + "\"");
+ runMatchSingleRowExplain(query, "SELECT TO_CHAR(\"C17\") FROM \"" + ORA_TABLE + "\" ORDER BY \"C17\"");
assertEquals("VARCHAR(2000000) UTF8", getColumnType("C17"));
}
+
+ @Test
+ public void testSelectAllTimestampColumns() throws SQLException {
+ executeUpdate("ALTER SESSION SET TIME_ZONE = 'UTC'");
+ final String query = "SELECT * FROM %s.%s";
+ ResultSet resultJDBC = runQueryJDBC(String.format(query, VIRTUAL_SCHEMA_JDBC, "TS_T"));
+ matchNextRow(resultJDBC, "01-JAN-18 11.00.00.000000 AM", "01-JAN-18 10.00.00.000000 AM", "01-JAN-18 11.00.00.000000 AM +01:00");
+ ResultSet resultORA = runQueryORA(String.format(query, VIRTUAL_SCHEMA_ORA, "TS_T"));
+ matchNextRow(resultORA,Timestamp.valueOf("2018-01-01 11:00:00.000"), Timestamp.valueOf("2018-01-01 10:00:00.000"), Timestamp.valueOf("2018-01-01 10:00:00.000"));
+ Map columnTypesJDBC = getColumnTypesOfTable(VIRTUAL_SCHEMA_JDBC + ".TS_T");
+ Map columnTypesORA = getColumnTypesOfTable(VIRTUAL_SCHEMA_ORA + ".TS_T");
+
+ assertEquals("TIMESTAMP", columnTypesJDBC.get("A"));
+ assertEquals("TIMESTAMP", columnTypesORA.get("A"));
+ assertEquals("VARCHAR(2000000) UTF8", columnTypesJDBC.get("B"));
+ assertEquals("VARCHAR(2000000) UTF8", columnTypesORA.get("B"));
+ assertEquals("VARCHAR(2000000) UTF8", columnTypesJDBC.get("C"));
+ assertEquals("VARCHAR(2000000) UTF8", columnTypesORA.get("C"));
+ }
}
diff --git a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/jdbc/JdbcAdapterTest.java b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/jdbc/JdbcAdapterTest.java
index 3f42dc153..75576bc7b 100644
--- a/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/jdbc/JdbcAdapterTest.java
+++ b/jdbc-adapter/virtualschema-jdbc-adapter/src/test/java/com/exasol/adapter/jdbc/JdbcAdapterTest.java
@@ -3,10 +3,16 @@
import com.exasol.ExaConnectionAccessException;
import com.exasol.ExaConnectionInformation;
import com.exasol.ExaMetadata;
+import com.exasol.adapter.dialects.JdbcTypeDescription;
+import com.exasol.adapter.metadata.DataType;
import com.exasol.adapter.metadata.SchemaMetadataInfo;
import org.junit.Before;
import org.junit.Test;
+import org.mockito.Mockito;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Types;
import java.util.HashMap;
import java.util.Map;
@@ -105,4 +111,29 @@ public void getCredentialsForEXAImportWithConnectionStringUsernamePasswordGiven(
.getCredentialsForPushdownQuery(this.exaMetadata, exaSchemaMetadataInfoConnectionStringUserPassword);
assertEquals("USER 'testUsername' IDENTIFIED BY 'testPassword'", credentials);
}
+
+ @Test
+ public void buildColumnDescription() {
+ DataType[] types = new DataType[3];
+ types[0] = DataType.createBool();
+ types[1] = DataType.createDecimal(10,2);
+ types[2] = DataType.createTimestamp(false);
+ assertEquals("(c0 BOOLEAN,c1 DECIMAL(10, 2),c2 TIMESTAMP)", JdbcAdapter.buildColumnDescriptionFrom(types));
+ }
+
+ @Test
+ public void buildJdbcTypeDescriptionFromResultSetMetadata() throws SQLException {
+ ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
+ when(metadata.getColumnType(0)).thenReturn(Types.SMALLINT);
+ when(metadata.getPrecision(0)).thenReturn(9);
+ when(metadata.getScale(0)).thenReturn(0);
+ when(metadata.getColumnTypeName(0)).thenReturn("SMALLINT");
+
+ JdbcTypeDescription type = JdbcAdapter.getJdbcTypeDescription(metadata, 0);
+ assertEquals(Types.SMALLINT, type.getJdbcType());
+ assertEquals(9, type.getPrecisionOrSize());
+ assertEquals(0, type.getDecimalScale());
+ assertEquals("SMALLINT", type.getTypeName());
+ assertEquals(0, type.getCharOctedLength());
+ }
}
\ No newline at end of file