diff --git a/.github/workflows/dependencies_update.yml b/.github/workflows/dependencies_update.yml index 1bf502f0..0fa71800 100644 --- a/.github/workflows/dependencies_update.yml +++ b/.github/workflows/dependencies_update.yml @@ -75,7 +75,7 @@ jobs: echo >> "$GITHUB_OUTPUT" echo '# ⚠️ Notes ⚠️' >> "$GITHUB_OUTPUT" echo '## Run PK fix manually' >> "$GITHUB_OUTPUT" - echo 'Due to restrictions workflow `dependencies_update.yml` can't update other workflows, see https://github.com/exasol/project-keeper/issues/578 for details.' >> "$GITHUB_OUTPUT" + echo 'Due to restrictions workflow `dependencies_update.yml` cannot update other workflows, see https://github.com/exasol/project-keeper/issues/578 for details.' >> "$GITHUB_OUTPUT" echo 'Please checkout this PR locally and run `mvn com.exasol:project-keeper-maven-plugin:fix --projects .`' >> "$GITHUB_OUTPUT" echo '## This PR does not trigger CI workflows' >> "$GITHUB_OUTPUT" echo 'Please click the **Close pull request** button and then **Reopen pull request** to trigger running checks.' >> "$GITHUB_OUTPUT" diff --git a/doc/changes/changelog.md b/doc/changes/changelog.md index 2e49baf9..a1ef754f 100644 --- a/doc/changes/changelog.md +++ b/doc/changes/changelog.md @@ -1,5 +1,6 @@ # Changes +* [2.9.0](changes_2.9.0.md) * [2.8.1](changes_2.8.1.md) * [2.8.0](changes_2.8.0.md) * [2.7.12](changes_2.7.12.md) diff --git a/doc/changes/changes_2.9.0.md b/doc/changes/changes_2.9.0.md new file mode 100644 index 00000000..79d6e549 --- /dev/null +++ b/doc/changes/changes_2.9.0.md @@ -0,0 +1,17 @@ +# Cloud Storage Extension 2.9.0, released 2024-??-?? + +Code name: + +## Summary + +## Features + +* ISSUE_NUMBER: description + +## Dependency Updates + +### Cloud Storage Extension + +#### Plugin Dependency Updates + +* Updated `com.exasol:project-keeper-maven-plugin:4.3.2` to `4.3.3` diff --git a/doc/user_guide/user_guide.md b/doc/user_guide/user_guide.md index 6415decf..285f2ce3 100644 --- a/doc/user_guide/user_guide.md +++ b/doc/user_guide/user_guide.md @@ -150,7 +150,7 @@ downloaded jar file is the same as the checksum provided in the releases. To check the SHA256 result of the local jar, run the command: ```sh -sha256sum exasol-cloud-storage-extension-2.8.1.jar +sha256sum exasol-cloud-storage-extension-2.9.0.jar ``` ### Building From Source @@ -180,7 +180,7 @@ mvn clean package -DskipTests=true ``` The assembled jar file should be located at -`target/exasol-cloud-storage-extension-2.8.1.jar`. +`target/exasol-cloud-storage-extension-2.9.0.jar`. ### Create an Exasol Bucket @@ -202,7 +202,7 @@ for the HTTP protocol. Upload the jar file using curl command: ```sh -curl -X PUT -T exasol-cloud-storage-extension-2.8.1.jar \ +curl -X PUT -T exasol-cloud-storage-extension-2.9.0.jar \ http://w:@exasol.datanode.domain.com:2580// ``` @@ -234,7 +234,7 @@ OPEN SCHEMA CLOUD_STORAGE_EXTENSION; CREATE OR REPLACE JAVA SET SCRIPT IMPORT_PATH(...) EMITS (...) AS %scriptclass com.exasol.cloudetl.scriptclasses.FilesImportQueryGenerator; - %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.8.1.jar; + %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.9.0.jar; / CREATE OR REPLACE JAVA SCALAR SCRIPT IMPORT_METADATA(...) EMITS ( @@ -244,12 +244,12 @@ CREATE OR REPLACE JAVA SCALAR SCRIPT IMPORT_METADATA(...) EMITS ( end_index DECIMAL(36, 0) ) AS %scriptclass com.exasol.cloudetl.scriptclasses.FilesMetadataReader; - %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.8.1.jar; + %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.9.0.jar; / CREATE OR REPLACE JAVA SET SCRIPT IMPORT_FILES(...) EMITS (...) AS %scriptclass com.exasol.cloudetl.scriptclasses.FilesDataImporter; - %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.8.1.jar; + %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.9.0.jar; / ``` @@ -268,12 +268,12 @@ OPEN SCHEMA CLOUD_STORAGE_EXTENSION; CREATE OR REPLACE JAVA SET SCRIPT EXPORT_PATH(...) EMITS (...) AS %scriptclass com.exasol.cloudetl.scriptclasses.TableExportQueryGenerator; - %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.8.1.jar; + %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.9.0.jar; / CREATE OR REPLACE JAVA SET SCRIPT EXPORT_TABLE(...) EMITS (ROWS_AFFECTED INT) AS %scriptclass com.exasol.cloudetl.scriptclasses.TableDataExporter; - %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.8.1.jar; + %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.9.0.jar; / ``` @@ -407,13 +407,13 @@ CREATE OR REPLACE JAVA SCALAR SCRIPT IMPORT_METADATA(...) EMITS ( ) AS %jvmoption -DHTTPS_PROXY=http://username:password@10.10.1.10:1180 %scriptclass com.exasol.cloudetl.scriptclasses.FilesMetadataReader; - %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.8.1.jar; + %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.9.0.jar; / CREATE OR REPLACE JAVA SET SCRIPT IMPORT_FILES(...) EMITS (...) AS %jvmoption -DHTTPS_PROXY=http://username:password@10.10.1.10:1180 %scriptclass com.exasol.cloudetl.scriptclasses.FilesDataImporter; - %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.8.1.jar; + %jar /buckets/bfsdefault//exasol-cloud-storage-extension-2.9.0.jar; / ``` diff --git a/pk_generated_parent.pom b/pk_generated_parent.pom index 21b50a55..47624c85 100644 --- a/pk_generated_parent.pom +++ b/pk_generated_parent.pom @@ -3,7 +3,7 @@ 4.0.0 com.exasol cloud-storage-extension-generated-parent - 2.8.1 + 2.9.0 pom UTF-8 diff --git a/pom.xml b/pom.xml index 814f10d2..f53b55a3 100644 --- a/pom.xml +++ b/pom.xml @@ -3,14 +3,14 @@ 4.0.0 com.exasol cloud-storage-extension - 2.8.1 + 2.9.0 Cloud Storage Extension Exasol Cloud Storage Import And Export Extension https://github.com/exasol/cloud-storage-extension/ cloud-storage-extension-generated-parent com.exasol - 2.8.1 + 2.9.0 pk_generated_parent.pom @@ -883,7 +883,7 @@ com.exasol project-keeper-maven-plugin - 4.3.2 + 4.3.3 diff --git a/src/main/scala/com/exasol/cloudetl/bucket/AzureAbfsBucket.scala b/src/main/scala/com/exasol/cloudetl/bucket/AzureAbfsBucket.scala index f2329e6f..83286f8a 100644 --- a/src/main/scala/com/exasol/cloudetl/bucket/AzureAbfsBucket.scala +++ b/src/main/scala/com/exasol/cloudetl/bucket/AzureAbfsBucket.scala @@ -57,20 +57,28 @@ final case class AzureAbfsBucket(path: String, params: StorageProperties) extend .get(AZURE_ACCOUNT_NAME) .getOrElse(accountAndContainer.accountName) val secretKey = mergedProperties.getString(AZURE_SECRET_KEY) - - conf.set(s"fs.azure.account.key.$accountName.dfs.core.windows.net", secretKey) + if (path.contains("fabric")) { + conf.set(s"fs.azure.account.key.$accountName.dfs.fabric.microsoft.com", secretKey) + } else { + conf.set(s"fs.azure.account.key.$accountName.dfs.core.windows.net", secretKey) + } conf } // Intentionally copy-paste, duplicate count: 2. Please, refactor when // it reaches 3+. + // Fabric / OneLake: .dfs.fabric.microsoft.com private[this] final val AZURE_ABFS_PATH_REGEX: Regex = """abfss?://(.*)@([^.]+).dfs.core.windows.net/(.*)$""".r + private[this] final val AZURE_ABFS_ONELAKE_PATH_REGEX: Regex = + """abfss?://(.*)@([^.]+)\.dfs\.fabric\.microsoft\.com/(.*)$""".r private[this] def regexParsePath(path: String): AccountAndContainer = path match { case AZURE_ABFS_PATH_REGEX(containerName, accountName, _) => AccountAndContainer(accountName, containerName) + case AZURE_ABFS_ONELAKE_PATH_REGEX(containerName, accountName, _) => + AccountAndContainer(accountName, containerName) case _ => throw new BucketValidationException( ExaError diff --git a/src/main/scala/com/exasol/cloudetl/bucket/SecureBucket.scala b/src/main/scala/com/exasol/cloudetl/bucket/SecureBucket.scala index c830a225..5c94588a 100644 --- a/src/main/scala/com/exasol/cloudetl/bucket/SecureBucket.scala +++ b/src/main/scala/com/exasol/cloudetl/bucket/SecureBucket.scala @@ -27,7 +27,7 @@ trait SecureBucket extends LazyLogging { self: Bucket => throw new BucketValidationException( ExaError .messageBuilder("E-CSE-5") - .message("Using credentials as parameters is forbidded.") + .message("Using credentials as parameters is forbidden.") .mitigation("Please use an Exasol named connection object via CONNECTION_NAME property.") .toString() ) diff --git a/src/test/scala/com/exasol/cloudetl/bucket/AbstractBucketTest.scala b/src/test/scala/com/exasol/cloudetl/bucket/AbstractBucketTest.scala index d3fea9cf..497e01dd 100644 --- a/src/test/scala/com/exasol/cloudetl/bucket/AbstractBucketTest.scala +++ b/src/test/scala/com/exasol/cloudetl/bucket/AbstractBucketTest.scala @@ -54,7 +54,7 @@ class AbstractBucketTest extends AnyFunSuite with BeforeAndAfterEach with Mockit fn } val message = thrown.getMessage() - assert(message.contains("Using credentials as parameters is forbidded")) + assert(message.contains("Using credentials as parameters is forbidden")) assert(message.contains("Please use an Exasol named connection object")) () } diff --git a/src/test/scala/com/exasol/cloudetl/bucket/AzureAbfsBucketTest.scala b/src/test/scala/com/exasol/cloudetl/bucket/AzureAbfsBucketTest.scala index d3a08ec7..c5d67c9a 100644 --- a/src/test/scala/com/exasol/cloudetl/bucket/AzureAbfsBucketTest.scala +++ b/src/test/scala/com/exasol/cloudetl/bucket/AzureAbfsBucketTest.scala @@ -36,6 +36,21 @@ class AzureAbfsBucketTest extends AbstractBucketTest { assert(thrown.getMessage().startsWith("E-CSE-20")) assert(thrown.getMessage().contains(s"path '$path' scheme is not valid.")) } + // https://github.com/MicrosoftDocs/fabric-docs/blob/main/docs/onelake/onelake-access-api.md + // "abfss://container1@account1.dfs.core.windows.net/data/" + // The account name is always onelake + // The container name is your workspace name. + // + // From the docs + // -> abfs[s]://@onelake.dfs.fabric.microsoft.com/.// + test("apply throws if Azure OneLake path does not return valid Bucket") { + val path = "abfss://workspacename@onelake.dfs.fabric.microsoft.com/item.itemtype/path/filename" + val exaMetadata = mockConnectionInfo("", "AZURE_SECRET_KEY=secret") + properties = defaultProperties ++ Map(PATH -> path, "CONNECTION_NAME" -> "connection_info") + + val bucket = getBucket(properties, exaMetadata) + assert(bucket.isInstanceOf[AzureAbfsBucket]) + } test("apply throws if no connection name is provided") { properties = defaultProperties