Fixed vulnerabilities CVE-2023-44981 and CVE-2023-46120 #597
broken_links_checker.yml
on: pull_request
linkChecker
28s
Annotations
1 error
AlluxioExportImportIT.alluxio filesystem export and import:
com/exasol/cloudetl/alluxio/AlluxioExportImportIT#L13
Failed executing SQL 'EXPORT "ALLUXIO_SCHEMA"."EXPORTED_ITEMS"
INTO SCRIPT ALLUXIO_SCHEMA.EXPORT_PATH WITH
BUCKET_PATH = 'alluxio://172.18.0.3:19998/data/'
DATA_FORMAT = 'PARQUET'
PARQUET_BLOCK_SIZE = '67108864'
PARALLELISM = 'iproc()';
': VM error: F-UDF-CL-LIB-1127: F-UDF-CL-SL-JAVA-1002: F-UDF-CL-SL-JAVA-1013:
com.exasol.ExaUDFException: F-UDF-CL-SL-JAVA-1080: Exception during run
java.lang.IllegalArgumentException: No Under File System Factory found for: /opt/alluxio/underFSStorage
alluxio.underfs.UnderFileSystem$Factory.createWithRecorder(UnderFileSystem.java:113)
alluxio.underfs.UnderFileSystem$Factory.create(UnderFileSystem.java:93)
alluxio.underfs.UnderFileSystem$Factory.create(UnderFileSystem.java:80)
alluxio.client.file.ufs.UfsBaseFileSystem.lambda$new$0(UfsBaseFileSystem.java:108)
alluxio.underfs.UfsManager$UfsClient.acquireUfsResource(UfsManager.java:62)
alluxio.client.file.ufs.UfsBaseFileSystem.<init>(UfsBaseFileSystem.java:130)
alluxio.client.file.ufs.UfsBaseFileSystem.<init>(UfsBaseFileSystem.java:107)
alluxio.client.file.FileSystem$Factory.create(FileSystem.java:182)
alluxio.client.file.FileSystem$Factory.create(FileSystem.java:165)
alluxio.client.file.FileSystem$Factory.create(FileSystem.java:157)
alluxio.hadoop.AbstractFileSystem.initialize(AbstractFileSystem.java:571)
alluxio.hadoop.AbstractFileSystem.initialize(AbstractFileSystem.java:509)
org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3611)
org.apache.hadoop.fs.FileSystem.access$300(FileSystem.java:174)
org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3712)
org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3663)
org.apache.hadoop.fs.FileSystem.get(FileSystem.java:557)
org.apache.hadoop.fs.Path.getFileSystem(Path.java:365)
org.apache.parquet.hadoop.util.HadoopOutputFile.fromPath(HadoopOutputFile.java:58)
org.apache.parquet.hadoop.ParquetWriter$Builder.build(ParquetWriter.java:677)
com.exasol.cloudetl.parquet.ParquetRowWriter$.apply(ParquetRowWriter.scala:37)
com.exasol.cloudetl.sink.BatchSizedSink$$anon$1.<init>(BatchSizedSink.scala:54)
com.exasol.cloudetl.sink.BatchSizedSink.createWriter(BatchSizedSink.scala:51)
com.exasol.cloudetl.sink.BatchSizedSink.openNewFile(BatchSizedSink.scala:106)
com.exasol.cloudetl.sink.BatchSizedSink.write(BatchSizedSink.scala:75)
com.exasol.cloudetl.scriptclasses.TableDataExporter$.runExport(TableDataExporter.scala:53)
com.exasol.cloudetl.scriptclasses.TableDataExporter$.run(TableDataExporter.scala:36)
com.exasol.cloudetl.scriptclasses.TableDataExporter.run(TableDataExporter.scala)
com.exasol.ExaWrapper.run(ExaWrapper.java:197)
(Session: 1780808294105284608)
|