diff --git a/cli.js b/cli.js index fbe3170..b802040 100644 --- a/cli.js +++ b/cli.js @@ -4,6 +4,7 @@ import path from 'path'; import { s3upload, purgeCache } from './index.js'; import { debugLog } from "./utils.js"; + async function start() { if (process.argv.length < 3) { console.log('USAGE: alt-upload source_path [cdn_path] [version] [sdk_version]'); diff --git a/s3upload.js b/s3upload.js index cb35cc4..0a7f70c 100644 --- a/s3upload.js +++ b/s3upload.js @@ -5,12 +5,14 @@ import { PromisePool } from '@supercharge/promise-pool'; import path from 'path'; import fs from 'fs'; import {debugLog as debugLog_, hashFile, walk} from "./utils.js"; +import { createGzipFileBuffer } from "./zip.js"; const debugLog = (...args) => debugLog_('S3', ...args); const AWS_KEY_ID = process.env['AWS_KEY_ID']; const SECRET_ACCESS_KEY = process.env['AWS_SECRET_ACCESS_KEY']; const BUCKET = process.env['AWS_BUCKET']; const ENDPOINT = process.env['AWS_ENDPOINT']; +const compressionExtensions = [".dll"]; const s3 = new S3({ credentials: { @@ -47,7 +49,17 @@ async function _upload(data, cdnPath, contentType) { } } -async function uploadFile(filePath, cdnPath, attempt = 0) { +/** + * Check the file extension for occurrence in the array + * @param {string} filePath - Path to the source file + * @param {array} allowedExtensions - Extensions + */ +function filterFilesByExtension(filePath, allowedExtensions) { + const fileExtension = path.extname(filePath).toLowerCase(); + return allowedExtensions.includes(fileExtension); +} + +async function uploadFile(filePath, cdnPath, attempt = 0, gzipBuffer = {}) { debugLog('Upload file', filePath, 'to', cdnPath, 'attempt', attempt); try { const size = (await fs.promises.stat(filePath)).size; @@ -60,7 +72,10 @@ async function uploadFile(filePath, cdnPath, attempt = 0) { Key: cdnPath })); - if (+head.ContentLength === size) { + if (+head.ContentLength === size) { + if(filterFilesByExtension(filePath, compressionExtensions)) { + uploadGzipFile(filePath, cdnPath, 0, gzipBuffer); + } return true; } @@ -79,6 +94,42 @@ async function uploadFile(filePath, cdnPath, attempt = 0) { return false; } +async function uploadGzipFile(filePath, cdnPath, attempt = 0, gzipBuffer = {}) { + debugLog('Upload gzip file', filePath, 'to', cdnPath, 'attempt', attempt); + + const cdnPathGz = cdnPath + '.gz'; + const filePathGz = filePath + '.gz'; + + try { + const sizeGz = gzipBuffer.length; + const contentType = 'application/gzip'; + if (await _upload(gzipBuffer, cdnPathGz, contentType)) { + console.log(`Uploaded '${filePathGz}' to '${cdnPathGz}'`); + + const head = await s3.send(new HeadObjectCommand({ + Bucket: BUCKET, + Key: cdnPathGz + })); + + if (+head.ContentLength === sizeGz) { + return true; + } + + console.error(`Uploaded gzip file size doesnt match. Local size: ${sizeGz}. Uploaded size: ${head.ContentLength}`) + } + console.error(`Error uploading '${filePathGz}' to '${cdnPathGz}'`); + } catch(e) { + console.error(e); + } + + if (attempt < 3) { + return uploadGzipFile(filePath, cdnPath, attempt + 1); + } + + console.log('Failed to upload', filePathGz, 'to', cdnPathGz, 'after 3 attempts'); + return false; +} + async function uploadDir(dirPath, cdnPath, version, sdkVersion) { dirPath = path.resolve(dirPath); debugLog('Upload dir', dirPath, 'to', cdnPath, version, sdkVersion); @@ -86,6 +137,7 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { const files = await walk(dirPath); const hashes = { }; const sizes = { }; + const gzipBuffer = { }; let result = true; @@ -93,7 +145,9 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { console.log(files); + let filesInfosForServer = []; for (let i = 0; i < files.length; ++i) { + const file = files[i]; const stats = fs.statSync(file); @@ -102,16 +156,34 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { const key = (cdnPath.length > 0 ? (cdnPath + '/') : '') + filePath; hashes[filePath] = await hashFile(file); - sizes[filePath] = stats.size; + sizes[filePath] = stats.size; + + let fileInfo = { + path: "", + size: 0, + sha1: "", + encoding: {}, + } - uploadQueue.push({ file, key }); + fileInfo.path = filePath; + fileInfo.size = stats.size; + fileInfo.sha1 = hashes[filePath]; + + let gzipBuffer = {}; + if(filterFilesByExtension(filePath, compressionExtensions)) { + gzipBuffer = await createGzipFileBuffer(file); + fileInfo.encoding["gz"] = gzipBuffer.length; + } + uploadQueue.push({ file, key, gzipBuffer }); + + filesInfosForServer.push(fileInfo); } } console.log(uploadQueue); const { results, errors } = await PromisePool.for(uploadQueue).withConcurrency(10).process(async queueItem => { - return await uploadFile(queueItem.file, queueItem.key); + return await uploadFile(queueItem.file, queueItem.key, 0, queueItem.gzipBuffer); }); for (let i = 0; i < results.length; ++i) { @@ -123,9 +195,9 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { if (version) { debugLog('Generate update.json', version); const updateData = JSON.stringify({ - latestBuildNumber: -1, version: version, sdkVersion: sdkVersion || undefined, + files: filesInfosForServer, hashList: hashes, sizeList: sizes }); diff --git a/zip.js b/zip.js new file mode 100644 index 0000000..2c710fa --- /dev/null +++ b/zip.js @@ -0,0 +1,59 @@ +import fs from 'fs'; +import zlib from 'zlib'; +import { promisify } from 'util'; +import { pipeline, PassThrough } from 'stream'; +import { debugLog } from "./utils.js"; + +const pipe = promisify(pipeline); + +/** + * Archiving a file by gzip + * @param {string} source - Path to the source file + * @param {string} destination - Path to the compressed file + */ +async function gzipFile(source, destination) { + try { + const sourceStream = fs.createReadStream(source); + const gzipStream = zlib.createGzip(); + const destinationStream = fs.createWriteStream(destination); + + await pipe(sourceStream, gzipStream, destinationStream); + debugLog(`Archiving completed: ${destination}`); + } catch (error) { + console.error(`Error archiving a file ${source}:`, error); + } +} + +/** + * Archiving a file by gzip in memory without file + * @param {string} source - Path to the source file + */ +export async function createGzipFileBuffer(source) { + try { + const sourceStream = fs.createReadStream(source); + const gzipStream = zlib.createGzip(); + const passThrough = new PassThrough(); + + return new Promise((resolve, reject) => { + gzipStream.on('error', reject); + gzipStream.on('end', () => { + }); + + let compressedChunks = []; + passThrough.on('data', (chunk) => { + compressedChunks.push(chunk); + }); + + passThrough.on('end', () => { + const compressedData = Buffer.concat(compressedChunks); + resolve(compressedData); + }); + + sourceStream.pipe(gzipStream) + gzipStream.pipe(passThrough); + }); + } catch (error) { + console.error(`Error archiving a file ${source}:`, error); + } + return null; +}