From 56640f570680e2056da62db8b0b28177ee7d72ce Mon Sep 17 00:00:00 2001 From: libragliese581 Date: Thu, 3 Oct 2024 09:20:31 +0300 Subject: [PATCH 1/6] [client] Archive libce2.dll and other DLLs and then extract it on the startup - Updated tool for to compress dll files --- cli.js | 4 +++ zip.js | 86 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) create mode 100644 zip.js diff --git a/cli.js b/cli.js index fbe3170..b199583 100644 --- a/cli.js +++ b/cli.js @@ -3,6 +3,8 @@ import path from 'path'; import { s3upload, purgeCache } from './index.js'; import { debugLog } from "./utils.js"; +import { compressDllFiles } from "./zip.js"; + async function start() { if (process.argv.length < 3) { @@ -14,6 +16,8 @@ async function start() { const cdnPath = process.argv[3] ?? ""; const version = process.argv.length >= 5 ? process.argv[4] : null; const sdkVersion = process.argv.length >= 6 ? process.argv[5] : null; + + await compressDllFiles(filePath); // Upload to our R2 bucket if (process.env['AWS_KEY_ID']) { diff --git a/zip.js b/zip.js new file mode 100644 index 0000000..021c955 --- /dev/null +++ b/zip.js @@ -0,0 +1,86 @@ +import path from 'path'; +import fs from 'fs'; +import zlib from 'zlib'; +import klaw from 'klaw'; +import { promisify } from 'util'; +import { pipeline } from 'stream'; +import { debugLog } from "./utils.js"; + +const pipe = promisify(pipeline); + +/** + * Archiving a file by gzip + * @param {string} source - Path to the source file + * @param {string} destination - Path to the compressed file + */ +async function gzipFile(source, destination) { + try { + const sourceStream = fs.createReadStream(source); + const gzipStream = zlib.createGzip(); + const destinationStream = fs.createWriteStream(destination); + + await pipe(sourceStream, gzipStream, destinationStream); + debugLog(`Archiving completed: ${destination}`); + } catch (error) { + console.error(`Error archiving a file ${source}:`, error); + } +} + +/** + * Recursive archiving all files in a directory + * @param {string} dirPath - Path to directory + */ +async function gzipDirectoryRecursively(dirPath) { + return new Promise((resolve, reject) => { + const tasks = []; + klaw(dirPath) + .on('data', item => { + if (!item.stats.isFile() || path.extname(item.path) != ".dll") { + return; + } + const filePath = item.path; + const gzipDestination = `${filePath}.gz`; + tasks.push(async () => { + debugLog(`Start archiving a file: ${filePath}`); + await gzipFile(filePath, gzipDestination); + }); + }) + .on('end', async () => { + try { + for (const task of tasks) { + await task(); + } + debugLog('Recursive archiving of all files is complete'); + resolve(); + } catch (err) { + reject(err); + } + }) + .on('error', (err, item) => { + console.error(`Error during directory entering ${item.path}:`, err); + reject(err); + }); + }); +} + +/** + * Сompress dll files + * @param {string} filePath - Path to file or dir + */ +export function compressDllFiles(filePath) { + let gzipTask; + try { + const stats = fs.statSync(filePath); + if (stats.isFile() && path.extname(filePath) !== ".dll") { + const gzipDestination = `${filePath}.gz`; + debugLog(`Start file gzip process: ${filePath}`); + gzipTask = gzipFile(filePath, gzipDestination); + } else if (stats.isDirectory()) { + debugLog(`Start dir gzip process: ${filePath}`); + gzipTask = gzipDirectoryRecursively(filePath); + } + } catch (error) { + console.error(`Error processing gzip : ${error.message}`); + } + return gzipTask; +} From 7d370fdddf0f89741e547b26c5faea6eedffad6c Mon Sep 17 00:00:00 2001 From: libragliese581 Date: Thu, 3 Oct 2024 13:40:00 +0300 Subject: [PATCH 2/6] Upload gzip without files .gz --- cli.js | 3 --- s3upload.js | 45 +++++++++++++++++++++++++++++++++++++++++++-- zip.js | 35 ++++++++++++++++++++++++++++++++++- 3 files changed, 77 insertions(+), 6 deletions(-) diff --git a/cli.js b/cli.js index b199583..b802040 100644 --- a/cli.js +++ b/cli.js @@ -3,7 +3,6 @@ import path from 'path'; import { s3upload, purgeCache } from './index.js'; import { debugLog } from "./utils.js"; -import { compressDllFiles } from "./zip.js"; async function start() { @@ -16,8 +15,6 @@ async function start() { const cdnPath = process.argv[3] ?? ""; const version = process.argv.length >= 5 ? process.argv[4] : null; const sdkVersion = process.argv.length >= 6 ? process.argv[5] : null; - - await compressDllFiles(filePath); // Upload to our R2 bucket if (process.env['AWS_KEY_ID']) { diff --git a/s3upload.js b/s3upload.js index cb35cc4..314fcc2 100644 --- a/s3upload.js +++ b/s3upload.js @@ -5,6 +5,7 @@ import { PromisePool } from '@supercharge/promise-pool'; import path from 'path'; import fs from 'fs'; import {debugLog as debugLog_, hashFile, walk} from "./utils.js"; +import { createGzipFileStream } from "./zip.js"; const debugLog = (...args) => debugLog_('S3', ...args); const AWS_KEY_ID = process.env['AWS_KEY_ID']; @@ -60,7 +61,10 @@ async function uploadFile(filePath, cdnPath, attempt = 0) { Key: cdnPath })); - if (+head.ContentLength === size) { + if (+head.ContentLength === size) { + if(path.extname(filePath) == ".dll") { + uploadGzipFile(filePath, cdnPath); + } return true; } @@ -79,6 +83,44 @@ async function uploadFile(filePath, cdnPath, attempt = 0) { return false; } +async function uploadGzipFile(filePath, cdnPath, attempt = 0) { + debugLog('Upload gzip file', filePath, 'to', cdnPath, 'attempt', attempt); + + const cdnPathGz = cdnPath + '.gz'; + const filePathGz = filePath + '.gz'; + + try { + let wrapperSize = { value: 0 } + const gzipFileStream = await createGzipFileStream(filePath, wrapperSize); + const sizeGz = wrapperSize.value; + const contentType = 'application/gzip'; + if (await _upload(gzipFileStream, cdnPathGz, contentType)) { + console.log(`Uploaded '${filePathGz}' to '${cdnPathGz}'`); + + const head = await s3.send(new HeadObjectCommand({ + Bucket: BUCKET, + Key: cdnPathGz + })); + + if (+head.ContentLength === sizeGz) { + return true; + } + + console.error(`Uploaded gzip file size doesnt match. Local size: ${sizeGz}. Uploaded size: ${head.ContentLength}`) + } + console.error(`Error uploading '${filePathGz}' to '${cdnPathGz}'`); + } catch(e) { + console.error(e); + } + + if (attempt < 3) { + return uploadGzipFile(filePath, cdnPath, attempt + 1); + } + + console.log('Failed to upload', filePathGz, 'to', cdnPathGz, 'after 3 attempts'); + return false; +} + async function uploadDir(dirPath, cdnPath, version, sdkVersion) { dirPath = path.resolve(dirPath); debugLog('Upload dir', dirPath, 'to', cdnPath, version, sdkVersion); @@ -123,7 +165,6 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { if (version) { debugLog('Generate update.json', version); const updateData = JSON.stringify({ - latestBuildNumber: -1, version: version, sdkVersion: sdkVersion || undefined, hashList: hashes, diff --git a/zip.js b/zip.js index 021c955..93b37d3 100644 --- a/zip.js +++ b/zip.js @@ -3,7 +3,7 @@ import fs from 'fs'; import zlib from 'zlib'; import klaw from 'klaw'; import { promisify } from 'util'; -import { pipeline } from 'stream'; +import { pipeline, PassThrough } from 'stream'; import { debugLog } from "./utils.js"; const pipe = promisify(pipeline); @@ -26,6 +26,39 @@ async function gzipFile(source, destination) { } } +export async function createGzipFileStream(source, wrapperSize) { + try { + const sourceStream = fs.createReadStream(source); + const gzipStream = zlib.createGzip(); + const passThrough = new PassThrough(); + + return new Promise((resolve, reject) => { + gzipStream.on('data', (chunk) => { + wrapperSize.value += chunk.length; + }); + gzipStream.on('error', reject); + gzipStream.on('end', () => { + }); + + let compressedChunks = []; + passThrough.on('data', (chunk) => { + compressedChunks.push(chunk); + }); + + passThrough.on('end', () => { + const compressedData = Buffer.concat(compressedChunks); + resolve(compressedData); + }); + + sourceStream.pipe(gzipStream) + gzipStream.pipe(passThrough); + }); + } catch (error) { + console.error(`Error archiving a file ${source}:`, error); + } + return null; +} + /** * Recursive archiving all files in a directory * @param {string} dirPath - Path to directory From 0286d674e280b88a2ce9cabac4a03067f0d9cf96 Mon Sep 17 00:00:00 2001 From: libragliese581 Date: Thu, 3 Oct 2024 14:43:23 +0300 Subject: [PATCH 3/6] Json --- s3upload.js | 38 +++++++++++++++++++++++++++----------- zip.js | 5 +---- 2 files changed, 28 insertions(+), 15 deletions(-) diff --git a/s3upload.js b/s3upload.js index 314fcc2..67bd99b 100644 --- a/s3upload.js +++ b/s3upload.js @@ -5,7 +5,7 @@ import { PromisePool } from '@supercharge/promise-pool'; import path from 'path'; import fs from 'fs'; import {debugLog as debugLog_, hashFile, walk} from "./utils.js"; -import { createGzipFileStream } from "./zip.js"; +import { createGzipFileBuffer } from "./zip.js"; const debugLog = (...args) => debugLog_('S3', ...args); const AWS_KEY_ID = process.env['AWS_KEY_ID']; @@ -48,7 +48,7 @@ async function _upload(data, cdnPath, contentType) { } } -async function uploadFile(filePath, cdnPath, attempt = 0) { +async function uploadFile(filePath, cdnPath, attempt = 0, gzipBuffer = {}) { debugLog('Upload file', filePath, 'to', cdnPath, 'attempt', attempt); try { const size = (await fs.promises.stat(filePath)).size; @@ -63,7 +63,7 @@ async function uploadFile(filePath, cdnPath, attempt = 0) { if (+head.ContentLength === size) { if(path.extname(filePath) == ".dll") { - uploadGzipFile(filePath, cdnPath); + uploadGzipFile(filePath, cdnPath, 0, gzipBuffer); } return true; } @@ -83,18 +83,16 @@ async function uploadFile(filePath, cdnPath, attempt = 0) { return false; } -async function uploadGzipFile(filePath, cdnPath, attempt = 0) { +async function uploadGzipFile(filePath, cdnPath, attempt = 0, gzipBuffer = {}) { debugLog('Upload gzip file', filePath, 'to', cdnPath, 'attempt', attempt); const cdnPathGz = cdnPath + '.gz'; const filePathGz = filePath + '.gz'; try { - let wrapperSize = { value: 0 } - const gzipFileStream = await createGzipFileStream(filePath, wrapperSize); - const sizeGz = wrapperSize.value; + const sizeGz = gzipBuffer.length; const contentType = 'application/gzip'; - if (await _upload(gzipFileStream, cdnPathGz, contentType)) { + if (await _upload(gzipBuffer, cdnPathGz, contentType)) { console.log(`Uploaded '${filePathGz}' to '${cdnPathGz}'`); const head = await s3.send(new HeadObjectCommand({ @@ -128,6 +126,7 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { const files = await walk(dirPath); const hashes = { }; const sizes = { }; + const gzipBuffer = { }; let result = true; @@ -135,7 +134,9 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { console.log(files); + let filesInfosForServer = []; for (let i = 0; i < files.length; ++i) { + const file = files[i]; const stats = fs.statSync(file); @@ -144,16 +145,30 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { const key = (cdnPath.length > 0 ? (cdnPath + '/') : '') + filePath; hashes[filePath] = await hashFile(file); - sizes[filePath] = stats.size; + sizes[filePath] = stats.size; + let gzipBuffer = await createGzipFileBuffer(file); + + uploadQueue.push({ file, key, gzipBuffer }); + + let fileInfo = { + path: "", + size: 0, + sha1: "", + encoding: {}, + } - uploadQueue.push({ file, key }); + fileInfo.path = filePath; + fileInfo.size = stats.size; + fileInfo.sha1 = hashes[filePath]; + fileInfo.encoding["gz"] = gzipBuffer.length; + filesInfosForServer.push(fileInfo); } } console.log(uploadQueue); const { results, errors } = await PromisePool.for(uploadQueue).withConcurrency(10).process(async queueItem => { - return await uploadFile(queueItem.file, queueItem.key); + return await uploadFile(queueItem.file, queueItem.key, 0, queueItem.gzipBuffer); }); for (let i = 0; i < results.length; ++i) { @@ -167,6 +182,7 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { const updateData = JSON.stringify({ version: version, sdkVersion: sdkVersion || undefined, + files: filesInfosForServer, hashList: hashes, sizeList: sizes }); diff --git a/zip.js b/zip.js index 93b37d3..d151bce 100644 --- a/zip.js +++ b/zip.js @@ -26,16 +26,13 @@ async function gzipFile(source, destination) { } } -export async function createGzipFileStream(source, wrapperSize) { +export async function createGzipFileBuffer(source) { try { const sourceStream = fs.createReadStream(source); const gzipStream = zlib.createGzip(); const passThrough = new PassThrough(); return new Promise((resolve, reject) => { - gzipStream.on('data', (chunk) => { - wrapperSize.value += chunk.length; - }); gzipStream.on('error', reject); gzipStream.on('end', () => { }); From d49fa4d4952afbe1b5b8cde81ea1c03407b4e64e Mon Sep 17 00:00:00 2001 From: libragliese581 Date: Fri, 4 Oct 2024 13:52:28 +0300 Subject: [PATCH 4/6] Update zip.js --- zip.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/zip.js b/zip.js index d151bce..2972fc2 100644 --- a/zip.js +++ b/zip.js @@ -26,6 +26,10 @@ async function gzipFile(source, destination) { } } +/** + * Archiving a file by gzip in memory without file + * @param {string} source - Path to the source file + */ export async function createGzipFileBuffer(source) { try { const sourceStream = fs.createReadStream(source); From c3a39bb8087e607f1207d59d55023393717d5d61 Mon Sep 17 00:00:00 2001 From: libragliese581 Date: Mon, 21 Oct 2024 14:03:34 +0300 Subject: [PATCH 5/6] Refactoring filter files for compression - Removed old code - Added function for filter files by extension for compression --- s3upload.js | 15 +++++++++++-- zip.js | 61 ----------------------------------------------------- 2 files changed, 13 insertions(+), 63 deletions(-) diff --git a/s3upload.js b/s3upload.js index 67bd99b..5881aee 100644 --- a/s3upload.js +++ b/s3upload.js @@ -12,6 +12,7 @@ const AWS_KEY_ID = process.env['AWS_KEY_ID']; const SECRET_ACCESS_KEY = process.env['AWS_SECRET_ACCESS_KEY']; const BUCKET = process.env['AWS_BUCKET']; const ENDPOINT = process.env['AWS_ENDPOINT']; +const compressionExtensions = [".dll"]; const s3 = new S3({ credentials: { @@ -48,6 +49,16 @@ async function _upload(data, cdnPath, contentType) { } } +/** + * Check the file extension for occurrence in the array + * @param {string} filePath - Path to the source file + * @param {array} allowedExtensions - Extensions + */ +async function filterFilesByExtension(filePath, allowedExtensions) { + const fileExtension = path.extname(filePath).toLowerCase(); + return allowedExtensions.includes(fileExtension); +} + async function uploadFile(filePath, cdnPath, attempt = 0, gzipBuffer = {}) { debugLog('Upload file', filePath, 'to', cdnPath, 'attempt', attempt); try { @@ -61,8 +72,8 @@ async function uploadFile(filePath, cdnPath, attempt = 0, gzipBuffer = {}) { Key: cdnPath })); - if (+head.ContentLength === size) { - if(path.extname(filePath) == ".dll") { + if (+head.ContentLength === size) { + if(filterFilesByExtension(filePath, compressionExtensions)) { uploadGzipFile(filePath, cdnPath, 0, gzipBuffer); } return true; diff --git a/zip.js b/zip.js index 2972fc2..2c710fa 100644 --- a/zip.js +++ b/zip.js @@ -1,7 +1,5 @@ -import path from 'path'; import fs from 'fs'; import zlib from 'zlib'; -import klaw from 'klaw'; import { promisify } from 'util'; import { pipeline, PassThrough } from 'stream'; import { debugLog } from "./utils.js"; @@ -59,62 +57,3 @@ export async function createGzipFileBuffer(source) { } return null; } - -/** - * Recursive archiving all files in a directory - * @param {string} dirPath - Path to directory - */ -async function gzipDirectoryRecursively(dirPath) { - return new Promise((resolve, reject) => { - const tasks = []; - klaw(dirPath) - .on('data', item => { - if (!item.stats.isFile() || path.extname(item.path) != ".dll") { - return; - } - const filePath = item.path; - const gzipDestination = `${filePath}.gz`; - tasks.push(async () => { - debugLog(`Start archiving a file: ${filePath}`); - await gzipFile(filePath, gzipDestination); - }); - }) - .on('end', async () => { - try { - for (const task of tasks) { - await task(); - } - debugLog('Recursive archiving of all files is complete'); - resolve(); - } catch (err) { - reject(err); - } - }) - .on('error', (err, item) => { - console.error(`Error during directory entering ${item.path}:`, err); - reject(err); - }); - }); -} - -/** - * Сompress dll files - * @param {string} filePath - Path to file or dir - */ -export function compressDllFiles(filePath) { - let gzipTask; - try { - const stats = fs.statSync(filePath); - if (stats.isFile() && path.extname(filePath) !== ".dll") { - const gzipDestination = `${filePath}.gz`; - debugLog(`Start file gzip process: ${filePath}`); - gzipTask = gzipFile(filePath, gzipDestination); - } else if (stats.isDirectory()) { - debugLog(`Start dir gzip process: ${filePath}`); - gzipTask = gzipDirectoryRecursively(filePath); - } - } catch (error) { - console.error(`Error processing gzip : ${error.message}`); - } - return gzipTask; -} From b0d3f88bf20687fd64d454382806af0a46bfdaa9 Mon Sep 17 00:00:00 2001 From: libragliese581 Date: Mon, 21 Oct 2024 15:38:15 +0300 Subject: [PATCH 6/6] Fix gzip files - Fixed algo compression files and creation of update.json --- s3upload.js | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/s3upload.js b/s3upload.js index 5881aee..0a7f70c 100644 --- a/s3upload.js +++ b/s3upload.js @@ -54,7 +54,7 @@ async function _upload(data, cdnPath, contentType) { * @param {string} filePath - Path to the source file * @param {array} allowedExtensions - Extensions */ -async function filterFilesByExtension(filePath, allowedExtensions) { +function filterFilesByExtension(filePath, allowedExtensions) { const fileExtension = path.extname(filePath).toLowerCase(); return allowedExtensions.includes(fileExtension); } @@ -157,9 +157,6 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { hashes[filePath] = await hashFile(file); sizes[filePath] = stats.size; - let gzipBuffer = await createGzipFileBuffer(file); - - uploadQueue.push({ file, key, gzipBuffer }); let fileInfo = { path: "", @@ -171,7 +168,14 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) { fileInfo.path = filePath; fileInfo.size = stats.size; fileInfo.sha1 = hashes[filePath]; - fileInfo.encoding["gz"] = gzipBuffer.length; + + let gzipBuffer = {}; + if(filterFilesByExtension(filePath, compressionExtensions)) { + gzipBuffer = await createGzipFileBuffer(file); + fileInfo.encoding["gz"] = gzipBuffer.length; + } + uploadQueue.push({ file, key, gzipBuffer }); + filesInfosForServer.push(fileInfo); } }