Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ALTV-415 [client] Archive libce2.dll and other DLLs and then extract it on the startup #1

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import path from 'path';
import { s3upload, purgeCache } from './index.js';
import { debugLog } from "./utils.js";


async function start() {
if (process.argv.length < 3) {
console.log('USAGE: alt-upload source_path [cdn_path] [version] [sdk_version]');
Expand Down
80 changes: 74 additions & 6 deletions s3upload.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@ import { PromisePool } from '@supercharge/promise-pool';
import path from 'path';
import fs from 'fs';
import {debugLog as debugLog_, hashFile, walk} from "./utils.js";
import { createGzipFileBuffer } from "./zip.js";
const debugLog = (...args) => debugLog_('S3', ...args);

const AWS_KEY_ID = process.env['AWS_KEY_ID'];
const SECRET_ACCESS_KEY = process.env['AWS_SECRET_ACCESS_KEY'];
const BUCKET = process.env['AWS_BUCKET'];
const ENDPOINT = process.env['AWS_ENDPOINT'];
const compressionExtensions = [".dll"];

const s3 = new S3({
credentials: {
Expand Down Expand Up @@ -47,7 +49,17 @@ async function _upload(data, cdnPath, contentType) {
}
}

async function uploadFile(filePath, cdnPath, attempt = 0) {
/**
* Check the file extension for occurrence in the array
* @param {string} filePath - Path to the source file
* @param {array} allowedExtensions - Extensions
*/
async function filterFilesByExtension(filePath, allowedExtensions) {
const fileExtension = path.extname(filePath).toLowerCase();
return allowedExtensions.includes(fileExtension);
}

async function uploadFile(filePath, cdnPath, attempt = 0, gzipBuffer = {}) {
debugLog('Upload file', filePath, 'to', cdnPath, 'attempt', attempt);
try {
const size = (await fs.promises.stat(filePath)).size;
Expand All @@ -60,7 +72,10 @@ async function uploadFile(filePath, cdnPath, attempt = 0) {
Key: cdnPath
}));

if (+head.ContentLength === size) {
if (+head.ContentLength === size) {
if(filterFilesByExtension(filePath, compressionExtensions)) {
uploadGzipFile(filePath, cdnPath, 0, gzipBuffer);
}
return true;
}

Expand All @@ -79,21 +94,60 @@ async function uploadFile(filePath, cdnPath, attempt = 0) {
return false;
}

async function uploadGzipFile(filePath, cdnPath, attempt = 0, gzipBuffer = {}) {
debugLog('Upload gzip file', filePath, 'to', cdnPath, 'attempt', attempt);

const cdnPathGz = cdnPath + '.gz';
const filePathGz = filePath + '.gz';

try {
const sizeGz = gzipBuffer.length;
const contentType = 'application/gzip';
if (await _upload(gzipBuffer, cdnPathGz, contentType)) {
console.log(`Uploaded '${filePathGz}' to '${cdnPathGz}'`);

const head = await s3.send(new HeadObjectCommand({
Bucket: BUCKET,
Key: cdnPathGz
}));

if (+head.ContentLength === sizeGz) {
return true;
}

console.error(`Uploaded gzip file size doesnt match. Local size: ${sizeGz}. Uploaded size: ${head.ContentLength}`)
}
console.error(`Error uploading '${filePathGz}' to '${cdnPathGz}'`);
} catch(e) {
console.error(e);
}

if (attempt < 3) {
return uploadGzipFile(filePath, cdnPath, attempt + 1);
}

console.log('Failed to upload', filePathGz, 'to', cdnPathGz, 'after 3 attempts');
return false;
}

async function uploadDir(dirPath, cdnPath, version, sdkVersion) {
dirPath = path.resolve(dirPath);
debugLog('Upload dir', dirPath, 'to', cdnPath, version, sdkVersion);

const files = await walk(dirPath);
const hashes = { };
const sizes = { };
const gzipBuffer = { };

let result = true;

const uploadQueue = [];

console.log(files);

let filesInfosForServer = [];
for (let i = 0; i < files.length; ++i) {

const file = files[i];

const stats = fs.statSync(file);
Expand All @@ -102,16 +156,30 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) {
const key = (cdnPath.length > 0 ? (cdnPath + '/') : '') + filePath;

hashes[filePath] = await hashFile(file);
sizes[filePath] = stats.size;
sizes[filePath] = stats.size;
let gzipBuffer = await createGzipFileBuffer(file);

uploadQueue.push({ file, key, gzipBuffer });

let fileInfo = {
path: "",
size: 0,
sha1: "",
encoding: {},
}

uploadQueue.push({ file, key });
fileInfo.path = filePath;
fileInfo.size = stats.size;
fileInfo.sha1 = hashes[filePath];
fileInfo.encoding["gz"] = gzipBuffer.length;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

shouldnt it write that only for files that will actually be uploaded compressed?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed work with non-dll files

filesInfosForServer.push(fileInfo);
}
}

console.log(uploadQueue);

const { results, errors } = await PromisePool.for(uploadQueue).withConcurrency(10).process(async queueItem => {
return await uploadFile(queueItem.file, queueItem.key);
return await uploadFile(queueItem.file, queueItem.key, 0, queueItem.gzipBuffer);
});

for (let i = 0; i < results.length; ++i) {
Expand All @@ -123,9 +191,9 @@ async function uploadDir(dirPath, cdnPath, version, sdkVersion) {
if (version) {
debugLog('Generate update.json', version);
const updateData = JSON.stringify({
latestBuildNumber: -1,
version: version,
sdkVersion: sdkVersion || undefined,
files: filesInfosForServer,
hashList: hashes,
sizeList: sizes
});
Expand Down
59 changes: 59 additions & 0 deletions zip.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import fs from 'fs';
import zlib from 'zlib';
import { promisify } from 'util';
import { pipeline, PassThrough } from 'stream';
import { debugLog } from "./utils.js";

const pipe = promisify(pipeline);

/**
* Archiving a file by gzip
* @param {string} source - Path to the source file
* @param {string} destination - Path to the compressed file
*/
async function gzipFile(source, destination) {
try {
const sourceStream = fs.createReadStream(source);
const gzipStream = zlib.createGzip();
const destinationStream = fs.createWriteStream(destination);

await pipe(sourceStream, gzipStream, destinationStream);
debugLog(`Archiving completed: ${destination}`);
} catch (error) {
console.error(`Error archiving a file ${source}:`, error);
}
}

/**
* Archiving a file by gzip in memory without file
* @param {string} source - Path to the source file
*/
export async function createGzipFileBuffer(source) {
try {
const sourceStream = fs.createReadStream(source);
const gzipStream = zlib.createGzip();
const passThrough = new PassThrough();

return new Promise((resolve, reject) => {
gzipStream.on('error', reject);
gzipStream.on('end', () => {
});

let compressedChunks = [];
passThrough.on('data', (chunk) => {
compressedChunks.push(chunk);
});

passThrough.on('end', () => {
const compressedData = Buffer.concat(compressedChunks);
resolve(compressedData);
});

sourceStream.pipe(gzipStream)
gzipStream.pipe(passThrough);
});
} catch (error) {
console.error(`Error archiving a file ${source}:`, error);
}
return null;
}