Skip to content

Commit

Permalink
Merge pull request #9 from vladaviedov/caching
Browse files Browse the repository at this point in the history
Caching
  • Loading branch information
vladaviedov authored Jan 4, 2023
2 parents 6b00993 + 98d5a2c commit a9051d2
Show file tree
Hide file tree
Showing 7 changed files with 103 additions and 14 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# Release
node_modules/

# Dev
.vscode/
*.svg
data.json
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@vladaviedov/gh-lang-stats",
"version": "0.4.0",
"version": "0.5.0",
"description": "Github Lang Stats",
"homepage": "https://github.com/vladaviedov/gh-lang-stats",
"bugs": "https://github.com/vladaviedov/gh-lang-stats/issues",
Expand Down
25 changes: 25 additions & 0 deletions src/cache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { readFileSync, writeFileSync, existsSync } from "fs";
import { config } from "./config.js";

export const retrieveStorage = () => {
// Check if store exists
if (!existsSync(config.storeFile)) {
return null;
}

// Read from store
const rawData = readFileSync(config.storeFile, { encoding: "utf-8" });
return JSON.parse(rawData);
};

export const updateStorage = data => {
// Wrapper to store timestamp
const dataStorage = {
analysis: data,
timestamp: new Date().toISOString()
};

// Write to store
const jsonData = JSON.stringify(dataStorage);
writeFileSync(config.storeFile, jsonData, { encoding: "utf-8" });
};
3 changes: 2 additions & 1 deletion src/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@ export const config = {
token: process.env.TOKEN,
maxConcur: process.env.MAX_CONCUR ?? 5,
inputFile: process.env.INPUT_FILE ?? "template.svg",
outputFile: process.env.OUTPUT_FILE ?? "generated.svg"
outputFile: process.env.OUTPUT_FILE ?? "generated.svg",
storeFile: process.env.STORE_FILE ?? "data.json"
};
32 changes: 27 additions & 5 deletions src/github-api.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,30 @@ export const qlUserId = async client => {
* @returns Array of repositories with commits
*/
export const qlFullList = async (client, id) => {
return qlListFrom(client, id, new Date(0).toISOString());
};

/**
* Scan user contributions since a given date
* @param {Octokit} client
* @param {string} id
* @param {Date} timestamp
* @returns Array of repositories with commits
*/
export const qlListFrom = async (client, id, timestamp) => {
try {
// Initial scan
const scan = (await client.graphql(queryScan, {
id: id
id: id,
since: timestamp.toString()
})).viewer.repositoriesContributedTo;

// Get all pages of repos
let pageInfo = scan.pageInfo;
while (pageInfo.hasNextPage) {
const scanNext = (await client.graphql(queryScanNext, {
id: id,
since: timestamp.toString(),
after: pageInfo.endCursor
})).viewer.repositoriesContributedTo;
pageInfo = scanNext.pageInfo;
Expand Down Expand Up @@ -122,6 +135,7 @@ const handleHttpErr = err => {
if (err.status == 401) {
console.error("Request is unauthorized.");
} else {
console.error(err);
console.error("An unknown error has occured.");
}

Expand All @@ -134,7 +148,7 @@ const queryUserId = `{
viewer { id }
}`;

const queryScan = `query ($id: ID) {
const queryScan = `query ($id: ID, $since: GitTimestamp) {
viewer {
repositoriesContributedTo(
first: 100
Expand All @@ -148,7 +162,11 @@ const queryScan = `query ($id: ID) {
defaultBranchRef {
target {
... on Commit {
history(first: 100, author: {id: $id}) {
history(
first: 100,
author: {id: $id},
since: $since
) {
nodes { oid }
pageInfo {
endCursor
Expand Down Expand Up @@ -177,7 +195,7 @@ const queryScan = `query ($id: ID) {
}
}`;

const queryScanNext = `query ($id: ID, $after: String) {
const queryScanNext = `query ($id: ID, $since: GitTimestamp, $after: String) {
viewer {
repositoriesContributedTo(
first: 100
Expand All @@ -192,7 +210,11 @@ const queryScanNext = `query ($id: ID, $after: String) {
defaultBranchRef {
target {
... on Commit {
history(first: 100, author: {id: $id}) {
history(
first: 100,
since: $since,
author: {id: $id}
) {
nodes { oid }
pageInfo {
endCursor
Expand Down
3 changes: 2 additions & 1 deletion src/load-commits.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ const processQueue = async () => {
const workers = [];

// Start initial workers
for (let i = 0; i < config.maxConcur; i++) {
const workerCount = Math.min(config.maxConcur, queue.length);
for (let i = 0; i < workerCount; i++) {
workers.push(worker(queue.pop(), i));
}

Expand Down
48 changes: 42 additions & 6 deletions src/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ import { Octokit } from "octokit";
import { throttling } from "@octokit/plugin-throttling";
import { analyzeData } from "./analyze.js";
import { fillTemplate } from "./fill-template.js";
import { qlUserId, qlFullList } from "./github-api.js";
import { qlUserId, qlFullList, qlListFrom } from "./github-api.js";
import { loadCommits } from "./load-commits.js";
import { config } from "./config.js";
import { retrieveStorage, updateStorage } from "./cache.js";

const OctokitPlug = Octokit.plugin(throttling);
const octokit = new OctokitPlug({
Expand All @@ -22,8 +23,43 @@ const octokit = new OctokitPlug({
}
});

qlUserId(octokit)
.then(id => qlFullList(octokit, id))
.then(list => loadCommits(octokit, list))
.then(analyzeData)
.then(fillTemplate);
const combineData = (oldData, newData) => {
Object.keys(newData).forEach(lang => {
if (lang == "Total") {
oldData.Total += newData.Total;
return;
}

if (oldData[lang]) {
oldData[lang].changes += newData[lang].changes;
} else {
oldData[lang] = newData[lang];
}
});

return oldData;
};

const main = async () => {
const userId = await qlUserId(octokit);

const dataStorage = retrieveStorage();
let analysis;

if (dataStorage == null) {
const list = await qlFullList(octokit, userId);
const commits = await loadCommits(octokit, list);
analysis = await analyzeData(commits);
} else {
const newList = await qlListFrom(octokit, userId, dataStorage.timestamp);
const newCommits = await loadCommits(octokit, newList);
const newAnalysis = await analyzeData(newCommits);

analysis = combineData(dataStorage.analysis, newAnalysis);
}

updateStorage(analysis);
fillTemplate(analysis);
};

main();

0 comments on commit a9051d2

Please sign in to comment.