From 402126dea40f2f68a9fd1804df1bd435c5ca339a Mon Sep 17 00:00:00 2001 From: Daniel Bachler Date: Sat, 16 Mar 2024 21:06:42 +0100 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20unify=20transaction=20creation=20an?= =?UTF-8?q?d=20close=20db=20connection=20in=20same=20call=20for=20scripts?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- adminSiteServer/exportGitData.ts | 4 +- baker/algolia/indexChartsToAlgolia.ts | 7 +- baker/algolia/indexExplorersToAlgolia.ts | 7 +- baker/algolia/indexToAlgolia.tsx | 6 +- baker/bakeGdocPost.ts | 5 +- baker/bakeGdocPosts.ts | 5 +- baker/batchTagWithGpt.ts | 6 +- baker/buildLocalBake.ts | 5 +- baker/postUpdatedHook.ts | 5 +- baker/recalcLatestCountryData.ts | 6 +- baker/runBakeGraphers.ts | 12 +- baker/startDeployQueueServer.ts | 5 +- baker/syncRedirectsToGrapher.ts | 6 +- db/analyzeWpPosts.ts | 4 +- db/db.ts | 44 ++++- db/migrateWpPostsToArchieMl.ts | 5 +- db/model/Gdoc/GdocBase.ts | 10 +- db/refreshPageviewsFromDatasette.ts | 6 +- db/syncPostsToGrapher.ts | 6 +- db/tests/basic.test.ts | 236 +++++++++++++---------- devTools/markdownTest/markdown.ts | 9 +- devTools/svgTester/dump-chart-ids.ts | 9 +- devTools/svgTester/dump-data.ts | 11 +- 23 files changed, 233 insertions(+), 186 deletions(-) diff --git a/adminSiteServer/exportGitData.ts b/adminSiteServer/exportGitData.ts index 51db8b5f34a..4de26d9d803 100644 --- a/adminSiteServer/exportGitData.ts +++ b/adminSiteServer/exportGitData.ts @@ -13,9 +13,7 @@ const main = async () => { commitOnly: true, }) } - }) - - await db.closeTypeOrmAndKnexConnections() + }, db.TransactionCloseMode.Close) } void main() diff --git a/baker/algolia/indexChartsToAlgolia.ts b/baker/algolia/indexChartsToAlgolia.ts index cd96d1e5f76..7bbafd1f1bd 100644 --- a/baker/algolia/indexChartsToAlgolia.ts +++ b/baker/algolia/indexChartsToAlgolia.ts @@ -131,10 +131,11 @@ const indexChartsToAlgolia = async () => { const index = client.initIndex(SearchIndexName.Charts) - const records = await db.knexReadonlyTransaction(getChartsRecords) + const records = await db.knexReadonlyTransaction( + getChartsRecords, + db.TransactionCloseMode.Close + ) await index.replaceAllObjects(records) - - await db.closeTypeOrmAndKnexConnections() } process.on("unhandledRejection", (e) => { diff --git a/baker/algolia/indexExplorersToAlgolia.ts b/baker/algolia/indexExplorersToAlgolia.ts index 6ea2aa8b095..31177049141 100644 --- a/baker/algolia/indexExplorersToAlgolia.ts +++ b/baker/algolia/indexExplorersToAlgolia.ts @@ -199,10 +199,11 @@ const indexExplorersToAlgolia = async () => { try { const index = client.initIndex(SearchIndexName.Explorers) - const records = await db.knexReadonlyTransaction(getExplorerRecords) + const records = await db.knexReadonlyTransaction( + getExplorerRecords, + db.TransactionCloseMode.Close + ) await index.replaceAllObjects(records) - - await db.closeTypeOrmAndKnexConnections() } catch (e) { console.log("Error indexing explorers to Algolia: ", e) } diff --git a/baker/algolia/indexToAlgolia.tsx b/baker/algolia/indexToAlgolia.tsx index 40108dfe70d..a4a0495c202 100644 --- a/baker/algolia/indexToAlgolia.tsx +++ b/baker/algolia/indexToAlgolia.tsx @@ -233,12 +233,14 @@ const indexToAlgolia = async () => { } const index = client.initIndex(SearchIndexName.Pages) - const records = await db.knexReadonlyTransaction(getPagesRecords) + const records = await db.knexReadonlyTransaction( + getPagesRecords, + db.TransactionCloseMode.Close + ) await index.replaceAllObjects(records) await wpdb.singleton.end() - await db.closeTypeOrmAndKnexConnections() } process.on("unhandledRejection", (e) => { diff --git a/baker/bakeGdocPost.ts b/baker/bakeGdocPost.ts index 3defc7afbf3..97deb5caac3 100644 --- a/baker/bakeGdocPost.ts +++ b/baker/bakeGdocPost.ts @@ -19,8 +19,9 @@ void yargs(hideBin(process.argv)) async ({ slug }) => { const baker = new SiteBaker(BAKED_SITE_DIR, BAKED_BASE_URL) - await db.knexReadonlyTransaction((trx) => - baker.bakeGDocPosts(trx, [slug]) + await db.knexReadonlyTransaction( + (trx) => baker.bakeGDocPosts(trx, [slug]), + db.TransactionCloseMode.Close ) process.exit(0) } diff --git a/baker/bakeGdocPosts.ts b/baker/bakeGdocPosts.ts index 64de00aa80e..1b1638208ea 100644 --- a/baker/bakeGdocPosts.ts +++ b/baker/bakeGdocPosts.ts @@ -24,8 +24,9 @@ void yargs(hideBin(process.argv)) async ({ slugs }) => { const baker = new SiteBaker(BAKED_SITE_DIR, BAKED_BASE_URL) - await db.knexReadonlyTransaction((trx) => - baker.bakeGDocPosts(trx, slugs) + await db.knexReadonlyTransaction( + (trx) => baker.bakeGDocPosts(trx, slugs), + db.TransactionCloseMode.Close ) process.exit(0) } diff --git a/baker/batchTagWithGpt.ts b/baker/batchTagWithGpt.ts index d7f80a49aec..bd00abb1493 100644 --- a/baker/batchTagWithGpt.ts +++ b/baker/batchTagWithGpt.ts @@ -92,11 +92,11 @@ if (require.main === module) { }, async (argv) => { try { - await db.knexReadonlyTransaction((trx) => - batchTagChartsWithGpt(trx, argv) + await db.knexReadonlyTransaction( + (trx) => batchTagChartsWithGpt(trx, argv), + db.TransactionCloseMode.Close ) } finally { - await db.closeTypeOrmAndKnexConnections() } } ) diff --git a/baker/buildLocalBake.ts b/baker/buildLocalBake.ts index 5ecbdcec724..f9da8474aff 100644 --- a/baker/buildLocalBake.ts +++ b/baker/buildLocalBake.ts @@ -16,7 +16,10 @@ const bakeDomainToFolder = async ( await fs.mkdirp(dir) const baker = new SiteBaker(dir, baseUrl, bakeSteps) console.log(`Baking site locally with baseUrl '${baseUrl}' to dir '${dir}'`) - await db.knexReadonlyTransaction((trx) => baker.bakeAll(trx)) + await db.knexReadonlyTransaction( + (trx) => baker.bakeAll(trx), + db.TransactionCloseMode.Close + ) } void yargs(hideBin(process.argv)) diff --git a/baker/postUpdatedHook.ts b/baker/postUpdatedHook.ts index f381fe6b088..738bf4444a6 100644 --- a/baker/postUpdatedHook.ts +++ b/baker/postUpdatedHook.ts @@ -200,8 +200,9 @@ const main = async ( ) => { console.log(email, name, postId) try { - const slug = db.knexReadWriteTransaction((trx) => - syncPostToGrapher(trx, postId) + const slug = db.knexReadWriteTransaction( + (trx) => syncPostToGrapher(trx, postId), + db.TransactionCloseMode.Close ) if (BAKE_ON_CHANGE) diff --git a/baker/recalcLatestCountryData.ts b/baker/recalcLatestCountryData.ts index 289958aeb32..592847dc876 100644 --- a/baker/recalcLatestCountryData.ts +++ b/baker/recalcLatestCountryData.ts @@ -5,8 +5,10 @@ import * as db from "../db/db.js" import { denormalizeLatestCountryData } from "../baker/countryProfiles.js" const main = async () => { - await db.knexReadWriteTransaction(denormalizeLatestCountryData) - await db.closeTypeOrmAndKnexConnections() + await db.knexReadWriteTransaction( + denormalizeLatestCountryData, + db.TransactionCloseMode.Close + ) } if (require.main === module) void main() diff --git a/baker/runBakeGraphers.ts b/baker/runBakeGraphers.ts index 7c1fbddb735..02f862ea2e9 100755 --- a/baker/runBakeGraphers.ts +++ b/baker/runBakeGraphers.ts @@ -9,11 +9,13 @@ import * as db from "../db/db.js" */ const main = async (folder: string) => { - return db.knexReadonlyTransaction((trx) => - bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers( - folder, - trx - ) + return db.knexReadonlyTransaction( + (trx) => + bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers( + folder, + trx + ), + db.TransactionCloseMode.Close ) } diff --git a/baker/startDeployQueueServer.ts b/baker/startDeployQueueServer.ts index 775f204ab4b..5bc45f755d4 100644 --- a/baker/startDeployQueueServer.ts +++ b/baker/startDeployQueueServer.ts @@ -33,7 +33,10 @@ const main = async () => { setTimeout(deployIfQueueIsNotEmpty, 10 * 1000) }) - await db.knexReadonlyTransaction(deployIfQueueIsNotEmpty) + await db.knexReadonlyTransaction( + deployIfQueueIsNotEmpty, + db.TransactionCloseMode.Close + ) } void main() diff --git a/baker/syncRedirectsToGrapher.ts b/baker/syncRedirectsToGrapher.ts index f27e97bb5ea..28b2fe92ea4 100644 --- a/baker/syncRedirectsToGrapher.ts +++ b/baker/syncRedirectsToGrapher.ts @@ -67,10 +67,12 @@ export const syncRedirectsToGrapher = async ( const main = async (): Promise => { try { - await db.knexReadWriteTransaction((trx) => syncRedirectsToGrapher(trx)) + await db.knexReadWriteTransaction( + (trx) => syncRedirectsToGrapher(trx), + db.TransactionCloseMode.Close + ) } finally { await wpdb.singleton.end() - await db.closeTypeOrmAndKnexConnections() } } diff --git a/db/analyzeWpPosts.ts b/db/analyzeWpPosts.ts index 6a33348476b..3255d62ab83 100644 --- a/db/analyzeWpPosts.ts +++ b/db/analyzeWpPosts.ts @@ -65,9 +65,7 @@ const analyze = async (): Promise => { for (const [tag, count] of sortedTagCount) { console.log(`${tag}: ${count}`) } - }) - - await db.closeTypeOrmAndKnexConnections() + }, db.TransactionCloseMode.Close) } void analyze() diff --git a/db/db.ts b/db/db.ts index dc06fdd50a8..c91bc5cbdaa 100644 --- a/db/db.ts +++ b/db/db.ts @@ -76,10 +76,13 @@ export const mysqlFirst = async ( export const closeTypeOrmAndKnexConnections = async (): Promise => { if (typeormDataSource) await typeormDataSource.destroy() - if (_knexInstance) await _knexInstance.destroy() + if (_knexInstance) { + await _knexInstance.destroy() + _knexInstance = undefined + } } -let _knexInstance: Knex +let _knexInstance: Knex | undefined = undefined export const knexInstance = (): Knex => { if (_knexInstance) return _knexInstance @@ -119,23 +122,46 @@ export type KnexReadWriteTransaction = Knex.Transaction & { readonly [__write_capability]: "write" } +export enum TransactionCloseMode { + Close, + KeepOpen, +} + +async function knexTransaction( + transactionFn: (trx: KT) => Promise, + closeConnection: TransactionCloseMode, + readonly: boolean, + knex: Knex +): Promise { + try { + const options = readonly ? { readOnly: true } : {} + const result = await knex.transaction( + async (trx) => transactionFn(trx as KT), + options + ) + return result + } finally { + if (closeConnection === TransactionCloseMode.Close) { + await knex.destroy() + if (knex === _knexInstance) _knexInstance = undefined + } + } +} + export async function knexReadonlyTransaction( transactionFn: (trx: KnexReadonlyTransaction) => Promise, + closeConnection: TransactionCloseMode = TransactionCloseMode.KeepOpen, knex: Knex = knexInstance() ): Promise { - return knex.transaction( - async (trx) => transactionFn(trx as KnexReadonlyTransaction), - { readOnly: true } - ) + return knexTransaction(transactionFn, closeConnection, true, knex) } export async function knexReadWriteTransaction( transactionFn: (trx: KnexReadWriteTransaction) => Promise, + closeConnection: TransactionCloseMode = TransactionCloseMode.KeepOpen, knex: Knex = knexInstance() ): Promise { - return knex.transaction(async (trx) => - transactionFn(trx as KnexReadWriteTransaction) - ) + return knexTransaction(transactionFn, closeConnection, false, knex) } export const knexRaw = async ( knex: Knex, diff --git a/db/migrateWpPostsToArchieMl.ts b/db/migrateWpPostsToArchieMl.ts index d2f7d8e5627..a185b4a2328 100644 --- a/db/migrateWpPostsToArchieMl.ts +++ b/db/migrateWpPostsToArchieMl.ts @@ -275,7 +275,7 @@ const migrate = async (trx: db.KnexReadWriteTransaction): Promise => { await db.knexRaw(trx, insertQuery, [ JSON.stringify(archieMlFieldContent, null, 2), JSON.stringify(archieMlStatsContent, null, 2), - markdown, + markdown ?? null, post.id, ]) console.log("inserted", post.id) @@ -314,8 +314,7 @@ const migrate = async (trx: db.KnexReadWriteTransaction): Promise => { } async function runMigrate(): Promise { - await db.knexReadWriteTransaction(migrate) - await db.closeTypeOrmAndKnexConnections() + await db.knexReadWriteTransaction(migrate, db.TransactionCloseMode.Close) } void runMigrate() diff --git a/db/model/Gdoc/GdocBase.ts b/db/model/Gdoc/GdocBase.ts index cb5c5405f65..8df88fbda59 100644 --- a/db/model/Gdoc/GdocBase.ts +++ b/db/model/Gdoc/GdocBase.ts @@ -744,13 +744,9 @@ export class GdocBase implements OwidGdocBaseInterface { [] ) - const { chartIdsBySlug, publishedExplorersBySlug } = - await db.knexReadonlyTransaction(async (trx) => { - const chartIdsBySlug = await mapSlugsToIds(trx) - const publishedExplorersBySlug = - await db.getPublishedExplorersBySlug(trx) - return { chartIdsBySlug, publishedExplorersBySlug } - }) + const chartIdsBySlug = await mapSlugsToIds(knex) + const publishedExplorersBySlug = + await db.getPublishedExplorersBySlug(knex) const linkErrors: OwidGdocErrorMessage[] = this.links.reduce( (errors: OwidGdocErrorMessage[], link): OwidGdocErrorMessage[] => { diff --git a/db/refreshPageviewsFromDatasette.ts b/db/refreshPageviewsFromDatasette.ts index 5ad27d0827a..80b47a2b650 100644 --- a/db/refreshPageviewsFromDatasette.ts +++ b/db/refreshPageviewsFromDatasette.ts @@ -70,11 +70,13 @@ async function downloadAndInsertCSV( const main = async (): Promise => { try { - await db.knexReadWriteTransaction((trx) => downloadAndInsertCSV(trx)) + await db.knexReadWriteTransaction( + (trx) => downloadAndInsertCSV(trx), + db.TransactionCloseMode.Close + ) } catch (e) { console.error(e) } finally { - await db.closeTypeOrmAndKnexConnections() } } diff --git a/db/syncPostsToGrapher.ts b/db/syncPostsToGrapher.ts index 2ae32e68c6d..785b337adcf 100644 --- a/db/syncPostsToGrapher.ts +++ b/db/syncPostsToGrapher.ts @@ -435,10 +435,12 @@ const syncPostsToGrapher = async ( const main = async (): Promise => { try { - await db.knexReadWriteTransaction((trx) => syncPostsToGrapher(trx)) + await db.knexReadWriteTransaction( + (trx) => syncPostsToGrapher(trx), + db.TransactionCloseMode.Close + ) } finally { await wpdb.singleton.end() - await db.closeTypeOrmAndKnexConnections() } } diff --git a/db/tests/basic.test.ts b/db/tests/basic.test.ts index d9f85680e7c..41fb23ec1ec 100644 --- a/db/tests/basic.test.ts +++ b/db/tests/basic.test.ts @@ -9,6 +9,7 @@ import { KnexReadWriteTransaction, knexRawFirst, knexReadonlyTransaction, + TransactionCloseMode, } from "../db.js" import { deleteUser, insertUser, updateUser } from "../model/User.js" import { @@ -65,115 +66,128 @@ function sleep(time: number, value: any): Promise { } test("timestamps are automatically created and updated", async () => { - await knexReadWriteTransaction(async (trx) => { - const chart: DbInsertChart = { - config: "{}", - lastEditedAt: new Date(), - lastEditedByUserId: 1, - is_indexable: 0, - } - await trx.table(ChartsTableName).insert(chart) - const created = await knexRawFirst( - trx, - "select * from charts where id = 1", - [] - ) - expect(created).not.toBeNull() - if (created) { - expect(created.createdAt).not.toBeNull() - expect(created.updatedAt).toBeNull() - await sleep(1000, undefined) - await trx - .table(ChartsTableName) - .where({ id: 1 }) - .update({ is_indexable: 1 }) - const updated = await knexRawFirst( + await knexReadWriteTransaction( + async (trx) => { + const chart: DbInsertChart = { + config: "{}", + lastEditedAt: new Date(), + lastEditedByUserId: 1, + is_indexable: 0, + } + await trx.table(ChartsTableName).insert(chart) + const created = await knexRawFirst( trx, "select * from charts where id = 1", [] ) - expect(updated).not.toBeNull() - if (updated) { - expect(updated.createdAt).not.toBeNull() - expect(updated.updatedAt).not.toBeNull() - expect( - updated.updatedAt!.getTime() - updated.createdAt.getTime() - ).toBeGreaterThan(800) - expect( - updated.updatedAt!.getTime() - updated.createdAt.getTime() - ).toBeLessThanOrEqual(2000) + expect(created).not.toBeNull() + if (created) { + expect(created.createdAt).not.toBeNull() + expect(created.updatedAt).toBeNull() + await sleep(1000, undefined) + await trx + .table(ChartsTableName) + .where({ id: 1 }) + .update({ is_indexable: 1 }) + const updated = await knexRawFirst( + trx, + "select * from charts where id = 1", + [] + ) + expect(updated).not.toBeNull() + if (updated) { + expect(updated.createdAt).not.toBeNull() + expect(updated.updatedAt).not.toBeNull() + expect( + updated.updatedAt!.getTime() - + updated.createdAt.getTime() + ).toBeGreaterThan(800) + expect( + updated.updatedAt!.getTime() - + updated.createdAt.getTime() + ).toBeLessThanOrEqual(2000) + } } - } - }, knexInstance) + }, + TransactionCloseMode.KeepOpen, + knexInstance + ) }) test("knex interface", async () => { if (!knexInstance) throw new Error("Knex connection not initialized") // Create a transaction and run all tests inside it - await knexReadWriteTransaction(async (trx) => { - // Fetch all users into memory - const users = await trx - .from(UsersTableName) - .select("isSuperuser", "email") - expect(users.length).toBe(1) - - // Fetch all users in a streaming fashion, iterate over them async to avoid having to load everything into memory - const usersStream = trx - .from(UsersTableName) - .select("isSuperuser", "email") - .stream() - - for await (const user of usersStream) { - expect(user.isSuperuser).toBe(0) - expect(user.email).toBe("admin@example.com") - } - - // Use the insert helper method - await insertUser(trx, { - email: "test@example.com", - fullName: "Test User", - }) - - // Use the update helper method - await updateUser(trx, 2, { isSuperuser: 1 }) - - // Check results after update and insert - const afterUpdate = await trx - .from(UsersTableName) - .select("isSuperuser", "email") - .orderBy("id") - expect(afterUpdate.length).toBe(2) - expect(afterUpdate[1].isSuperuser).toBe(1) - - // The pick type is used to type the result row - const usersFromRawQuery: Pick[] = await knexRaw( - trx, - "select email from users", - [] - ) - expect(usersFromRawQuery.length).toBe(2) - - // Check if in queries work as expected - const usersFromRawQueryWithInClauseAsObj: Pick[] = - await knexRaw(trx, "select * from users where email in (:emails)", { - emails: [ - usersFromRawQuery[0].email, - usersFromRawQuery[1].email, - ], + await knexReadWriteTransaction( + async (trx) => { + // Fetch all users into memory + const users = await trx + .from(UsersTableName) + .select("isSuperuser", "email") + expect(users.length).toBe(1) + + // Fetch all users in a streaming fashion, iterate over them async to avoid having to load everything into memory + const usersStream = trx + .from(UsersTableName) + .select("isSuperuser", "email") + .stream() + + for await (const user of usersStream) { + expect(user.isSuperuser).toBe(0) + expect(user.email).toBe("admin@example.com") + } + + // Use the insert helper method + await insertUser(trx, { + email: "test@example.com", + fullName: "Test User", }) - expect(usersFromRawQueryWithInClauseAsObj.length).toBe(2) - - const usersFromRawQueryWithInClauseAsArray: Pick< - DbPlainUser, - "email" - >[] = await knexRaw(trx, "select * from users where email in (?)", [ - [usersFromRawQuery[0].email, usersFromRawQuery[1].email], - ]) - expect(usersFromRawQueryWithInClauseAsArray.length).toBe(2) - - await deleteUser(trx, 2) - }, knexInstance) + + // Use the update helper method + await updateUser(trx, 2, { isSuperuser: 1 }) + + // Check results after update and insert + const afterUpdate = await trx + .from(UsersTableName) + .select("isSuperuser", "email") + .orderBy("id") + expect(afterUpdate.length).toBe(2) + expect(afterUpdate[1].isSuperuser).toBe(1) + + // The pick type is used to type the result row + const usersFromRawQuery: Pick[] = + await knexRaw(trx, "select email from users", []) + expect(usersFromRawQuery.length).toBe(2) + + // Check if in queries work as expected + const usersFromRawQueryWithInClauseAsObj: Pick< + DbPlainUser, + "email" + >[] = await knexRaw( + trx, + "select * from users where email in (:emails)", + { + emails: [ + usersFromRawQuery[0].email, + usersFromRawQuery[1].email, + ], + } + ) + expect(usersFromRawQueryWithInClauseAsObj.length).toBe(2) + + const usersFromRawQueryWithInClauseAsArray: Pick< + DbPlainUser, + "email" + >[] = await knexRaw(trx, "select * from users where email in (?)", [ + [usersFromRawQuery[0].email, usersFromRawQuery[1].email], + ]) + expect(usersFromRawQueryWithInClauseAsArray.length).toBe(2) + + await deleteUser(trx, 2) + }, + TransactionCloseMode.KeepOpen, + knexInstance + ) }) export async function testRo( @@ -198,21 +212,29 @@ export async function testRw(trx: KnexReadWriteTransaction): Promise { ]) } test("Transaction setup", async () => { - const result = await knexReadWriteTransaction(async (trx) => { - const result = await testRo(trx) - expect(result.length).toBe(1) - expect(result[0].result).toBe(2) - await testRw(trx) - return await testGetNumUsers(trx) - }, knexInstance) + const result = await knexReadWriteTransaction( + async (trx) => { + const result = await testRo(trx) + expect(result.length).toBe(1) + expect(result[0].result).toBe(2) + await testRw(trx) + return await testGetNumUsers(trx) + }, + TransactionCloseMode.KeepOpen, + knexInstance + ) expect(result.length).toBe(1) expect(result[0].userCount).toBe(2) }) test("Write actions in read-only transactions fail", async () => { await expect(async () => { - return knexReadonlyTransaction(async (trx) => { - await testRw(trx as KnexReadWriteTransaction) // The cast is necessary to not make TypeScript complain and catch this error :) - }, knexInstance) + return knexReadonlyTransaction( + async (trx) => { + await testRw(trx as KnexReadWriteTransaction) // The cast is necessary to not make TypeScript complain and catch this error :) + }, + TransactionCloseMode.KeepOpen, + knexInstance + ) }).rejects.toThrow() }) diff --git a/devTools/markdownTest/markdown.ts b/devTools/markdownTest/markdown.ts index 95f5a1f1ac2..febd66e07d7 100644 --- a/devTools/markdownTest/markdown.ts +++ b/devTools/markdownTest/markdown.ts @@ -1,7 +1,4 @@ -import { - closeTypeOrmAndKnexConnections, - knexReadonlyTransaction, -} from "../../db/db.js" +import { TransactionCloseMode, knexReadonlyTransaction } from "../../db/db.js" import { getPostRawBySlug } from "../../db/model/Post.js" import { enrichedBlocksToMarkdown } from "../../db/model/Gdoc/enrichedToMarkdown.js" @@ -47,10 +44,8 @@ async function main(parsedArgs: parseArgs.ParsedArgs) { process.exit(-1) } console.log(markdown) - }) - await closeTypeOrmAndKnexConnections() + }, TransactionCloseMode.Close) } catch (error) { - await closeTypeOrmAndKnexConnections() console.error("Encountered an error: ", error) // This call to exit is necessary for some unknown reason to make sure that the process terminates. It // was not required before introducing the multiprocessing library. diff --git a/devTools/svgTester/dump-chart-ids.ts b/devTools/svgTester/dump-chart-ids.ts index f66ba1db423..e824f5898f9 100644 --- a/devTools/svgTester/dump-chart-ids.ts +++ b/devTools/svgTester/dump-chart-ids.ts @@ -3,10 +3,7 @@ import fs from "fs-extra" import parseArgs from "minimist" -import { - closeTypeOrmAndKnexConnections, - knexReadonlyTransaction, -} from "../../db/db.js" +import { TransactionCloseMode, knexReadonlyTransaction } from "../../db/db.js" import { getMostViewedGrapherIdsByChartType } from "../../db/model/Chart.js" import { CHART_TYPES } from "./utils.js" @@ -27,16 +24,14 @@ async function main(parsedArgs: parseArgs.ParsedArgs) { ) const chartIds = (await Promise.all(promises)).flatMap((ids) => ids) return chartIds - }) + }, TransactionCloseMode.Close) console.log(`Writing ${chartIds.length} chart ids to ${outFile}`) fs.writeFileSync(outFile, chartIds.join("\n")) - await closeTypeOrmAndKnexConnections() process.exit(0) } catch (error) { - await closeTypeOrmAndKnexConnections() console.error("Encountered an error: ", error) process.exit(-1) } diff --git a/devTools/svgTester/dump-data.ts b/devTools/svgTester/dump-data.ts index 069b014d2ba..0e08ec6983e 100644 --- a/devTools/svgTester/dump-data.ts +++ b/devTools/svgTester/dump-data.ts @@ -2,10 +2,7 @@ import { getPublishedGraphersBySlug } from "../../baker/GrapherImageBaker.js" -import { - closeTypeOrmAndKnexConnections, - knexReadonlyTransaction, -} from "../../db/db.js" +import { TransactionCloseMode, knexReadonlyTransaction } from "../../db/db.js" import fs from "fs-extra" @@ -21,7 +18,8 @@ async function main(parsedArgs: parseArgs.ParsedArgs) { const { graphersBySlug } = await knexReadonlyTransaction( async (trx) => { return getPublishedGraphersBySlug(trx) - } + }, + TransactionCloseMode.Close ) const allGraphers = [...graphersBySlug.values()] const saveJobs: utils.SaveGrapherSchemaAndDataJob[] = allGraphers.map( @@ -31,10 +29,7 @@ async function main(parsedArgs: parseArgs.ParsedArgs) { await pMap(saveJobs, utils.saveGrapherSchemaAndData, { concurrency: 32, }) - - await closeTypeOrmAndKnexConnections() } catch (error) { - await closeTypeOrmAndKnexConnections() console.error("Encountered an error: ", error) // This call to exit is necessary for some unknown reason to make sure that the process terminates. It // was not required before introducing the multiprocessing library.