From b603737cc5ca3cf9738668bfda78fdb546d904ce Mon Sep 17 00:00:00 2001 From: Daniel Bachler Date: Wed, 28 Feb 2024 21:49:11 +0100 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20add=20transaction=20types=20and=20a?= =?UTF-8?q?pi=20route=20helpers?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .eslintrc.cjs | 2 + adminSiteServer/adminRouter.tsx | 58 +- adminSiteServer/apiRouter.ts | 1609 +++++++++-------- adminSiteServer/exportGitData.ts | 16 +- adminSiteServer/gitDataExport.ts | 5 +- adminSiteServer/mockSiteRouter.tsx | 102 +- adminSiteServer/routerHelpers.tsx | 98 + baker/DeployUtils.ts | 8 +- baker/ExplorerBaker.tsx | 8 +- baker/GrapherBaker.tsx | 33 +- baker/SiteBaker.tsx | 67 +- baker/algolia/indexChartsToAlgolia.ts | 7 +- baker/algolia/indexExplorersToAlgolia.ts | 6 +- baker/algolia/indexToAlgolia.tsx | 15 +- baker/buildLocalBake.ts | 2 +- baker/countryProfiles.tsx | 79 +- baker/formatWordpressPost.tsx | 6 +- baker/pageOverrides.test.ts | 7 +- baker/pageOverrides.tsx | 14 +- baker/postUpdatedHook.ts | 48 +- baker/recalcLatestCountryData.ts | 2 +- baker/redirects.test.ts | 72 +- baker/redirects.ts | 11 +- baker/runBakeGraphers.ts | 8 +- baker/siteRenderers.test.ts | 4 +- baker/siteRenderers.tsx | 58 +- baker/sitemap.ts | 7 +- baker/startDeployQueueServer.ts | 2 +- baker/syncRedirectsToGrapher.ts | 72 +- db/DEPRECATEDwpdb.ts | 7 +- db/Variable.test.ts | 10 +- db/db.ts | 36 +- db/migrateWpPostsToArchieMl.ts | 66 +- db/model/Chart.ts | 18 +- db/model/Dataset.ts | 37 +- db/model/Gdoc/GdocBase.ts | 8 +- db/model/Pageview.ts | 3 +- db/model/Post.ts | 74 +- db/model/PostLink.ts | 18 +- db/model/Redirect.ts | 3 +- db/model/Source.ts | 6 +- db/model/User.ts | 12 +- db/model/Variable.ts | 25 +- db/refreshPageviewsFromDatasette.ts | 8 +- db/syncPostsToGrapher.ts | 38 +- db/tests/basic.test.ts | 54 +- devTools/markdownTest/markdown.ts | 66 +- .../@ourworldindata/types/src/NominalType.ts | 22 + packages/@ourworldindata/types/src/index.ts | 2 + 49 files changed, 1610 insertions(+), 1329 deletions(-) create mode 100644 adminSiteServer/routerHelpers.tsx create mode 100644 packages/@ourworldindata/types/src/NominalType.ts diff --git a/.eslintrc.cjs b/.eslintrc.cjs index 43432a98055..48943defb30 100644 --- a/.eslintrc.cjs +++ b/.eslintrc.cjs @@ -51,6 +51,8 @@ module.exports = { "react/no-render-return-value": "warn", "react/no-unescaped-entities": ["warn", { forbid: [">", "}"] }], "react/prop-types": "warn", + // TODO: consider adding this and whitelisting all promises that don't need to be awaited with "void" + // "@typescript-eslint/no-floating-promises": "error", }, settings: { "import/resolver": { diff --git a/adminSiteServer/adminRouter.tsx b/adminSiteServer/adminRouter.tsx index f1cd4495187..5fa2164acfb 100644 --- a/adminSiteServer/adminRouter.tsx +++ b/adminSiteServer/adminRouter.tsx @@ -139,7 +139,7 @@ adminRouter.get("/logout", logOut) adminRouter.get("/datasets/:datasetId.csv", async (req, res) => { const datasetId = expectInt(req.params.datasetId) - await db.knexInstance().transaction(async (t) => { + await db.knexReadonlyTransaction(async (t) => { const datasetName = ( await db.knexRawFirst>( t, @@ -166,19 +166,19 @@ adminRouter.get("/datasets/:datasetId/downloadZip", async (req, res) => { res.attachment("additional-material.zip") - const knex = db.knexInstance() - - const file = await db.knexRawFirst< - Pick - >(knex, `SELECT filename, file FROM dataset_files WHERE datasetId=?`, [ - datasetId, - ]) + const file = await await db.knexReadonlyTransaction((knex) => + db.knexRawFirst>( + knex, + `SELECT filename, file FROM dataset_files WHERE datasetId=?`, + [datasetId] + ) + ) res.send(file?.file) }) adminRouter.get("/posts/preview/:postId", async (req, res) => { const postId = expectInt(req.params.postId) - const preview = await db.knexInstance().transaction(async (knex) => { + const preview = await db.knexReadonlyTransaction(async (knex) => { return renderPreview(postId, knex) }) res.send(preview) @@ -187,16 +187,16 @@ adminRouter.get("/posts/preview/:postId", async (req, res) => { adminRouter.get("/posts/compare/:postId", async (req, res) => { const postId = expectInt(req.params.postId) - const [wpPage, archieMlText] = await db - .knexInstance() - .transaction(async (t) => { + const [wpPage, archieMlText] = await db.knexReadonlyTransaction( + async (t) => { const wpPage = await renderPreview(postId, t) const archieMlText = await Post.select( "archieml", "archieml_update_statistics" ).from(t(Post.postsTable).where({ id: postId })) return [wpPage, archieMlText] - }) + } + ) if ( archieMlText.length === 0 || @@ -275,7 +275,9 @@ adminRouter.get(`/${GetAllExplorersRoute}`, async (req, res) => { adminRouter.get(`/${GetAllExplorersTagsRoute}`, async (_, res) => { return res.send({ - explorers: await db.getExplorerTags(db.knexInstance()), + explorers: await db.knexReadonlyTransaction((trx) => + db.getExplorerTags(trx) + ), }) }) @@ -283,7 +285,7 @@ adminRouter.get(`/${EXPLORERS_PREVIEW_ROUTE}/:slug`, async (req, res) => { const slug = slugify(req.params.slug) const filename = slug + EXPLORER_FILE_SUFFIX - const explorerPage = await db.knexInstance().transaction(async (knex) => { + const explorerPage = await db.knexReadonlyTransaction(async (knex) => { if (slug === DefaultNewExplorerSlug) return renderExplorerPage( new ExplorerProgram(DefaultNewExplorerSlug, ""), @@ -307,14 +309,16 @@ adminRouter.get("/datapage-preview/:id", async (req, res) => { if (!variableMetadata) throw new JsonError("No such variable", 404) res.send( - await renderDataPageV2( - { - variableId, - variableMetadata, - isPreviewing: true, - useIndicatorGrapherConfigs: true, - }, - db.knexInstance() + await db.knexReadonlyTransaction((trx) => + renderDataPageV2( + { + variableId, + variableMetadata, + isPreviewing: true, + useIndicatorGrapherConfigs: true, + }, + trx + ) ) ) }) @@ -323,11 +327,9 @@ adminRouter.get("/grapher/:slug", async (req, res) => { const entity = await Chart.getBySlug(req.params.slug) if (!entity) throw new JsonError("No such chart", 404) - const previewDataPageOrGrapherPage = db - .knexInstance() - .transaction(async (knex) => - renderPreviewDataPageOrGrapherPage(entity.config, knex) - ) + const previewDataPageOrGrapherPage = db.knexReadonlyTransaction( + async (knex) => renderPreviewDataPageOrGrapherPage(entity.config, knex) + ) res.send(previewDataPageOrGrapherPage) }) diff --git a/adminSiteServer/apiRouter.ts b/adminSiteServer/apiRouter.ts index 29b4646fc65..af9c73756de 100644 --- a/adminSiteServer/apiRouter.ts +++ b/adminSiteServer/apiRouter.ts @@ -14,7 +14,7 @@ import { } from "../settings/serverSettings.js" import { expectInt, isValidSlug } from "../serverUtils/serverUtil.js" import { OldChart, Chart, getGrapherById } from "../db/model/Chart.js" -import { Request, Response, CurrentUser } from "./authentication.js" +import { Request, CurrentUser } from "./authentication.js" import { getMergedGrapherConfigForVariable, fetchS3MetadataByPath, @@ -36,7 +36,6 @@ import { OwidGdocPostInterface, parseIntOrUndefined, parseToOperation, - DbEnrichedPost, DbRawPostWithGdocPublishStatus, SuggestedChartRevisionStatus, variableAnnotationAllowedColumnNamesAndTypes, @@ -60,7 +59,9 @@ import { DbRawVariable, DbRawOrigin, parseOriginsRow, - AnalyticsPageviewsTableName, + PostsTableName, + DbRawPost, + DbPlainDataset, } from "@ourworldindata/types" import { getVariableDataRoute, @@ -100,7 +101,12 @@ import { Link } from "../db/model/Link.js" import { In } from "typeorm" import { logErrorAndMaybeSendToBugsnag } from "../serverUtils/errorLog.js" import { GdocFactory } from "../db/model/Gdoc/GdocFactory.js" -import { Knex } from "knex" +import { + getRouteWithROTransaction, + deleteRouteWithRWTransaction, + putRouteWithRWTransaction, + postRouteWithRWTransaction, +} from "./routerHelpers.js" const apiRouter = new FunctionalRouter() @@ -157,7 +163,7 @@ async function getLogsByChartId(chartId: number): Promise { const getReferencesByChartId = async ( chartId: number, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const postsWordpressPromise = getWordpressPostReferencesByChartId( chartId, @@ -313,8 +319,12 @@ const saveGrapher = async ( // So we can generate country profiles including this chart data if (newConfig.isPublished && referencedVariablesMightChange) - await denormalizeLatestCountryData( - newDimensions.map((d) => d.variableId) + // TODO: remove this ad hoc knex transaction context when we switch the function to knex + await db.knexReadWriteTransaction((trx) => + denormalizeLatestCountryData( + trx, + newDimensions.map((d) => d.variableId) + ) ) if ( @@ -344,7 +354,7 @@ const saveGrapher = async ( return chartId } -apiRouter.get("/charts.json", async (req: Request, res: Response) => { +apiRouter.get("/charts.json", async (req, res) => { const limit = parseIntOrUndefined(req.query.limit as string) ?? 10000 const charts = await db.queryMysql( ` @@ -361,7 +371,7 @@ apiRouter.get("/charts.json", async (req: Request, res: Response) => { return { charts } }) -apiRouter.get("/charts.csv", async (req: Request, res: Response) => { +apiRouter.get("/charts.csv", async (req, res) => { const limit = parseIntOrUndefined(req.query.limit as string) ?? 10000 // note: this query is extended from OldChart.listFields. @@ -415,78 +425,67 @@ apiRouter.get("/charts.csv", async (req: Request, res: Response) => { return csv }) -apiRouter.get( - "/charts/:chartId.config.json", - async (req: Request, res: Response) => expectChartById(req.params.chartId) +apiRouter.get("/charts/:chartId.config.json", async (req, res) => + expectChartById(req.params.chartId) ) -apiRouter.get( - "/editorData/namespaces.json", - async (req: Request, res: Response) => { - const rows = (await db.queryMysql( - `SELECT DISTINCT +apiRouter.get("/editorData/namespaces.json", async (req, res) => { + const rows = (await db.queryMysql( + `SELECT DISTINCT namespace AS name, namespaces.description AS description, namespaces.isArchived AS isArchived FROM active_datasets JOIN namespaces ON namespaces.name = active_datasets.namespace` - )) as { name: string; description?: string; isArchived: boolean }[] + )) as { name: string; description?: string; isArchived: boolean }[] - return { - namespaces: lodash - .sortBy(rows, (row) => row.description) - .map((namespace) => ({ - ...namespace, - isArchived: !!namespace.isArchived, - })), - } + return { + namespaces: lodash + .sortBy(rows, (row) => row.description) + .map((namespace) => ({ + ...namespace, + isArchived: !!namespace.isArchived, + })), } -) +}) -apiRouter.get( - "/charts/:chartId.logs.json", - async (req: Request, res: Response) => ({ - logs: await getLogsByChartId(parseInt(req.params.chartId as string)), - }) -) +apiRouter.get("/charts/:chartId.logs.json", async (req, res) => ({ + logs: await getLogsByChartId(parseInt(req.params.chartId as string)), +})) -apiRouter.get( +getRouteWithROTransaction( + apiRouter, "/charts/:chartId.references.json", - async (req: Request, res: Response) => { - const references = db.knexInstance().transaction(async (knex) => ({ + async (req, res, trx) => { + const references = { references: await getReferencesByChartId( parseInt(req.params.chartId as string), - knex + trx ), - })) + } return references } ) -apiRouter.get( - "/charts/:chartId.redirects.json", - async (req: Request, res: Response) => ({ - redirects: await getRedirectsByChartId( - parseInt(req.params.chartId as string) - ), - }) -) +apiRouter.get("/charts/:chartId.redirects.json", async (req, res) => ({ + redirects: await getRedirectsByChartId( + parseInt(req.params.chartId as string) + ), +})) -apiRouter.get( +getRouteWithROTransaction( + apiRouter, "/charts/:chartId.pageviews.json", - async (req: Request, res: Response) => { + async (req, res, trx) => { const slug = await Chart.getById( parseInt(req.params.chartId as string) ).then((chart) => chart?.config?.slug) if (!slug) return {} const pageviewsByUrl = await db.knexRawFirst( - db.knexInstance(), - "select * from ?? where url = ?", - [ - AnalyticsPageviewsTableName, - `https://ourworldindata.org/grapher/${slug}`, - ] + trx, + "select * from analytics_pageviews where url = ?", + [`https://ourworldindata.org/grapher/${slug}`] ) return { @@ -495,15 +494,27 @@ apiRouter.get( } ) -apiRouter.get("/topics.json", async (req: Request, res: Response) => ({ +apiRouter.get("/topics.json", async (req, res) => ({ topics: await DEPRECATEDgetTopics(), })) -apiRouter.get( +getRouteWithROTransaction( + apiRouter, "/editorData/variables.json", - async (req: Request, res: Response) => { + async (req, res, trx) => { const datasets = [] - const rows = await db.queryMysql( - `SELECT + const rows = await db.knexRaw< + Pick & { + datasetId: number + datasetName: string + datasetVersion: string + } & Pick< + DbPlainDataset, + "namespace" | "isPrivate" | "nonRedistributable" + > + >( + trx, + `-- sql + SELECT v.name, v.id, d.id as datasetId, @@ -537,15 +548,15 @@ apiRouter.get( name: row.datasetName, version: row.datasetVersion, namespace: row.namespace, - isPrivate: row.isPrivate, - nonRedistributable: row.nonRedistributable, + isPrivate: !!row.isPrivate, + nonRedistributable: !!row.nonRedistributable, variables: [], } } dataset.variables.push({ id: row.id, - name: row.name, + name: row.name ?? "", }) } @@ -555,26 +566,23 @@ apiRouter.get( } ) -apiRouter.get( - "/data/variables/data/:variableStr.json", - async (req: Request, res: Response) => { - const variableStr = req.params.variableStr as string - if (!variableStr) throw new JsonError("No variable id given") - if (variableStr.includes("+")) - throw new JsonError( - "Requesting multiple variables at the same time is no longer supported" - ) - const variableId = parseInt(variableStr) - if (isNaN(variableId)) throw new JsonError("Invalid variable id") - return await fetchS3DataValuesByPath( - getVariableDataRoute(DATA_API_URL, variableId) + "?nocache" +apiRouter.get("/data/variables/data/:variableStr.json", async (req, res) => { + const variableStr = req.params.variableStr as string + if (!variableStr) throw new JsonError("No variable id given") + if (variableStr.includes("+")) + throw new JsonError( + "Requesting multiple variables at the same time is no longer supported" ) - } -) + const variableId = parseInt(variableStr) + if (isNaN(variableId)) throw new JsonError("Invalid variable id") + return await fetchS3DataValuesByPath( + getVariableDataRoute(DATA_API_URL, variableId) + "?nocache" + ) +}) apiRouter.get( "/data/variables/metadata/:variableStr.json", - async (req: Request, res: Response) => { + async (req, res) => { const variableStr = req.params.variableStr as string if (!variableStr) throw new JsonError("No variable id given") if (variableStr.includes("+")) @@ -589,25 +597,22 @@ apiRouter.get( } ) -apiRouter.post("/charts", async (req: Request, res: Response) => { +apiRouter.post("/charts", async (req, res) => { const chartId = await db.transaction(async (t) => { return saveGrapher(t, res.locals.user, req.body) }) return { success: true, chartId: chartId } }) -apiRouter.post( - "/charts/:chartId/setTags", - async (req: Request, res: Response) => { - const chartId = expectInt(req.params.chartId) +apiRouter.post("/charts/:chartId/setTags", async (req, res) => { + const chartId = expectInt(req.params.chartId) - await Chart.setTags(chartId, req.body.tags) + await Chart.setTags(chartId, req.body.tags) - return { success: true } - } -) + return { success: true } +}) -apiRouter.put("/charts/:chartId", async (req: Request, res: Response) => { +apiRouter.put("/charts/:chartId", async (req, res) => { const existingConfig = await expectChartById(req.params.chartId) await db.transaction(async (t) => { @@ -618,7 +623,7 @@ apiRouter.put("/charts/:chartId", async (req: Request, res: Response) => { return { success: true, chartId: existingConfig.id, newLog: logs[0] } }) -apiRouter.delete("/charts/:chartId", async (req: Request, res: Response) => { +apiRouter.delete("/charts/:chartId", async (req, res) => { const chart = await expectChartById(req.params.chartId) const links = await Link.getPublishedLinksTo([chart.slug!]) if (links.length) { @@ -651,59 +656,57 @@ apiRouter.delete("/charts/:chartId", async (req: Request, res: Response) => { return { success: true } }) -apiRouter.get( - "/suggested-chart-revisions", - async (req: Request, res: Response) => { - const isValidSortBy = (sortBy: string) => { - return [ - "updatedAt", - "createdAt", - "suggestedReason", - "id", - "chartId", - "status", - "variableId", - "chartUpdatedAt", - "chartCreatedAt", - ].includes(sortBy) - } - const isValidSortOrder = (sortOrder: string) => { - return ( - sortOrder !== undefined && - sortOrder !== null && - ["ASC", "DESC"].includes(sortOrder.toUpperCase()) - ) - } - const limit = - req.query.limit !== undefined ? expectInt(req.query.limit) : 10000 - const offset = - req.query.offset !== undefined ? expectInt(req.query.offset) : 0 - const sortBy = isValidSortBy(req.query.sortBy as string) - ? req.query.sortBy - : "updatedAt" - const sortOrder = isValidSortOrder(req.query.sortOrder as string) - ? (req.query.sortOrder as string).toUpperCase() - : "DESC" - const status = SuggestedChartRevision.isValidStatus( - req.query.status as SuggestedChartRevisionStatus +apiRouter.get("/suggested-chart-revisions", async (req, res) => { + const isValidSortBy = (sortBy: string) => { + return [ + "updatedAt", + "createdAt", + "suggestedReason", + "id", + "chartId", + "status", + "variableId", + "chartUpdatedAt", + "chartCreatedAt", + ].includes(sortBy) + } + const isValidSortOrder = (sortOrder: string) => { + return ( + sortOrder !== undefined && + sortOrder !== null && + ["ASC", "DESC"].includes(sortOrder.toUpperCase()) ) - ? req.query.status - : null - - let orderBy - if (sortBy === "variableId") { - orderBy = - "CAST(scr.suggestedConfig->>'$.dimensions[0].variableId' as SIGNED)" - } else if (sortBy === "chartUpdatedAt") { - orderBy = "c.updatedAt" - } else if (sortBy === "chartCreatedAt") { - orderBy = "c.createdAt" - } else { - orderBy = `scr.${sortBy}` - } + } + const limit = + req.query.limit !== undefined ? expectInt(req.query.limit) : 10000 + const offset = + req.query.offset !== undefined ? expectInt(req.query.offset) : 0 + const sortBy = isValidSortBy(req.query.sortBy as string) + ? req.query.sortBy + : "updatedAt" + const sortOrder = isValidSortOrder(req.query.sortOrder as string) + ? (req.query.sortOrder as string).toUpperCase() + : "DESC" + const status = SuggestedChartRevision.isValidStatus( + req.query.status as SuggestedChartRevisionStatus + ) + ? req.query.status + : null + + let orderBy + if (sortBy === "variableId") { + orderBy = + "CAST(scr.suggestedConfig->>'$.dimensions[0].variableId' as SIGNED)" + } else if (sortBy === "chartUpdatedAt") { + orderBy = "c.updatedAt" + } else if (sortBy === "chartCreatedAt") { + orderBy = "c.createdAt" + } else { + orderBy = `scr.${sortBy}` + } - const suggestedChartRevisions = await db.queryMysql( - `-- sql + const suggestedChartRevisions = await db.queryMysql( + `-- sql SELECT scr.id, scr.chartId, scr.updatedAt, scr.createdAt, scr.suggestedReason, scr.decisionReason, scr.status, scr.suggestedConfig, scr.originalConfig, scr.changesInDataSummary, @@ -722,203 +725,194 @@ apiRouter.get( ORDER BY ${orderBy} ${sortOrder} LIMIT ? OFFSET ? `, - status ? [status, limit, offset] : [limit, offset] - ) + status ? [status, limit, offset] : [limit, offset] + ) - let numTotalRows = ( - await db.queryMysql( - ` + let numTotalRows = ( + await db.queryMysql( + ` SELECT COUNT(*) as count FROM suggested_chart_revisions ${status ? "WHERE status = ?" : ""} `, - status ? [status] : [] - ) - )[0].count - numTotalRows = numTotalRows ? parseInt(numTotalRows) : numTotalRows - - suggestedChartRevisions.map( - (suggestedChartRevision: SuggestedChartRevision) => { - suggestedChartRevision.suggestedConfig = JSON.parse( - suggestedChartRevision.suggestedConfig - ) - suggestedChartRevision.existingConfig = JSON.parse( - suggestedChartRevision.existingConfig - ) - suggestedChartRevision.originalConfig = JSON.parse( - suggestedChartRevision.originalConfig - ) - suggestedChartRevision.experimental = JSON.parse( - suggestedChartRevision.experimental - ) - suggestedChartRevision.canApprove = - SuggestedChartRevision.checkCanApprove( - suggestedChartRevision - ) - suggestedChartRevision.canReject = - SuggestedChartRevision.checkCanReject( - suggestedChartRevision - ) - suggestedChartRevision.canFlag = - SuggestedChartRevision.checkCanFlag(suggestedChartRevision) - suggestedChartRevision.canPending = - SuggestedChartRevision.checkCanPending( - suggestedChartRevision - ) - } + status ? [status] : [] ) + )[0].count + numTotalRows = numTotalRows ? parseInt(numTotalRows) : numTotalRows - return { - suggestedChartRevisions: suggestedChartRevisions, - numTotalRows: numTotalRows, + suggestedChartRevisions.map( + (suggestedChartRevision: SuggestedChartRevision) => { + suggestedChartRevision.suggestedConfig = JSON.parse( + suggestedChartRevision.suggestedConfig + ) + suggestedChartRevision.existingConfig = JSON.parse( + suggestedChartRevision.existingConfig + ) + suggestedChartRevision.originalConfig = JSON.parse( + suggestedChartRevision.originalConfig + ) + suggestedChartRevision.experimental = JSON.parse( + suggestedChartRevision.experimental + ) + suggestedChartRevision.canApprove = + SuggestedChartRevision.checkCanApprove(suggestedChartRevision) + suggestedChartRevision.canReject = + SuggestedChartRevision.checkCanReject(suggestedChartRevision) + suggestedChartRevision.canFlag = + SuggestedChartRevision.checkCanFlag(suggestedChartRevision) + suggestedChartRevision.canPending = + SuggestedChartRevision.checkCanPending(suggestedChartRevision) } + ) + + return { + suggestedChartRevisions: suggestedChartRevisions, + numTotalRows: numTotalRows, } -) +}) -apiRouter.post( - "/suggested-chart-revisions", - async (req: Request, res: Response) => { - const messages: any[] = [] - const status = SuggestedChartRevisionStatus.pending - const suggestedReason = req.body.suggestedReason - ? String(req.body.suggestedReason) - : null - const changesInDataSummary = req.body.changesInDataSummary - ? String(req.body.changesInDataSummary) - : null - const convertStringsToNull = - typeof req.body.convertStringsToNull === "boolean" - ? req.body.convertStringsToNull - : true - const suggestedConfigs = req.body.suggestedConfigs as any[] - - // suggestedConfigs must be an array of length > 0 - if (!(Array.isArray(suggestedConfigs) && suggestedConfigs.length > 0)) { - throw new JsonError( - "POST body must contain a `suggestedConfigs` property, which must be an Array with length > 0." - ) - } +apiRouter.post("/suggested-chart-revisions", async (req, res) => { + const messages: any[] = [] + const status = SuggestedChartRevisionStatus.pending + const suggestedReason = req.body.suggestedReason + ? String(req.body.suggestedReason) + : null + const changesInDataSummary = req.body.changesInDataSummary + ? String(req.body.changesInDataSummary) + : null + const convertStringsToNull = + typeof req.body.convertStringsToNull === "boolean" + ? req.body.convertStringsToNull + : true + const suggestedConfigs = req.body.suggestedConfigs as any[] + + // suggestedConfigs must be an array of length > 0 + if (!(Array.isArray(suggestedConfigs) && suggestedConfigs.length > 0)) { + throw new JsonError( + "POST body must contain a `suggestedConfigs` property, which must be an Array with length > 0." + ) + } - // tries to convert each config field to json (e.g. the `map` field - // should be converted to json if it is present). - suggestedConfigs.map((config) => { - Object.keys(config).map((k) => { - try { - const json = JSON.parse(config[k]) - config[k] = json - } catch (error) { - // do nothing. - } - }) + // tries to convert each config field to json (e.g. the `map` field + // should be converted to json if it is present). + suggestedConfigs.map((config) => { + Object.keys(config).map((k) => { + try { + const json = JSON.parse(config[k]) + config[k] = json + } catch (error) { + // do nothing. + } }) + }) - // checks for required keys - const requiredKeys = ["id", "version"] - suggestedConfigs.map((config) => { - requiredKeys.map((k) => { - if (!config.hasOwnProperty(k)) { - throw new JsonError( - `The "${k}" field is required, but one or more chart configs in the POST body does not contain it.` - ) - } - }) + // checks for required keys + const requiredKeys = ["id", "version"] + suggestedConfigs.map((config) => { + requiredKeys.map((k) => { + if (!config.hasOwnProperty(k)) { + throw new JsonError( + `The "${k}" field is required, but one or more chart configs in the POST body does not contain it.` + ) + } }) + }) - // safely sets types of keys that are used in db queries below. - const typeConversions = [ - { key: "id", expectedType: "number", f: expectInt }, - { key: "version", expectedType: "number", f: expectInt }, - ] - suggestedConfigs.map((config) => { - typeConversions.map((obj) => { - config[obj.key] = obj.f(config[obj.key]) - if ( - config[obj.key] !== null && - config[obj.key] !== undefined && - typeof config[obj.key] !== obj.expectedType - ) { - throw new JsonError( - `Expected all "${obj.key}" values to be non-null and of ` + - `type "${obj.expectedType}", but one or more chart ` + - `configs contains a "${obj.key}" value that does ` + - `not meet this criteria.` - ) - } - }) + // safely sets types of keys that are used in db queries below. + const typeConversions = [ + { key: "id", expectedType: "number", f: expectInt }, + { key: "version", expectedType: "number", f: expectInt }, + ] + suggestedConfigs.map((config) => { + typeConversions.map((obj) => { + config[obj.key] = obj.f(config[obj.key]) + if ( + config[obj.key] !== null && + config[obj.key] !== undefined && + typeof config[obj.key] !== obj.expectedType + ) { + throw new JsonError( + `Expected all "${obj.key}" values to be non-null and of ` + + `type "${obj.expectedType}", but one or more chart ` + + `configs contains a "${obj.key}" value that does ` + + `not meet this criteria.` + ) + } }) + }) - // checks for invalid keys - const uniqKeys = new Set() - suggestedConfigs.map((config) => { - Object.keys(config).forEach((item) => { - uniqKeys.add(item) - }) + // checks for invalid keys + const uniqKeys = new Set() + suggestedConfigs.map((config) => { + Object.keys(config).forEach((item) => { + uniqKeys.add(item) }) - const invalidKeys = [...uniqKeys].filter( - (v) => !grapherKeysToSerialize.includes(v as string) + }) + const invalidKeys = [...uniqKeys].filter( + (v) => !grapherKeysToSerialize.includes(v as string) + ) + if (invalidKeys.length > 0) { + throw new JsonError( + `The following fields are not valid chart config fields: ${invalidKeys}` ) - if (invalidKeys.length > 0) { - throw new JsonError( - `The following fields are not valid chart config fields: ${invalidKeys}` - ) - } + } - // checks that no duplicate chart ids are present. - const chartIds = suggestedConfigs.map((config) => config.id) - if (new Set(chartIds).size !== chartIds.length) { - throw new JsonError( - `Found one or more duplicate chart ids in POST body.` - ) - } + // checks that no duplicate chart ids are present. + const chartIds = suggestedConfigs.map((config) => config.id) + if (new Set(chartIds).size !== chartIds.length) { + throw new JsonError( + `Found one or more duplicate chart ids in POST body.` + ) + } - // converts some strings to null - if (convertStringsToNull) { - const isNullString = (value: string): boolean => { - const nullStrings = ["nan", "na"] - return nullStrings.includes(value.toLowerCase()) - } - suggestedConfigs.map((config) => { - for (const key of Object.keys(config)) { - if ( - typeof config[key] === "string" && - isNullString(config[key]) - ) { - config[key] = null - } - } - }) + // converts some strings to null + if (convertStringsToNull) { + const isNullString = (value: string): boolean => { + const nullStrings = ["nan", "na"] + return nullStrings.includes(value.toLowerCase()) } - - // empty strings mean that the field should NOT be overwritten, so we - // remove key-value pairs where value === "" suggestedConfigs.map((config) => { for (const key of Object.keys(config)) { - if (config[key] === "") { - delete config[key] + if ( + typeof config[key] === "string" && + isNullString(config[key]) + ) { + config[key] = null } } }) + } - await db.transaction(async (t) => { - const whereCond1 = suggestedConfigs - .map( - (config) => - `(id = ${escape( - config.id - )} AND config->"$.version" = ${escape(config.version)})` - ) - .join(" OR ") - const whereCond2 = suggestedConfigs - .map( - (config) => - `(chartId = ${escape( - config.id - )} AND config->"$.version" = ${escape(config.version)})` - ) - .join(" OR ") - // retrieves original chart configs - let rows: any[] = await t.query( - ` + // empty strings mean that the field should NOT be overwritten, so we + // remove key-value pairs where value === "" + suggestedConfigs.map((config) => { + for (const key of Object.keys(config)) { + if (config[key] === "") { + delete config[key] + } + } + }) + + await db.transaction(async (t) => { + const whereCond1 = suggestedConfigs + .map( + (config) => + `(id = ${escape( + config.id + )} AND config->"$.version" = ${escape(config.version)})` + ) + .join(" OR ") + const whereCond2 = suggestedConfigs + .map( + (config) => + `(chartId = ${escape( + config.id + )} AND config->"$.version" = ${escape(config.version)})` + ) + .join(" OR ") + // retrieves original chart configs + let rows: any[] = await t.query( + ` SELECT id, config, 1 as priority FROM charts WHERE ${whereCond1} @@ -931,161 +925,158 @@ apiRouter.post( ORDER BY priority ` - ) + ) - rows.map((row) => { - row.config = JSON.parse(row.config) - }) + rows.map((row) => { + row.config = JSON.parse(row.config) + }) - // drops duplicate id-version rows (keeping the row from the - // `charts` table when available). - rows = rows.filter( - (v, i, a) => - a.findIndex( - (el) => - el.id === v.id && - el.config.version === v.config.version - ) === i - ) - if (rows.length < suggestedConfigs.length) { - // identifies which particular chartId-version combinations have - // not been found in the DB - const missingConfigs = suggestedConfigs.filter((config) => { - const i = rows.findIndex((row) => { - return ( - row.id === config.id && - row.config.version === config.version - ) - }) - return i === -1 + // drops duplicate id-version rows (keeping the row from the + // `charts` table when available). + rows = rows.filter( + (v, i, a) => + a.findIndex( + (el) => + el.id === v.id && el.config.version === v.config.version + ) === i + ) + if (rows.length < suggestedConfigs.length) { + // identifies which particular chartId-version combinations have + // not been found in the DB + const missingConfigs = suggestedConfigs.filter((config) => { + const i = rows.findIndex((row) => { + return ( + row.id === config.id && + row.config.version === config.version + ) }) - throw new JsonError( - `Failed to retrieve the following chartId-version combinations:\n${missingConfigs - .map((c) => { - return JSON.stringify({ - id: c.id, - version: c.version, - }) + return i === -1 + }) + throw new JsonError( + `Failed to retrieve the following chartId-version combinations:\n${missingConfigs + .map((c) => { + return JSON.stringify({ + id: c.id, + version: c.version, }) - .join( - "\n" - )}\nPlease check that each chartId and version exists.` - ) - } else if (rows.length > suggestedConfigs.length) { - throw new JsonError( - "Retrieved more chart configs than expected. This may be due to a bug on the server." - ) - } - const originalConfigs: Record = - rows.reduce( - (obj: any, row: any) => ({ - ...obj, - [row.id]: row.config, - }), - {} - ) + }) + .join( + "\n" + )}\nPlease check that each chartId and version exists.` + ) + } else if (rows.length > suggestedConfigs.length) { + throw new JsonError( + "Retrieved more chart configs than expected. This may be due to a bug on the server." + ) + } + const originalConfigs: Record = rows.reduce( + (obj: any, row: any) => ({ + ...obj, + [row.id]: row.config, + }), + {} + ) - // some chart configs do not have an `id` field, so we check for it - // and insert the id here as needed. This is important for the - // lodash.isEqual condition later on. - for (const [id, config] of Object.entries(originalConfigs)) { - if (config.id === null || config.id === undefined) { - config.id = parseInt(id) - } + // some chart configs do not have an `id` field, so we check for it + // and insert the id here as needed. This is important for the + // lodash.isEqual condition later on. + for (const [id, config] of Object.entries(originalConfigs)) { + if (config.id === null || config.id === undefined) { + config.id = parseInt(id) } + } - // sanity check that each original config also has the required keys. - Object.values(originalConfigs).map((config) => { - requiredKeys.map((k) => { - if (!config.hasOwnProperty(k)) { - throw new JsonError( - `The "${k}" field is required, but one or more ` + - `chart configs in the database does not ` + - `contain it. Please report this issue to a ` + - `developer.` - ) - } - }) + // sanity check that each original config also has the required keys. + Object.values(originalConfigs).map((config) => { + requiredKeys.map((k) => { + if (!config.hasOwnProperty(k)) { + throw new JsonError( + `The "${k}" field is required, but one or more ` + + `chart configs in the database does not ` + + `contain it. Please report this issue to a ` + + `developer.` + ) + } }) + }) - // if a field is null in the suggested config and the field does not - // exist in the original config, then we can delete the field from - // the suggested config b/c the non-existence of the field on the - // original config is equivalent to null. - suggestedConfigs.map((config: any) => { - const chartId = config.id as number - const originalConfig = originalConfigs[chartId] - for (const key of Object.keys(config)) { - if ( - config[key] === null && - !originalConfig.hasOwnProperty(key) - ) { - delete config[key] - } + // if a field is null in the suggested config and the field does not + // exist in the original config, then we can delete the field from + // the suggested config b/c the non-existence of the field on the + // original config is equivalent to null. + suggestedConfigs.map((config: any) => { + const chartId = config.id as number + const originalConfig = originalConfigs[chartId] + for (const key of Object.keys(config)) { + if ( + config[key] === null && + !originalConfig.hasOwnProperty(key) + ) { + delete config[key] } - }) + } + }) - // constructs array of suggested chart revisions to insert. - const values: any[] = [] - suggestedConfigs.map((config) => { - const chartId = config.id as number - const originalConfig = originalConfigs[chartId] - const suggestedConfig: GrapherInterface = Object.assign( - {}, - JSON.parse(JSON.stringify(originalConfig)), - config - ) - if (!lodash.isEqual(suggestedConfig, originalConfig)) { - if (suggestedConfig.version) { - suggestedConfig.version += 1 - } - values.push([ - chartId, - JSON.stringify(suggestedConfig), - JSON.stringify(originalConfig), - suggestedReason, - changesInDataSummary, - status, - res.locals.user.id, - new Date(), - new Date(), - ]) + // constructs array of suggested chart revisions to insert. + const values: any[] = [] + suggestedConfigs.map((config) => { + const chartId = config.id as number + const originalConfig = originalConfigs[chartId] + const suggestedConfig: GrapherInterface = Object.assign( + {}, + JSON.parse(JSON.stringify(originalConfig)), + config + ) + if (!lodash.isEqual(suggestedConfig, originalConfig)) { + if (suggestedConfig.version) { + suggestedConfig.version += 1 } - }) + values.push([ + chartId, + JSON.stringify(suggestedConfig), + JSON.stringify(originalConfig), + suggestedReason, + changesInDataSummary, + status, + res.locals.user.id, + new Date(), + new Date(), + ]) + } + }) - // inserts suggested chart revisions - const result = await t.execute( - ` + // inserts suggested chart revisions + const result = await t.execute( + ` INSERT INTO suggested_chart_revisions (chartId, suggestedConfig, originalConfig, suggestedReason, changesInDataSummary, status, createdBy, createdAt, updatedAt) VALUES ? `, - [values] - ) - if (result.affectedRows > 0) { - messages.push({ - type: "success", - text: `${result.affectedRows} chart revisions have been queued for approval.`, - }) - } - if (suggestedConfigs.length - result.affectedRows > 0) { - messages.push({ - type: "warning", - text: `${ - suggestedConfigs.length - result.affectedRows - } chart revisions have not been queued for approval (e.g. because the chart revision does not contain any changes).`, - }) - } - }) + [values] + ) + if (result.affectedRows > 0) { + messages.push({ + type: "success", + text: `${result.affectedRows} chart revisions have been queued for approval.`, + }) + } + if (suggestedConfigs.length - result.affectedRows > 0) { + messages.push({ + type: "warning", + text: `${ + suggestedConfigs.length - result.affectedRows + } chart revisions have not been queued for approval (e.g. because the chart revision does not contain any changes).`, + }) + } + }) - return { success: true, messages } - } -) + return { success: true, messages } +}) apiRouter.get( "/suggested-chart-revisions/:suggestedChartRevisionId", - async (req: Request, res: Response) => { + async (req, res) => { const suggestedChartRevisionId = expectInt( req.params.suggestedChartRevisionId ) @@ -1144,7 +1135,7 @@ apiRouter.get( apiRouter.post( "/suggested-chart-revisions/:suggestedChartRevisionId/update", - async (req: Request, res: Response) => { + async (req, res) => { const suggestedChartRevisionId = expectInt( req.params.suggestedChartRevisionId ) @@ -1266,9 +1257,8 @@ apiRouter.post( } ) -apiRouter.get("/users.json", async (req: Request, res: Response) => ({ - users: await db - .knexInstance() +getRouteWithROTransaction(apiRouter, "/users.json", async (req, res, trx) => ({ + users: await trx .select( "id" satisfies keyof DbPlainUser, "email" satisfies keyof DbPlainUser, @@ -1284,14 +1274,18 @@ apiRouter.get("/users.json", async (req: Request, res: Response) => ({ .orderBy("lastSeen", "desc"), })) -apiRouter.get("/users/:userId.json", async (req: Request, res: Response) => { - const id = parseIntOrUndefined(req.params.userId) - if (!id) throw new JsonError("No user id given") - const user = await getUserById(db.knexInstance(), id) - return { user } -}) +getRouteWithROTransaction( + apiRouter, + "/users/:userId.json", + async (req, res, trx) => { + const id = parseIntOrUndefined(req.params.userId) + if (!id) throw new JsonError("No user id given") + const user = await getUserById(trx, id) + return { user } + } +) -apiRouter.delete("/users/:userId", async (req: Request, res: Response) => { +apiRouter.delete("/users/:userId", async (req, res) => { if (!res.locals.user.isSuperuser) throw new JsonError("Permission denied", 403) @@ -1303,43 +1297,54 @@ apiRouter.delete("/users/:userId", async (req: Request, res: Response) => { return { success: true } }) -apiRouter.put("/users/:userId", async (req: Request, res: Response) => { - if (!res.locals.user.isSuperuser) - throw new JsonError("Permission denied", 403) +putRouteWithRWTransaction( + apiRouter, + "/users/:userId", + async (req, res, trx: db.KnexReadWriteTransaction) => { + if (!res.locals.user.isSuperuser) + throw new JsonError("Permission denied", 403) - return db.knexInstance().transaction(async (t) => { const userId = parseIntOrUndefined(req.params.userId) - const user = userId !== undefined ? await getUserById(t, userId) : null + const user = + userId !== undefined ? await getUserById(trx, userId) : null if (!user) throw new JsonError("No such user", 404) user.fullName = req.body.fullName user.isActive = req.body.isActive - await updateUser(t, userId!, pick(user, ["fullName", "isActive"])) + await updateUser(trx, userId!, pick(user, ["fullName", "isActive"])) return { success: true } - }) -}) + } +) -apiRouter.post("/users/add", async (req: Request, res: Response) => { - if (!res.locals.user.isSuperuser) - throw new JsonError("Permission denied", 403) +postRouteWithRWTransaction( + apiRouter, + "/users/add", + async (req, res, trx: db.KnexReadWriteTransaction) => { + if (!res.locals.user.isSuperuser) + throw new JsonError("Permission denied", 403) - const { email, fullName } = req.body + const { email, fullName } = req.body - await insertUser(db.knexInstance(), { - email, - fullName, - }) + await insertUser(trx, { + email, + fullName, + }) - return { success: true } -}) + return { success: true } + } +) -apiRouter.get("/variables.json", async (req) => { - const limit = parseIntOrUndefined(req.query.limit as string) ?? 50 - const query = req.query.search as string - return await searchVariables(query, limit, db.knexInstance()) -}) +getRouteWithROTransaction( + apiRouter, + "/variables.json", + async (req, res, trx) => { + const limit = parseIntOrUndefined(req.query.limit as string) ?? 50 + const query = req.query.search as string + return await searchVariables(query, limit, trx) + } +) apiRouter.get( "/chart-bulk-update", @@ -1525,9 +1530,10 @@ ORDER BY usageCount DESC` }) // Used in VariableEditPage -apiRouter.get( +getRouteWithROTransaction( + apiRouter, "/variables/:variableId.json", - async (req: Request, res: Response) => { + async (req, res, trx) => { const variableId = expectInt(req.params.variableId) const variable = await fetchS3MetadataByPath( @@ -1558,7 +1564,7 @@ apiRouter.get( const grapherConfig = await getMergedGrapherConfigForVariable( variableId, - db.knexInstance() + trx ) if ( grapherConfig && @@ -1589,12 +1595,13 @@ apiRouter.get( } ) -apiRouter.get("/datasets.json", async (req) => { - return db.knexInstance().transaction( - async (trx) => { - const datasets = await db.knexRaw>( - trx, - ` +getRouteWithROTransaction( + apiRouter, + "/datasets.json", + async (req, res, trx) => { + const datasets = await db.knexRaw>( + trx, + ` WITH variable_counts AS ( SELECT v.datasetId, @@ -1624,41 +1631,40 @@ apiRouter.get("/datasets.json", async (req) => { JOIN datasets d ON d.id=ad.id ORDER BY ad.dataEditedAt DESC ` - ) + ) - const tags = await db.knexRaw< - Pick & - Pick - >( - trx, - ` + const tags = await db.knexRaw< + Pick & + Pick + >( + trx, + ` SELECT dt.datasetId, t.id, t.name FROM dataset_tags dt JOIN tags t ON dt.tagId = t.id ` + ) + const tagsByDatasetId = lodash.groupBy(tags, (t) => t.datasetId) + for (const dataset of datasets) { + dataset.tags = (tagsByDatasetId[dataset.id] || []).map((t) => + lodash.omit(t, "datasetId") ) - const tagsByDatasetId = lodash.groupBy(tags, (t) => t.datasetId) - for (const dataset of datasets) { - dataset.tags = (tagsByDatasetId[dataset.id] || []).map((t) => - lodash.omit(t, "datasetId") - ) - } - /*LEFT JOIN variables AS v ON v.datasetId=d.id + } + /*LEFT JOIN variables AS v ON v.datasetId=d.id GROUP BY d.id*/ - return { datasets: datasets } - }, - { readOnly: true } - ) -}) + return { datasets: datasets } + } +) -apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { - const datasetId = expectInt(req.params.datasetId) +getRouteWithROTransaction( + apiRouter, + "/datasets/:datasetId.json", + async (req: Request, res, trx) => { + const datasetId = expectInt(req.params.datasetId) - return db.knexInstance().transaction( - async (trx) => { - const dataset = await db.knexRawFirst>( - trx, - ` + const dataset = await db.knexRawFirst>( + trx, + ` SELECT d.id, d.namespace, d.name, @@ -1681,42 +1687,42 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { JOIN users mu ON mu.id=d.metadataEditedByUserId WHERE d.id = ? `, - [datasetId] - ) + [datasetId] + ) - if (!dataset) - throw new JsonError(`No dataset by id '${datasetId}'`, 404) + if (!dataset) + throw new JsonError(`No dataset by id '${datasetId}'`, 404) - const zipFile = await db.knexRawFirst( - trx, - `SELECT filename FROM dataset_files WHERE datasetId=?`, - [datasetId] - ) - if (zipFile) dataset.zipFile = zipFile + const zipFile = await db.knexRawFirst( + trx, + `SELECT filename FROM dataset_files WHERE datasetId=?`, + [datasetId] + ) + if (zipFile) dataset.zipFile = zipFile - const variables: Pick< - DbRawVariable, - "id" | "name" | "description" | "display" | "catalogPath" - >[] = await db.knexRaw( - trx, - ` + const variables: Pick< + DbRawVariable, + "id" | "name" | "description" | "display" | "catalogPath" + >[] = await db.knexRaw( + trx, + ` SELECT v.id, v.name, v.description, v.display, v.catalogPath FROM variables AS v WHERE v.datasetId = ? `, - [datasetId] - ) + [datasetId] + ) - for (const v of variables) { - v.display = JSON.parse(v.display) - } + for (const v of variables) { + v.display = JSON.parse(v.display) + } - dataset.variables = variables + dataset.variables = variables - // add all origins - const origins: DbRawOrigin[] = await db.knexRaw( - trx, - ` + // add all origins + const origins: DbRawOrigin[] = await db.knexRaw( + trx, + ` select distinct o.* from origins_variables as ov @@ -1724,36 +1730,36 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { join variables as v on ov.variableId = v.id where v.datasetId = ? `, - [datasetId] - ) + [datasetId] + ) - const parsedOrigins = origins.map(parseOriginsRow) + const parsedOrigins = origins.map(parseOriginsRow) - dataset.origins = parsedOrigins + dataset.origins = parsedOrigins - const sources = await db.knexRaw( - trx, - ` + const sources = await db.knexRaw( + trx, + ` SELECT s.id, s.name, s.description FROM sources AS s WHERE s.datasetId = ? ORDER BY s.id ASC `, - [datasetId] - ) + [datasetId] + ) - // expand description of sources and add to dataset as variableSources - dataset.variableSources = sources.map((s: any) => { - return { - id: s.id, - name: s.name, - ...JSON.parse(s.description), - } - }) + // expand description of sources and add to dataset as variableSources + dataset.variableSources = sources.map((s: any) => { + return { + id: s.id, + name: s.name, + ...JSON.parse(s.description), + } + }) - const charts = await db.knexRaw( - trx, - ` + const charts = await db.knexRaw( + trx, + ` SELECT ${OldChart.listFields} FROM charts JOIN chart_dimensions AS cd ON cd.chartId = charts.id @@ -1763,51 +1769,50 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { WHERE v.datasetId = ? GROUP BY charts.id `, - [datasetId] - ) + [datasetId] + ) - dataset.charts = charts + dataset.charts = charts - await Chart.assignTagsForCharts(charts as any) + await Chart.assignTagsForCharts(charts as any) - const tags = await db.knexRaw( - trx, - ` + const tags = await db.knexRaw( + trx, + ` SELECT t.id, t.name FROM tags t JOIN dataset_tags dt ON dt.tagId = t.id WHERE dt.datasetId = ? `, - [datasetId] - ) - dataset.tags = tags + [datasetId] + ) + dataset.tags = tags - const availableTags = await db.knexRaw( - trx, - ` + const availableTags = await db.knexRaw( + trx, + ` SELECT t.id, t.name, p.name AS parentName FROM tags AS t JOIN tags AS p ON t.parentId=p.id WHERE p.isBulkImport IS FALSE ` - ) - dataset.availableTags = availableTags + ) + dataset.availableTags = availableTags - return { dataset: dataset } - }, - { readOnly: true } - ) -}) + return { dataset: dataset } + } +) -apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => { - // Only updates `nonRedistributable` and `tags`, other fields come from ETL - // and are not editable - const datasetId = expectInt(req.params.datasetId) - const knex = db.knexInstance() - const dataset = await getDatasetById(knex, datasetId) - if (!dataset) throw new JsonError(`No dataset by id ${datasetId}`, 404) +putRouteWithRWTransaction( + apiRouter, + "/datasets/:datasetId", + async (req, res, trx) => { + // Only updates `nonRedistributable` and `tags`, other fields come from ETL + // and are not editable + const datasetId = expectInt(req.params.datasetId) + const dataset = await getDatasetById(trx, datasetId) + if (!dataset) throw new JsonError(`No dataset by id ${datasetId}`, 404) - await knex.transaction(async (trx) => { const newDataset = (req.body as { dataset: any }).dataset await db.knexRaw( trx, @@ -1848,70 +1853,62 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => { logErrorAndMaybeSendToBugsnag(err, req) // Continue } - }) - return { success: true } -}) + return { success: true } + } +) -apiRouter.post( +postRouteWithRWTransaction( + apiRouter, "/datasets/:datasetId/setArchived", - async (req: Request, res: Response) => { + async (req, res, trx) => { const datasetId = expectInt(req.params.datasetId) - const knex = db.knexInstance() - const dataset = await getDatasetById(knex, datasetId) + const dataset = await getDatasetById(trx, datasetId) if (!dataset) throw new JsonError(`No dataset by id ${datasetId}`, 404) - await knex.transaction(async (trx) => { - await db.knexRaw( - trx, - `UPDATE datasets SET isArchived = 1 WHERE id=?`, - [datasetId] - ) - }) + + await db.knexRaw(trx, `UPDATE datasets SET isArchived = 1 WHERE id=?`, [ + datasetId, + ]) return { success: true } } ) -apiRouter.post( +postRouteWithRWTransaction( + apiRouter, "/datasets/:datasetId/setTags", - async (req: Request, res: Response) => { + async (req, res, trx) => { const datasetId = expectInt(req.params.datasetId) - await setTagsForDataset(db.knexInstance(), datasetId, req.body.tagIds) + await setTagsForDataset(trx, datasetId, req.body.tagIds) return { success: true } } ) -apiRouter.delete( +deleteRouteWithRWTransaction( + apiRouter, "/datasets/:datasetId", - async (req: Request, res: Response) => { + async (req, res, trx) => { const datasetId = expectInt(req.params.datasetId) - const knex = db.knexInstance() - const dataset = await getDatasetById(knex, datasetId) + const dataset = await getDatasetById(trx, datasetId) if (!dataset) throw new JsonError(`No dataset by id ${datasetId}`, 404) - await knex.transaction(async (trx) => { - await db.knexRaw( - trx, - `DELETE d FROM country_latest_data AS d JOIN variables AS v ON d.variable_id=v.id WHERE v.datasetId=?`, - [datasetId] - ) - await db.knexRaw( - trx, - `DELETE FROM dataset_files WHERE datasetId=?`, - [datasetId] - ) - await db.knexRaw(trx, `DELETE FROM variables WHERE datasetId=?`, [ - datasetId, - ]) - await db.knexRaw(trx, `DELETE FROM sources WHERE datasetId=?`, [ - datasetId, - ]) - await db.knexRaw(trx, `DELETE FROM datasets WHERE id=?`, [ - datasetId, - ]) - }) + await db.knexRaw( + trx, + `DELETE d FROM country_latest_data AS d JOIN variables AS v ON d.variable_id=v.id WHERE v.datasetId=?`, + [datasetId] + ) + await db.knexRaw(trx, `DELETE FROM dataset_files WHERE datasetId=?`, [ + datasetId, + ]) + await db.knexRaw(trx, `DELETE FROM variables WHERE datasetId=?`, [ + datasetId, + ]) + await db.knexRaw(trx, `DELETE FROM sources WHERE datasetId=?`, [ + datasetId, + ]) + await db.knexRaw(trx, `DELETE FROM datasets WHERE id=?`, [datasetId]) try { await removeDatasetFromGitRepo(dataset.name, dataset.namespace, { @@ -1927,20 +1924,19 @@ apiRouter.delete( } ) -apiRouter.post( +postRouteWithRWTransaction( + apiRouter, "/datasets/:datasetId/charts", - async (req: Request, res: Response) => { + async (req, res, trx) => { const datasetId = expectInt(req.params.datasetId) - const knex = db.knexInstance() - const dataset = await getDatasetById(knex, datasetId) + const dataset = await getDatasetById(trx, datasetId) if (!dataset) throw new JsonError(`No dataset by id ${datasetId}`, 404) if (req.body.republish) { - await knex.transaction(async (trx) => { - await db.knexRaw( - trx, - ` + await db.knexRaw( + trx, + ` UPDATE charts SET config = JSON_SET(config, "$.version", config->"$.version" + 1) WHERE id IN ( @@ -1950,9 +1946,8 @@ apiRouter.post( WHERE variables.datasetId = ? ) `, - [datasetId] - ) - }) + [datasetId] + ) } await triggerStaticBuild( @@ -1965,14 +1960,14 @@ apiRouter.post( ) // Get a list of redirects that map old slugs to charts -apiRouter.get("/redirects.json", async (req: Request, res: Response) => ({ +apiRouter.get("/redirects.json", async (req, res) => ({ redirects: await db.queryMysql(` SELECT r.id, r.slug, r.chart_id as chartId, JSON_UNQUOTE(JSON_EXTRACT(charts.config, "$.slug")) AS chartSlug FROM chart_slug_redirects AS r JOIN charts ON charts.id = r.chart_id ORDER BY r.id DESC`), })) -apiRouter.get("/tags/:tagId.json", async (req: Request, res: Response) => { +apiRouter.get("/tags/:tagId.json", async (req, res) => { const tagId = expectInt(req.params.tagId) as number | null // NOTE (Mispy): The "uncategorized" tag is special -- it represents all untagged stuff @@ -2119,7 +2114,7 @@ apiRouter.post("/tags/new", async (req: Request) => { return { success: true, tagId: result.insertId } }) -apiRouter.get("/tags.json", async (req: Request, res: Response) => { +apiRouter.get("/tags.json", async (req, res) => { const tags = await db.queryMysql(` SELECT t.id, t.name, t.parentId, t.specialType FROM tags t LEFT JOIN tags p ON t.parentId=p.id @@ -2132,7 +2127,7 @@ apiRouter.get("/tags.json", async (req: Request, res: Response) => { } }) -apiRouter.delete("/tags/:tagId/delete", async (req: Request, res: Response) => { +apiRouter.delete("/tags/:tagId/delete", async (req, res) => { const tagId = expectInt(req.params.tagId) await db.transaction(async (t) => { @@ -2157,7 +2152,7 @@ apiRouter.post("/charts/:chartId/redirects/new", async (req: Request) => { return { success: true, redirect: redirect } }) -apiRouter.delete("/redirects/:id", async (req: Request, res: Response) => { +apiRouter.delete("/redirects/:id", async (req, res) => { const id = expectInt(req.params.id) const redirect = await db.mysqlFirst( @@ -2223,154 +2218,163 @@ apiRouter.get("/posts.json", async (req) => { return { posts: rows } }) -apiRouter.post( - "/posts/:postId/setTags", - async (req: Request, res: Response) => { - const postId = expectInt(req.params.postId) +apiRouter.post("/posts/:postId/setTags", async (req, res) => { + const postId = expectInt(req.params.postId) - await setTagsForPost(postId, req.body.tagIds) + await setTagsForPost(postId, req.body.tagIds) - return { success: true } + return { success: true } +}) + +getRouteWithROTransaction( + apiRouter, + "/posts/:postId.json", + async (req, res, trx) => { + const postId = expectInt(req.params.postId) + const post = (await trx + .table(PostsTableName) + .where({ id: postId }) + .select("*") + .first()) as DbRawPost | undefined + return camelCaseProperties({ ...post }) } ) -apiRouter.get("/posts/:postId.json", async (req: Request, res: Response) => { - const postId = expectInt(req.params.postId) - const post = (await db - .knexTable(postsTable) - .where({ id: postId }) - .select("*") - .first()) as DbEnrichedPost | undefined - return camelCaseProperties({ ...post }) -}) - -apiRouter.post("/posts/:postId/createGdoc", async (req: Request) => { - const postId = expectInt(req.params.postId) - const allowRecreate = !!req.body.allowRecreate - const post = (await db - .knexTable("posts_with_gdoc_publish_status") - .where({ id: postId }) - .select("*") - .first()) as DbRawPostWithGdocPublishStatus | undefined - - if (!post) throw new JsonError(`No post found for id ${postId}`, 404) - const existingGdocId = post.gdocSuccessorId - if (!allowRecreate && existingGdocId) - throw new JsonError("A gdoc already exists for this post", 400) - if (allowRecreate && existingGdocId && post.isGdocPublished) { - throw new JsonError( - "A gdoc already exists for this post and it is already published", - 400 +postRouteWithRWTransaction( + apiRouter, + "/posts/:postId/createGdoc", + async (req: Request, res, trx) => { + const postId = expectInt(req.params.postId) + const allowRecreate = !!req.body.allowRecreate + const post = (await trx + .table("posts_with_gdoc_publish_status") + .where({ id: postId }) + .select("*") + .first()) as DbRawPostWithGdocPublishStatus | undefined + + if (!post) throw new JsonError(`No post found for id ${postId}`, 404) + const existingGdocId = post.gdocSuccessorId + if (!allowRecreate && existingGdocId) + throw new JsonError("A gdoc already exists for this post", 400) + if (allowRecreate && existingGdocId && post.isGdocPublished) { + throw new JsonError( + "A gdoc already exists for this post and it is already published", + 400 + ) + } + if (post.archieml === null) + throw new JsonError( + `ArchieML was not present for post with id ${postId}`, + 500 + ) + const tagsByPostId = await getTagsByPostId(trx) + const tags = + tagsByPostId + .get(postId) + ?.map(({ id }) => TagEntity.create({ id })) || [] + const archieMl = JSON.parse( + // Google Docs interprets ®ion in grapher URLS as ®ion + // So we escape them here + post.archieml.replaceAll("&", "&") + ) as OwidGdocPostInterface + const gdocId = await createGdocAndInsertOwidGdocPostContent( + archieMl.content, + post.gdocSuccessorId ) + // If we did not yet have a gdoc associated with this post, we need to register + // the gdocSuccessorId and create an entry in the posts_gdocs table. Otherwise + // we don't need to make changes to the DB (only the gdoc regeneration was required) + if (!existingGdocId) { + post.gdocSuccessorId = gdocId + // This is not ideal - we are using knex for on thing and typeorm for another + // which means that we can't wrap this in a transaction. We should probably + // move posts to use typeorm as well or at least have a typeorm alternative for it + await trx + .table(postsTable) + .where({ id: postId }) + .update("gdocSuccessorId", gdocId) + + const gdoc = new GdocPost(gdocId) + gdoc.slug = post.slug + gdoc.tags = tags + gdoc.content.title = post.title + gdoc.content.type = archieMl.content.type || OwidGdocType.Article + gdoc.published = false + gdoc.createdAt = new Date() + gdoc.publishedAt = post.published_at + await dataSource.getRepository(GdocPost).save(gdoc) + } + + return { googleDocsId: gdocId } } - if (post.archieml === null) - throw new JsonError( - `ArchieML was not present for post with id ${postId}`, - 500 - ) - const tagsByPostId = await getTagsByPostId() - const tags = - tagsByPostId.get(postId)?.map(({ id }) => TagEntity.create({ id })) || - [] - const archieMl = JSON.parse( - // Google Docs interprets ®ion in grapher URLS as ®ion - // So we escape them here - post.archieml.replaceAll("&", "&") - ) as OwidGdocPostInterface - const gdocId = await createGdocAndInsertOwidGdocPostContent( - archieMl.content, - post.gdocSuccessorId - ) - // If we did not yet have a gdoc associated with this post, we need to register - // the gdocSuccessorId and create an entry in the posts_gdocs table. Otherwise - // we don't need to make changes to the DB (only the gdoc regeneration was required) - if (!existingGdocId) { - post.gdocSuccessorId = gdocId +) + +postRouteWithRWTransaction( + apiRouter, + "/posts/:postId/unlinkGdoc", + async (req: Request, res, trx) => { + const postId = expectInt(req.params.postId) + const post = (await trx + .table("posts_with_gdoc_publish_status") + .where({ id: postId }) + .select("*") + .first()) as DbRawPostWithGdocPublishStatus | undefined + + if (!post) throw new JsonError(`No post found for id ${postId}`, 404) + const existingGdocId = post.gdocSuccessorId + if (!existingGdocId) + throw new JsonError("No gdoc exists for this post", 400) + if (existingGdocId && post.isGdocPublished) { + throw new JsonError( + "The GDoc is already published - you can't unlink it", + 400 + ) + } // This is not ideal - we are using knex for on thing and typeorm for another // which means that we can't wrap this in a transaction. We should probably // move posts to use typeorm as well or at least have a typeorm alternative for it - await db - .knexTable(postsTable) + await trx + .table(postsTable) .where({ id: postId }) - .update("gdocSuccessorId", gdocId) - - const gdoc = new GdocPost(gdocId) - gdoc.slug = post.slug - gdoc.tags = tags - gdoc.content.title = post.title - gdoc.content.type = archieMl.content.type || OwidGdocType.Article - gdoc.published = false - gdoc.createdAt = new Date() - gdoc.publishedAt = post.published_at - await dataSource.getRepository(GdocPost).save(gdoc) - } + .update("gdocSuccessorId", null) - return { googleDocsId: gdocId } -}) + await dataSource.getRepository(GdocPost).delete(existingGdocId) -apiRouter.post("/posts/:postId/unlinkGdoc", async (req: Request) => { - const postId = expectInt(req.params.postId) - const post = (await db - .knexTable("posts_with_gdoc_publish_status") - .where({ id: postId }) - .select("*") - .first()) as DbRawPostWithGdocPublishStatus | undefined - - if (!post) throw new JsonError(`No post found for id ${postId}`, 404) - const existingGdocId = post.gdocSuccessorId - if (!existingGdocId) - throw new JsonError("No gdoc exists for this post", 400) - if (existingGdocId && post.isGdocPublished) { - throw new JsonError( - "The GDoc is already published - you can't unlink it", - 400 - ) + return { success: true } } - // This is not ideal - we are using knex for on thing and typeorm for another - // which means that we can't wrap this in a transaction. We should probably - // move posts to use typeorm as well or at least have a typeorm alternative for it - await db - .knexTable(postsTable) - .where({ id: postId }) - .update("gdocSuccessorId", null) - - await dataSource.getRepository(GdocPost).delete(existingGdocId) +) - return { success: true } -}) +getRouteWithROTransaction( + apiRouter, + "/sources/:sourceId.json", + async (req: Request, res, trx) => { + const sourceId = expectInt(req.params.sourceId) -apiRouter.get("/sources/:sourceId.json", async (req: Request) => { - const sourceId = expectInt(req.params.sourceId) - return db.knexInstance().transaction( - async (trx) => { - const source = await db.knexRawFirst>( - trx, - ` + const source = await db.knexRawFirst>( + trx, + ` SELECT s.id, s.name, s.description, s.createdAt, s.updatedAt, d.namespace FROM sources AS s JOIN active_datasets AS d ON d.id=s.datasetId WHERE s.id=?`, - [sourceId] - ) - if (!source) - throw new JsonError(`No source by id '${sourceId}'`, 404) - source.variables = await db.knexRaw( - trx, - `SELECT id, name, updatedAt FROM variables WHERE variables.sourceId=?`, - [sourceId] - ) + [sourceId] + ) + if (!source) throw new JsonError(`No source by id '${sourceId}'`, 404) + source.variables = await db.knexRaw( + trx, + `SELECT id, name, updatedAt FROM variables WHERE variables.sourceId=?`, + [sourceId] + ) - return { source: source } - }, - { readOnly: true } - ) -}) + return { source: source } + } +) apiRouter.get("/deploys.json", async () => ({ deploys: await new DeployQueueServer().getDeploys(), })) -apiRouter.put("/deploy", async (req: Request, res: Response) => { +apiRouter.put("/deploy", async (req, res) => { triggerStaticBuild(res.locals.user, "Manually triggered deploy") }) @@ -2510,14 +2514,14 @@ apiRouter.put("/gdocs/:id", async (req, res) => { return nextGdoc }) -apiRouter.delete("/gdocs/:id", async (req, res) => { +deleteRouteWithRWTransaction(apiRouter, "/gdocs/:id", async (req, res, trx) => { const { id } = req.params const gdoc = await GdocPost.findOneBy({ id }) if (!gdoc) throw new JsonError(`No Google Doc with id ${id} found`) - await db - .knexTable("posts") + await trx + .table("posts") .where({ gdocSuccessorId: gdoc.id }) .update({ gdocSuccessorId: null }) @@ -2532,22 +2536,19 @@ apiRouter.delete("/gdocs/:id", async (req, res) => { return {} }) -apiRouter.post( - "/gdocs/:gdocId/setTags", - async (req: Request, res: Response) => { - const { gdocId } = req.params - const { tagIds } = req.body +apiRouter.post("/gdocs/:gdocId/setTags", async (req, res) => { + const { gdocId } = req.params + const { tagIds } = req.body - const gdoc = await GdocPost.findOneBy({ id: gdocId }) - if (!gdoc) return Error(`Unable to find Gdoc with ID: ${gdocId}`) - const tags = await dataSource - .getRepository(TagEntity) - .findBy({ id: In(tagIds) }) - gdoc.tags = tags - await gdoc.save() - return { success: true } - } -) + const gdoc = await GdocPost.findOneBy({ id: gdocId }) + if (!gdoc) return Error(`Unable to find Gdoc with ID: ${gdocId}`) + const tags = await dataSource + .getRepository(TagEntity) + .findBy({ id: In(tagIds) }) + gdoc.tags = tags + await gdoc.save() + return { success: true } +}) apiRouter.get( `/gpt/suggest-topics/${TaggableType.Charts}/:chartId.json`, @@ -2569,27 +2570,35 @@ apiRouter.get( } ) -apiRouter.post("/explorer/:slug/tags", async (req: Request, res: Response) => { - const { slug } = req.params - const { tagIds } = req.body - const explorer = await db.knexTable("explorers").where({ slug }).first() - if (!explorer) - throw new JsonError(`No explorer found for slug ${slug}`, 404) +postRouteWithRWTransaction( + apiRouter, + "/explorer/:slug/tags", + async (req, res, trx) => { + const { slug } = req.params + const { tagIds } = req.body + const explorer = await trx.table("explorers").where({ slug }).first() + if (!explorer) + throw new JsonError(`No explorer found for slug ${slug}`, 404) - await db.knexInstance().transaction(async (t) => { - await t.table("explorer_tags").where({ explorerSlug: slug }).delete() + await trx.table("explorer_tags").where({ explorerSlug: slug }).delete() for (const tagId of tagIds) { - await t.table("explorer_tags").insert({ explorerSlug: slug, tagId }) + await trx + .table("explorer_tags") + .insert({ explorerSlug: slug, tagId }) } - }) - return { success: true } -}) + return { success: true } + } +) -apiRouter.delete("/explorer/:slug/tags", async (req: Request) => { - const { slug } = req.params - await db.knexTable("explorer_tags").where({ explorerSlug: slug }).delete() - return { success: true } -}) +deleteRouteWithRWTransaction( + apiRouter, + "/explorer/:slug/tags", + async (req: Request, res, trx) => { + const { slug } = req.params + await trx.table("explorer_tags").where({ explorerSlug: slug }).delete() + return { success: true } + } +) export { apiRouter } diff --git a/adminSiteServer/exportGitData.ts b/adminSiteServer/exportGitData.ts index d4257dd1582..54251212214 100644 --- a/adminSiteServer/exportGitData.ts +++ b/adminSiteServer/exportGitData.ts @@ -3,14 +3,16 @@ import { syncDatasetToGitRepo } from "./gitDataExport.js" import { DbPlainDataset, DatasetsTableName } from "@ourworldindata/types" const main = async () => { - const knex = db.knexInstance() - const datasets = await knex(DatasetsTableName).where({ - namespace: "owid", + await db.knexReadonlyTransaction(async (knex) => { + const datasets = await knex(DatasetsTableName).where({ + namespace: "owid", + }) + for (const dataset of datasets) { + if (!dataset.isPrivate && !dataset.nonRedistributable) + syncDatasetToGitRepo(knex, dataset.id, { commitOnly: true }) + } }) - for (const dataset of datasets) { - if (!dataset.isPrivate && !dataset.nonRedistributable) - await syncDatasetToGitRepo(knex, dataset.id, { commitOnly: true }) - } + await db.closeTypeOrmAndKnexConnections() } diff --git a/adminSiteServer/gitDataExport.ts b/adminSiteServer/gitDataExport.ts index f61484b61cb..f14fcd6643d 100644 --- a/adminSiteServer/gitDataExport.ts +++ b/adminSiteServer/gitDataExport.ts @@ -16,11 +16,10 @@ import filenamify from "filenamify" import { execFormatted } from "../db/execWrapper.js" import { JsonError } from "@ourworldindata/utils" import { DbPlainDataset } from "@ourworldindata/types" -import { Knex } from "knex" import { getSourcesForDataset } from "../db/model/Source.js" const datasetToReadme = async ( - knex: Knex, + knex: db.KnexReadonlyTransaction, dataset: DbPlainDataset ): Promise => { // TODO: add origins here @@ -62,7 +61,7 @@ export async function removeDatasetFromGitRepo( } export async function syncDatasetToGitRepo( - knex: Knex, + knex: db.KnexReadonlyTransaction, datasetId: number, options: { transaction?: db.TransactionContext diff --git a/adminSiteServer/mockSiteRouter.tsx b/adminSiteServer/mockSiteRouter.tsx index 05b06332c3b..7d3bde79c73 100644 --- a/adminSiteServer/mockSiteRouter.tsx +++ b/adminSiteServer/mockSiteRouter.tsx @@ -66,34 +66,38 @@ mockSiteRouter.use(express.json()) mockSiteRouter.get("/sitemap.xml", async (req, res) => { res.set("Content-Type", "application/xml") - const sitemap = await db - .knexInstance() - .transaction(async (knex) => makeSitemap(explorerAdminServer, knex)) + const sitemap = await db.knexReadonlyTransaction(async (knex) => + makeSitemap(explorerAdminServer, knex) + ) res.send(sitemap) }) mockSiteRouter.get("/atom.xml", async (req, res) => { res.set("Content-Type", "application/xml") - const atomFeed = await db - .knexInstance() - .transaction(async (knex) => makeAtomFeed(knex)) + const atomFeed = await db.knexReadonlyTransaction(async (knex) => + makeAtomFeed(knex) + ) res.send(atomFeed) }) mockSiteRouter.get("/atom-no-topic-pages.xml", async (req, res) => { res.set("Content-Type", "application/xml") - const atomFeedNoTopicPages = await db - .knexInstance() - .transaction(async (knex) => makeAtomFeedNoTopicPages(knex)) + const atomFeedNoTopicPages = await db.knexReadonlyTransaction( + async (knex) => makeAtomFeedNoTopicPages(knex) + ) res.send(atomFeedNoTopicPages) }) mockSiteRouter.get("/entries-by-year", async (req, res) => - res.send(await entriesByYearPage()) + res.send(await db.knexReadonlyTransaction((trx) => entriesByYearPage(trx))) ) mockSiteRouter.get(`/entries-by-year/:year`, async (req, res) => - res.send(await entriesByYearPage(parseInt(req.params.year))) + res.send( + await db.knexReadonlyTransaction((trx) => + entriesByYearPage(trx, parseInt(req.params.year)) + ) + ) ) mockSiteRouter.get( @@ -128,11 +132,9 @@ mockSiteRouter.get(`/${EXPLORERS_ROUTE_FOLDER}/:slug`, async (req, res) => { (program) => program.slug === req.params.slug ) if (explorerProgram) { - const explorerPage = await db - .knexInstance() - .transaction(async (knex) => { - return renderExplorerPage(explorerProgram, knex) - }) + const explorerPage = await db.knexReadonlyTransaction(async (knex) => { + return renderExplorerPage(explorerProgram, knex) + }) res.send(explorerPage) } else @@ -149,7 +151,7 @@ mockSiteRouter.get("/*", async (req, res, next) => { const { migrationId, baseQueryStr } = explorerRedirect const { explorerSlug } = explorerUrlMigrationsById[migrationId] const program = await explorerAdminServer.getExplorerFromSlug(explorerSlug) - const explorerPage = await db.knexInstance().transaction(async (knex) => { + const explorerPage = await db.knexReadonlyTransaction(async (knex) => { return renderExplorerPage(program, knex, { explorerUrlMigrationId: migrationId, baseQueryStr, @@ -172,18 +174,16 @@ mockSiteRouter.get("/grapher/:slug", async (req, res) => { // XXX add dev-prod parity for this res.set("Access-Control-Allow-Origin", "*") - const previewDataPageOrGrapherPage = await db - .knexInstance() - .transaction(async (knex) => - renderPreviewDataPageOrGrapherPage(entity.config, knex) - ) + const previewDataPageOrGrapherPage = await db.knexReadonlyTransaction( + async (knex) => renderPreviewDataPageOrGrapherPage(entity.config, knex) + ) res.send(previewDataPageOrGrapherPage) }) mockSiteRouter.get("/", async (req, res) => { - const frontPage = await db - .knexInstance() - .transaction(async (knex) => renderFrontPage(knex)) + const frontPage = await db.knexReadonlyTransaction(async (knex) => + renderFrontPage(knex) + ) res.send(frontPage) }) @@ -247,25 +247,25 @@ mockSiteRouter.get("/datapage-preview/:id", async (req, res) => { const variableMetadata = await getVariableMetadata(variableId) res.send( - await renderDataPageV2( - { - variableId, - variableMetadata, - isPreviewing: true, - useIndicatorGrapherConfigs: true, - }, - db.knexInstance() + await db.knexReadonlyTransaction((trx) => + renderDataPageV2( + { + variableId, + variableMetadata, + isPreviewing: true, + useIndicatorGrapherConfigs: true, + }, + trx + ) ) ) }) countryProfileSpecs.forEach((spec) => mockSiteRouter.get(`/${spec.rootPath}/:countrySlug`, async (req, res) => { - const countryPage = await db - .knexInstance() - .transaction(async (knex) => - countryProfileCountryPage(spec, req.params.countrySlug, knex) - ) + const countryPage = await db.knexReadonlyTransaction(async (knex) => + countryProfileCountryPage(spec, req.params.countrySlug, knex) + ) res.send(countryPage) }) ) @@ -275,20 +275,18 @@ mockSiteRouter.get("/search", async (req, res) => ) mockSiteRouter.get("/latest", async (req, res) => { - const latest = await db - .knexInstance() - .transaction(async (knex) => renderBlogByPageNum(1, knex)) + const latest = await db.knexReadonlyTransaction(async (knex) => + renderBlogByPageNum(1, knex) + ) res.send(latest) }) mockSiteRouter.get("/latest/page/:pageno", async (req, res) => { const pagenum = parseInt(req.params.pageno, 10) if (!isNaN(pagenum)) { - const latestPageNum = await db - .knexInstance() - .transaction(async (knex) => - renderBlogByPageNum(isNaN(pagenum) ? 1 : pagenum, knex) - ) + const latestPageNum = await db.knexReadonlyTransaction(async (knex) => + renderBlogByPageNum(isNaN(pagenum) ? 1 : pagenum, knex) + ) res.send(latestPageNum) } else throw new Error("invalid page number") }) @@ -345,7 +343,11 @@ mockSiteRouter.get("/countries", async (req, res) => ) mockSiteRouter.get("/country/:countrySlug", async (req, res) => - res.send(await countryProfilePage(req.params.countrySlug, BAKED_BASE_URL)) + res.send( + await db.knexReadonlyTransaction((trx) => + countryProfilePage(trx, req.params.countrySlug, BAKED_BASE_URL) + ) + ) ) mockSiteRouter.get("/feedback", async (req, res) => @@ -383,9 +385,9 @@ mockSiteRouter.get("/*", async (req, res) => { } try { - const page = await db - .knexInstance() - .transaction(async (knex) => renderPageBySlug(slug, knex)) + const page = await db.knexReadonlyTransaction(async (knex) => + renderPageBySlug(slug, knex) + ) res.send(page) } catch (e) { console.error(e) diff --git a/adminSiteServer/routerHelpers.tsx b/adminSiteServer/routerHelpers.tsx new file mode 100644 index 00000000000..65537eac6f0 --- /dev/null +++ b/adminSiteServer/routerHelpers.tsx @@ -0,0 +1,98 @@ +import { FunctionalRouter } from "./FunctionalRouter.js" +import { Request, Response } from "express" +import * as db from "../db/db.js" +export function getRouteWithROTransaction( + router: FunctionalRouter, + targetPath: string, + handler: ( + req: Request, + res: Response, + trx: db.KnexReadonlyTransaction + ) => Promise +) { + return router.get(targetPath, (req: Request, res: Response) => { + return db.knexReadonlyTransaction((transaction) => + handler(req, res, transaction) + ) + }) +} + +// Might be needed in the future if we have get requests that e.g. write analytics data or stats to the DB +// function getRouteWithRWTransaction( +// targetPath: string, +// handler: ( +// req: Request, +// res: Response, +// trx: db.KnexReadWriteTransaction +// ) => Promise +// ) { +// return apiRouter.get(targetPath, (req: Request, res: Response) => { +// return db.knexReadWriteTransaction((transaction) => +// handler(req, res, transaction) +// ) +// }) +// } + +export function postRouteWithRWTransaction( + router: FunctionalRouter, + targetPath: string, + handler: ( + req: Request, + res: Response, + trx: db.KnexReadWriteTransaction + ) => Promise +) { + return router.post(targetPath, (req: Request, res: Response) => { + return db.knexReadWriteTransaction((transaction) => + handler(req, res, transaction) + ) + }) +} + +export function putRouteWithRWTransaction( + router: FunctionalRouter, + targetPath: string, + handler: ( + req: Request, + res: Response, + trx: db.KnexReadWriteTransaction + ) => Promise +) { + return router.put(targetPath, (req: Request, res: Response) => { + return db.knexReadWriteTransaction((transaction) => + handler(req, res, transaction) + ) + }) +} + +export function patchRouteWithRWTransaction( + router: FunctionalRouter, + targetPath: string, + handler: ( + req: Request, + res: Response, + trx: db.KnexReadWriteTransaction + ) => Promise +) { + return router.patch(targetPath, (req: Request, res: Response) => { + return db.knexReadWriteTransaction((transaction) => + handler(req, res, transaction) + ) + }) +} + +export function deleteRouteWithRWTransaction( + router: FunctionalRouter, + targetPath: string, + handler: ( + req: Request, + res: Response, + trx: db.KnexReadWriteTransaction + ) => Promise +) { + return router.delete(targetPath, (req: Request, res: Response) => { + return db.knexReadWriteTransaction((transaction) => + handler(req, res, transaction) + ) + }) +} diff --git a/baker/DeployUtils.ts b/baker/DeployUtils.ts index 3e4e5c21384..6df34f718e5 100644 --- a/baker/DeployUtils.ts +++ b/baker/DeployUtils.ts @@ -11,7 +11,7 @@ import { import { SiteBaker } from "../baker/SiteBaker.js" import { WebClient } from "@slack/web-api" import { DeployChange, DeployMetadata } from "@ourworldindata/utils" -import { Knex } from "knex" +import { KnexReadonlyTransaction } from "../db/db.js" const deployQueueServer = new DeployQueueServer() @@ -35,7 +35,7 @@ export const defaultCommitMessage = async (): Promise => { */ const triggerBakeAndDeploy = async ( deployMetadata: DeployMetadata, - knex: Knex, + knex: KnexReadonlyTransaction, lightningQueue?: DeployChange[] ) => { // deploy to Buildkite if we're on master and BUILDKITE_API_ACCESS_TOKEN is set @@ -156,7 +156,9 @@ let deploying = false * the end of the current one, as long as there are changes in the queue. * If there are no changes in the queue, a deploy won't be initiated. */ -export const deployIfQueueIsNotEmpty = async (knex: Knex) => { +export const deployIfQueueIsNotEmpty = async ( + knex: KnexReadonlyTransaction +) => { if (deploying) return deploying = true let failures = 0 diff --git a/baker/ExplorerBaker.tsx b/baker/ExplorerBaker.tsx index 7eec1691521..9268476eabc 100644 --- a/baker/ExplorerBaker.tsx +++ b/baker/ExplorerBaker.tsx @@ -5,12 +5,12 @@ import { explorerUrlMigrationsById } from "../explorer/urlMigrations/ExplorerUrl import { ExplorerAdminServer } from "../explorerAdminServer/ExplorerAdminServer.js" import { explorerRedirectTable } from "../explorerAdminServer/ExplorerRedirects.js" import { renderExplorerPage } from "./siteRenderers.js" -import { Knex } from "knex" +import * as db from "../db/db.js" export const bakeAllPublishedExplorers = async ( outputFolder: string, explorerAdminServer: ExplorerAdminServer, - knex: Knex + knex: db.KnexReadonlyTransaction ) => { // remove all existing explorers, since we're re-baking every single one anyway fs.remove(outputFolder) @@ -23,7 +23,7 @@ export const bakeAllPublishedExplorers = async ( const bakeExplorersToDir = async ( directory: string, explorers: ExplorerProgram[] = [], - knex: Knex + knex: db.KnexReadonlyTransaction ) => { for (const explorer of explorers) { await write( @@ -36,7 +36,7 @@ const bakeExplorersToDir = async ( export const bakeAllExplorerRedirects = async ( outputFolder: string, explorerAdminServer: ExplorerAdminServer, - knex: Knex + knex: db.KnexReadonlyTransaction ) => { const explorers = await explorerAdminServer.getAllExplorers() const redirects = explorerRedirectTable.rows diff --git a/baker/GrapherBaker.tsx b/baker/GrapherBaker.tsx index c07d9ac5a2e..10682d74b24 100644 --- a/baker/GrapherBaker.tsx +++ b/baker/GrapherBaker.tsx @@ -58,7 +58,6 @@ import { parseFaqs } from "../db/model/Gdoc/rawToEnriched.js" import { GdocPost } from "../db/model/Gdoc/GdocPost.js" import { getShortPageCitation } from "../site/gdocs/utils.js" import { getSlugForTopicTag, getTagToSlugMap } from "./GrapherBakingUtils.js" -import { Knex } from "knex" import { knexRaw } from "../db/db.js" import { getRelatedChartsForVariable } from "../db/model/Chart.js" import pMap from "p-map" @@ -66,7 +65,7 @@ import pMap from "p-map" const renderDatapageIfApplicable = async ( grapher: GrapherInterface, isPreviewing: boolean, - knex: Knex, + knex: db.KnexReadonlyTransaction, imageMetadataDictionary?: Record ) => { const variable = await getVariableOfDatapageIfApplicable(grapher) @@ -92,7 +91,7 @@ const renderDatapageIfApplicable = async ( */ export const renderDataPageOrGrapherPage = async ( grapher: GrapherInterface, - knex: Knex, + knex: db.KnexReadonlyTransaction, imageMetadataDictionary?: Record ): Promise => { const datapage = await renderDatapageIfApplicable( @@ -133,7 +132,7 @@ export async function renderDataPageV2( pageGrapher?: GrapherInterface imageMetadataDictionary?: Record }, - knex: Knex + knex: db.KnexReadonlyTransaction ) { const grapherConfigForVariable = await getMergedGrapherConfigForVariable( variableId, @@ -271,7 +270,7 @@ export async function renderDataPageV2( citation, } } else { - const post = await getPostEnrichedBySlug(slug) + const post = await getPostEnrichedBySlug(knex, slug) if (post) { const authors = post.authors const citation = getShortPageCitation( @@ -318,7 +317,7 @@ export async function renderDataPageV2( */ export const renderPreviewDataPageOrGrapherPage = async ( grapher: GrapherInterface, - knex: Knex + knex: db.KnexReadonlyTransaction ) => { const datapage = await renderDatapageIfApplicable(grapher, true, knex) if (datapage) return datapage @@ -328,13 +327,17 @@ export const renderPreviewDataPageOrGrapherPage = async ( const renderGrapherPage = async ( grapher: GrapherInterface, - knex: Knex + knex: db.KnexReadonlyTransaction ) => { const postSlug = urlToSlug(grapher.originUrl || "") - const post = postSlug ? await getPostEnrichedBySlug(postSlug) : undefined - const relatedCharts = post ? await getPostRelatedCharts(post.id) : undefined + const post = postSlug + ? await getPostEnrichedBySlug(knex, postSlug) + : undefined + const relatedCharts = post + ? await getPostRelatedCharts(knex, post.id) + : undefined const relatedArticles = grapher.id - ? await getRelatedArticles(grapher.id, knex) + ? await getRelatedArticles(knex, grapher.id) : undefined return renderToHtmlPage( @@ -367,7 +370,7 @@ const bakeGrapherPageAndVariablesPngAndSVGIfChanged = async ( bakedSiteDir: string, imageMetadataDictionary: Record, grapher: GrapherInterface, - knex: Knex + knex: db.KnexReadonlyTransaction ) => { const htmlPath = `${bakedSiteDir}/grapher/${grapher.slug}.html` const isSameVersion = await chartIsSameVersion(htmlPath, grapher.version) @@ -451,7 +454,7 @@ export interface BakeSingleGrapherChartArguments { export const bakeSingleGrapherChart = async ( args: BakeSingleGrapherChartArguments, - knex: Knex = db.knexInstance() + knex: db.KnexReadonlyTransaction ) => { const grapher: GrapherInterface = JSON.parse(args.config) grapher.id = args.id @@ -473,7 +476,7 @@ export const bakeSingleGrapherChart = async ( } export const bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers = - async (bakedSiteDir: string, knex: Knex) => { + async (bakedSiteDir: string, knex: db.KnexReadonlyTransaction) => { const chartsToBake: { id: number; config: string; slug: string }[] = await knexRaw( knex, @@ -516,7 +519,9 @@ export const bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers = await pMap( jobs, async (job) => { - await bakeSingleGrapherChart(job, knex) + db.knexReadonlyTransaction((trx) => + bakeSingleGrapherChart(job, trx) + ) progressBar.tick({ name: `slug ${job.slug}` }) }, { concurrency: 10 } diff --git a/baker/SiteBaker.tsx b/baker/SiteBaker.tsx index 920d12bde03..6854cb0b13c 100644 --- a/baker/SiteBaker.tsx +++ b/baker/SiteBaker.tsx @@ -102,7 +102,6 @@ import { getVariableMetadata, getVariableOfDatapageIfApplicable, } from "../db/model/Variable.js" -import { Knex } from "knex" import { getBakePath } from "@ourworldindata/components" import { GdocAuthor } from "../db/model/Gdoc/GdocAuthor.js" @@ -191,7 +190,7 @@ export class SiteBaker { this.explorerAdminServer = new ExplorerAdminServer(GIT_CMS_DIR) } - private async bakeEmbeds(knex: Knex) { + private async bakeEmbeds(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("embeds")) return // Find all grapher urls used as embeds in all Wordpress posts on the site @@ -205,7 +204,7 @@ export class SiteBaker { this.progressBar.tick({ name: "✅ baked embeds" }) } - private async bakeCountryProfiles(knex: Knex) { + private async bakeCountryProfiles(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("countryProfiles")) return await Promise.all( countryProfileSpecs.map(async (spec) => { @@ -248,7 +247,7 @@ export class SiteBaker { } // Bake an individual post/page - private async bakePost(post: FullPost, knex: Knex) { + private async bakePost(post: FullPost, knex: db.KnexReadonlyTransaction) { const html = await renderPost( post, knex, @@ -430,7 +429,7 @@ export class SiteBaker { return this._prefetchedAttachmentsCache } - private async removeDeletedPosts(knex: Knex) { + private async removeDeletedPosts(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("removeDeletedPosts")) return await db.getConnection() @@ -439,7 +438,7 @@ export class SiteBaker { const postSlugs = [] for (const postApi of postsApi) { - const post = await getFullPost(postApi) + const post = await getFullPost(knex, postApi) postSlugs.push(post.slug) } @@ -459,11 +458,11 @@ export class SiteBaker { this.progressBar.tick({ name: "✅ removed deleted posts" }) } - private async bakePosts(knex: Knex) { + private async bakePosts(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("wordpressPosts")) return // TODO: the knex instance should be handed down as a parameter const alreadyPublishedViaGdocsSlugsSet = - await db.getSlugsWithPublishedGdocsSuccessors(db.knexInstance()) + await db.getSlugsWithPublishedGdocsSuccessors(knex) const postsApi = await getPostsFromSnapshots( knex, @@ -474,7 +473,9 @@ export class SiteBaker { await pMap( postsApi, async (postApi) => - getFullPost(postApi).then((post) => this.bakePost(post, knex)), + getFullPost(knex, postApi).then((post) => + this.bakePost(post, knex) + ), { concurrency: 10 } ) @@ -547,7 +548,7 @@ export class SiteBaker { } // Bake unique individual pages - private async bakeSpecialPages(knex: Knex) { + private async bakeSpecialPages(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("specialPages")) return await this.stageWrite( `${this.bakedSiteDir}/index.html`, @@ -598,7 +599,7 @@ export class SiteBaker { this.progressBar.tick({ name: "✅ baked special pages" }) } - private async bakeExplorers(knex: Knex) { + private async bakeExplorers(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("explorers")) return await bakeAllExplorerRedirects( @@ -825,23 +826,21 @@ export class SiteBaker { } // Pages that are expected by google scholar for indexing - private async bakeGoogleScholar() { + private async bakeGoogleScholar(trx: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("googleScholar")) return await this.stageWrite( `${this.bakedSiteDir}/entries-by-year.html`, - await entriesByYearPage() + await entriesByYearPage(trx) ) - const knex = db.knexInstance() - - const rows = (await db - .knexTable(postsTable) + const rows = (await trx + .table(postsTable) .where({ status: "publish" }) .whereNot({ type: "wp_block" }) .join("post_tags", { "post_tags.post_id": "posts.id" }) .join("tags", { "tags.id": "post_tags.tag_id" }) .where({ "tags.name": "Entries" }) - .select(knex.raw("distinct year(published_at) as year")) + .select(trx.raw("distinct year(published_at) as year")) .orderBy("year", "DESC")) as { year: number }[] const years = rows.map((r) => r.year) @@ -849,7 +848,7 @@ export class SiteBaker { for (const year of years) { await this.stageWrite( `${this.bakedSiteDir}/entries-by-year/${year}.html`, - await entriesByYearPage(year) + await entriesByYearPage(trx, year) ) } @@ -857,7 +856,7 @@ export class SiteBaker { } // Bake the blog index - private async bakeBlogIndex(knex: Knex) { + private async bakeBlogIndex(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("blogIndex")) return const allPosts = await getBlogIndex(knex) const numPages = Math.ceil(allPosts.length / BLOG_POSTS_PER_PAGE) @@ -871,7 +870,7 @@ export class SiteBaker { } // Bake the RSS feed - private async bakeRSS(knex: Knex) { + private async bakeRSS(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("rss")) return await this.stageWrite( `${this.bakedSiteDir}/atom.xml`, @@ -893,9 +892,9 @@ export class SiteBaker { // We don't have an icon for every single tag (yet), but for the icons that we *do* have, // we want to make sure that we have a corresponding tag in the database. - private async validateTagIcons() { - const allTags = await db - .knexTable("tags") + private async validateTagIcons(trx: db.KnexReadonlyTransaction) { + const allTags = await trx + .table("tags") .select<{ name: string }[]>("name") const tagNames = new Set(allTags.map((tag) => tag.name)) const tagIcons = await fs.readdir("public/images/tag-icons") @@ -910,7 +909,7 @@ export class SiteBaker { } // Bake the static assets - private async bakeAssets() { + private async bakeAssets(trx: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("assets")) return // do not delete images/published folder so that we don't have to sync gdrive images again @@ -923,7 +922,7 @@ export class SiteBaker { await execWrapper( `rm -rf ${this.bakedSiteDir}/assets && cp -r ${BASE_DIR}/dist/assets ${this.bakedSiteDir}/assets` ) - await this.validateTagIcons() + await this.validateTagIcons(trx) await execWrapper( `rsync -hav --delete ${BASE_DIR}/public/* ${this.bakedSiteDir}/ ${excludes}` ) @@ -938,7 +937,7 @@ export class SiteBaker { this.progressBar.tick({ name: "✅ baked assets" }) } - async bakeRedirects(knex: Knex) { + async bakeRedirects(knex: db.KnexReadonlyTransaction) { if (!this.bakeSteps.has("redirects")) return const redirects = await getRedirects(knex) this.progressBar.tick({ name: "✅ got redirects" }) @@ -956,19 +955,19 @@ export class SiteBaker { this.progressBar.tick({ name: "✅ baked redirects" }) } - async bakeWordpressPages(knex: Knex) { + async bakeWordpressPages(knex: db.KnexReadonlyTransaction) { await this.bakeRedirects(knex) await this.bakeEmbeds(knex) await this.bakeBlogIndex(knex) await this.bakeRSS(knex) - await this.bakeAssets() - await this.bakeGoogleScholar() + await this.bakeAssets(knex) + await this.bakeGoogleScholar(knex) await this.bakePosts(knex) } - private async _bakeNonWordpressPages(knex: Knex) { + private async _bakeNonWordpressPages(knex: db.KnexReadonlyTransaction) { if (this.bakeSteps.has("countries")) { - await bakeCountries(this) + await bakeCountries(this, knex) } await this.bakeSpecialPages(knex) await this.bakeCountryProfiles(knex) @@ -990,7 +989,7 @@ export class SiteBaker { await this.bakeDriveImages() } - async bakeNonWordpressPages(knex: Knex) { + async bakeNonWordpressPages(knex: db.KnexReadonlyTransaction) { await db.getConnection() const progressBarTotal = nonWordpressSteps .map((step) => this.bakeSteps.has(step)) @@ -1004,7 +1003,7 @@ export class SiteBaker { await this._bakeNonWordpressPages(knex) } - async bakeAll(knex: Knex) { + async bakeAll(knex: db.KnexReadonlyTransaction) { // Ensure caches are correctly initialized this.flushCache() await this.removeDeletedPosts(knex) diff --git a/baker/algolia/indexChartsToAlgolia.ts b/baker/algolia/indexChartsToAlgolia.ts index 88cc8caa6f2..803ed9ee475 100644 --- a/baker/algolia/indexChartsToAlgolia.ts +++ b/baker/algolia/indexChartsToAlgolia.ts @@ -8,7 +8,6 @@ import { MarkdownTextWrap } from "@ourworldindata/components" import { getAnalyticsPageviewsByUrlObj } from "../../db/model/Pageview.js" import { Link } from "../../db/model/Link.js" import { getRelatedArticles } from "../../db/model/Post.js" -import { Knex } from "knex" import { getIndexName } from "../../site/search/searchClient.js" const computeScore = (record: Omit): number => { @@ -17,7 +16,7 @@ const computeScore = (record: Omit): number => { } const getChartsRecords = async ( - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const chartsToIndex = await db.queryMysql(` SELECT c.id, @@ -68,7 +67,7 @@ const getChartsRecords = async ( // otherwise they will fail when rendered in the search results if (isPathRedirectedToExplorer(`/grapher/${c.slug}`)) continue - const relatedArticles = (await getRelatedArticles(c.id, knex)) ?? [] + const relatedArticles = (await getRelatedArticles(knex, c.id)) ?? [] const linksFromGdocs = await Link.getPublishedLinksTo( c.slug, OwidGdocLinkType.Grapher @@ -118,7 +117,7 @@ const indexChartsToAlgolia = async () => { const index = client.initIndex(getIndexName(SearchIndexName.Charts)) await db.getConnection() - const records = await getChartsRecords(db.knexInstance()) + const records = await db.knexReadonlyTransaction(getChartsRecords) await index.replaceAllObjects(records) await db.closeTypeOrmAndKnexConnections() diff --git a/baker/algolia/indexExplorersToAlgolia.ts b/baker/algolia/indexExplorersToAlgolia.ts index 1fb98d234e6..ce6353f45ac 100644 --- a/baker/algolia/indexExplorersToAlgolia.ts +++ b/baker/algolia/indexExplorersToAlgolia.ts @@ -15,7 +15,6 @@ import { ALGOLIA_INDEXING } from "../../settings/serverSettings.js" import { getAnalyticsPageviewsByUrlObj } from "../../db/model/Pageview.js" import { chunkParagraphs } from "../chunk.js" import { SearchIndexName } from "../../site/search/searchTypes.js" -import { Knex } from "knex" import { getIndexName } from "../../site/search/searchClient.js" type ExplorerBlockColumns = { @@ -116,7 +115,7 @@ function getNullishJSONValueAsPlaintext(value: string): string { } const getExplorerRecords = async ( - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const pageviews = await getAnalyticsPageviewsByUrlObj(knex) @@ -198,8 +197,7 @@ const indexExplorersToAlgolia = async () => { try { const index = client.initIndex(getIndexName(SearchIndexName.Explorers)) - const knex = db.knexInstance() - const records = await getExplorerRecords(knex) + const records = await db.knexReadonlyTransaction(getExplorerRecords) await index.replaceAllObjects(records) await db.closeTypeOrmAndKnexConnections() diff --git a/baker/algolia/indexToAlgolia.tsx b/baker/algolia/indexToAlgolia.tsx index 3f71e8ef527..38c5e8880f8 100644 --- a/baker/algolia/indexToAlgolia.tsx +++ b/baker/algolia/indexToAlgolia.tsx @@ -31,7 +31,6 @@ import { getPostTags, getPostsFromSnapshots, } from "../../db/model/Post.js" -import { Knex } from "knex" import { getIndexName } from "../../site/search/searchClient.js" interface TypeAndImportance { @@ -81,7 +80,7 @@ function generateChunksFromHtmlText(htmlString: string) { async function generateWordpressRecords( postsApi: PostRestApi[], pageviews: Record, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise { const getPostTypeAndImportance = ( post: FormattedPost, @@ -100,7 +99,7 @@ async function generateWordpressRecords( const records: PageRecord[] = [] for (const postApi of postsApi) { - const rawPost = await getFullPost(postApi) + const rawPost = await getFullPost(knex, postApi) if (isEmpty(rawPost.content)) { // we have some posts that are only placeholders (e.g. for a redirect); don't index these console.log( @@ -111,7 +110,7 @@ async function generateWordpressRecords( const post = await formatPost(rawPost, { footnotes: false }, knex) const chunks = generateChunksFromHtmlText(post.html) - const tags = await getPostTags(post.id) + const tags = await getPostTags(knex, post.id) const postTypeAndImportance = getPostTypeAndImportance(post, tags) let i = 0 @@ -196,7 +195,7 @@ function generateGdocRecords( } // Generate records for countries, WP posts (not including posts that have been succeeded by Gdocs equivalents), and Gdocs -const getPagesRecords = async (knex: Knex) => { +const getPagesRecords = async (knex: db.KnexReadonlyTransaction) => { const pageviews = await getAnalyticsPageviewsByUrlObj(knex) const gdocs = await GdocPost.getPublishedGdocPosts() const publishedGdocsBySlug = keyBy(gdocs, "slug") @@ -225,7 +224,7 @@ const getPagesRecords = async (knex: Knex) => { return [...countryRecords, ...wordpressRecords, ...gdocsRecords] } -const indexToAlgolia = async (knex: Knex) => { +const indexToAlgolia = async () => { if (!ALGOLIA_INDEXING) return const client = getAlgoliaClient() @@ -236,7 +235,7 @@ const indexToAlgolia = async (knex: Knex) => { const index = client.initIndex(getIndexName(SearchIndexName.Pages)) await db.getConnection() - const records = await getPagesRecords(knex) + const records = await db.knexReadonlyTransaction(getPagesRecords) index.replaceAllObjects(records) @@ -249,4 +248,4 @@ process.on("unhandledRejection", (e) => { process.exit(1) }) -indexToAlgolia(db.knexInstance()) +indexToAlgolia() diff --git a/baker/buildLocalBake.ts b/baker/buildLocalBake.ts index c6820e5c5ed..155ec076f78 100644 --- a/baker/buildLocalBake.ts +++ b/baker/buildLocalBake.ts @@ -17,7 +17,7 @@ const bakeDomainToFolder = async ( fs.mkdirp(dir) const baker = new SiteBaker(dir, baseUrl, bakeSteps) console.log(`Baking site locally with baseUrl '${baseUrl}' to dir '${dir}'`) - await baker.bakeAll(db.knexInstance()) + await db.knexReadonlyTransaction((trx) => baker.bakeAll(trx)) } yargs(hideBin(process.argv)) diff --git a/baker/countryProfiles.tsx b/baker/countryProfiles.tsx index 2651397323c..c0f1d2b5183 100644 --- a/baker/countryProfiles.tsx +++ b/baker/countryProfiles.tsx @@ -41,11 +41,13 @@ const checkShouldShowIndicator = (grapher: GrapherInterface) => // Find the charts that will be shown on the country profile page (if they have that country) // TODO: make this page per variable instead -const countryIndicatorGraphers = async (): Promise => +const countryIndicatorGraphers = async ( + trx: db.KnexReadonlyTransaction +): Promise => bakeCache(countryIndicatorGraphers, async () => { const graphers = ( - await db - .knexTable("charts") + await trx + .table("charts") .whereRaw( "publishedAt is not null and config->>'$.isPublished' = 'true' and is_indexable is true" ) @@ -54,22 +56,25 @@ const countryIndicatorGraphers = async (): Promise => return graphers.filter(checkShouldShowIndicator) }) -export const countryIndicatorVariables = async (): Promise< - DbEnrichedVariable[] -> => +export const countryIndicatorVariables = async ( + trx: db.KnexReadonlyTransaction +): Promise => bakeCache(countryIndicatorVariables, async () => { - const variableIds = (await countryIndicatorGraphers()).map( + const variableIds = (await countryIndicatorGraphers(trx)).map( (c) => c.dimensions![0]!.variableId ) - const rows: DbRawVariable[] = await db - .knexTable(VariablesTableName) + const rows: DbRawVariable[] = await trx + .table(VariablesTableName) .whereIn("id", variableIds) return rows.map(parseVariablesRow) }) -export const denormalizeLatestCountryData = async (variableIds?: number[]) => { - const entities = (await db - .knexTable("entities") +export const denormalizeLatestCountryData = async ( + trx: db.KnexReadWriteTransaction, + variableIds?: number[] +) => { + const entities = (await trx + .table("entities") .select("id", "code") .whereRaw("validated is true and code is not null")) as { id: number @@ -80,7 +85,7 @@ export const denormalizeLatestCountryData = async (variableIds?: number[]) => { const entityIds = countries.map((c) => entitiesByCode[c.code].id) if (!variableIds) { - variableIds = (await countryIndicatorVariables()).map((v) => v.id) + variableIds = (await countryIndicatorVariables(trx)).map((v) => v.id) // exclude variables that are already in country_latest_data // NOTE: we always fetch all variables that don't have country entities because they never @@ -88,17 +93,18 @@ export const denormalizeLatestCountryData = async (variableIds?: number[]) => { // entities used in variables there's no easy way. It's not as bad since this still takes // under a minute to run. const existingVariableIds = ( - await db.queryMysql( + await db.knexRaw<{ variable_id: number }>( + trx, `select variable_id from country_latest_data where variable_id in (?)`, [variableIds] ) - ).map((r: any) => r.variable_id) + ).map((r) => r.variable_id) variableIds = lodash.difference(variableIds, existingVariableIds) } const currentYear = new Date().getUTCFullYear() - const df = (await dataAsDF(variableIds, db.knexInstance())) + const df = (await dataAsDF(variableIds, trx)) .filter( pl .col("entityId") @@ -113,26 +119,27 @@ export const denormalizeLatestCountryData = async (variableIds?: number[]) => { .select("variableId", "entityCode", "year", "value") .rename({ variableId: "variable_id", entityCode: "country_code" }) - await db.knexInstance().transaction(async (t) => { - // Remove existing values - await t - .table("country_latest_data") - .whereIn("variable_id", variableIds as number[]) - .delete() + // Remove existing values + await trx + .table("country_latest_data") + .whereIn("variable_id", variableIds as number[]) + .delete() - // Insert new ones - if (df.height > 0) { - await t.table("country_latest_data").insert(df.toRecords()) - } - }) + // Insert new ones + if (df.height > 0) { + await trx.table("country_latest_data").insert(df.toRecords()) + } } -const countryIndicatorLatestData = async (countryCode: string) => { +const countryIndicatorLatestData = async ( + trx: db.KnexReadonlyTransaction, + countryCode: string +) => { const dataValuesByEntityId = await bakeCache( countryIndicatorLatestData, async () => { - const dataValues = (await db - .knexTable("country_latest_data") + const dataValues = (await trx + .table("country_latest_data") .select( "variable_id AS variableId", "country_code AS code", @@ -153,14 +160,15 @@ const countryIndicatorLatestData = async (countryCode: string) => { } export const countryProfilePage = async ( + trx: db.KnexReadonlyTransaction, countrySlug: string, baseUrl: string ) => { const country = getCountryBySlug(countrySlug) if (!country) throw new JsonError(`No such country ${countrySlug}`, 404) - const graphers = await countryIndicatorGraphers() - const dataValues = await countryIndicatorLatestData(country.code) + const graphers = await countryIndicatorGraphers(trx) + const dataValues = await countryIndicatorLatestData(trx, country.code) const valuesByVariableId = lodash.groupBy(dataValues, (v) => v.variableId) @@ -193,14 +201,17 @@ export const countryProfilePage = async ( ) } -export const bakeCountries = async (baker: SiteBaker) => { +export const bakeCountries = async ( + baker: SiteBaker, + trx: db.KnexReadonlyTransaction +) => { const html = await countriesIndexPage(baker.baseUrl) await baker.writeFile("/countries.html", html) await baker.ensureDir("/country") for (const country of countries) { const path = `/country/${country.slug}.html` - const html = await countryProfilePage(country.slug, baker.baseUrl) + const html = await countryProfilePage(trx, country.slug, baker.baseUrl) await baker.writeFile(path, html) } diff --git a/baker/formatWordpressPost.tsx b/baker/formatWordpressPost.tsx index 5c51291b178..ed1261bc5a6 100644 --- a/baker/formatWordpressPost.tsx +++ b/baker/formatWordpressPost.tsx @@ -59,7 +59,7 @@ import { renderKeyInsights, renderProminentLinks } from "./siteRenderers.js" import { KEY_INSIGHTS_CLASS_NAME } from "../site/blocks/KeyInsights.js" import { RELATED_CHARTS_CLASS_NAME } from "../site/blocks/RelatedCharts.js" import { logErrorAndMaybeSendToBugsnag } from "../serverUtils/errorLog.js" -import { Knex } from "knex" +import { KnexReadonlyTransaction } from "../db/db.js" const initMathJax = () => { const adaptor = liteAdaptor() @@ -129,7 +129,7 @@ const formatLatex = async ( export const formatWordpressPost = async ( post: FullPost, formattingOptions: FormattingOptions, - knex: Knex, + knex: KnexReadonlyTransaction, grapherExports?: GrapherExports ): Promise => { let html = post.content @@ -596,7 +596,7 @@ export const formatWordpressPost = async ( export const formatPost = async ( post: FullPost, formattingOptions: FormattingOptions, - knex: Knex, + knex: KnexReadonlyTransaction, grapherExports?: GrapherExports ): Promise => { // No formatting applied, plain source HTML returned diff --git a/baker/pageOverrides.test.ts b/baker/pageOverrides.test.ts index 702d7573bec..64b7e63d88b 100644 --- a/baker/pageOverrides.test.ts +++ b/baker/pageOverrides.test.ts @@ -6,6 +6,7 @@ import { import * as pageOverrides from "./pageOverrides.js" import { jest } from "@jest/globals" +import { KnexReadonlyTransaction } from "../db/db.js" const mockCreatePost = (slug: string): FullPost => { return { @@ -27,7 +28,7 @@ const getPostBySlugLogToSlackNoThrow = jest.spyOn( pageOverrides, "getPostBySlugLogToSlackNoThrow" ) -getPostBySlugLogToSlackNoThrow.mockImplementation((landingSlug) => +getPostBySlugLogToSlackNoThrow.mockImplementation((knex, landingSlug) => Promise.resolve(mockCreatePost(landingSlug)) ) @@ -38,6 +39,7 @@ it("gets parent landing", async () => { await expect( pageOverrides.getLandingOnlyIfParent( + {} as KnexReadonlyTransaction, mockCreatePost("forest-area"), formattingOptions ) @@ -51,6 +53,7 @@ it("does not get parent landing (subnavId invalid)", async () => { await expect( pageOverrides.getLandingOnlyIfParent( + {} as KnexReadonlyTransaction, mockCreatePost("forest-area"), formattingOptions ) @@ -64,6 +67,7 @@ it("does not get parent landing (post is already a landing)", async () => { await expect( pageOverrides.getLandingOnlyIfParent( + {} as KnexReadonlyTransaction, mockCreatePost(forestLandingSlug), formattingOptions ) @@ -81,6 +85,7 @@ it("does not get parent landing and logs (landing post not found)", async () => await expect( pageOverrides.getLandingOnlyIfParent( + {} as KnexReadonlyTransaction, mockCreatePost("forest-area"), formattingOptions ) diff --git a/baker/pageOverrides.tsx b/baker/pageOverrides.tsx index c46b69e40b3..cf0e254724e 100644 --- a/baker/pageOverrides.tsx +++ b/baker/pageOverrides.tsx @@ -8,11 +8,15 @@ import { getFullPostBySlugFromSnapshot, isPostSlugCitable, } from "../db/model/Post.js" +import { KnexReadonlyTransaction } from "../db/db.js" -export const getPostBySlugLogToSlackNoThrow = async (slug: string) => { +export const getPostBySlugLogToSlackNoThrow = async ( + knex: KnexReadonlyTransaction, + slug: string +) => { let post try { - post = await getFullPostBySlugFromSnapshot(slug) + post = await getFullPostBySlugFromSnapshot(knex, slug) } catch (err) { logErrorAndMaybeSendToBugsnag(err) } finally { @@ -21,6 +25,7 @@ export const getPostBySlugLogToSlackNoThrow = async (slug: string) => { } export const getLandingOnlyIfParent = async ( + knex: KnexReadonlyTransaction, post: FullPost, formattingOptions: FormattingOptions ) => { @@ -37,7 +42,7 @@ export const getLandingOnlyIfParent = async ( // Using no-throw version to prevent throwing and stopping baking mid-way. // It is more desirable to temporarily deploy with citation overrides // absent, while fixing the issue. - const landing = await getPostBySlugLogToSlackNoThrow(landingSlug) + const landing = await getPostBySlugLogToSlackNoThrow(knex, landingSlug) if (!landing) { // todo: the concept of "citation overrides" does not belong to that // generic function. Logging this message should be the responsibility @@ -58,10 +63,11 @@ export const getLandingOnlyIfParent = async ( } export const getPageOverrides = async ( + knex: KnexReadonlyTransaction, post: FullPost, formattingOptions: FormattingOptions ): Promise => { - const landing = await getLandingOnlyIfParent(post, formattingOptions) + const landing = await getLandingOnlyIfParent(knex, post, formattingOptions) if (!landing) return const isParentLandingCitable = isPostSlugCitable(landing.slug) diff --git a/baker/postUpdatedHook.ts b/baker/postUpdatedHook.ts index 4a997ea1e0a..6cf528882f4 100644 --- a/baker/postUpdatedHook.ts +++ b/baker/postUpdatedHook.ts @@ -23,14 +23,13 @@ import { getPostLinksBySourceId, insertManyPostLinks, } from "../db/model/PostLink.js" -import { Knex } from "knex" const argv = parseArgs(process.argv.slice(2)) const zeroDateString = "0000-00-00 00:00:00" // Sync post from the wordpress database to OWID database const syncPostToGrapher = async ( - knex: Knex, + knex: db.KnexReadWriteTransaction, postId: number ): Promise => { const rows = await wpdb.singleton.query( @@ -103,7 +102,7 @@ const syncPostToGrapher = async ( const dereferenceReusableBlocksFn = await buildReusableBlocksResolver() const dereferenceTablePressFn = await buildTablePressResolver() - const matchingRows = await db.knexTable(postsTable).where({ id: postId }) + const matchingRows = await knex.table(postsTable).where({ id: postId }) const existsInGrapher = !!matchingRows.length const wpPost = rows[0] @@ -140,32 +139,27 @@ const syncPostToGrapher = async ( } as DbEnrichedPost) : undefined - await db.knexInstance().transaction(async (transaction) => { - if (!postRow && existsInGrapher) - // Delete from grapher - await transaction.table(postsTable).where({ id: postId }).delete() - else if (postRow) { - const contentWithBlocksInlined = dereferenceTablePressFn( - dereferenceReusableBlocksFn(postRow.content) - ) - postRow.content = contentWithBlocksInlined + if (!postRow && existsInGrapher) + // Delete from grapher + await knex.table(postsTable).where({ id: postId }).delete() + else if (postRow) { + const contentWithBlocksInlined = dereferenceTablePressFn( + dereferenceReusableBlocksFn(postRow.content) + ) + postRow.content = contentWithBlocksInlined - const rowForDb = serializePostRow(postRow) + const rowForDb = serializePostRow(postRow) - if (!existsInGrapher) - await transaction.table(postsTable).insert(rowForDb) - else if (existsInGrapher) - await transaction - .table(postsTable) - .where("id", "=", rowForDb.id) - .update(rowForDb) - } - }) + if (!existsInGrapher) await knex.table(postsTable).insert(rowForDb) + else if (existsInGrapher) + await knex + .table(postsTable) + .where("id", "=", rowForDb.id) + .update(rowForDb) + } const newPost = ( - await select("slug").from( - db.knexTable(postsTable).where({ id: postId }) - ) + await select("slug").from(knex.table(postsTable).where({ id: postId })) )[0] if (postRow) { @@ -206,7 +200,9 @@ const main = async ( ) => { console.log(email, name, postId) try { - const slug = await syncPostToGrapher(db.knexInstance(), postId) + const slug = db.knexReadWriteTransaction((trx) => + syncPostToGrapher(trx, postId) + ) if (BAKE_ON_CHANGE) await new DeployQueueServer().enqueueChange({ diff --git a/baker/recalcLatestCountryData.ts b/baker/recalcLatestCountryData.ts index 4e654326d4b..fc3e109e43a 100644 --- a/baker/recalcLatestCountryData.ts +++ b/baker/recalcLatestCountryData.ts @@ -5,7 +5,7 @@ import * as db from "../db/db.js" import { denormalizeLatestCountryData } from "../baker/countryProfiles.js" const main = async () => { - await denormalizeLatestCountryData() + await db.knexReadWriteTransaction(denormalizeLatestCountryData) await db.closeTypeOrmAndKnexConnections() } diff --git a/baker/redirects.test.ts b/baker/redirects.test.ts index 1f7390a107c..20b1a3bf828 100644 --- a/baker/redirects.test.ts +++ b/baker/redirects.test.ts @@ -4,7 +4,7 @@ import * as redirects from "./redirects.js" import { resolveInternalRedirect } from "./redirects.js" import { jest } from "@jest/globals" -import { Knex } from "knex" +import { KnexReadonlyTransaction } from "../db/db.js" type ArrayForMap = [string, string][] @@ -17,8 +17,6 @@ const getFormattedUrl = (url: string): Url => { return Url.fromURL(formatUrls(url)) } -const knex: Knex = {} as Knex - it("resolves pathnames", async () => { const src = "/hello" const target = "/world" @@ -31,9 +29,12 @@ it("resolves pathnames", async () => { Promise.resolve(new Map(redirectsArr)) ) - expect(await resolveInternalRedirect(urlToResolve, knex)).toEqual( - resolvedUrl - ) + expect( + await resolveInternalRedirect( + urlToResolve, + {} as KnexReadonlyTransaction + ) + ).toEqual(resolvedUrl) }) it("does not support query string in redirects map", async () => { @@ -47,9 +48,12 @@ it("does not support query string in redirects map", async () => { Promise.resolve(new Map(redirectsArr)) ) - expect(await resolveInternalRedirect(urlToResolve, knex)).toEqual( - urlToResolve - ) + expect( + await resolveInternalRedirect( + urlToResolve, + {} as KnexReadonlyTransaction + ) + ).toEqual(urlToResolve) }) it("passes query string params when resolving", async () => { @@ -69,9 +73,12 @@ it("passes query string params when resolving", async () => { Promise.resolve(new Map(redirectsArr)) ) - expect(await resolveInternalRedirect(urlToResolve, knex)).toEqual( - resolvedUrl - ) + expect( + await resolveInternalRedirect( + urlToResolve, + {} as KnexReadonlyTransaction + ) + ).toEqual(resolvedUrl) }) it("does not pass query string params when some present on the target", async () => { @@ -88,9 +95,12 @@ it("does not pass query string params when some present on the target", async () Promise.resolve(new Map(redirectsArr)) ) - expect(await resolveInternalRedirect(urlToResolve, knex)).toEqual( - resolvedUrl - ) + expect( + await resolveInternalRedirect( + urlToResolve, + {} as KnexReadonlyTransaction + ) + ).toEqual(resolvedUrl) }) it("resolves self-redirects", async () => { @@ -104,9 +114,12 @@ it("resolves self-redirects", async () => { Promise.resolve(new Map(redirectsArr)) ) - expect(await resolveInternalRedirect(urlToResolve, knex)).toEqual( - urlToResolve - ) + expect( + await resolveInternalRedirect( + urlToResolve, + {} as KnexReadonlyTransaction + ) + ).toEqual(urlToResolve) }) it("does not support query params in self-redirects", async () => { @@ -123,11 +136,17 @@ it("does not support query params in self-redirects", async () => { Promise.resolve(new Map(redirectsArr)) ) - expect(await resolveInternalRedirect(urlToResolve, knex)).toEqual( - urlToResolve - ) expect( - await resolveInternalRedirect(urlToResolveWithQueryParam, knex) + await resolveInternalRedirect( + urlToResolve, + {} as KnexReadonlyTransaction + ) + ).toEqual(urlToResolve) + expect( + await resolveInternalRedirect( + urlToResolveWithQueryParam, + {} as KnexReadonlyTransaction + ) ).toEqual(urlToResolveWithQueryParam) }) @@ -147,7 +166,10 @@ it("resolves circular redirects", async () => { Promise.resolve(new Map(redirectsArr)) ) - expect(await resolveInternalRedirect(urlToResolve, knex)).toEqual( - urlToResolve - ) + expect( + await resolveInternalRedirect( + urlToResolve, + {} as KnexReadonlyTransaction + ) + ).toEqual(urlToResolve) }) diff --git a/baker/redirects.ts b/baker/redirects.ts index cc7756154d1..cce1a66df56 100644 --- a/baker/redirects.ts +++ b/baker/redirects.ts @@ -4,9 +4,8 @@ import { isCanonicalInternalUrl } from "./formatting.js" import { resolveExplorerRedirect } from "./replaceExplorerRedirects.js" import { logErrorAndMaybeSendToBugsnag } from "../serverUtils/errorLog.js" import { getRedirectsFromDb } from "../db/model/Redirect.js" -import { Knex } from "knex" -export const getRedirects = async (knex: Knex) => { +export const getRedirects = async (knex: db.KnexReadonlyTransaction) => { const staticRedirects = [ // RSS feed "/feed /atom.xml 302", @@ -92,14 +91,16 @@ export const getGrapherRedirectsMap = async ( ) } -export const getWordpressRedirectsMap = async (knex: Knex) => { +export const getWordpressRedirectsMap = async ( + knex: db.KnexReadonlyTransaction +) => { const redirectsFromDb = await getRedirectsFromDb(knex) return new Map(redirectsFromDb.map((row) => [row.source, row.target])) } export const getGrapherAndWordpressRedirectsMap = memoize( - async (knex: Knex): Promise> => { + async (knex: db.KnexReadonlyTransaction): Promise> => { // source: pathnames only (e.g. /transport) // target: pathnames with or without origins (e.g. /transport-new or https://ourworldindata.org/transport-new) @@ -161,7 +162,7 @@ export const resolveRedirectFromMap = async ( export const resolveInternalRedirect = async ( url: Url, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { if (!isCanonicalInternalUrl(url)) return url diff --git a/baker/runBakeGraphers.ts b/baker/runBakeGraphers.ts index d034bea8682..3086bbc538f 100755 --- a/baker/runBakeGraphers.ts +++ b/baker/runBakeGraphers.ts @@ -9,9 +9,11 @@ import * as db from "../db/db.js" */ const main = async (folder: string) => { - await bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers( - folder, - db.knexInstance() + db.knexReadonlyTransaction((trx) => + bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers( + folder, + trx + ) ) } diff --git a/baker/siteRenderers.test.ts b/baker/siteRenderers.test.ts index 155a5b6d5bd..d6bbd0dce56 100755 --- a/baker/siteRenderers.test.ts +++ b/baker/siteRenderers.test.ts @@ -3,12 +3,12 @@ import {} from "../site/blocks/ProminentLink.js" import { renderExplorerPage } from "./siteRenderers.js" import { ExplorerProgram } from "../explorer/ExplorerProgram.js" -import { Knex } from "knex" +import { KnexReadonlyTransaction } from "../db/db.js" // Note: renderProminentLinks() tests are now e2e (see kitchenSink.js) it("renders an explorer page with title", async () => { - const knex: Knex = {} as Knex + const knex: KnexReadonlyTransaction = {} as KnexReadonlyTransaction expect( await renderExplorerPage( diff --git a/baker/siteRenderers.tsx b/baker/siteRenderers.tsx index 6ca0b35f100..048e1b5a83d 100644 --- a/baker/siteRenderers.tsx +++ b/baker/siteRenderers.tsx @@ -58,7 +58,7 @@ import { import { FormattingOptions, GrapherInterface } from "@ourworldindata/types" import { CountryProfileSpec } from "../site/countryProfileProjects.js" import { formatPost } from "./formatWordpressPost.js" -import { queryMysql, knexTable, getHomepageId } from "../db/db.js" +import { queryMysql, getHomepageId, KnexReadonlyTransaction } from "../db/db.js" import { getPageOverrides, isPageOverridesCitable } from "./pageOverrides.js" import { ProminentLink } from "../site/blocks/ProminentLink.js" import { @@ -90,7 +90,6 @@ import { GdocPost } from "../db/model/Gdoc/GdocPost.js" import { logErrorAndMaybeSendToBugsnag } from "../serverUtils/errorLog.js" import { GdocFactory } from "../db/model/Gdoc/GdocFactory.js" import { SiteNavigationStatic } from "../site/SiteNavigation.js" -import { Knex } from "knex" export const renderToHtmlPage = (element: any) => `${ReactDOMServer.renderToStaticMarkup(element)}` @@ -190,17 +189,17 @@ export const renderGdoc = (gdoc: OwidGdoc, isPreviewing: boolean = false) => { export const renderPageBySlug = async ( slug: string, - knex: Knex + knex: KnexReadonlyTransaction ) => { - const post = await getFullPostBySlugFromSnapshot(slug) + const post = await getFullPostBySlugFromSnapshot(knex, slug) return renderPost(post, knex) } export const renderPreview = async ( postId: number, - knex: Knex + knex: KnexReadonlyTransaction ): Promise => { - const postApi = await getFullPostByIdFromSnapshot(postId) + const postApi = await getFullPostByIdFromSnapshot(knex, postId) return renderPost(postApi, knex) } @@ -210,7 +209,7 @@ export const renderMenuJson = async () => { export const renderPost = async ( post: FullPost, - knex: Knex, + knex: KnexReadonlyTransaction, baseUrl: string = BAKED_BASE_URL, grapherExports?: GrapherExports ) => { @@ -238,7 +237,7 @@ export const renderPost = async ( grapherExports ) - const pageOverrides = await getPageOverrides(post, formattingOptions) + const pageOverrides = await getPageOverrides(knex, post, formattingOptions) const citationStatus = isPostSlugCitable(post.slug) || isPageOverridesCitable(pageOverrides) @@ -253,7 +252,7 @@ export const renderPost = async ( ) } -export const renderFrontPage = async (knex: Knex) => { +export const renderFrontPage = async (knex: KnexReadonlyTransaction) => { const gdocHomepageId = await getHomepageId(knex) if (gdocHomepageId) { @@ -311,7 +310,7 @@ export const renderDataInsightsIndexPage = ( export const renderBlogByPageNum = async ( pageNum: number, - knex: Knex + knex: KnexReadonlyTransaction ) => { const allPosts = await getBlogIndex(knex) @@ -337,7 +336,7 @@ export const renderSearchPage = () => export const renderNotFoundPage = () => renderToHtmlPage() -export async function makeAtomFeed(knex: Knex) { +export async function makeAtomFeed(knex: KnexReadonlyTransaction) { const posts = (await getBlogIndex(knex)).slice(0, 10) return makeAtomFeedFromPosts(posts) } @@ -345,7 +344,7 @@ export async function makeAtomFeed(knex: Knex) { // We don't want to include topic pages in the atom feed that is being consumed // by Mailchimp for sending the "immediate update" newsletter. Instead topic // pages announcements are sent out manually. -export async function makeAtomFeedNoTopicPages(knex: Knex) { +export async function makeAtomFeedNoTopicPages(knex: KnexReadonlyTransaction) { const posts = (await getBlogIndex(knex)) .filter((post: IndexPost) => post.type !== OwidGdocType.TopicPage) .slice(0, 10) @@ -393,8 +392,12 @@ ${posts } // These pages exist largely just for Google Scholar -export const entriesByYearPage = async (year?: number) => { - const entries = (await knexTable(postsTable) +export const entriesByYearPage = async ( + trx: KnexReadonlyTransaction, + year?: number +) => { + const entries = (await trx + .table(postsTable) .where({ status: "publish" }) .whereNot({ type: "wp_block" }) .join("post_tags", { "post_tags.post_id": "posts.id" }) @@ -427,11 +430,12 @@ export const feedbackPage = () => const getCountryProfilePost = memoize( async ( profileSpec: CountryProfileSpec, - knex: Knex, + knex: KnexReadonlyTransaction, grapherExports?: GrapherExports ): Promise<[FormattedPost, FormattingOptions]> => { // Get formatted content from generic covid country profile page. const genericCountryProfilePost = await getFullPostBySlugFromSnapshot( + knex, profileSpec.genericProfileSlug ) @@ -451,15 +455,15 @@ const getCountryProfilePost = memoize( // todo: we used to flush cache of this thing. const getCountryProfileLandingPost = memoize( - async (profileSpec: CountryProfileSpec) => { - return getFullPostBySlugFromSnapshot(profileSpec.landingPageSlug) + async (knex: KnexReadonlyTransaction, profileSpec: CountryProfileSpec) => { + return getFullPostBySlugFromSnapshot(knex, profileSpec.landingPageSlug) } ) export const renderCountryProfile = async ( profileSpec: CountryProfileSpec, country: Country, - knex: Knex, + knex: KnexReadonlyTransaction, grapherExports?: GrapherExports ) => { const [formatted, formattingOptions] = await getCountryProfilePost( @@ -470,7 +474,7 @@ export const renderCountryProfile = async ( const formattedCountryProfile = formatCountryProfile(formatted, country) - const landing = await getCountryProfileLandingPost(profileSpec) + const landing = await getCountryProfileLandingPost(knex, profileSpec) const overrides: PageOverrides = { pageTitle: `${country.name}: ${profileSpec.pageTitle} Country Profile`, @@ -496,7 +500,7 @@ export const renderCountryProfile = async ( export const countryProfileCountryPage = async ( profileSpec: CountryProfileSpec, countrySlug: string, - knex: Knex + knex: KnexReadonlyTransaction ) => { const country = getCountryBySlug(countrySlug) if (!country) throw new JsonError(`No such country ${countrySlug}`, 404) @@ -508,13 +512,14 @@ export const countryProfileCountryPage = async ( export const flushCache = () => getCountryProfilePost.cache.clear?.() const renderPostThumbnailBySlug = async ( + knex: KnexReadonlyTransaction, slug: string | undefined ): Promise => { if (!slug) return let post try { - post = await getFullPostBySlugFromSnapshot(slug) + post = await getFullPostBySlugFromSnapshot(knex, slug) } catch (err) { // if no post is found, then we return early instead of throwing } @@ -528,7 +533,7 @@ const renderPostThumbnailBySlug = async ( export const renderProminentLinks = async ( $: CheerioStatic, containerPostId: number, - knex: Knex + knex: KnexReadonlyTransaction ) => { const blocks = $("block[type='prominent-link']").toArray() await Promise.all( @@ -560,6 +565,7 @@ export const renderProminentLinks = async ( : resolvedUrl.slug && ( await getFullPostBySlugFromSnapshot( + knex, resolvedUrl.slug ) ).title) @@ -587,7 +593,7 @@ export const renderProminentLinks = async ( ? renderGrapherThumbnailByResolvedChartSlug( resolvedUrl.slug ) - : await renderPostThumbnailBySlug(resolvedUrl.slug)) + : await renderPostThumbnailBySlug(knex, resolvedUrl.slug)) const rendered = ReactDOMServer.renderToStaticMarkup(
@@ -609,7 +615,7 @@ export const renderProminentLinks = async ( export const renderReusableBlock = async ( html: string | undefined, containerPostId: number, - knex: Knex + knex: KnexReadonlyTransaction ): Promise => { if (!html) return @@ -621,7 +627,7 @@ export const renderReusableBlock = async ( export const renderExplorerPage = async ( program: ExplorerProgram, - knex: Knex, + knex: KnexReadonlyTransaction, urlMigrationSpec?: ExplorerPageUrlMigrationSpec ) => { const { requiredGrapherIds, requiredVariableIds } = program.decisionMatrix @@ -674,7 +680,7 @@ export const renderExplorerPage = async ( const wpContent = program.wpBlockId ? await renderReusableBlock( - await getBlockContentFromSnapshot(program.wpBlockId), + await getBlockContentFromSnapshot(knex, program.wpBlockId), program.wpBlockId, knex ) diff --git a/baker/sitemap.ts b/baker/sitemap.ts index 9b0a36efead..d1f8adafe51 100644 --- a/baker/sitemap.ts +++ b/baker/sitemap.ts @@ -13,7 +13,6 @@ import { EXPLORERS_ROUTE_FOLDER } from "../explorer/ExplorerConstants.js" import { ExplorerProgram } from "../explorer/ExplorerProgram.js" import { GdocPost } from "../db/model/Gdoc/GdocPost.js" import { getPostsFromSnapshots } from "../db/model/Post.js" -import { Knex } from "knex" import { calculateDataInsightIndexPageCount } from "../db/model/Gdoc/gdocUtils.js" interface SitemapUrl { @@ -61,7 +60,7 @@ const explorerToSitemapUrl = (program: ExplorerProgram): SitemapUrl[] => { export const makeSitemap = async ( explorerAdminServer: ExplorerAdminServer, - knex: Knex + knex: db.KnexReadonlyTransaction ) => { const alreadyPublishedViaGdocsSlugsSet = await db.getSlugsWithPublishedGdocsSuccessors(knex) @@ -77,8 +76,8 @@ export const makeSitemap = async ( publishedDataInsights.length ) - const charts = (await db - .knexTable(Chart.table) + const charts = (await knex + .table(Chart.table) .select(knex.raw(`updatedAt, config->>"$.slug" AS slug`)) .whereRaw('config->"$.isPublished" = true')) as { updatedAt: Date diff --git a/baker/startDeployQueueServer.ts b/baker/startDeployQueueServer.ts index 2e82864cbe5..82e7a011925 100644 --- a/baker/startDeployQueueServer.ts +++ b/baker/startDeployQueueServer.ts @@ -35,7 +35,7 @@ const main = async () => { setTimeout(deployIfQueueIsNotEmpty, 10 * 1000) }) - deployIfQueueIsNotEmpty(db.knexInstance()) + await db.knexReadonlyTransaction(deployIfQueueIsNotEmpty) } main() diff --git a/baker/syncRedirectsToGrapher.ts b/baker/syncRedirectsToGrapher.ts index ae57eb7e5b7..bd5bdc38ad8 100644 --- a/baker/syncRedirectsToGrapher.ts +++ b/baker/syncRedirectsToGrapher.ts @@ -18,7 +18,9 @@ const stripTrailingSlash = (url: string) => { return url.replace(/\/$/, "") // remove trailing slash: /abc/ -> /abc } -export const syncRedirectsToGrapher = async (): Promise => { +export const syncRedirectsToGrapher = async ( + trx: db.KnexReadWriteTransaction +): Promise => { const allWordpressRedirectsRaw = await wpdb.FOR_SYNC_ONLY_getRedirects() const allWordpressRedirects = allWordpressRedirectsRaw.map((r) => ({ @@ -27,52 +29,46 @@ export const syncRedirectsToGrapher = async (): Promise => { target: stripTrailingSlash(r.target), })) - await db.knexInstance().transaction(async (knex) => { - const existingRedirectsFromDb = await getRedirectsFromDb(knex) + const existingRedirectsFromDb = await getRedirectsFromDb(trx) - for (const { source, code } of allWordpressRedirects) { - // We only want to insert redirects for which we don't have a redirected source yet - if (existingRedirectsFromDb.some((r) => r.source === source)) { - console.log( - `Skipping, a redirect already exists for "${source}"` - ) - continue - } + for (const { source, code } of allWordpressRedirects) { + // We only want to insert redirects for which we don't have a redirected source yet + if (existingRedirectsFromDb.some((r) => r.source === source)) { + console.log(`Skipping, a redirect already exists for "${source}"`) + continue + } - // Resolve the target of the redirect, recursively following any - // Wordpress redirects until we reach a final target - const resolvedUrl = await resolveRedirectFromMap( - Url.fromURL(source), - getWordpressRedirectsMapFromRedirects(allWordpressRedirects) - ) + // Resolve the target of the redirect, recursively following any + // Wordpress redirects until we reach a final target + const resolvedUrl = await resolveRedirectFromMap( + Url.fromURL(source), + getWordpressRedirectsMapFromRedirects(allWordpressRedirects) + ) - // Use the no-trailing slash version of the resolved target URL. - // This is to handle the case where parsing a URL with no pathname - // (e.g. https://africaindata.org) still results in a trailing slash - // being added by url-parse when creating a new Url object. This - // solves the issue at hand, although it's debatable whether the - // issue needed fixing for the one case this script will ever come - // across (https://africaindata.org). It might just make more sense - // to fix this edge case manually and avoid polluting the Url class - // with code that won't be used after the migration. - const resolvedTarget = resolvedUrl.fullUrlNoTrailingSlash + // Use the no-trailing slash version of the resolved target URL. + // This is to handle the case where parsing a URL with no pathname + // (e.g. https://africaindata.org) still results in a trailing slash + // being added by url-parse when creating a new Url object. This + // solves the issue at hand, although it's debatable whether the + // issue needed fixing for the one case this script will ever come + // across (https://africaindata.org). It might just make more sense + // to fix this edge case manually and avoid polluting the Url class + // with code that won't be used after the migration. + const resolvedTarget = resolvedUrl.fullUrlNoTrailingSlash - console.log( - `Adding redirect: ${source} -> ${resolvedTarget} (${code})` - ) - await db.knexRaw( - knex, - `INSERT INTO redirects (source, target, code) VALUES (?, ?, ?)`, - [source, resolvedTarget, code] - ) - } - }) + console.log(`Adding redirect: ${source} -> ${resolvedTarget} (${code})`) + await db.knexRaw( + trx, + `INSERT INTO redirects (source, target, code) VALUES (?, ?, ?)`, + [source, resolvedTarget, code] + ) + } } const main = async (): Promise => { try { await db.getConnection() - await syncRedirectsToGrapher() + await db.knexReadWriteTransaction((trx) => syncRedirectsToGrapher(trx)) } finally { await wpdb.singleton.end() await db.closeTypeOrmAndKnexConnections() diff --git a/db/DEPRECATEDwpdb.ts b/db/DEPRECATEDwpdb.ts index a0dea29f82b..4ec7329a868 100644 --- a/db/DEPRECATEDwpdb.ts +++ b/db/DEPRECATEDwpdb.ts @@ -27,6 +27,7 @@ import { FOR_SYNC_ONLY_graphqlQuery, } from "./wpdb.js" import { getFullPost } from "./model/Post.js" +import { KnexReadonlyTransaction } from "./db.js" const DEPRECATED_ENTRIES_CATEGORY_ID = 44 const DEPRECATED_OWID_API_ENDPOINT = `${WORDPRESS_URL}/wp-json/owid/v1` @@ -88,6 +89,7 @@ export const DEPRECATEDgetPosts = async ( // We might want to cache this as the network of prominent links densifies and // multiple requests to the same posts are happening. export const DEPRECATEDgetPostBySlugFromApi = async ( + knex: KnexReadonlyTransaction, slug: string ): Promise => { if (!isWordpressAPIEnabled) { @@ -96,12 +98,13 @@ export const DEPRECATEDgetPostBySlugFromApi = async ( const postApi = await FOR_SYNC_ONLY_getPostApiBySlugFromApi(slug) - return getFullPost(postApi) + return getFullPost(knex, postApi) } // the /revisions endpoint does not send back all the metadata required for // the proper rendering of the post (e.g. authors), hence the double request. export const DEPRECATEDgetLatestPostRevision = async ( + knex: KnexReadonlyTransaction, id: number ): Promise => { const type = await DEPRECATEDgetPostType(id) @@ -125,7 +128,7 @@ export const DEPRECATEDgetLatestPostRevision = async ( // and could have been modified in the sidebar.) // - authors // ... - return getFullPost({ + return getFullPost(knex, { ...postApi, content: revision.content, title: revision.title, diff --git a/db/Variable.test.ts b/db/Variable.test.ts index 30cf95002f9..1f51b46ae2c 100644 --- a/db/Variable.test.ts +++ b/db/Variable.test.ts @@ -46,7 +46,13 @@ describe("writeVariableCSV", () => { callback(null) }, }) - await writeVariableCSV(variableIds, writeStream, db.knexInstance()) + + await writeVariableCSV( + variableIds, + writeStream, + {} as db.KnexReadonlyTransaction + ) + return out } @@ -165,7 +171,7 @@ describe("_dataAsDFfromS3", () => { }, } mockS3data(s3data) - const df = await _dataAsDFfromS3([1], db.knexInstance()) + const df = await _dataAsDFfromS3([1], {} as db.KnexReadonlyTransaction) expect(df.toObject()).toEqual({ entityCode: ["code", "code"], entityId: [1, 1], diff --git a/db/db.ts b/db/db.ts index 2d41999b8c3..f43e2f6ed82 100644 --- a/db/db.ts +++ b/db/db.ts @@ -111,9 +111,35 @@ export const knexInstance = (): Knex => { return _knexInstance } -export const knexTable = (table: string): Knex.QueryBuilder => - knexInstance().table(table) +declare const __read_capability: unique symbol +declare const __write_capability: unique symbol +export type KnexReadonlyTransaction = Knex.Transaction & { + readonly [__read_capability]: "read" +} + +export type KnexReadWriteTransaction = Knex.Transaction & { + readonly [__read_capability]: "read" + readonly [__write_capability]: "write" +} + +export async function knexReadonlyTransaction( + transactionFn: (trx: KnexReadonlyTransaction) => Promise, + knex: Knex = knexInstance() +): Promise { + return knex.transaction( + async (trx) => transactionFn(trx as KnexReadonlyTransaction), + { readOnly: true } + ) +} +export async function knexReadWriteTransaction( + transactionFn: (trx: KnexReadWriteTransaction) => Promise, + knex: Knex = knexInstance() +): Promise { + return knex.transaction(async (trx) => + transactionFn(trx as KnexReadWriteTransaction) + ) +} export const knexRaw = async ( knex: Knex, str: string, @@ -137,7 +163,7 @@ export const knexRawFirst = async ( * to help us exclude them from the baking process. */ export const getSlugsWithPublishedGdocsSuccessors = async ( - knex: Knex + knex: KnexReadonlyTransaction ): Promise> => { return knexRaw( knex, @@ -148,7 +174,7 @@ export const getSlugsWithPublishedGdocsSuccessors = async ( } export const getExplorerTags = async ( - knex: Knex + knex: KnexReadonlyTransaction ): Promise<{ slug: string; tags: DbChartTagJoin[] }[]> => { return knexRaw<{ slug: string; tags: string }>( knex, @@ -174,7 +200,7 @@ export const getExplorerTags = async ( } export const getPublishedExplorersBySlug = async ( - knex: Knex + knex: KnexReadonlyTransaction ): Promise<{ [slug: string]: { slug: string diff --git a/db/migrateWpPostsToArchieMl.ts b/db/migrateWpPostsToArchieMl.ts index b547b63d6a0..97960585076 100644 --- a/db/migrateWpPostsToArchieMl.ts +++ b/db/migrateWpPostsToArchieMl.ts @@ -9,6 +9,7 @@ import { RelatedChart, EnrichedBlockAllCharts, parsePostAuthors, + DbRawPost, } from "@ourworldindata/utils" import * as Post from "./model/Post.js" import fs from "fs" @@ -78,24 +79,38 @@ const entries = new Set([ "trust", ]) -const migrate = async (): Promise => { +const migrate = async (trx: db.KnexReadWriteTransaction): Promise => { const writeToFile = false const severeErrors: any[] = [] - await db.getConnection() - const rawPosts = await Post.select( - "id", - "slug", - "title", - "content", - "published_at", - "updated_at_in_wordpress", - "authors", - "excerpt", - "created_at_in_wordpress", - "updated_at", - "featured_image" - ).from(db.knexTable(Post.postsTable)) //.where("id", "=", "54759")) + const rawPosts: Pick< + DbRawPost, + | "id" + | "slug" + | "title" + | "content" + | "published_at" + | "updated_at_in_wordpress" + | "authors" + | "excerpt" + | "created_at_in_wordpress" + | "updated_at" + | "featured_image" + >[] = await trx + .select( + "id" satisfies keyof DbRawPost, + "slug" satisfies keyof DbRawPost, + "title" satisfies keyof DbRawPost, + "content" satisfies keyof DbRawPost, + "published_at" satisfies keyof DbRawPost, + "updated_at_in_wordpress" satisfies keyof DbRawPost, + "authors" satisfies keyof DbRawPost, + "excerpt" satisfies keyof DbRawPost, + "created_at_in_wordpress" satisfies keyof DbRawPost, + "updated_at" satisfies keyof DbRawPost, + "featured_image" satisfies keyof DbRawPost + ) + .table(Post.postsTable) //.where("id", "=", "54759")) for (const postRaw of rawPosts) { try { @@ -109,7 +124,7 @@ const migrate = async (): Promise => { const text = post.content let relatedCharts: RelatedChart[] = [] if (isEntry) { - relatedCharts = await getPostRelatedCharts(post.id) + relatedCharts = await getPostRelatedCharts(trx, post.id) } const shouldIncludeMaxAsAuthor = isPostSlugCitable(post.slug) @@ -256,7 +271,7 @@ const migrate = async (): Promise => { const insertQuery = ` UPDATE posts SET archieml = ?, archieml_update_statistics = ?, markdown = ? WHERE id = ? ` - await db.queryMysql(insertQuery, [ + await db.knexRaw(trx, insertQuery, [ JSON.stringify(archieMlFieldContent, null, 2), JSON.stringify(archieMlStatsContent, null, 2), markdown, @@ -291,20 +306,15 @@ const migrate = async (): Promise => { } } - // const sortedTagCount = _.sortBy( - // Array.from(tagCounts.entries()), - // ([tag, count]) => tag - // ) - // for (const [tag, count] of sortedTagCount) { - // console.log(`${tag}: ${count}`) - // } - - await db.closeTypeOrmAndKnexConnections() - if (severeErrors.length > 0) { console.error("Errors", severeErrors) throw new Error(`${severeErrors.length} items had errors`) } } -migrate() +async function runMigrate(): Promise { + await db.knexReadWriteTransaction(migrate) + await db.closeTypeOrmAndKnexConnections() +} + +runMigrate() diff --git a/db/model/Chart.ts b/db/model/Chart.ts index f9d073cee5e..a988b4ab41e 100644 --- a/db/model/Chart.ts +++ b/db/model/Chart.ts @@ -30,7 +30,6 @@ import { BAKED_BASE_URL, OPENAI_API_KEY, } from "../../settings/serverSettings.js" -import { Knex } from "knex" // XXX hardcoded filtering to public parent tags export const PUBLIC_TAG_PARENT_IDS = [ @@ -258,21 +257,6 @@ WHERE c.config -> "$.isPublished" = true return [] } } - - static async all(): Promise { - const rows = await db.knexTable(Chart.table) - - for (const row of rows) { - row.config = JSON.parse(row.config) - } - - return rows as ChartRow[] // This cast might be a lie? - } -} - -interface ChartRow { - id: number - config: any } // TODO integrate this old logic with typeorm @@ -384,7 +368,7 @@ export const getRelatedChartsForVariable = async ( } export const getChartEmbedUrlsInPublishedWordpressPosts = async ( - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const chartSlugQueryString: Pick< DbPlainPostLink, diff --git a/db/model/Dataset.ts b/db/model/Dataset.ts index 22a2a3ca794..68110afcab6 100644 --- a/db/model/Dataset.ts +++ b/db/model/Dataset.ts @@ -22,7 +22,6 @@ import { VariablesTableName, DbRawVariable, } from "@ourworldindata/types" -import { Knex } from "knex" @Entity("datasets") @Unique(["name", "namespace"]) @@ -45,7 +44,7 @@ export class Dataset extends BaseEntity { } export async function getDatasetById( - knex: Knex, + knex: db.KnexReadonlyTransaction, datasetId: number ): Promise { const dataset = await knex(DatasetsTableName) @@ -62,7 +61,7 @@ export async function getDatasetById( // Export dataset variables to CSV (not including metadata) export async function writeDatasetCSV( - knex: Knex, + knex: db.KnexReadonlyTransaction, datasetId: number, stream: Writable ): Promise { @@ -81,7 +80,7 @@ export async function writeDatasetCSV( } export async function datasetToCSV( - knex: Knex, + knex: db.KnexReadonlyTransaction, datasetId: number ): Promise { let csv = "" @@ -93,34 +92,32 @@ export async function datasetToCSV( } export async function setTagsForDataset( - knex: Knex, + trx: db.KnexReadWriteTransaction, datasetId: number, tagIds: number[] ): Promise { - await knex.transaction(async (trx: Knex) => { - const tagRows = tagIds.map((tagId) => [tagId, datasetId]) - await db.knexRaw(trx, `DELETE FROM dataset_tags WHERE datasetId=?`, [ - datasetId, - ]) - if (tagRows.length) - await db.knexRaw( - trx, - `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, + const tagRows = tagIds.map((tagId) => [tagId, datasetId]) + await db.knexRaw(trx, `DELETE FROM dataset_tags WHERE datasetId=?`, [ + datasetId, + ]) + if (tagRows.length) + await db.knexRaw( + trx, + `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, - [tagRows] - ) - }) + [tagRows] + ) } // Return object representing datapackage.json for this dataset export async function datasetToDatapackage( - knex: Knex, + knex: db.KnexReadonlyTransaction, datasetId: number ): Promise { const datasetName = (await getDatasetById(knex, datasetId))?.name const sources = await getSourcesForDataset(knex, datasetId) - const variables = (await db - .knexTable(VariablesTableName) + const variables = (await knex + .table(VariablesTableName) .where({ datasetId })) as DbRawVariable[] const tags = await db.knexRaw>( knex, diff --git a/db/model/Gdoc/GdocBase.ts b/db/model/Gdoc/GdocBase.ts index 0f0826ce54f..0ac51610f54 100644 --- a/db/model/Gdoc/GdocBase.ts +++ b/db/model/Gdoc/GdocBase.ts @@ -646,8 +646,8 @@ export class GdocBase extends BaseEntity implements OwidGdocBaseInterface { ) ).then(excludeNullish) - const publishedExplorersBySlug = await db.getPublishedExplorersBySlug( - db.knexInstance() + const publishedExplorersBySlug = await db.knexReadonlyTransaction( + (trx) => db.getPublishedExplorersBySlug(trx) ) const linkedExplorerCharts = await Promise.all( @@ -773,8 +773,8 @@ export class GdocBase extends BaseEntity implements OwidGdocBaseInterface { ) const chartIdsBySlug = await Chart.mapSlugsToIds() - const publishedExplorersBySlug = await db.getPublishedExplorersBySlug( - db.knexInstance() + const publishedExplorersBySlug = await db.knexReadonlyTransaction( + (trx) => db.getPublishedExplorersBySlug(trx) ) const linkErrors: OwidGdocErrorMessage[] = this.links.reduce( diff --git a/db/model/Pageview.ts b/db/model/Pageview.ts index b31d39bde50..d2a1d92f9b9 100644 --- a/db/model/Pageview.ts +++ b/db/model/Pageview.ts @@ -2,7 +2,6 @@ import { keyBy } from "lodash" import { Entity, Column, BaseEntity } from "typeorm" import { RawPageview } from "@ourworldindata/utils" import * as db from "../db.js" -import { Knex } from "knex" import { DbPlainAnalyticsPageview, AnalyticsPageviewsTableName, @@ -27,7 +26,7 @@ export class Pageview extends BaseEntity implements RawPageview { } export async function getAnalyticsPageviewsByUrlObj( - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise<{ [url: string]: DbPlainAnalyticsPageview }> { diff --git a/db/model/Post.ts b/db/model/Post.ts index 29a125014df..bfd96d7e19b 100644 --- a/db/model/Post.ts +++ b/db/model/Post.ts @@ -43,14 +43,21 @@ export const select = ( from: (query) => query.select(...args) as any, }) -export const getTagsByPostId = async (): Promise< - Map -> => { - const postTags = await db.queryMysql(` +export const getTagsByPostId = async ( + knex: db.KnexReadonlyTransaction +): Promise> => { + const postTags = await db.knexRaw<{ + postId: number + tagId: number + tagName: string + }>( + knex, + ` SELECT pt.post_id AS postId, pt.tag_id AS tagId, t.name as tagName FROM post_tags pt JOIN posts p ON p.id=pt.post_id JOIN tags t ON t.id=pt.tag_id - `) + ` + ) const tagsByPostId: Map = new Map() @@ -92,55 +99,61 @@ export const setTagsForPost = async ( }) export const getPostRawBySlug = async ( + trx: db.KnexReadonlyTransaction, slug: string ): Promise => - (await db.knexTable(postsTable).where({ slug }))[0] + (await trx.table(postsTable).where({ slug }))[0] export const getPostRawById = async ( + trx: db.KnexReadonlyTransaction, id: number ): Promise => - (await db.knexTable(postsTable).where({ id }))[0] + (await trx.table(postsTable).where({ id }))[0] export const getPostEnrichedBySlug = async ( + trx: db.KnexReadonlyTransaction, slug: string ): Promise => { - const post = await getPostRawBySlug(slug) + const post = await getPostRawBySlug(trx, slug) if (!post) return undefined return parsePostRow(post) } export const getPostEnrichedById = async ( + trx: db.KnexReadonlyTransaction, id: number ): Promise => { - const post = await getPostRawById(id) + const post = await getPostRawById(trx, id) if (!post) return undefined return parsePostRow(post) } export const getFullPostBySlugFromSnapshot = async ( + trx: db.KnexReadonlyTransaction, slug: string ): Promise => { - const postEnriched = await getPostEnrichedBySlug(slug) + const postEnriched = await getPostEnrichedBySlug(trx, slug) if ( !postEnriched?.wpApiSnapshot || !snapshotIsPostRestApi(postEnriched.wpApiSnapshot) ) throw new JsonError(`No page snapshot found by slug ${slug}`, 404) - return getFullPost(postEnriched.wpApiSnapshot) + return getFullPost(trx, postEnriched.wpApiSnapshot) } export const getFullPostByIdFromSnapshot = async ( + trx: db.KnexReadonlyTransaction, id: number ): Promise => { - const postEnriched = await getPostEnrichedById(id) + const postEnriched = await getPostEnrichedById(trx, id) if ( !postEnriched?.wpApiSnapshot || !snapshotIsPostRestApi(postEnriched.wpApiSnapshot) ) throw new JsonError(`No page snapshot found by id ${id}`, 404) - return getFullPost(postEnriched.wpApiSnapshot) + return getFullPost(trx, postEnriched.wpApiSnapshot) } export const isPostSlugCitable = (slug: string): boolean => { @@ -160,7 +173,7 @@ export const isPostSlugCitable = (slug: string): boolean => { } export const getPostsFromSnapshots = async ( - knex: Knex, + knex: db.KnexReadonlyTransaction, postTypes: string[] = [WP_PostType.Post, WP_PostType.Page], filterFunc?: FilterFnPostRestApi ): Promise => { @@ -194,9 +207,12 @@ export const getPostsFromSnapshots = async ( } export const getPostRelatedCharts = async ( + knex: db.KnexReadonlyTransaction, postId: number ): Promise => - db.queryMysql(` + db.knexRaw( + knex, + `-- sql SELECT DISTINCT charts.config->>"$.slug" AS slug, charts.config->>"$.title" AS title, @@ -208,9 +224,11 @@ export const getPostRelatedCharts = async ( WHERE post_tags.post_id=${postId} AND charts.config->>"$.isPublished" = "true" ORDER BY title ASC - `) + ` + ) export const getFullPost = async ( + knex: db.KnexReadonlyTransaction, postApi: PostRestApi, excludeContent?: boolean ): Promise => ({ @@ -233,7 +251,7 @@ export const getFullPost = async ( imageId: postApi.featured_media, relatedCharts: postApi.type === "page" - ? await getPostRelatedCharts(postApi.id) + ? await getPostRelatedCharts(knex, postApi.id) : undefined, }) @@ -241,7 +259,7 @@ const selectHomepagePosts: FilterFnPostRestApi = (post) => post.meta?.owid_publication_context_meta_field?.homepage === true export const getBlogIndex = memoize( - async (knex: Knex): Promise => { + async (knex: db.KnexReadonlyTransaction): Promise => { await db.getConnection() // side effect: ensure connection is established const gdocPosts = await GdocPost.getListedGdocPosts() const wpPosts = await Promise.all( @@ -249,7 +267,9 @@ export const getBlogIndex = memoize( knex, [WP_PostType.Post], selectHomepagePosts - ).then((posts) => posts.map((post) => getFullPost(post, true))) + ).then((posts) => + posts.map((post) => getFullPost(knex, post, true)) + ) ) const gdocSlugs = new Set(gdocPosts.map(({ slug }) => slug)) @@ -289,9 +309,10 @@ export const postsFlushCache = (): void => { } export const getBlockContentFromSnapshot = async ( + trx: db.KnexReadonlyTransaction, id: number ): Promise => { - const enrichedBlock = await getPostEnrichedById(id) + const enrichedBlock = await getPostEnrichedById(trx, id) if ( !enrichedBlock?.wpApiSnapshot || !snapshotIsBlockGraphQlApi(enrichedBlock.wpApiSnapshot) @@ -303,7 +324,7 @@ export const getBlockContentFromSnapshot = async ( export const getWordpressPostReferencesByChartId = async ( chartId: number, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const relatedWordpressPosts: PostReference[] = await db.knexRaw( knex, @@ -351,7 +372,7 @@ export const getWordpressPostReferencesByChartId = async ( export const getGdocsPostReferencesByChartId = async ( chartId: number, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const relatedGdocsPosts: PostReference[] = await db.knexRaw( knex, @@ -393,8 +414,8 @@ export const getGdocsPostReferencesByChartId = async ( * Get all the gdocs and Wordpress posts mentioning a chart */ export const getRelatedArticles = async ( - chartId: number, - knex: Knex + knex: db.KnexReadonlyTransaction, + chartId: number ): Promise => { const wordpressPosts = await getWordpressPostReferencesByChartId( chartId, @@ -409,10 +430,11 @@ export const getRelatedArticles = async ( } export const getPostTags = async ( + trx: db.KnexReadonlyTransaction, postId: number ): Promise[]> => { - return await db - .knexTable("post_tags") + return await trx + .table("post_tags") .select("tags.id", "tags.name") .where({ post_id: postId }) .join("tags", "tags.id", "=", "post_tags.tag_id") diff --git a/db/model/PostLink.ts b/db/model/PostLink.ts index 5f30e6951e2..57a56a9c855 100644 --- a/db/model/PostLink.ts +++ b/db/model/PostLink.ts @@ -6,7 +6,7 @@ import { Url, } from "@ourworldindata/utils" import { getLinkType, getUrlTarget } from "@ourworldindata/components" -import { Knex } from "knex" +import { KnexReadWriteTransaction, KnexReadonlyTransaction } from "../db.js" export function postLinkCreateFromUrl({ url, sourceId, @@ -36,41 +36,41 @@ export function postLinkCreateFromUrl({ } export async function getPostLinkById( - knex: Knex, + knex: KnexReadonlyTransaction, id: number ): Promise { return knex(PostsLinksTableName).where({ id }).first() } export async function getAllPostLinks( - knex: Knex + knex: KnexReadonlyTransaction ): Promise { return knex(PostsLinksTableName) } export async function getPostLinksBySourceId( - knex: Knex, + knex: KnexReadonlyTransaction, sourceId: number ): Promise { return knex(PostsLinksTableName).where({ sourceId }) } export async function insertPostLink( - knex: Knex, + knex: KnexReadWriteTransaction, postLink: DbInsertPostLink ): Promise<{ id: number }> { return knex(PostsLinksTableName).returning("id").insert(postLink) } export async function insertManyPostLinks( - knex: Knex, + knex: KnexReadWriteTransaction, postLinks: DbInsertPostLink[] ): Promise { return knex.batchInsert(PostsLinksTableName, postLinks) } export async function updatePostLink( - knex: Knex, + knex: KnexReadWriteTransaction, id: number, postLink: DbInsertPostLink ): Promise { @@ -78,14 +78,14 @@ export async function updatePostLink( } export async function deletePostLink( - knex: Knex, + knex: KnexReadWriteTransaction, id: number ): Promise { return knex(PostsLinksTableName).where({ id }).delete() } export async function deleteManyPostLinks( - knex: Knex, + knex: KnexReadWriteTransaction, ids: number[] ): Promise { return knex(PostsLinksTableName).whereIn("id", ids).delete() diff --git a/db/model/Redirect.ts b/db/model/Redirect.ts index 6c1ad4f609d..d0c60439bfe 100644 --- a/db/model/Redirect.ts +++ b/db/model/Redirect.ts @@ -1,9 +1,8 @@ -import { Knex } from "knex" import { DbPlainRedirect } from "@ourworldindata/types" import * as db from "../db" export const getRedirectsFromDb = async ( - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const redirectsFromDb: DbPlainRedirect[] = await db.knexRaw( knex, diff --git a/db/model/Source.ts b/db/model/Source.ts index 0ae51f96465..7c58b3d83d0 100644 --- a/db/model/Source.ts +++ b/db/model/Source.ts @@ -1,11 +1,11 @@ import { Entity, PrimaryGeneratedColumn, Column, BaseEntity } from "typeorm" -import { Knex } from "knex" import { DbEnrichedSource, DbRawSource, SourcesTableName, parseSourcesRow, } from "@ourworldindata/types" +import { KnexReadonlyTransaction } from "../db.js" @Entity("sources") export class Source extends BaseEntity { @@ -16,7 +16,7 @@ export class Source extends BaseEntity { } export async function getSourceById( - knex: Knex, + knex: KnexReadonlyTransaction, sourceId: number ): Promise { const rawSource: DbRawSource | undefined = await knex( @@ -34,7 +34,7 @@ export async function getSourceById( } export async function getSourcesForDataset( - knex: Knex, + knex: KnexReadonlyTransaction, datasetId: number ): Promise { const rawSources: DbRawSource[] = await knex( diff --git a/db/model/User.ts b/db/model/User.ts index 4964b8198e4..8901c31d2f0 100644 --- a/db/model/User.ts +++ b/db/model/User.ts @@ -15,7 +15,7 @@ import { DbInsertUser, UsersTableName, } from "@ourworldindata/types" -import { Knex } from "knex" +import { KnexReadWriteTransaction, KnexReadonlyTransaction } from "../db.js" @Entity("users") export class User extends BaseEntity { @@ -44,7 +44,7 @@ export class User extends BaseEntity { } export async function setPassword( - knex: Knex, + knex: KnexReadWriteTransaction, id: number, password: string ): Promise { @@ -54,21 +54,21 @@ export async function setPassword( } export async function getUserById( - knex: Knex, + knex: KnexReadonlyTransaction, id: number ): Promise { return knex(UsersTableName).where({ id }).first() } export async function insertUser( - knex: Knex, + knex: KnexReadWriteTransaction, user: DbInsertUser ): Promise<{ id: number }> { return knex(UsersTableName).returning("id").insert(user) } export async function updateUser( - knex: Knex, + knex: KnexReadWriteTransaction, id: number, user: Partial ): Promise { @@ -76,7 +76,7 @@ export async function updateUser( } export async function deleteUser( - knex: Knex, + knex: KnexReadWriteTransaction, id: number ): Promise { return knex(UsersTableName).where({ id }).delete() diff --git a/db/model/Variable.ts b/db/model/Variable.ts index 851ccc83fb5..06a8da01728 100644 --- a/db/model/Variable.ts +++ b/db/model/Variable.ts @@ -26,7 +26,6 @@ import { OwidProcessingLevel, DbRawVariable, } from "@ourworldindata/types" -import { Knex } from "knex/types" import { knexRaw, knexRawFirst } from "../db.js" export interface VariableRow { @@ -137,7 +136,7 @@ export function parseVariableRows( export async function getMergedGrapherConfigForVariable( variableId: number, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise { const rows: Pick< DbRawVariable, @@ -200,7 +199,7 @@ export async function getDataForMultipleVariables( export async function writeVariableCSV( variableIds: number[], stream: Writable, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise { // get variables as dataframe const variablesDF = ( @@ -245,7 +244,7 @@ export async function writeVariableCSV( export const getDataValue = async ( { variableId, entityId, year }: DataValueQueryArgs, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { if (!variableId || !entityId) return @@ -290,7 +289,7 @@ export const getDataValue = async ( export const getOwidChartDimensionConfigForVariable = async ( variableId: OwidVariableId, chartId: number, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const row = await db.knexRawFirst<{ dimensions: string }>( knex, @@ -311,7 +310,7 @@ export const getOwidChartDimensionConfigForVariable = async ( export const getOwidVariableDisplayConfig = async ( variableId: OwidVariableId, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const row = await knexRawFirst>( knex, @@ -324,7 +323,7 @@ export const getOwidVariableDisplayConfig = async ( export const entitiesAsDF = async ( entityIds: number[], - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { return ( await readSQLasDF( @@ -371,7 +370,7 @@ const emptyDataDF = (): pl.DataFrame => { export const _dataAsDFfromS3 = async ( variableIds: OwidVariableId[], - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { if (variableIds.length === 0) { return emptyDataDF() @@ -418,7 +417,7 @@ export const _dataAsDFfromS3 = async ( export const dataAsDF = async ( variableIds: OwidVariableId[], - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { return _dataAsDFfromS3(variableIds, knex) } @@ -514,7 +513,7 @@ export const createDataFrame = (data: unknown): pl.DataFrame => { export const readSQLasDF = async ( sql: string, params: any[], - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { return createDataFrame(await db.knexRaw(knex, sql, params)) } @@ -572,7 +571,7 @@ export async function getVariableOfDatapageIfApplicable( export const searchVariables = async ( query: string, limit: number, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const whereClauses = buildWhereClauses(query) @@ -726,7 +725,7 @@ const buildWhereClauses = (query: string): string[] => { */ const queryRegexSafe = async ( query: string, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { // catch regular expression failures in MySQL and return empty result return await knexRaw(knex, query).catch((err) => { @@ -739,7 +738,7 @@ const queryRegexSafe = async ( const queryRegexCount = async ( query: string, - knex: Knex + knex: db.KnexReadonlyTransaction ): Promise => { const results = await queryRegexSafe(query, knex) if (!results.length) { diff --git a/db/refreshPageviewsFromDatasette.ts b/db/refreshPageviewsFromDatasette.ts index ff9a60fca57..cb32bd7a824 100644 --- a/db/refreshPageviewsFromDatasette.ts +++ b/db/refreshPageviewsFromDatasette.ts @@ -4,7 +4,6 @@ import Papa from "papaparse" import * as db from "./db.js" import { DbPlainAnalyticsPageview } from "@ourworldindata/types" import { omitUndefinedValues } from "@ourworldindata/utils" -import { Knex } from "knex" const analyticsPageviewsColumnNames: Array = [ "day", @@ -17,7 +16,9 @@ const analyticsPageviewsColumnNames: Array = [ const emojiRegex = /[\u{1f300}-\u{1f5ff}\u{1f900}-\u{1f9ff}\u{1f600}-\u{1f64f}\u{1f680}-\u{1f6ff}\u{2600}-\u{26ff}\u{2700}-\u{27bf}\u{1f1e6}-\u{1f1ff}\u{1f191}-\u{1f251}\u{1f004}\u{1f0cf}\u{1f170}-\u{1f171}\u{1f17e}-\u{1f17f}\u{1f18e}\u{3030}\u{2b50}\u{2b55}\u{2934}-\u{2935}\u{2b05}-\u{2b07}\u{2b1b}-\u{2b1c}\u{3297}\u{3299}\u{303d}\u{00a9}\u{00ae}\u{2122}\u{23f3}\u{24c2}\u{23e9}-\u{23ef}\u{25b6}\u{23f8}-\u{23fa}]/gu -async function downloadAndInsertCSV(knex: Knex): Promise { +async function downloadAndInsertCSV( + knex: db.KnexReadWriteTransaction +): Promise { // Fetch CSV from private Datasette and insert it to a local MySQL. This function // exists because `make refresh` uses MySQL dump that excludes analytics_pageviews // table. That's why it's necessary to call `make refresh.pageviews` separately. @@ -69,8 +70,7 @@ async function downloadAndInsertCSV(knex: Knex): Promise { const main = async (): Promise => { try { - const knex = db.knexInstance() - await downloadAndInsertCSV(knex) + db.knexReadWriteTransaction((trx) => downloadAndInsertCSV(trx)) } catch (e) { console.error(e) } finally { diff --git a/db/syncPostsToGrapher.ts b/db/syncPostsToGrapher.ts index 936f8d3f3c8..3cc1390ea44 100644 --- a/db/syncPostsToGrapher.ts +++ b/db/syncPostsToGrapher.ts @@ -23,7 +23,6 @@ import { } from "./model/PostLink.js" import { renderTablePress } from "../site/Tablepress.js" import pMap from "p-map" -import { Knex } from "knex" const zeroDateString = "0000-00-00 00:00:00" @@ -241,7 +240,9 @@ export function getLinksToAddAndRemoveForPost( return { linksToAdd, linksToDelete } } -const syncPostsToGrapher = async (knex: Knex): Promise => { +const syncPostsToGrapher = async ( + knex: db.KnexReadWriteTransaction +): Promise => { const dereferenceReusableBlocksFn = await buildReusableBlocksResolver() const dereferenceTablePressFn = await buildTablePressResolver() @@ -331,9 +332,7 @@ const syncPostsToGrapher = async (knex: Knex): Promise => { ) const doesExistInWordpress = keyBy(rows, "ID") - const existsInGrapher = await select("id").from( - db.knexInstance().from(postsTable) - ) + const existsInGrapher = await select("id").from(knex.from(postsTable)) const doesExistInGrapher = keyBy(existsInGrapher, "id") const toDelete = existsInGrapher @@ -406,20 +405,18 @@ const syncPostsToGrapher = async (knex: Knex): Promise => { linksToDelete.push(...linksToModify.linksToDelete) } - await db.knexInstance().transaction(async (t) => { - if (toDelete.length) - await t.whereIn("id", toDelete).delete().from(postsTable) - - for (const row of toInsert) { - const rowForDb = serializePostRow(row) - if (doesExistInGrapher[row.id]) - await t - .update(rowForDb) - .where("id", "=", rowForDb.id) - .into(postsTable) - else await t.insert(rowForDb).into(postsTable) - } - }) + if (toDelete.length) + await knex.whereIn("id", toDelete).delete().from(postsTable) + + for (const row of toInsert) { + const rowForDb = serializePostRow(row) + if (doesExistInGrapher[row.id]) + await knex + .update(rowForDb) + .where("id", "=", rowForDb.id) + .into(postsTable) + else await knex.insert(rowForDb).into(postsTable) + } // TODO: unify our DB access and then do everything in one transaction if (linksToAdd.length) { @@ -439,8 +436,7 @@ const syncPostsToGrapher = async (knex: Knex): Promise => { const main = async (): Promise => { try { await db.getConnection() - const knex = db.knexInstance() - await syncPostsToGrapher(knex) + db.knexReadWriteTransaction((trx) => syncPostsToGrapher(trx)) } finally { await wpdb.singleton.end() await db.closeTypeOrmAndKnexConnections() diff --git a/db/tests/basic.test.ts b/db/tests/basic.test.ts index e2afa6a4549..784686d470c 100644 --- a/db/tests/basic.test.ts +++ b/db/tests/basic.test.ts @@ -3,7 +3,14 @@ import sqlFixtures from "sql-fixtures" import { dbTestConfig } from "./dbTestConfig.js" import { dataSource } from "./dataSource.dbtests.js" import { knex, Knex } from "knex" -import { getConnection, knexRaw } from "../db.js" +import { + getConnection, + knexRaw, + knexReadWriteTransaction, + KnexReadonlyTransaction, + KnexReadWriteTransaction, + knexReadonlyTransaction, +} from "../db.js" import { DataSource } from "typeorm" import { deleteUser, insertUser, updateUser, User } from "../model/User.js" import { Chart } from "../model/Chart.js" @@ -93,7 +100,7 @@ test("knex interface", async () => { if (!knexInstance) throw new Error("Knex connection not initialized") // Create a transaction and run all tests inside it - await knexInstance.transaction(async (trx) => { + await knexReadWriteTransaction(async (trx) => { // Fetch all users into memory const users = await trx .from(UsersTableName) @@ -137,5 +144,46 @@ test("knex interface", async () => { ) expect(usersFromRawQuery.length).toBe(2) await deleteUser(trx, 2) - }) + }, knexInstance) +}) + +export async function testRo( + trx: KnexReadonlyTransaction +): Promise<{ result: number }[]> { + return knexRaw<{ result: number }>(trx, "SELECT 1 + 1 as result") +} + +export async function testGetNumUsers( + trx: KnexReadonlyTransaction +): Promise<{ userCount: number }[]> { + return knexRaw<{ userCount: number }>( + trx, + "SELECT count(*) as userCount from users" + ) +} + +export async function testRw(trx: KnexReadWriteTransaction): Promise { + await knexRaw(trx, "INSERT INTO users (email, fullName) VALUES (?, ?)", [ + "test2@ourworldindata.org", + "Test User 2", + ]) +} +test("Transaction setup", async () => { + const result = await knexReadWriteTransaction(async (trx) => { + const result = await testRo(trx) + expect(result.length).toBe(1) + expect(result[0].result).toBe(2) + await testRw(trx) + return await testGetNumUsers(trx) + }, knexInstance) + expect(result.length).toBe(1) + expect(result[0].userCount).toBe(2) +}) + +test("Write actions in read-only transactions fail", async () => { + expect(async () => { + return knexReadonlyTransaction(async (trx) => { + await testRw(trx as KnexReadWriteTransaction) // The cast is necessary to not make TypeScript complain and catch this error :) + }, knexInstance) + }).rejects.toThrow() }) diff --git a/devTools/markdownTest/markdown.ts b/devTools/markdownTest/markdown.ts index 94084262e06..28139c9cd61 100644 --- a/devTools/markdownTest/markdown.ts +++ b/devTools/markdownTest/markdown.ts @@ -1,4 +1,8 @@ -import { closeTypeOrmAndKnexConnections, getConnection } from "../../db/db.js" +import { + closeTypeOrmAndKnexConnections, + getConnection, + knexReadonlyTransaction, +} from "../../db/db.js" import { getPostRawBySlug } from "../../db/model/Post.js" import { enrichedBlocksToMarkdown } from "../../db/model/Gdoc/enrichedToMarkdown.js" import { GdocBase } from "../../db/model/Gdoc/GdocBase.js" @@ -12,36 +16,40 @@ import { parsePostArchieml } from "@ourworldindata/utils" async function main(parsedArgs: parseArgs.ParsedArgs) { try { - const connection = await getConnection() - const gdoc = await GdocBase.findOneBy({ slug: parsedArgs._[0] }) - let archieMlContent: OwidEnrichedGdocBlock[] | null - let contentToShowOnError: any - if (!gdoc) { - const post = await getPostRawBySlug(parsedArgs._[0]) - if (!post) { - console.error("No post found") - process.exit(-1) + await knexReadonlyTransaction(async (trx) => { + const gdoc = await GdocBase.findOneBy({ slug: parsedArgs._[0] }) + let archieMlContent: OwidEnrichedGdocBlock[] | null + let contentToShowOnError: any + if (!gdoc) { + const post = await getPostRawBySlug(trx, parsedArgs._[0]) + if (!post) { + console.error("No post found") + process.exit(-1) + } + archieMlContent = post?.archieml + ? parsePostArchieml(post?.archieml)?.content?.body + : null + contentToShowOnError = post?.archieml + } else { + archieMlContent = gdoc.enrichedBlockSources.flat() + contentToShowOnError = gdoc?.content } - archieMlContent = post?.archieml - ? parsePostArchieml(post?.archieml)?.content?.body - : null - contentToShowOnError = post?.archieml - } else { - archieMlContent = gdoc.enrichedBlockSources.flat() - contentToShowOnError = gdoc?.content - } - if (!archieMlContent) { - console.error("No archieMl found") - process.exit(-1) - } - const markdown = enrichedBlocksToMarkdown(archieMlContent ?? [], true) - if (!markdown) { - console.error("No markdown found") - console.log(contentToShowOnError) - process.exit(-1) - } - console.log(markdown) + if (!archieMlContent) { + console.error("No archieMl found") + process.exit(-1) + } + const markdown = enrichedBlocksToMarkdown( + archieMlContent ?? [], + true + ) + if (!markdown) { + console.error("No markdown found") + console.log(contentToShowOnError) + process.exit(-1) + } + console.log(markdown) + }) await closeTypeOrmAndKnexConnections() } catch (error) { await closeTypeOrmAndKnexConnections() diff --git a/packages/@ourworldindata/types/src/NominalType.ts b/packages/@ourworldindata/types/src/NominalType.ts new file mode 100644 index 00000000000..f3487f54232 --- /dev/null +++ b/packages/@ourworldindata/types/src/NominalType.ts @@ -0,0 +1,22 @@ +declare const __nominal__type: unique symbol +/** Typescript is structurally typed, not nominally typed. This means that + * two types are considered equivalent if their members are equivalent. + * This is often useful but sometimes you want to distingish types based on their + * name alone - e.g. if you have an identifier that is just a string but you'd like + * some type safety when using it. This is where this nominal type comes in. + * @example + * type UserName = Nominal + * type UserId = Nominal + * + * function getUserName(name: UserName) { + * return name + * } + * + * const name = getUserName("123" as UserName) // OK + * const name2 = getUserName("123") // Error + * const name3 = getUserName("123" as UserId) // Error + * // of course the main benefit comes when the UserName and UserId types are used in a more complex call hierarchy + */ +export type Nominal = Type & { + readonly [__nominal__type]: Identifier +} diff --git a/packages/@ourworldindata/types/src/index.ts b/packages/@ourworldindata/types/src/index.ts index a5de6e9dbdb..3759e4d2e3a 100644 --- a/packages/@ourworldindata/types/src/index.ts +++ b/packages/@ourworldindata/types/src/index.ts @@ -635,6 +635,8 @@ export { export { RedirectCode, type DbPlainRedirect } from "./dbTypes/Redirects.js" +export type { Nominal } from "./NominalType.js" + export { type DbRawLatestWork, type DbEnrichedLatestWork,