diff --git a/adminSiteServer/adminRouter.tsx b/adminSiteServer/adminRouter.tsx index 331e79f51f1..f1cd4495187 100644 --- a/adminSiteServer/adminRouter.tsx +++ b/adminSiteServer/adminRouter.tsx @@ -142,8 +142,8 @@ adminRouter.get("/datasets/:datasetId.csv", async (req, res) => { await db.knexInstance().transaction(async (t) => { const datasetName = ( await db.knexRawFirst>( - `SELECT name FROM datasets WHERE id=?`, t, + `SELECT name FROM datasets WHERE id=?`, [datasetId] ) )?.name @@ -170,7 +170,7 @@ adminRouter.get("/datasets/:datasetId/downloadZip", async (req, res) => { const file = await db.knexRawFirst< Pick - >(`SELECT filename, file FROM dataset_files WHERE datasetId=?`, knex, [ + >(knex, `SELECT filename, file FROM dataset_files WHERE datasetId=?`, [ datasetId, ]) res.send(file?.file) diff --git a/adminSiteServer/apiRouter.ts b/adminSiteServer/apiRouter.ts index 44ed7a48407..29b4646fc65 100644 --- a/adminSiteServer/apiRouter.ts +++ b/adminSiteServer/apiRouter.ts @@ -481,8 +481,8 @@ apiRouter.get( if (!slug) return {} const pageviewsByUrl = await db.knexRawFirst( - "select * from ?? where url = ?", db.knexInstance(), + "select * from ?? where url = ?", [ AnalyticsPageviewsTableName, `https://ourworldindata.org/grapher/${slug}`, @@ -1593,6 +1593,7 @@ apiRouter.get("/datasets.json", async (req) => { return db.knexInstance().transaction( async (trx) => { const datasets = await db.knexRaw>( + trx, ` WITH variable_counts AS ( SELECT @@ -1622,19 +1623,18 @@ apiRouter.get("/datasets.json", async (req) => { JOIN users mu ON mu.id=ad.metadataEditedByUserId JOIN datasets d ON d.id=ad.id ORDER BY ad.dataEditedAt DESC - `, - trx + ` ) const tags = await db.knexRaw< Pick & Pick >( + trx, ` SELECT dt.datasetId, t.id, t.name FROM dataset_tags dt JOIN tags t ON dt.tagId = t.id - `, - trx + ` ) const tagsByDatasetId = lodash.groupBy(tags, (t) => t.datasetId) for (const dataset of datasets) { @@ -1657,6 +1657,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { return db.knexInstance().transaction( async (trx) => { const dataset = await db.knexRawFirst>( + trx, ` SELECT d.id, d.namespace, @@ -1680,7 +1681,6 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { JOIN users mu ON mu.id=d.metadataEditedByUserId WHERE d.id = ? `, - trx, [datasetId] ) @@ -1688,8 +1688,8 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { throw new JsonError(`No dataset by id '${datasetId}'`, 404) const zipFile = await db.knexRawFirst( - `SELECT filename FROM dataset_files WHERE datasetId=?`, trx, + `SELECT filename FROM dataset_files WHERE datasetId=?`, [datasetId] ) if (zipFile) dataset.zipFile = zipFile @@ -1698,12 +1698,12 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { DbRawVariable, "id" | "name" | "description" | "display" | "catalogPath" >[] = await db.knexRaw( + trx, ` SELECT v.id, v.name, v.description, v.display, v.catalogPath FROM variables AS v WHERE v.datasetId = ? `, - trx, [datasetId] ) @@ -1715,6 +1715,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { // add all origins const origins: DbRawOrigin[] = await db.knexRaw( + trx, ` select distinct o.* @@ -1723,7 +1724,6 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { join variables as v on ov.variableId = v.id where v.datasetId = ? `, - trx, [datasetId] ) @@ -1732,13 +1732,13 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { dataset.origins = parsedOrigins const sources = await db.knexRaw( + trx, ` SELECT s.id, s.name, s.description FROM sources AS s WHERE s.datasetId = ? ORDER BY s.id ASC `, - trx, [datasetId] ) @@ -1752,6 +1752,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { }) const charts = await db.knexRaw( + trx, ` SELECT ${OldChart.listFields} FROM charts @@ -1762,7 +1763,6 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { WHERE v.datasetId = ? GROUP BY charts.id `, - trx, [datasetId] ) @@ -1771,25 +1771,25 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { await Chart.assignTagsForCharts(charts as any) const tags = await db.knexRaw( + trx, ` SELECT t.id, t.name FROM tags t JOIN dataset_tags dt ON dt.tagId = t.id WHERE dt.datasetId = ? `, - trx, [datasetId] ) dataset.tags = tags const availableTags = await db.knexRaw( + trx, ` SELECT t.id, t.name, p.name AS parentName FROM tags AS t JOIN tags AS p ON t.parentId=p.id WHERE p.isBulkImport IS FALSE - `, - trx + ` ) dataset.availableTags = availableTags @@ -1810,6 +1810,7 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => { await knex.transaction(async (trx) => { const newDataset = (req.body as { dataset: any }).dataset await db.knexRaw( + trx, ` UPDATE datasets SET @@ -1818,7 +1819,6 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => { metadataEditedByUserId=? WHERE id=? `, - trx, [ newDataset.nonRedistributable, new Date(), @@ -1828,13 +1828,13 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => { ) const tagRows = newDataset.tags.map((tag: any) => [tag.id, datasetId]) - await db.knexRaw(`DELETE FROM dataset_tags WHERE datasetId=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM dataset_tags WHERE datasetId=?`, [ datasetId, ]) if (tagRows.length) await db.knexRaw( - `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, trx, + `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, [tagRows] ) @@ -1862,8 +1862,8 @@ apiRouter.post( if (!dataset) throw new JsonError(`No dataset by id ${datasetId}`, 404) await knex.transaction(async (trx) => { await db.knexRaw( - `UPDATE datasets SET isArchived = 1 WHERE id=?`, trx, + `UPDATE datasets SET isArchived = 1 WHERE id=?`, [datasetId] ) }) @@ -1893,22 +1893,22 @@ apiRouter.delete( await knex.transaction(async (trx) => { await db.knexRaw( - `DELETE d FROM country_latest_data AS d JOIN variables AS v ON d.variable_id=v.id WHERE v.datasetId=?`, trx, + `DELETE d FROM country_latest_data AS d JOIN variables AS v ON d.variable_id=v.id WHERE v.datasetId=?`, [datasetId] ) await db.knexRaw( - `DELETE FROM dataset_files WHERE datasetId=?`, trx, + `DELETE FROM dataset_files WHERE datasetId=?`, [datasetId] ) - await db.knexRaw(`DELETE FROM variables WHERE datasetId=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM variables WHERE datasetId=?`, [ datasetId, ]) - await db.knexRaw(`DELETE FROM sources WHERE datasetId=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM sources WHERE datasetId=?`, [ datasetId, ]) - await db.knexRaw(`DELETE FROM datasets WHERE id=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM datasets WHERE id=?`, [ datasetId, ]) }) @@ -1939,6 +1939,7 @@ apiRouter.post( if (req.body.republish) { await knex.transaction(async (trx) => { await db.knexRaw( + trx, ` UPDATE charts SET config = JSON_SET(config, "$.version", config->"$.version" + 1) @@ -1949,7 +1950,6 @@ apiRouter.post( WHERE variables.datasetId = ? ) `, - trx, [datasetId] ) }) @@ -2344,19 +2344,19 @@ apiRouter.get("/sources/:sourceId.json", async (req: Request) => { return db.knexInstance().transaction( async (trx) => { const source = await db.knexRawFirst>( + trx, ` SELECT s.id, s.name, s.description, s.createdAt, s.updatedAt, d.namespace FROM sources AS s JOIN active_datasets AS d ON d.id=s.datasetId WHERE s.id=?`, - trx, [sourceId] ) if (!source) throw new JsonError(`No source by id '${sourceId}'`, 404) source.variables = await db.knexRaw( - `SELECT id, name, updatedAt FROM variables WHERE variables.sourceId=?`, trx, + `SELECT id, name, updatedAt FROM variables WHERE variables.sourceId=?`, [sourceId] ) diff --git a/baker/GrapherBaker.tsx b/baker/GrapherBaker.tsx index 36210949c2e..c07d9ac5a2e 100644 --- a/baker/GrapherBaker.tsx +++ b/baker/GrapherBaker.tsx @@ -476,13 +476,13 @@ export const bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers = async (bakedSiteDir: string, knex: Knex) => { const chartsToBake: { id: number; config: string; slug: string }[] = await knexRaw( + knex, ` SELECT id, config, config->>'$.slug' as slug FROM charts WHERE JSON_EXTRACT(config, "$.isPublished")=true ORDER BY JSON_EXTRACT(config, "$.slug") ASC - `, - knex + ` ) const newSlugs = chartsToBake.map((row) => row.slug) diff --git a/baker/algolia/indexExplorersToAlgolia.ts b/baker/algolia/indexExplorersToAlgolia.ts index 8a4eefe75bf..1fb98d234e6 100644 --- a/baker/algolia/indexExplorersToAlgolia.ts +++ b/baker/algolia/indexExplorersToAlgolia.ts @@ -123,20 +123,21 @@ const getExplorerRecords = async ( // Fetch info about all charts used in explorers, as linked by the explorer_charts table const graphersUsedInExplorers = await db .knexRaw( + knex, ` SELECT * FROM charts INNER JOIN ( SELECT DISTINCT chartId AS id FROM explorer_charts ) AS ec USING (id) - `, - knex + ` ) .then((charts) => charts.map((c) => parseChartsRow(c))) .then((charts) => keyBy(charts, "id")) const explorerRecords = await db .knexRaw>( + knex, ` SELECT slug, COALESCE(config->>"$.explorerSubtitle", "null") AS subtitle, @@ -144,8 +145,7 @@ const getExplorerRecords = async ( COALESCE(config->>"$.blocks", "null") AS blocks FROM explorers WHERE isPublished = true - `, - knex + ` ) .then((results) => results.flatMap(({ slug, title, subtitle, blocks }) => { diff --git a/baker/syncRedirectsToGrapher.ts b/baker/syncRedirectsToGrapher.ts index 2a8d188dbb7..ae57eb7e5b7 100644 --- a/baker/syncRedirectsToGrapher.ts +++ b/baker/syncRedirectsToGrapher.ts @@ -61,8 +61,8 @@ export const syncRedirectsToGrapher = async (): Promise => { `Adding redirect: ${source} -> ${resolvedTarget} (${code})` ) await db.knexRaw( - `INSERT INTO redirects (source, target, code) VALUES (?, ?, ?)`, knex, + `INSERT INTO redirects (source, target, code) VALUES (?, ?, ?)`, [source, resolvedTarget, code] ) } diff --git a/db/db.ts b/db/db.ts index a387ad0445f..2d41999b8c3 100644 --- a/db/db.ts +++ b/db/db.ts @@ -115,17 +115,17 @@ export const knexTable = (table: string): Knex.QueryBuilder => knexInstance().table(table) export const knexRaw = async ( - str: string, knex: Knex, + str: string, params?: any[] ): Promise => (await knex.raw(str, params ?? []))[0] export const knexRawFirst = async ( - str: string, knex: Knex, + str: string, params?: any[] ): Promise => { - const results = await knexRaw(str, knex, params) + const results = await knexRaw(knex, str, params) if (results.length === 0) return undefined return results[0] } @@ -140,10 +140,10 @@ export const getSlugsWithPublishedGdocsSuccessors = async ( knex: Knex ): Promise> => { return knexRaw( + knex, `-- sql select slug from posts_with_gdoc_publish_status - where isGdocPublished = TRUE`, - knex + where isGdocPublished = TRUE` ).then((rows) => new Set(rows.map((row: any) => row.slug))) } @@ -151,6 +151,7 @@ export const getExplorerTags = async ( knex: Knex ): Promise<{ slug: string; tags: DbChartTagJoin[] }[]> => { return knexRaw<{ slug: string; tags: string }>( + knex, `-- sql SELECT ext.explorerSlug as slug, @@ -163,8 +164,7 @@ export const getExplorerTags = async ( LEFT JOIN tags t ON ext.tagId = t.id GROUP BY - ext.explorerSlug`, - knex + ext.explorerSlug` ).then((rows) => rows.map((row) => ({ slug: row.slug, @@ -186,6 +186,7 @@ export const getPublishedExplorersBySlug = async ( const tags = await getExplorerTags(knex) const tagsBySlug = keyBy(tags, "slug") return knexRaw( + knex, `-- sql SELECT slug, @@ -194,8 +195,7 @@ export const getPublishedExplorersBySlug = async ( FROM explorers WHERE - isPublished = TRUE`, - knex + isPublished = TRUE` ).then((rows) => { const processed = rows.map((row: any) => { return { @@ -214,6 +214,7 @@ export const getPublishedDataInsights = ( limit = Number.MAX_SAFE_INTEGER // default to no limit ): Promise => { return knexRaw( + knex, ` SELECT content->>'$.title' AS title, @@ -227,7 +228,6 @@ export const getPublishedDataInsights = ( AND publishedAt < NOW() ORDER BY publishedAt DESC LIMIT ?`, - knex, [limit] ).then((results) => results.map((record: any) => ({ @@ -237,26 +237,38 @@ export const getPublishedDataInsights = ( ) as Promise } +export const getPublishedDataInsightCount = (): Promise => { + return knexRawFirst<{ count: number }>( + knexInstance(), + ` + SELECT COUNT(*) AS count + FROM posts_gdocs + WHERE content->>'$.type' = '${OwidGdocType.DataInsight}' + AND published = TRUE + AND publishedAt < NOW()` + ).then((res) => res?.count ?? 0) +} + export const getTotalNumberOfCharts = (): Promise => { return knexRawFirst<{ count: number }>( + knexInstance(), ` SELECT COUNT(*) AS count FROM charts - WHERE config->"$.isPublished" = TRUE`, - knexInstance() + WHERE config->"$.isPublished" = TRUE` ).then((res) => res?.count ?? 0) } export const getTotalNumberOfInUseGrapherTags = (): Promise => { return knexRawFirst<{ count: number }>( + knexInstance(), ` SELECT COUNT(DISTINCT(tagId)) AS count FROM chart_tags WHERE chartId IN ( SELECT id FROM charts - WHERE publishedAt IS NOT NULL)`, - knexInstance() + WHERE publishedAt IS NOT NULL)` ).then((res) => res?.count ?? 0) } @@ -267,6 +279,7 @@ export const getHomepageId = ( knex: Knex ): Promise => { return knexRawFirst<{ id: string }>( + knex, `-- sql SELECT posts_gdocs.id @@ -274,7 +287,6 @@ export const getHomepageId = ( posts_gdocs WHERE content->>'$.type' = '${OwidGdocType.Homepage}' - AND published = TRUE`, - knex + AND published = TRUE` ).then((result) => result?.id) } diff --git a/db/model/Chart.ts b/db/model/Chart.ts index b8a105e73f4..f9d073cee5e 100644 --- a/db/model/Chart.ts +++ b/db/model/Chart.ts @@ -390,6 +390,7 @@ export const getChartEmbedUrlsInPublishedWordpressPosts = async ( DbPlainPostLink, "target" | "queryString" >[] = await db.knexRaw( + knex, ` SELECT pl.target, @@ -427,8 +428,7 @@ export const getChartEmbedUrlsInPublishedWordpressPosts = async ( -- AND pgl.componentType = "chart" -- AND pg.content ->> '$.type' <> 'fragment' -- AND pg.published = 1 - `, - knex + ` ) return chartSlugQueryString.map((row) => { diff --git a/db/model/Dataset.ts b/db/model/Dataset.ts index c059234ab47..22a2a3ca794 100644 --- a/db/model/Dataset.ts +++ b/db/model/Dataset.ts @@ -69,10 +69,10 @@ export async function writeDatasetCSV( // get variables of a dataset const variableIds = ( await db.knexRaw<{ variableId: number }>( + knex, `SELECT id as variableId FROM variables v WHERE datasetId=?`, - knex, [datasetId] ) ).map((row) => row.variableId) @@ -99,13 +99,14 @@ export async function setTagsForDataset( ): Promise { await knex.transaction(async (trx: Knex) => { const tagRows = tagIds.map((tagId) => [tagId, datasetId]) - await db.knexRaw(`DELETE FROM dataset_tags WHERE datasetId=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM dataset_tags WHERE datasetId=?`, [ datasetId, ]) if (tagRows.length) await db.knexRaw( - `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, trx, + `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, + [tagRows] ) }) @@ -122,8 +123,8 @@ export async function datasetToDatapackage( .knexTable(VariablesTableName) .where({ datasetId })) as DbRawVariable[] const tags = await db.knexRaw>( - `SELECT t.id, t.name FROM dataset_tags dt JOIN tags t ON t.id=dt.tagId WHERE dt.datasetId=?`, knex, + `SELECT t.id, t.name FROM dataset_tags dt JOIN tags t ON t.id=dt.tagId WHERE dt.datasetId=?`, [datasetId] ) diff --git a/db/model/Gdoc/GdocHomepage.ts b/db/model/Gdoc/GdocHomepage.ts index 66aeb074b67..198f8dca111 100644 --- a/db/model/Gdoc/GdocHomepage.ts +++ b/db/model/Gdoc/GdocHomepage.ts @@ -34,14 +34,14 @@ export class GdocHomepage _validateSubclass = async (): Promise => { const errors: OwidGdocErrorMessage[] = [] const otherPublishedHomepages = await db.knexRaw<{ id: string }>( + db.knexInstance(), ` - SELECT + SELECT id FROM posts_gdocs WHERE content->>"$.type" = "${OwidGdocType.Homepage}" AND published = TRUE AND id != ?`, - db.knexInstance(), [this.id] ) if (otherPublishedHomepages.length > 0) { diff --git a/db/model/Pageview.ts b/db/model/Pageview.ts index 5407e41bf3f..b31d39bde50 100644 --- a/db/model/Pageview.ts +++ b/db/model/Pageview.ts @@ -32,8 +32,8 @@ export async function getAnalyticsPageviewsByUrlObj( [url: string]: DbPlainAnalyticsPageview }> { const pageviews = await db.knexRaw( - "SELECT * FROM ??", knex, + "SELECT * FROM ??", [AnalyticsPageviewsTableName] ) diff --git a/db/model/Post.ts b/db/model/Post.ts index 15ea71de098..29a125014df 100644 --- a/db/model/Post.ts +++ b/db/model/Post.ts @@ -165,6 +165,7 @@ export const getPostsFromSnapshots = async ( filterFunc?: FilterFnPostRestApi ): Promise => { const rawPosts: Pick[] = await db.knexRaw( + knex, ` SELECT wpApiSnapshot FROM ${postsTable} WHERE wpApiSnapshot IS NOT NULL @@ -172,7 +173,6 @@ export const getPostsFromSnapshots = async ( AND type IN (?) ORDER BY wpApiSnapshot->>'$.date' DESC; `, - knex, [postTypes] ) @@ -306,6 +306,7 @@ export const getWordpressPostReferencesByChartId = async ( knex: Knex ): Promise => { const relatedWordpressPosts: PostReference[] = await db.knexRaw( + knex, ` SELECT DISTINCT p.title, @@ -342,7 +343,6 @@ export const getWordpressPostReferencesByChartId = async ( ORDER BY p.title ASC `, - knex, [chartId] ) @@ -354,6 +354,7 @@ export const getGdocsPostReferencesByChartId = async ( knex: Knex ): Promise => { const relatedGdocsPosts: PostReference[] = await db.knexRaw( + knex, ` SELECT DISTINCT pg.content ->> '$.title' AS title, @@ -382,7 +383,6 @@ export const getGdocsPostReferencesByChartId = async ( ORDER BY pg.content ->> '$.title' ASC `, - knex, [chartId] ) @@ -558,6 +558,7 @@ export const getLatestWorkByAuthor = async ( author: string ): Promise => { const rawLatestWorkLinks: DbRawLatestWork[] = await db.knexRaw( + knex, ` SELECT pg.id, @@ -574,7 +575,6 @@ export const getLatestWorkByAuthor = async ( AND pg.published = TRUE AND pg.content->>'$.type' = "${OwidGdocType.Article}" `, - knex, [`%${author}%`] ) diff --git a/db/model/Redirect.ts b/db/model/Redirect.ts index 9a29e3839b7..6c1ad4f609d 100644 --- a/db/model/Redirect.ts +++ b/db/model/Redirect.ts @@ -6,10 +6,10 @@ export const getRedirectsFromDb = async ( knex: Knex ): Promise => { const redirectsFromDb: DbPlainRedirect[] = await db.knexRaw( + knex, ` SELECT source, target, code FROM redirects - `, - knex + ` ) return redirectsFromDb diff --git a/db/model/Variable.ts b/db/model/Variable.ts index 4dd96016906..851ccc83fb5 100644 --- a/db/model/Variable.ts +++ b/db/model/Variable.ts @@ -143,8 +143,8 @@ export async function getMergedGrapherConfigForVariable( DbRawVariable, "grapherConfigAdmin" | "grapherConfigETL" >[] = await knexRaw( - `SELECT grapherConfigAdmin, grapherConfigETL FROM variables WHERE id = ?`, knex, + `SELECT grapherConfigAdmin, grapherConfigETL FROM variables WHERE id = ?`, [variableId] ) if (!rows.length) return @@ -255,11 +255,11 @@ export const getDataValue = async ( const unit = ( await knexRawFirst>( + knex, `-- sql SELECT unit FROM variables WHERE id = ? `, - knex, [variableId] ) )?.unit @@ -293,12 +293,12 @@ export const getOwidChartDimensionConfigForVariable = async ( knex: Knex ): Promise => { const row = await db.knexRawFirst<{ dimensions: string }>( + knex, ` SELECT config->"$.dimensions" AS dimensions FROM charts WHERE id = ? `, - knex, [chartId] ) if (!row?.dimensions) return @@ -314,8 +314,8 @@ export const getOwidVariableDisplayConfig = async ( knex: Knex ): Promise => { const row = await knexRawFirst>( - `SELECT display FROM variables WHERE id = ?`, knex, + `SELECT display FROM variables WHERE id = ?`, [variableId] ) if (!row?.display) return @@ -516,7 +516,7 @@ export const readSQLasDF = async ( params: any[], knex: Knex ): Promise => { - return createDataFrame(await db.knexRaw(sql, knex, params)) + return createDataFrame(await db.knexRaw(knex, sql, params)) } export async function getVariableOfDatapageIfApplicable( @@ -729,7 +729,7 @@ const queryRegexSafe = async ( knex: Knex ): Promise => { // catch regular expression failures in MySQL and return empty result - return await knexRaw(query, knex).catch((err) => { + return await knexRaw(knex, query).catch((err) => { if (err.message.includes("regular expression")) { return [] } diff --git a/db/refreshPageviewsFromDatasette.ts b/db/refreshPageviewsFromDatasette.ts index 89d9e185e88..ff9a60fca57 100644 --- a/db/refreshPageviewsFromDatasette.ts +++ b/db/refreshPageviewsFromDatasette.ts @@ -60,7 +60,7 @@ async function downloadAndInsertCSV(knex: Knex): Promise { console.log("Parsed CSV data:", onlyValidRows.length, "rows") console.log("Columns:", analyticsPageviewsColumnNames.join(", ")) - await db.knexRaw("TRUNCATE TABLE analytics_pageviews", knex) + await db.knexRaw(knex, "TRUNCATE TABLE analytics_pageviews") await knex.transaction(async (trx) => { await trx.batchInsert("analytics_pageviews", onlyValidRows) }) diff --git a/db/tests/basic.test.ts b/db/tests/basic.test.ts index ce30c6b6af7..e2afa6a4549 100644 --- a/db/tests/basic.test.ts +++ b/db/tests/basic.test.ts @@ -131,8 +131,8 @@ test("knex interface", async () => { // Use raw queries, using ?? to specify the table name using the shared const value // The pick type is used to type the result row const usersFromRawQuery: Pick[] = await knexRaw( - "select email from users", trx, + "select email from users", [] ) expect(usersFromRawQuery.length).toBe(2) diff --git a/devTools/updateImageHeights/update-image-heights.ts b/devTools/updateImageHeights/update-image-heights.ts index 3178c801418..e8938c91ea9 100644 --- a/devTools/updateImageHeights/update-image-heights.ts +++ b/devTools/updateImageHeights/update-image-heights.ts @@ -7,10 +7,10 @@ async function updateImageHeights() { const transaction = await db.knexInstance().transaction() const filenames = await db .knexRaw<{ filename: string }>( + transaction, `SELECT DISTINCT filename FROM posts_gdocs_x_images pgxi - LEFT JOIN images i ON pgxi.imageId = i.id`, - transaction + LEFT JOIN images i ON pgxi.imageId = i.id` ) .then((rows) => rows.map((row) => row.filename)) @@ -32,12 +32,12 @@ async function updateImageHeights() { if (image && image.originalHeight) { promises.push( db.knexRaw( + transaction, ` UPDATE images SET originalHeight = ? WHERE filename = ? `, - transaction, [image.originalHeight, filename] ) )