Skip to content

Commit

Permalink
Swap to postgres.js so compatible with CF Workers
Browse files Browse the repository at this point in the history
  • Loading branch information
samwillis committed Nov 28, 2024
1 parent 0d1e75c commit 2a70daa
Show file tree
Hide file tree
Showing 4 changed files with 91 additions and 122 deletions.
93 changes: 37 additions & 56 deletions demos/linearlite/db/load_data.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import createPool, { sql } from '@databases/pg'
import postgres from 'postgres'
import { generateIssues } from './generate_data.js'

if (!process.env.DATABASE_URL) {
Expand All @@ -10,66 +10,47 @@ const ISSUES_TO_LOAD = process.env.ISSUES_TO_LOAD || 512
const issues = generateIssues(ISSUES_TO_LOAD)

console.info(`Connecting to Postgres at ${DATABASE_URL}`)
const db = createPool(DATABASE_URL)
const sql = postgres(DATABASE_URL)

function createBatchInsertQuery(table, columns, dataArray) {
const valuesSql = dataArray.map(
(data) =>
sql`(${sql.join(
columns.map((column) => sql.value(data[column])),
sql`, `
)})`
)
async function batchInsert(sql, table, columns, dataArray, batchSize = 1000) {
for (let i = 0; i < dataArray.length; i += batchSize) {
const batch = dataArray.slice(i, i + batchSize)

return sql`
INSERT INTO ${sql.ident(table)} (${sql.join(
columns.map((col) => sql.ident(col)),
sql`, `
)})
VALUES ${sql.join(valuesSql, sql`, `)}
`
}

const issueCount = issues.length
let commentCount = 0

await db.tx(async (db) => {
await db.query(sql`SET CONSTRAINTS ALL DEFERRED;`) // disable FK checks

const batchSize = 1000
for (let i = 0; i < issueCount; i += batchSize) {
const issueBatch = issues
.slice(i, i + batchSize)
.map(({ comments: _, ...rest }) => rest)
await db.query(
createBatchInsertQuery('issue', Object.keys(issueBatch[0]), issueBatch)
)

process.stdout.write(
`Loaded ${Math.min(i + batchSize, issueCount)} of ${issueCount} issues\r`
)
}

const allComments = issues.flatMap((issue) => issue.comments)
commentCount = allComments.length

for (let i = 0; i < allComments.length; i += batchSize) {
const commentBatch = allComments.slice(i, i + batchSize)
await db.query(
createBatchInsertQuery(
'comment',
Object.keys(commentBatch[0]),
commentBatch
)
)
await sql`
INSERT INTO ${sql(table)} ${sql(batch, columns)}
`

process.stdout.write(
`Loaded ${Math.min(i + batchSize, commentCount)} of ${commentCount} comments\r`
`Loaded ${Math.min(i + batchSize, dataArray.length)} of ${dataArray.length} ${table}s\r`
)
}
})
}

process.stdout.write(`\n`)
const issueCount = issues.length
let commentCount = 0

db.dispose()
console.info(`Loaded ${issueCount} issues with ${commentCount} comments.`)
try {
await sql.begin(async (sql) => {
// Disable FK checks
await sql`SET CONSTRAINTS ALL DEFERRED`

// Insert issues
const issuesData = issues.map(({ comments: _, ...rest }) => rest)
const issueColumns = Object.keys(issuesData[0])
await batchInsert(sql, 'issue', issueColumns, issuesData)

// Insert comments
const allComments = issues.flatMap((issue) => issue.comments)
commentCount = allComments.length
const commentColumns = Object.keys(allComments[0])
await batchInsert(sql, 'comment', commentColumns, allComments)
})

process.stdout.write(`\n`)
console.info(`Loaded ${issueCount} issues with ${commentCount} comments.`)
} catch (error) {
console.error('Error loading data:', error)
throw error
} finally {
await sql.end()
}
3 changes: 1 addition & 2 deletions demos/linearlite/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"fractional-indexing": "^3.2.0",
"jsonwebtoken": "^9.0.2",
"lodash.debounce": "^4.0.8",
"pg": "^8.13.0",
"postgres": "^3.4.3",
"react": "^18.3.1",
"react-beautiful-dnd": "^13.1.1",
"react-dom": "^18.3.1",
Expand All @@ -65,7 +65,6 @@
"@hono/node-server": "^1.8.0"
},
"devDependencies": {
"@databases/pg": "^5.5.0",
"@databases/pg-migrations": "^5.0.3",
"@faker-js/faker": "^8.4.1",
"@tailwindcss/typography": "^0.5.10",
Expand Down
102 changes: 44 additions & 58 deletions demos/linearlite/server.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { Hono } from 'hono'
import { cors } from 'hono/cors'
import pg from 'pg'
import postgres from 'postgres'
import {
ChangeSet,
changeSetSchema,
Expand All @@ -9,11 +9,12 @@ import {
} from './src/utils/changes'
import { serve } from '@hono/node-server'

const DATABASE_URL = process.env.DATABASE_URL
const DATABASE_URL =
process.env.DATABASE_URL ??
'postgresql://postgres:password@localhost:54321/linearlite'

const { Client } = pg
const client = new Client(DATABASE_URL)
client.connect()
// Create postgres connection
const sql = postgres(DATABASE_URL)

const app = new Hono()

Expand All @@ -22,10 +23,10 @@ app.use('/*', cors())

// Routes
app.get('/', async (c) => {
const result = await client.query(
"SELECT 'ok' as status, version() as postgres_version, now() as server_time"
)
return c.json(result.rows[0])
const result = await sql`
SELECT 'ok' as status, version() as postgres_version, now() as server_time
`
return c.json(result[0])
})

app.post('/apply-changes', async (c) => {
Expand All @@ -48,72 +49,57 @@ console.log(`Server is running on port ${port}`)

serve({
fetch: app.fetch,
port
port,
})

async function applyChanges(changes: ChangeSet): Promise<{ success: boolean }> {
const { issues, comments } = changes
client.query('BEGIN')

try {
await client.query('COMMIT')
for (const issue of issues) {
await applyTableChange('issue', issue)
}
for (const comment of comments) {
await applyTableChange('comment', comment)
}
await sql.begin(async (sql) => {
for (const issue of issues) {
await applyTableChange('issue', issue, sql)
}
for (const comment of comments) {
await applyTableChange('comment', comment, sql)
}
})
return { success: true }
} catch (error) {
await client.query('ROLLBACK')
throw error
}
}

/**
* Apply a change to the specified table in the database.
* @param tableName The name of the table to apply the change to
* @param change The change object containing the data to be applied
*/
async function applyTableChange(
tableName: 'issue' | 'comment',
change: IssueChange | CommentChange
change: IssueChange | CommentChange,
sql: postgres.TransactionSql
): Promise<void> {
const {
id,
modified_columns,
new: isNew,
deleted,
} = change
const { id, modified_columns, new: isNew, deleted } = change

if (deleted) {
await client.query(
`
DELETE FROM ${tableName} WHERE id = $1
-- ON CONFLICT (id) DO NOTHING
`,
[id]
)
await sql`
DELETE FROM ${sql(tableName)} WHERE id = ${id}
`
} else if (isNew) {
const columns = modified_columns || [];
const values = columns.map(col => change[col]);
await client.query(
`
INSERT INTO ${tableName} (id, ${columns.join(', ')})
VALUES ($1, ${columns.map((_, index) => `$${index + 2}`).join(', ')})
-- ON CONFLICT (id) DO NOTHING
`,
[id, ...values]
);
const columns = modified_columns || []
const values = columns.map((col) => change[col])

await sql`
INSERT INTO ${sql(tableName)} (id, ${sql(columns)})
VALUES (${id}, ${sql(values)})
`
} else {
const columns = modified_columns || [];
const values = columns.map(col => change[col]);
const updateSet = columns.map((col, index) => `${col} = $${index + 2}`).join(', ');
await client.query(
`
UPDATE ${tableName} SET ${updateSet} WHERE id = $1
-- ON CONFLICT (id) DO NOTHING
`,
[id, ...values]
);
const columns = modified_columns || []
const values = columns.map((col) => change[col])
const updates = columns
.map((col) => ({ [col]: change[col] }))
.reduce((acc, curr) => ({ ...acc, ...curr }), {})

await sql`
UPDATE ${sql(tableName)}
SET ${sql(updates)}
WHERE id = ${id}
`
}
}
15 changes: 9 additions & 6 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 2a70daa

Please sign in to comment.