From 6de7c89983429837c291ffd255c9ff07bf58c4f8 Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Tue, 17 Sep 2024 11:22:18 +0100 Subject: [PATCH 1/5] WIP WIP wip --- cibuild/linkimports.sh | 1 + packages/pglite/examples/basic.html | 2 +- packages/pglite/src/fs/tarUtils.ts | 9 +- packages/pglite/src/pglite.ts | 202 +++++++++++++++--- packages/pglite/src/postgresMod.ts | 1 + patches/pg_main.c | 10 +- .../src-include-port-wasm_common.h.diff | 8 +- 7 files changed, 197 insertions(+), 36 deletions(-) diff --git a/cibuild/linkimports.sh b/cibuild/linkimports.sh index 0f706f2a..55a4ab88 100644 --- a/cibuild/linkimports.sh +++ b/cibuild/linkimports.sh @@ -59,6 +59,7 @@ _getenv _interactive_one _interactive_read _interactive_write +_use_socketfile _lowerstr _main _main_repl diff --git a/packages/pglite/examples/basic.html b/packages/pglite/examples/basic.html index d46ee705..8f9ec1c8 100644 --- a/packages/pglite/examples/basic.html +++ b/packages/pglite/examples/basic.html @@ -21,7 +21,7 @@

PGlite Basic Example

console.log("Starting..."); // In-memory database: -const pg = new PGlite(); +const pg = new PGlite({ debug: 2 }); // Or, stored in indexedDB: // const pg = new PGlite('pgdata'); diff --git a/packages/pglite/src/fs/tarUtils.ts b/packages/pglite/src/fs/tarUtils.ts index c4829e03..27b79024 100644 --- a/packages/pglite/src/fs/tarUtils.ts +++ b/packages/pglite/src/fs/tarUtils.ts @@ -50,9 +50,14 @@ export async function loadTar( const files = untar(tarball) for (const file of files) { const filePath = pgDataDir + file.name + const dirPath = filePath.split('/') + const fileName = dirPath.pop() + + if (fileName?.startsWith('.')) { + continue + } // Ensure the directory structure exists - const dirPath = filePath.split('/').slice(0, -1) for (let i = 1; i <= dirPath.length; i++) { const dir = dirPath.slice(0, i).join('/') if (!FS.analyzePath(dir).exists) { @@ -80,7 +85,7 @@ function readDirectory(FS: FS, path: string) { const traverseDirectory = (currentPath: string) => { const entries = FS.readdir(currentPath) entries.forEach((entry) => { - if (entry === '.' || entry === '..') { + if (entry === '.' || entry === '..' || entry.startsWith('.')) { return } const fullPath = currentPath + '/' + entry diff --git a/packages/pglite/src/pglite.ts b/packages/pglite/src/pglite.ts index ea76d725..a36c587a 100644 --- a/packages/pglite/src/pglite.ts +++ b/packages/pglite/src/pglite.ts @@ -25,6 +25,13 @@ import { NotificationResponseMessage, } from '@electric-sql/pg-protocol/messages' +const SOCKET_FILE = { + ILOCK: '/tmp/pglite/.s.PGSQL.5432.lock.in', + IN: '/tmp/pglite/.s.PGSQL.5432.in', + OLOCK: '/tmp/pglite/.s.PGSQL.5432.lock.out', + OUT: '/tmp/pglite/.s.PGSQL.5432.out', +} + export class PGlite extends BasePGlite implements PGliteInterface, AsyncDisposable @@ -54,14 +61,20 @@ export class PGlite #protocolParser = new ProtocolParser() - // These are the current ArrayBuffer that is being read or written to + #queryInBuffer?: ArrayBuffer + #queryOutChunks?: Uint8Array[] + + // These are the current /dev/blob ArrayBuffer that is being read or written to // during a query, such as COPY FROM or COPY TO. - #queryReadBuffer?: ArrayBuffer - #queryWriteChunks?: Uint8Array[] + #devBlobReadBuffer?: ArrayBuffer + #devBlobWriteChunks?: Uint8Array[] #notifyListeners = new Map void>>() #globalNotifyListeners = new Set<(channel: string, payload: string) => void>() + #socketInDevId?: number + #socketOutDevId?: number + /** * Create a new PGlite instance * @param dataDir The directory to store the database files @@ -227,6 +240,11 @@ export class PGlite throw new Error(`Unknown package: ${remotePackageName}`) }, preRun: [ + (mod: any) => { + console.log('preRun: clearing socket files') + this.#clearSocketFiles(mod) + console.log('preRun: socket files cleared') + }, (mod: any) => { // Register /dev/blob device // This is used to read and write blobs when used in COPY TO/FROM @@ -243,7 +261,7 @@ export class PGlite length: number, position: number, ) => { - const buf = this.#queryReadBuffer + const buf = this.#devBlobReadBuffer if (!buf) { throw new Error( 'No /dev/blob File or Blob provided to read from', @@ -264,12 +282,14 @@ export class PGlite length: number, _position: number, ) => { - this.#queryWriteChunks ??= [] - this.#queryWriteChunks.push(buffer.slice(offset, offset + length)) + this.#devBlobWriteChunks ??= [] + this.#devBlobWriteChunks.push( + buffer.slice(offset, offset + length), + ) return length }, llseek: (stream: any, offset: number, whence: number) => { - const buf = this.#queryReadBuffer + const buf = this.#devBlobReadBuffer if (!buf) { throw new Error('No /dev/blob File or Blob provided to llseek') } @@ -355,6 +375,11 @@ export class PGlite await loadTar(this.mod.FS, options.loadDataDir, PGDATA) } + // Clear the socket files + console.log('init: clearing socket files') + this.#clearSocketFiles() + console.log('init: socket files cleared') + // Check and log if the database exists if (this.mod.FS.analyzePath(PGDATA + '/PG_VERSION').exists) { this.#log('pglite: found DB, resuming') @@ -412,6 +437,9 @@ export class PGlite // TODO: only sync here if initdb did init db. await this.syncToFs() + // Setup the socket files for the query protocol IO + this.#setupSocketDevices() + this.#ready = true // Set the search path to public for this connection @@ -426,6 +454,126 @@ export class PGlite } } + #clearSocketFiles(mod?: PostgresMod) { + mod = mod ?? this.mod! + // Remove any existing socket files - could be left over from a previous run + if (mod.FS.analyzePath(SOCKET_FILE.OLOCK).exists) { + mod.FS.unlink(SOCKET_FILE.OLOCK) + } + if (mod.FS.analyzePath(SOCKET_FILE.OLOCK).exists) { + mod.FS.unlink(SOCKET_FILE.OLOCK) + } + if (mod.FS.analyzePath(SOCKET_FILE.IN).exists) { + mod.FS.unlink(SOCKET_FILE.IN) + } + if (mod.FS.analyzePath(SOCKET_FILE.OUT).exists) { + mod.FS.unlink(SOCKET_FILE.OUT) + } + } + + #setupSocketDevices() { + const mod = this.mod! + + // Register SOCKET_FILE.IN device + this.#socketInDevId = mod.FS.makedev(63, 0) + const inDevOpt = { + open: (_stream: any) => {}, + close: (_stream: any) => {}, + read: ( + _stream: any, + buffer: Uint8Array, + offset: number, + length: number, + position: number, + ) => { + const buf = this.#queryInBuffer + if (!buf) { + throw new Error(`No ${SOCKET_FILE.IN} Buffer provided to read from`) + } + const contents = new Uint8Array(buf) + if (position >= contents.length) return 0 + const size = Math.min(contents.length - position, length) + for (let i = 0; i < size; i++) { + buffer[offset + i] = contents[position + i] + } + return size + }, + write: ( + _stream: any, + _buffer: Uint8Array, + _offset: number, + _length: number, + _position: number, + ) => { + throw new Error('Not implemented') + }, + llseek: (stream: any, offset: number, whence: number) => { + const buf = this.#queryInBuffer + if (!buf) { + throw new Error(`No ${SOCKET_FILE.IN} Buffer provided to llseek`) + } + let position = offset + if (whence === 1) { + position += stream.position + } else if (whence === 2) { + position = new Uint8Array(buf).length + } + if (position < 0) { + throw new mod.FS.ErrnoError(28) + } + return position + }, + } + mod.FS.registerDevice(this.#socketInDevId!, inDevOpt) + + // Register SOCKET_FILE.OUT devicex + this.#socketOutDevId = mod.FS.makedev(62, 0) + const outDevOpt = { + open: (_stream: any) => {}, + close: (_stream: any) => {}, + read: ( + _stream: any, + _buffer: Uint8Array, + _offset: number, + _length: number, + _position: number, + ) => { + throw new Error('Not implemented') + }, + write: ( + _stream: any, + buffer: Uint8Array, + offset: number, + length: number, + _position: number, + ) => { + this.#queryOutChunks ??= [] + this.#queryOutChunks.push(buffer.slice(offset, offset + length)) + return length + }, + llseek: (_stream: any, _offset: number, _whence: number) => { + throw new Error('Not implemented') + }, + } + mod.FS.registerDevice(this.#socketOutDevId!, outDevOpt) + + this.#makeSocketFiles() + + mod._use_socketfile() + } + + #makeSocketFiles() { + console.log('pglite: making socket files') + const mod = this.mod! + if (!mod.FS.analyzePath(SOCKET_FILE.IN).exists) { + mod.FS.mkdev(SOCKET_FILE.IN, this.#socketInDevId!) + } + if (!mod.FS.analyzePath(SOCKET_FILE.OUT).exists) { + mod.FS.mkdev(SOCKET_FILE.OUT, this.#socketOutDevId!) + } + console.log('pglite: socket files made') + } + /** * The Postgres Emscripten Module */ @@ -496,14 +644,14 @@ export class PGlite * @param file The file to handle */ async _handleBlob(blob?: File | Blob) { - this.#queryReadBuffer = blob ? await blob.arrayBuffer() : undefined + this.#devBlobReadBuffer = blob ? await blob.arrayBuffer() : undefined } /** * Cleanup the current file */ async _cleanupBlob() { - this.#queryReadBuffer = undefined + this.#devBlobReadBuffer = undefined } /** @@ -511,11 +659,11 @@ export class PGlite * @returns The written blob */ async _getWrittenBlob(): Promise { - if (!this.#queryWriteChunks) { + if (!this.#devBlobWriteChunks) { return undefined } - const blob = new Blob(this.#queryWriteChunks) - this.#queryWriteChunks = undefined + const blob = new Blob(this.#devBlobWriteChunks) + this.#devBlobWriteChunks = undefined return blob } @@ -551,29 +699,33 @@ export class PGlite message: Uint8Array, { syncToFs = true }: ExecProtocolOptions = {}, ) { - const msg_len = message.length - const mod = this.mod! + // Make query available at /dev/query-in + this.#queryInBuffer = message - // >0 set buffer content type to wire protocol - // set buffer size so answer will be at size+0x2 pointer addr - mod._interactive_write(msg_len) + // Remove the lock files if they exist + const mod = this.mod! + if (mod.FS.analyzePath(SOCKET_FILE.OLOCK).exists) { + mod.FS.unlink(SOCKET_FILE.OLOCK) + } + if (mod.FS.analyzePath(SOCKET_FILE.OLOCK).exists) { + mod.FS.unlink(SOCKET_FILE.OLOCK) + } - // copy whole buffer at addr 0x1 - mod.HEAPU8.set(message, 1) + this.#makeSocketFiles() // execute the message - mod._interactive_one() + this.#queryOutChunks = [] + this.mod!._interactive_one() - // Read responses from the buffer - const msg_start = msg_len + 2 - const msg_end = msg_start + mod._interactive_read() - const data = mod.HEAPU8.subarray(msg_start, msg_end) + // Read responses from SOCKET_FILE.OUT + const data = await new Blob(this.#queryOutChunks).arrayBuffer() + this.#queryOutChunks = undefined if (syncToFs) { await this.syncToFs() } - return data + return new Uint8Array(data) } /** diff --git a/packages/pglite/src/postgresMod.ts b/packages/pglite/src/postgresMod.ts index 5eb7a1ea..7ff99b4f 100644 --- a/packages/pglite/src/postgresMod.ts +++ b/packages/pglite/src/postgresMod.ts @@ -28,6 +28,7 @@ export interface PostgresMod _interactive_write: (msgLength: number) => void _interactive_one: () => void _interactive_read: () => number + _use_socketfile: () => void } type PostgresFactory = ( diff --git a/patches/pg_main.c b/patches/pg_main.c index 4ac51bec..2dbae2a9 100644 --- a/patches/pg_main.c +++ b/patches/pg_main.c @@ -1182,13 +1182,15 @@ extern void AsyncPostgresSingleUserMain(int single_argc, char *single_argv[], co #include "../postgresql/src/bin/initdb/initdb.c" - void use_socketfile(void) { - is_repl = true; - is_node = true; - } #undef PG_INITDB_MAIN #undef PG_MAIN #endif // __wasi__ + +EMSCRIPTEN_KEEPALIVE void use_socketfile(void) { + is_repl = true; + is_node = true; +} + EMSCRIPTEN_KEEPALIVE int main_repl(); EMSCRIPTEN_KEEPALIVE int main_repl() { diff --git a/patches/postgresql-wasm/src-include-port-wasm_common.h.diff b/patches/postgresql-wasm/src-include-port-wasm_common.h.diff index 01dc24fd..30f97490 100644 --- a/patches/postgresql-wasm/src-include-port-wasm_common.h.diff +++ b/patches/postgresql-wasm/src-include-port-wasm_common.h.diff @@ -24,10 +24,10 @@ +/* --------------- how to configure those when installed ? ---------------- */ + +// socket emulation via file, need to go in PGDATA for nodefs mount in web mode -+#define PGS_ILOCK "/tmp/pglite/base/.s.PGSQL.5432.lock.in" -+#define PGS_IN "/tmp/pglite/base/.s.PGSQL.5432.in" -+#define PGS_OLOCK "/tmp/pglite/base/.s.PGSQL.5432.lock.out" -+#define PGS_OUT "/tmp/pglite/base/.s.PGSQL.5432.out" ++#define PGS_ILOCK "/tmp/pglite/.s.PGSQL.5432.lock.in" ++#define PGS_IN "/tmp/pglite/.s.PGSQL.5432.in" ++#define PGS_OLOCK "/tmp/pglite/.s.PGSQL.5432.lock.out" ++#define PGS_OUT "/tmp/pglite/.s.PGSQL.5432.out" + + +#define PG_DEBUG_HEADER From 64b2b16bc89398aa11bf48bcbb9b0c4240c384af Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Tue, 8 Oct 2024 15:00:24 +0100 Subject: [PATCH 2/5] Working --- packages/pglite/src/pglite.ts | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/packages/pglite/src/pglite.ts b/packages/pglite/src/pglite.ts index a36c587a..5018d1f0 100644 --- a/packages/pglite/src/pglite.ts +++ b/packages/pglite/src/pglite.ts @@ -240,11 +240,6 @@ export class PGlite throw new Error(`Unknown package: ${remotePackageName}`) }, preRun: [ - (mod: any) => { - console.log('preRun: clearing socket files') - this.#clearSocketFiles(mod) - console.log('preRun: socket files cleared') - }, (mod: any) => { // Register /dev/blob device // This is used to read and write blobs when used in COPY TO/FROM @@ -375,11 +370,6 @@ export class PGlite await loadTar(this.mod.FS, options.loadDataDir, PGDATA) } - // Clear the socket files - console.log('init: clearing socket files') - this.#clearSocketFiles() - console.log('init: socket files cleared') - // Check and log if the database exists if (this.mod.FS.analyzePath(PGDATA + '/PG_VERSION').exists) { this.#log('pglite: found DB, resuming') @@ -454,23 +444,6 @@ export class PGlite } } - #clearSocketFiles(mod?: PostgresMod) { - mod = mod ?? this.mod! - // Remove any existing socket files - could be left over from a previous run - if (mod.FS.analyzePath(SOCKET_FILE.OLOCK).exists) { - mod.FS.unlink(SOCKET_FILE.OLOCK) - } - if (mod.FS.analyzePath(SOCKET_FILE.OLOCK).exists) { - mod.FS.unlink(SOCKET_FILE.OLOCK) - } - if (mod.FS.analyzePath(SOCKET_FILE.IN).exists) { - mod.FS.unlink(SOCKET_FILE.IN) - } - if (mod.FS.analyzePath(SOCKET_FILE.OUT).exists) { - mod.FS.unlink(SOCKET_FILE.OUT) - } - } - #setupSocketDevices() { const mod = this.mod! @@ -563,7 +536,6 @@ export class PGlite } #makeSocketFiles() { - console.log('pglite: making socket files') const mod = this.mod! if (!mod.FS.analyzePath(SOCKET_FILE.IN).exists) { mod.FS.mkdev(SOCKET_FILE.IN, this.#socketInDevId!) @@ -571,7 +543,6 @@ export class PGlite if (!mod.FS.analyzePath(SOCKET_FILE.OUT).exists) { mod.FS.mkdev(SOCKET_FILE.OUT, this.#socketOutDevId!) } - console.log('pglite: socket files made') } /** From 0c0541c333895b1f3063fe5d38db3e8881f8d20e Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Tue, 8 Oct 2024 15:09:14 +0100 Subject: [PATCH 3/5] revert change to example --- packages/pglite/examples/basic.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pglite/examples/basic.html b/packages/pglite/examples/basic.html index 8f9ec1c8..d46ee705 100644 --- a/packages/pglite/examples/basic.html +++ b/packages/pglite/examples/basic.html @@ -21,7 +21,7 @@

PGlite Basic Example

console.log("Starting..."); // In-memory database: -const pg = new PGlite({ debug: 2 }); +const pg = new PGlite(); // Or, stored in indexedDB: // const pg = new PGlite('pgdata'); From 4df00fbc293434ca442aa24cd28887b1c8194ea8 Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Tue, 8 Oct 2024 17:37:11 +0100 Subject: [PATCH 4/5] Tweeks and tests (failing) --- packages/pglite/src/pglite.ts | 10 ++- packages/pglite/tests/large-query.test.ts | 100 ++++++++++++++++++++++ 2 files changed, 108 insertions(+), 2 deletions(-) create mode 100644 packages/pglite/tests/large-query.test.ts diff --git a/packages/pglite/src/pglite.ts b/packages/pglite/src/pglite.ts index 5018d1f0..91377ee2 100644 --- a/packages/pglite/src/pglite.ts +++ b/packages/pglite/src/pglite.ts @@ -31,6 +31,7 @@ const SOCKET_FILE = { OLOCK: '/tmp/pglite/.s.PGSQL.5432.lock.out', OUT: '/tmp/pglite/.s.PGSQL.5432.out', } +const PAGE_SIZE = 8192 export class PGlite extends BasePGlite @@ -465,7 +466,10 @@ export class PGlite } const contents = new Uint8Array(buf) if (position >= contents.length) return 0 - const size = Math.min(contents.length - position, length) + const size = Math.min( + PAGE_SIZE, + Math.min(contents.length - position, length), + ) for (let i = 0; i < size; i++) { buffer[offset + i] = contents[position + i] } @@ -521,7 +525,9 @@ export class PGlite _position: number, ) => { this.#queryOutChunks ??= [] - this.#queryOutChunks.push(buffer.slice(offset, offset + length)) + if (length > 0) { + this.#queryOutChunks.push(buffer.slice(offset, offset + length)) + } return length }, llseek: (_stream: any, _offset: number, _whence: number) => { diff --git a/packages/pglite/tests/large-query.test.ts b/packages/pglite/tests/large-query.test.ts new file mode 100644 index 00000000..f91bf9a9 --- /dev/null +++ b/packages/pglite/tests/large-query.test.ts @@ -0,0 +1,100 @@ +import { describe, it, expect } from 'vitest' +import { PGlite } from '../dist/index.js' + +describe('large query', () => { + it('8192 byte wire message', async () => { + const pg = await PGlite.create() + + await pg.exec('CREATE TABLE test (id SERIAL PRIMARY KEY, data TEXT);') + + const value = 'a'.repeat(8150) + const sql = `INSERT INTO test (data) VALUES ('${value}');` + + // 8192 page size, 6 bytes for the wire protocol header + expect(sql.length).toBe(8192 - 6) + + await pg.exec(sql) + + const res = await pg.query<{ data: string }>(` + SELECT * FROM test; + `) + + expect(res.rows.length).toBe(1) + expect(res.rows[0].data).toBe(value) + + pg.close() + }) + + it('8193 byte wire message', async () => { + const pg = await PGlite.create() + + await pg.exec('CREATE TABLE test (id SERIAL PRIMARY KEY, data TEXT);') + + const value = 'a'.repeat(8151) + const sql = `INSERT INTO test (data) VALUES ('${value}');` + + // 1 more than 8192 page size, 6 bytes for the wire protocol header + expect(sql.length).toBe(8193 - 6) + + await pg.exec(sql) + + const res = await pg.query<{ data: string }>(` + SELECT * FROM test; + `) + + expect(res.rows.length).toBe(1) + expect(res.rows[0].data).toBe(value) + + pg.close() + }) + + it('1mb value in insert and select', async () => { + const pg = await PGlite.create() + + await pg.exec(` + CREATE TABLE test (id SERIAL PRIMARY KEY, data TEXT); + `) + + // 1mb value + const value = 'a'.repeat(1_000_000) + + await pg.query('INSERT INTO test (data) VALUES ($1);', [value]) + + await pg.exec(` + INSERT INTO test (data) VALUES (${value}); + `) + + const res = await pg.query<{ data: string }>(` + SELECT * FROM test; + `) + + expect(res.rows.length).toBe(2) + expect(res.rows[0].data).toBe(value) + + pg.close() + + // sleep for GC to collect + await new Promise((resolve) => setTimeout(resolve, 100)) + }) + + it('1mb of SQL in query', async () => { + const pg = await PGlite.create() + + await pg.exec(` + CREATE TABLE test (id SERIAL PRIMARY KEY, data TEXT); + `) + + let sql = '' + for (let i = 0; i < 26316; i++) { + // 26316 * 38 = 1,000,008 bytes + sql += `INSERT INTO test (data) VALUES ('a');\n` // 38b statement + } + + await pg.exec(sql) + + pg.close() + + // sleep for GC to collect + await new Promise((resolve) => setTimeout(resolve, 100)) + }) +}) From 2553edc0ddbf7f3976d9629e9aafd4c2950df7a4 Mon Sep 17 00:00:00 2001 From: Sam Willis Date: Tue, 8 Oct 2024 18:56:34 +0100 Subject: [PATCH 5/5] Add tests for large result sets --- packages/pglite/tests/large-query.test.ts | 80 +++++++++++++++++++++++ 1 file changed, 80 insertions(+) diff --git a/packages/pglite/tests/large-query.test.ts b/packages/pglite/tests/large-query.test.ts index f91bf9a9..64fa0a77 100644 --- a/packages/pglite/tests/large-query.test.ts +++ b/packages/pglite/tests/large-query.test.ts @@ -97,4 +97,84 @@ describe('large query', () => { // sleep for GC to collect await new Promise((resolve) => setTimeout(resolve, 100)) }) + + it('select 10k rows ~ 10mb', async () => { + const pg = await PGlite.create() + + await pg.exec(` + CREATE TABLE test (id SERIAL PRIMARY KEY, data TEXT); + `) + + // 1kb + const value = 'a'.repeat(1000) + + await pg.exec(` + INSERT INTO test (data) + SELECT 'Row ' || generate_series || '${value}' + FROM generate_series(1, 10000); + `) + + const res = await pg.query<{ data: string }>(` + SELECT * FROM test; + `) + + expect(res.rows.length).toBe(10000) + + pg.close() + + // sleep for GC to collect + await new Promise((resolve) => setTimeout(resolve, 100)) + }) + + it('select 100k rows ~ 100mb', async () => { + const pg = await PGlite.create() + + await pg.exec(` + CREATE TABLE test (id SERIAL PRIMARY KEY, data TEXT); + `) + + // 1kb + const value = 'a'.repeat(1000) + + await pg.exec(` + INSERT INTO test (data) + SELECT 'Row ' || generate_series || '${value}' + FROM generate_series(1, 100000); + `) + + const res = await pg.query<{ data: string }>(` + SELECT * FROM test; + `) + + expect(res.rows.length).toBe(100000) + + pg.close() + + // sleep for GC to collect + await new Promise((resolve) => setTimeout(resolve, 100)) + }) + + it('select 1m rows ~ 1gb', async () => { + const pg = await PGlite.create() + + await pg.exec(` + CREATE TABLE test (id SERIAL PRIMARY KEY, data TEXT); + `) + + // 1kb + const value = 'a'.repeat(1000) + + // This time only select from the series, not insert into the table + const res = await pg.query(` + SELECT 'Row ' || generate_series || '${value}' + FROM generate_series(1, 1000000); + `) + + expect(res.rows.length).toBe(1000000) + + pg.close() + + // sleep for GC to collect + await new Promise((resolve) => setTimeout(resolve, 100)) + }) })