diff --git a/bin/node-pg-migrate.ts b/bin/node-pg-migrate.ts index 5983165e..5dca9c28 100755 --- a/bin/node-pg-migrate.ts +++ b/bin/node-pg-migrate.ts @@ -43,6 +43,7 @@ const schemaArg = 'schema'; const createSchemaArg = 'create-schema'; const databaseUrlVarArg = 'database-url-var'; const migrationsDirArg = 'migrations-dir'; +const migrationsSubdirsArg = 'migrations-subdirs'; const migrationsTableArg = 'migrations-table'; const migrationsSchemaArg = 'migrations-schema'; const createMigrationsSchemaArg = 'create-migrations-schema'; @@ -80,6 +81,11 @@ const parser = yargs(process.argv.slice(2)) describe: 'The directory containing your migration files', type: 'string', }, + [migrationsSubdirsArg]: { + defaultDescription: 'false', + describe: 'Search for migrations in subdirectories', + type: 'boolean', + }, [migrationsTableArg]: { alias: 't', defaultDescription: '"pgmigrations"', @@ -149,7 +155,7 @@ const parser = yargs(process.argv.slice(2)) }, [migrationFilenameFormatArg]: { defaultDescription: '"timestamp"', - choices: ['timestamp', 'utc'], + choices: ['timestamp', 'utc', 'year/utc', 'year/timestamp'], describe: 'Prefix type of migration filename (Only valid with the create action)', type: 'string', @@ -236,6 +242,7 @@ if (dotenv) { } let MIGRATIONS_DIR = argv[migrationsDirArg]; +let MIGRATIONS_SUBDIRS = argv[migrationsSubdirsArg]; let DB_CONNECTION: string | ConnectionParameters | ClientConfig | undefined = process.env[argv[databaseUrlVarArg]]; let IGNORE_PATTERN = argv[ignorePatternArg]; @@ -352,6 +359,12 @@ function readJson(json: unknown): void { ); CREATE_SCHEMA = applyIf(CREATE_SCHEMA, createSchemaArg, json, isBoolean); MIGRATIONS_DIR = applyIf(MIGRATIONS_DIR, migrationsDirArg, json, isString); + MIGRATIONS_SUBDIRS = applyIf( + MIGRATIONS_SUBDIRS, + migrationsSubdirsArg, + json, + isBoolean + ); MIGRATIONS_SCHEMA = applyIf( MIGRATIONS_SCHEMA, migrationsSchemaArg, @@ -381,7 +394,11 @@ function readJson(json: unknown): void { MIGRATIONS_FILENAME_FORMAT, migrationFilenameFormatArg, json, - (val): val is `${FilenameFormat}` => val === 'timestamp' || val === 'utc' + (val): val is `${FilenameFormat}` => + val === 'timestamp' || + val === 'utc' || + val === 'year/utc' || + val === 'year/timestamp' ); TEMPLATE_FILE_NAME = applyIf( TEMPLATE_FILE_NAME, @@ -444,6 +461,7 @@ const action = argv._.shift(); // defaults MIGRATIONS_DIR ??= join(cwd(), 'migrations'); +MIGRATIONS_SUBDIRS ??= false; MIGRATIONS_FILE_LANGUAGE ??= 'js'; MIGRATIONS_FILENAME_FORMAT ??= 'timestamp'; MIGRATIONS_TABLE ??= 'pgmigrations'; @@ -558,6 +576,7 @@ if (action === 'create') { }, // eslint-disable-next-line @typescript-eslint/no-non-null-assertion dir: MIGRATIONS_DIR!, + migrationSubdirs: MIGRATIONS_SUBDIRS, ignorePattern: IGNORE_PATTERN, schema: SCHEMA, createSchema: CREATE_SCHEMA, diff --git a/docs/src/cli.md b/docs/src/cli.md index 9bc20e72..909e4c5e 100644 --- a/docs/src/cli.md +++ b/docs/src/cli.md @@ -80,31 +80,32 @@ More on that below. You can adjust defaults by passing arguments to `node-pg-migrate`: -| Argument | Aliases | Default | Description | -| --------------------------- | ------- | ------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `config-file` | `f` | `undefined` | The file with migration JSON config | -| `config-value` | | `db` | Name of config section with db options | -| `schema` | `s` | `public` | The schema(s) on which migration will be run, used to set `search_path` | -| `create-schema` | | `false` | Create the configured schema if it doesn't exist | -| `database-url-var` | `d` | `DATABASE_URL` | Name of env variable with database url string | -| `migrations-dir` | `m` | `migrations` | The directory containing your migration files | -| `migrations-schema` | | same value as `schema` | The schema storing table which migrations have been run | -| `create-migrations-schema` | | `false` | Create the configured migrations schema if it doesn't exist | -| `migrations-table` | `t` | `pgmigrations` | The table storing which migrations have been run | -| `ignore-pattern` | | `undefined` | Regex pattern for file names to ignore | -| `migration-filename-format` | | `utc` | Choose prefix of file, `utc` (`20200605075829074`) or `timestamp` (`1591343909074`) | -| `migration-file-language` | `j` | `js` | Language of the migration file to create (`js`, `ts` or `sql`) | -| `template-file-name` | | `undefined` | Utilize a custom migration template file with language inferred from its extension. The file should export the up method, accepting a MigrationBuilder instance. | -| `tsconfig` | | `undefined` | Path to tsconfig.json. Used to setup transpiling of TS migration files. (Also sets `migration-file-language` to typescript, if not overridden) | -| `envPath` | | `same level where it's invoked` | Retrieve the path to a .env file. This feature proves handy when dealing with nested projects or when referencing a global .env file. | -| `timestamp` | | `false` | Treats number argument to up/down migration as timestamp (running up migrations less or equal to timestamp or down migrations greater or equal to timestamp) | -| `check-order` | | `true` | Check order of migrations before running them, to switch it off supply `--no-check-order` | -| `single-transaction` | | `true` | Combines all pending migrations into a single transaction so that if any migration fails, all will be rolled back, to switch it off supply `--no-single-transaction` | -| `no-lock` | | `false` | Disables locking mechanism and checks | -| `fake` | | `false` | Mark migrations as run without actually performing them, (use with caution!) | -| `decamelize` | | `false` | Runs `decamelize` on table/column/etc. names | -| `verbose` | | `true` | Print all debug messages like DB queries run, to switch it off supply `--no-verbose` | -| `reject-unauthorized` | | `undefined` | Sets ssl `rejectUnauthorized` parameter. Use for e.g. self-signed certificates on the server. [see](https://node-postgres.com/announcements#2020-02-25) | +| Argument | Aliases | Default | Description | +| --------------------------- | ------- | ------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `config-file` | `f` | `undefined` | The file with migration JSON config | +| `config-value` | | `db` | Name of config section with db options | +| `schema` | `s` | `public` | The schema(s) on which migration will be run, used to set `search_path` | +| `create-schema` | | `false` | Create the configured schema if it doesn't exist | +| `database-url-var` | `d` | `DATABASE_URL` | Name of env variable with database url string | +| `migrations-dir` | `m` | `migrations` | The directory containing your migration files | +| `migrations-subdirs` | | `false` | Search for migration files in subdirectories | +| `migrations-schema` | | same value as `schema` | The schema storing table which migrations have been run | +| `create-migrations-schema` | | `false` | Create the configured migrations schema if it doesn't exist | +| `migrations-table` | `t` | `pgmigrations` | The table storing which migrations have been run | +| `ignore-pattern` | | `undefined` | Regex pattern for file names to ignore | +| `migration-filename-format` | | `utc` | Choose prefix of file, `utc` (`20200605075829074`) or `timestamp` (`1591343909074`), or either format prefixed with `year/` (`2020/20200605075829074` or `2020/1591343909074`) | +| `migration-file-language` | `j` | `js` | Language of the migration file to create (`js`, `ts` or `sql`) | +| `template-file-name` | | `undefined` | Utilize a custom migration template file with language inferred from its extension. The file should export the up method, accepting a MigrationBuilder instance. | +| `tsconfig` | | `undefined` | Path to tsconfig.json. Used to setup transpiling of TS migration files. (Also sets `migration-file-language` to typescript, if not overridden) | +| `envPath` | | `same level where it's invoked` | Retrieve the path to a .env file. This feature proves handy when dealing with nested projects or when referencing a global .env file. | +| `timestamp` | | `false` | Treats number argument to up/down migration as timestamp (running up migrations less or equal to timestamp or down migrations greater or equal to timestamp) | +| `check-order` | | `true` | Check order of migrations before running them, to switch it off supply `--no-check-order` | +| `single-transaction` | | `true` | Combines all pending migrations into a single transaction so that if any migration fails, all will be rolled back, to switch it off supply `--no-single-transaction` | +| `no-lock` | | `false` | Disables locking mechanism and checks | +| `fake` | | `false` | Mark migrations as run without actually performing them, (use with caution!) | +| `decamelize` | | `false` | Runs `decamelize` on table/column/etc. names | +| `verbose` | | `true` | Print all debug messages like DB queries run, to switch it off supply `--no-verbose` | +| `reject-unauthorized` | | `undefined` | Sets ssl `rejectUnauthorized` parameter. Use for e.g. self-signed certificates on the server. [see](https://node-postgres.com/announcements#2020-02-25) | For SSL connection to DB you can set `PGSSLMODE` environment variable to value from [list](https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNECT-SSLMODE) other diff --git a/src/migration.ts b/src/migration.ts index 6900b0f2..10b32773 100644 --- a/src/migration.ts +++ b/src/migration.ts @@ -8,7 +8,7 @@ import { createReadStream, createWriteStream } from 'node:fs'; import { mkdir, readdir } from 'node:fs/promises'; -import { basename, extname, join, resolve } from 'node:path'; +import { extname, join, relative, resolve } from 'node:path'; import { cwd } from 'node:process'; import type { QueryResult } from 'pg'; import type { DBConnection } from './db'; @@ -32,6 +32,8 @@ export interface RunMigration { export enum FilenameFormat { timestamp = 'timestamp', utc = 'utc', + year_timestamp = 'year/timestamp', + year_utc = 'year/utc', } export interface CreateOptionsTemplate { @@ -44,20 +46,24 @@ export interface CreateOptionsDefault { } export type CreateOptions = { - filenameFormat?: FilenameFormat | `${FilenameFormat}`; + filenameFormat?: FilenameFormat; } & (CreateOptionsTemplate | CreateOptionsDefault); const SEPARATOR = '_'; export async function loadMigrationFiles( dir: string, + subdirs: boolean, ignorePattern?: string ): Promise { - const dirContent = await readdir(`${dir}/`, { withFileTypes: true }); + const dirContent = await readdir(`${dir}/`, { + withFileTypes: true, + recursive: subdirs, + }); const files = dirContent - .map((file) => (file.isFile() || file.isSymbolicLink() ? file.name : null)) - .filter((file): file is string => Boolean(file)) - .sort(); + .flatMap((file) => (file.isFile() || file.isSymbolicLink() ? [file] : [])) + .map((file) => relative(dir, join(file.parentPath ?? file.path, file.name))) + .sort((a, b) => a.localeCompare(b, 'en', { numeric: true })); const filter = new RegExp(`^(${ignorePattern})$`); return ignorePattern === undefined ? files @@ -70,10 +76,11 @@ function getSuffixFromFileName(fileName: string): string { async function getLastSuffix( dir: string, + subdirs: boolean, ignorePattern?: string ): Promise { try { - const files = await loadMigrationFiles(dir, ignorePattern); + const files = await loadMigrationFiles(dir, subdirs, ignorePattern); return files.length > 0 ? getSuffixFromFileName(files[files.length - 1]) : undefined; @@ -83,7 +90,11 @@ async function getLastSuffix( } export function getTimestamp(logger: Logger, filename: string): number { - const prefix = filename.split(SEPARATOR)[0]; + let prefix = filename.split(SEPARATOR)[0]; + + // Strip off a leading year, if present + prefix = prefix?.replace(/^\d{4}\//, ''); + if (prefix && /^\d+$/.test(prefix)) { if (prefix.length === 13) { // timestamp: 1391877300255 @@ -111,10 +122,13 @@ export function getTimestamp(logger: Logger, filename: string): number { async function resolveSuffix( directory: string, + subdirs: boolean, options: CreateOptionsDefault ): Promise { const { language, ignorePattern } = options; - return language || (await getLastSuffix(directory, ignorePattern)) || 'js'; + return ( + language || (await getLastSuffix(directory, subdirs, ignorePattern)) || 'js' + ); } export class Migration implements RunMigration { @@ -122,6 +136,7 @@ export class Migration implements RunMigration { static async create( name: string, directory: string, + subdirs: boolean, options: CreateOptions = {} ): Promise { const { filenameFormat = FilenameFormat.timestamp } = options; @@ -129,22 +144,17 @@ export class Migration implements RunMigration { // ensure the migrations directory exists await mkdir(directory, { recursive: true }); - const now = new Date(); - const time = - filenameFormat === FilenameFormat.utc - ? now.toISOString().replace(/\D/g, '') - : now.valueOf(); - const templateFileName = 'templateFileName' in options ? resolve(cwd(), options.templateFileName) : resolve( join('node_modules', 'node-pg-migrate', 'templates'), - `migration-template.${await resolveSuffix(directory, options)}` + `migration-template.${await resolveSuffix(directory, subdirs, options)}` ); const suffix = getSuffixFromFileName(templateFileName); // file name looks like migrations/1391877300255_migration-title.js + const time = Migration.filenameTime(filenameFormat); const newFile = join(directory, `${time}${SEPARATOR}${name}.${suffix}`); // copy the default migration template to the new file location @@ -186,7 +196,10 @@ export class Migration implements RunMigration { ) { this.db = db; this.path = migrationPath; - this.name = basename(migrationPath, extname(migrationPath)); + this.name = relative(options.dir, migrationPath).replace( + new RegExp(`${extname(migrationPath)}$`), + '' + ); this.timestamp = getTimestamp(logger, this.name); this.up = up; this.down = down; @@ -195,6 +208,34 @@ export class Migration implements RunMigration { this.logger = logger; } + private static filenameTime(filenameFormat: FilenameFormat): string { + const now = new Date(); + switch (filenameFormat) { + case FilenameFormat.utc: { + return now.toISOString().replace(/\D/g, ''); + } + + case FilenameFormat.year_utc: { + return join( + now.getFullYear().toString(), + now.toISOString().replace(/\D/g, '') + ); + } + + case FilenameFormat.timestamp: { + return now.valueOf().toString(); + } + + case FilenameFormat.year_timestamp: { + return join(now.getFullYear().toString(), now.valueOf().toString()); + } + + default: { + throw new Error(`unrecognized filenameFormat: ${filenameFormat}`); + } + } + } + _getMarkAsRun(action: MigrationAction): string { const schema = getMigrationTableSchema(this.options); diff --git a/src/runner.ts b/src/runner.ts index 729da38d..ecc83ff1 100644 --- a/src/runner.ts +++ b/src/runner.ts @@ -32,7 +32,11 @@ async function loadMigrations( ): Promise { try { let shorthands: ColumnDefinitions = {}; - const files = await loadMigrationFiles(options.dir, options.ignorePattern); + const files = await loadMigrationFiles( + options.dir, + options.migrationSubdirs ?? false, + options.ignorePattern + ); const migrations = await Promise.all( files.map(async (file) => { diff --git a/src/types.ts b/src/types.ts index d0b59684..4f821f47 100644 --- a/src/types.ts +++ b/src/types.ts @@ -787,6 +787,11 @@ export interface RunnerOptionConfig { */ dir: string; + /** + * If migration discovery should consider subdirectories. + */ + migrationSubdirs?: boolean; + /** * Check order of migrations before running them. */ diff --git a/test/__snapshots__/migration.spec.ts.snap b/test/__snapshots__/migration.spec.ts.snap new file mode 100644 index 00000000..93f45802 --- /dev/null +++ b/test/__snapshots__/migration.spec.ts.snap @@ -0,0 +1,122 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`migration > loadMigrationFiles > should load cockroach migration files 1`] = ` +[ + "004_table.js", + "006_table_rename.js", + "008_column_drop.js", + "009_column.js", + "011_column_rename.js", + "012_column_alter.js", + "014_add_constraint.js", + "017_drop_constraint.js", + "019_add_index.js", + "020_create_index.js", + "062_view_test.js", + "062_view.js", +] +`; + +exports[`migration > loadMigrationFiles > should load migration files from subdirectories 1`] = ` +[ + "001/004_table.js", + "001/006_table_rename.js", + "002/008_column_drop.js", + "002/009_column.js", +] +`; + +exports[`migration > loadMigrationFiles > should load normal migration files 1`] = ` +[ + "001_noop.js", + "002_callback.js", + "003_promise.js", + "004_table.js", + "005_table_test.js", + "006_table_rename.js", + "007_table_rename_test.js", + "008_column_drop.js", + "009_column.js", + "010_column_test.js", + "011_column_rename.js", + "012_column_alter.js", + "013_column_alter_test.js", + "014_add_constraint.js", + "015_add_constraint_test.js", + "016_rename_constraint.js", + "017_drop_constraint.js", + "018_drop_constraint_test.js", + "019_add_index.js", + "020_drop_index.js", + "021_add_type.js", + "022_add_type_test.js", + "023_add_type_attribute.js", + "024_add_type_attribute_test.js", + "025_set_type_attribute.js", + "026_set_type_attribute_test.js", + "027_add_type_value.js", + "028_add_type_value_test.js", + "029_rename_type_attribute.js", + "030_rename_type_attribute_test.js", + "031_drop_type_attribute.js", + "032_drop_type_attribute_test.js", + "033_drop_type.js", + "034_drop_type_test.js", + "035_role_add.js", + "036_role_alter.js", + "037_role_rename.js", + "038_role_drop.js", + "039_function_create.js", + "040_function_rename.js", + "041_function_test.js", + "042_function_drop.js", + "043_trigger_create_rename.js", + "044_trigger_test.js", + "045_trigger_drop.js", + "046_domain_create_rename.js", + "047_domain_check.js", + "048_domain_drop.js", + "049_sequence_create_rename.js", + "050_sequence_test.js", + "051_sequence_alter.js", + "052_sequence_alter_test.js", + "053_sequence_drop.js", + "054_operator_create.js", + "055_operator_test.js", + "056_operator_drop.js", + "057_policy_create.js", + "058_policy_test.js", + "059_policy_drop.js", + "060_column_comment.js", + "061_column_comment_test.js", + "062_view.js", + "063_view_test.js", + "064_alter_view_column.js", + "065_materialized_view.js", + "066_materialized_view_test.js", + "067_materialized_view_alter.js", + "068_extension.js", + "069_comments.js", + "070_extension_schema.js", + "071_constraint_name_for_foreign_key.js", + "072_alter_column_comment.js", + "073_alter_column_comment_test.js", + "074_rename_type_value.js", + "075_drop_index_schema.js", + "076_create_table_like.js", + "077_create_table_generated_column.js", + "078_add_column_if_not_exists.js", + "079_drop_index_schema.js", + "080_create_table_generated_column_take_2.js", + "081_temporary_table.js", + "082_view_options.js", + "083_alter_column_sequence.js", + "084_drop_unique_index.js", + "085_grant_tables_schemas_roles.js", + "086_grant_test.js", + "087_revoke_tables_schemas_roles.js", + "088_revoke_test.js", + "089_grant_reverse.js", + "090_create_cast.js", +] +`; diff --git a/test/__snapshots__/runner.spec.ts.snap b/test/__snapshots__/runner.spec.ts.snap new file mode 100644 index 00000000..65153f2e --- /dev/null +++ b/test/__snapshots__/runner.spec.ts.snap @@ -0,0 +1,93 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`runner > should execute a basic down migration 1`] = `[]`; + +exports[`runner > should execute a basic up migration 1`] = ` +[ + { + "id": 1, + "name": "004_table", + "run_on": 2024-01-01T00:00:00.000Z, + }, + { + "id": 2, + "name": "006_table_rename", + "run_on": 2024-01-02T00:00:00.000Z, + }, + { + "id": 3, + "name": "008_column_drop", + "run_on": 2024-01-03T00:00:00.000Z, + }, + { + "id": 4, + "name": "009_column", + "run_on": 2024-01-04T00:00:00.000Z, + }, + { + "id": 5, + "name": "011_column_rename", + "run_on": 2024-01-05T00:00:00.000Z, + }, + { + "id": 6, + "name": "012_column_alter", + "run_on": 2024-01-06T00:00:00.000Z, + }, + { + "id": 7, + "name": "014_add_constraint", + "run_on": 2024-01-07T00:00:00.000Z, + }, + { + "id": 8, + "name": "017_drop_constraint", + "run_on": 2024-01-08T00:00:00.000Z, + }, + { + "id": 9, + "name": "019_add_index", + "run_on": 2024-01-09T00:00:00.000Z, + }, + { + "id": 10, + "name": "020_create_index", + "run_on": 2024-01-10T00:00:00.000Z, + }, + { + "id": 11, + "name": "062_view", + "run_on": 2024-01-11T00:00:00.000Z, + }, + { + "id": 12, + "name": "062_view_test", + "run_on": 2024-01-12T00:00:00.000Z, + }, +] +`; + +exports[`runner > should execute a basic up migration in nested directories 1`] = ` +[ + { + "id": 1, + "name": "001/004_table", + "run_on": 2024-01-01T00:00:00.000Z, + }, + { + "id": 2, + "name": "001/006_table_rename", + "run_on": 2024-01-02T00:00:00.000Z, + }, + { + "id": 3, + "name": "002/008_column_drop", + "run_on": 2024-01-03T00:00:00.000Z, + }, + { + "id": 4, + "name": "002/009_column", + "run_on": 2024-01-04T00:00:00.000Z, + }, +] +`; diff --git a/test/migration.spec.ts b/test/migration.spec.ts index a2e2ab59..6f95dae8 100644 --- a/test/migration.spec.ts +++ b/test/migration.spec.ts @@ -1,7 +1,7 @@ import type { Mock } from 'vitest'; import { beforeEach, describe, expect, it, vi } from 'vitest'; import type { DBConnection } from '../src/db'; -import { getTimestamp, Migration } from '../src/migration'; +import { getTimestamp, loadMigrationFiles, Migration } from '../src/migration'; import type { Logger, RunnerOption } from '../src/types'; const callbackMigration = '1414549381268_names.js'; @@ -20,7 +20,7 @@ describe('migration', () => { error: () => null, }; - const options = { migrationsTable } as RunnerOption; + const options = { migrationsTable, dir: '' } as RunnerOption; let queryMock: Mock; @@ -32,16 +32,26 @@ describe('migration', () => { describe('getTimestamp', () => { it('should get timestamp for normal timestamp', () => { const now = Date.now(); - - expect(getTimestamp(logger, String(now))).toBe(now); + const timestamp = String(now); + expect(getTimestamp(logger, timestamp)).toBe(now); }); it('should get timestamp for shortened iso format', () => { const now = new Date(); + const timestamp = now.toISOString().replace(/\D/g, ''); + expect(getTimestamp(logger, timestamp)).toBe(now.valueOf()); + }); - expect(getTimestamp(logger, now.toISOString().replace(/\D/g, ''))).toBe( - now.valueOf() - ); + it('should get timestamp for normal timestamp with year prefix', () => { + const now = Date.now(); + const timestamp = `${new Date(now).getFullYear()}/${now}`; + expect(getTimestamp(logger, timestamp)).toBe(now); + }); + + it('should get timestamp for shortened iso format with year prefix', () => { + const now = new Date(); + const timestamp = `${new Date(now).getFullYear()}/${now.toISOString().replace(/\D/g, '')}`; + expect(getTimestamp(logger, timestamp)).toBe(now.valueOf()); }); }); @@ -194,4 +204,41 @@ describe('migration', () => { expect(queryMock).toHaveBeenNthCalledWith(4, 'COMMIT;'); }); }); + + describe('loadMigrationFiles', () => { + it('should load cockroach migration files', async () => { + const migrations = await loadMigrationFiles('test/cockroach', false); + expect(migrations).toHaveLength(12); + expect(migrations).toMatchSnapshot(); + }); + + it('should load normal migration files', async () => { + const migrations = await loadMigrationFiles('test/migrations', false); + expect(migrations).toHaveLength(90); + expect(migrations).toMatchSnapshot(); + }); + + it('should load migration files from subdirectories', async () => { + const migrations = await loadMigrationFiles( + 'test/migrations-subdir', + true + ); + expect(migrations).toHaveLength(4); + expect(migrations).toMatchSnapshot(); + }); + + it('should sort migration files correctly', async () => { + const migrations = await loadMigrationFiles( + 'test/migrations-subdir-numeric-ordering', + true + ); + expect(migrations).toStrictEqual([ + '3/a.sql', + '123/c.sql', + '123/test/b.sql', + '2014/d.sql', + 'test/e.sql', + ]); + }); + }); }); diff --git a/test/migrations-subdir-numeric-ordering/123/c.sql b/test/migrations-subdir-numeric-ordering/123/c.sql new file mode 100644 index 00000000..e69de29b diff --git a/test/migrations-subdir-numeric-ordering/123/test/b.sql b/test/migrations-subdir-numeric-ordering/123/test/b.sql new file mode 100644 index 00000000..e69de29b diff --git a/test/migrations-subdir-numeric-ordering/2014/d.sql b/test/migrations-subdir-numeric-ordering/2014/d.sql new file mode 100644 index 00000000..e69de29b diff --git a/test/migrations-subdir-numeric-ordering/3/a.sql b/test/migrations-subdir-numeric-ordering/3/a.sql new file mode 100644 index 00000000..e69de29b diff --git a/test/migrations-subdir-numeric-ordering/test/e.sql b/test/migrations-subdir-numeric-ordering/test/e.sql new file mode 100644 index 00000000..e69de29b diff --git a/test/migrations-subdir/001/004_table.js b/test/migrations-subdir/001/004_table.js new file mode 100644 index 00000000..bca0b86c --- /dev/null +++ b/test/migrations-subdir/001/004_table.js @@ -0,0 +1,21 @@ +exports.up = (pgm) => { + pgm.createTable('t1', { + id: 'id', + string: { type: 'text', notNull: true }, + created: { + type: 'timestamp', + notNull: true, + default: pgm.func('current_timestamp'), + }, + }); + pgm.createTable( + 't2', + { + id1: 'id', + id2: { type: 'integer', primaryKey: true }, + }, + { + ifNotExists: true, + } + ); +}; diff --git a/test/migrations-subdir/001/006_table_rename.js b/test/migrations-subdir/001/006_table_rename.js new file mode 100644 index 00000000..99958a4d --- /dev/null +++ b/test/migrations-subdir/001/006_table_rename.js @@ -0,0 +1,3 @@ +exports.up = (pgm) => { + pgm.renameTable('t2', 't2r'); +}; diff --git a/test/migrations-subdir/002/008_column_drop.js b/test/migrations-subdir/002/008_column_drop.js new file mode 100644 index 00000000..e563ad34 --- /dev/null +++ b/test/migrations-subdir/002/008_column_drop.js @@ -0,0 +1,9 @@ +exports.up = (pgm) => { + pgm.dropColumns('t1', 'string'); +}; + +exports.down = (pgm) => { + pgm.addColumns('t1', { + string: { type: 'text', notNull: false }, + }); +}; diff --git a/test/migrations-subdir/002/009_column.js b/test/migrations-subdir/002/009_column.js new file mode 100644 index 00000000..0a86b8dd --- /dev/null +++ b/test/migrations-subdir/002/009_column.js @@ -0,0 +1,5 @@ +exports.up = (pgm) => { + pgm.addColumns('t1', { + nr: { type: 'integer', unique: true }, + }); +}; diff --git a/test/runner.spec.ts b/test/runner.spec.ts index 2886d262..5c659c12 100644 --- a/test/runner.spec.ts +++ b/test/runner.spec.ts @@ -2,6 +2,86 @@ import type { ClientBase } from 'pg'; import { describe, expect, it, vi } from 'vitest'; import { runner } from '../src'; +type ExecutedMigrations = Array<{ id: number; name: string; run_on: Date }>; + +function mockMigrationClient( + getExecutedMigrations: () => ExecutedMigrations +): ClientBase { + let id = 1; + return { + query: vi.fn((query) => { + switch (query) { + case 'SELECT pg_try_advisory_lock(7241865325823964) AS "lockObtained"': { + return Promise.resolve({ + rows: [{ lockObtained: true }], // lock obtained + }); + } + + case "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'pgmigrations'": { + return Promise.resolve({ + rows: [{}], // empty migrations table + }); + } + + case 'CREATE TABLE "public"."pgmigrations" ( id SERIAL PRIMARY KEY, name varchar(255) NOT NULL, run_on timestamp NOT NULL)': { + return Promise.resolve([{}]); // migration table created + } + + case 'SELECT name FROM "public"."pgmigrations" ORDER BY run_on, id': { + return Promise.resolve({ + rows: getExecutedMigrations(), + }); + } + + case 'BEGIN;': { + return Promise.resolve({}); // transaction started + } + + case 'COMMIT;': { + return Promise.resolve({}); // transaction committed + } + + default: { + if ( + query.startsWith( + 'INSERT INTO "public"."pgmigrations" (name, run_on) VALUES' + ) + ) { + const name: string = + /VALUES \('([^']+)'/.exec(query as string)?.[1] ?? 'failed'; // migration name + + // insert migration + getExecutedMigrations().push({ + name, + run_on: new Date( + +new Date('2024-01-01T00:00:00Z') + + (id - 1) * (24 * 60 * 60 * 1000) + ), + id: id++, + }); + + return Promise.resolve({}); // migration inserted + } + + if ( + query.startsWith('DELETE FROM "public"."pgmigrations" WHERE name=') + ) { + const name = /WHERE name='([^']+)'/.exec(query as string)?.[1]; + const migrations = getExecutedMigrations(); + const index = migrations.findIndex((m) => m.name === name); + if (index === -1) throw new Error(`migration not found: ${name}`); + migrations.splice(index, 1); + return Promise.resolve({}); // migration inserted + } + } + } + + // bypass migration queries + return Promise.resolve({ rows: [{}] }); + }), + } as unknown as ClientBase; +} + describe('runner', () => { it('should return a function', () => { expect(runner).toBeTypeOf('function'); @@ -31,73 +111,9 @@ describe('runner', () => { ); it('should execute a basic up migration', async () => { - const executedMigrations: Array<{ - id: number; - name: string; - run_on: Date; - }> = []; - let id = 1; - - const dbClient = { - query: vi.fn((query) => { - switch (query) { - case 'SELECT pg_try_advisory_lock(7241865325823964) AS "lockObtained"': { - return Promise.resolve({ - rows: [{ lockObtained: true }], // lock obtained - }); - } - - case "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'pgmigrations'": { - return Promise.resolve({ - rows: [], // no migration table - }); - } - - case 'CREATE TABLE "public"."pgmigrations" ( id SERIAL PRIMARY KEY, name varchar(255) NOT NULL, run_on timestamp NOT NULL)': { - return Promise.resolve({}); // migration table created - } - - case 'SELECT name FROM "public"."pgmigrations" ORDER BY run_on, id': { - return Promise.resolve({ - rows: executedMigrations, - }); - } - - case 'BEGIN;': { - return Promise.resolve({}); // transaction started - } - - case 'COMMIT;': { - return Promise.resolve({}); // transaction committed - } - - default: { - if ( - query.startsWith( - 'INSERT INTO "public"."pgmigrations" (name, run_on) VALUES' - ) - ) { - const name: string = - /VALUES \('([^']+)'/.exec(query as string)?.[1] ?? 'failed'; // migration name - - // insert migration - executedMigrations.push({ - id: id++, - name, - run_on: new Date(), - }); - - return Promise.resolve({}); // migration inserted - } - - break; - } - } + const executedMigrations: ExecutedMigrations = []; - // bypass migration queries - return Promise.resolve({ rows: [{}] }); - }), - } as unknown as ClientBase; + const dbClient = mockMigrationClient(() => executedMigrations); await expect( runner({ @@ -110,75 +126,16 @@ describe('runner', () => { }) ).resolves.not.toThrow(); expect(executedMigrations).toHaveLength(12); + expect(executedMigrations).toMatchSnapshot(); }); it('should execute a basic down migration', async () => { - const executedMigrations: Array<{ - id: number; - name: string; - run_on: Date; - }> = [ + const executedMigrations: ExecutedMigrations = [ { id: 1, name: '004_table', run_on: new Date() }, { id: 2, name: '006_table_rename', run_on: new Date() }, ]; - const dbClient = { - query: vi.fn((query) => { - switch (query) { - case 'SELECT pg_try_advisory_lock(7241865325823964) AS "lockObtained"': { - return Promise.resolve({ - rows: [{ lockObtained: true }], // lock obtained - }); - } - - case "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'pgmigrations'": { - return Promise.resolve({ - rows: [{}], // migration table exists - }); - } - - case "SELECT constraint_name FROM information_schema.table_constraints WHERE table_schema = 'public' AND table_name = 'pgmigrations' AND constraint_type = 'PRIMARY KEY'": { - return Promise.resolve({}); // no primary key constraint found - } - - case 'ALTER TABLE "public"."pgmigrations" ADD PRIMARY KEY (id)': { - return Promise.resolve({}); // primary key constraint added - } - - case 'SELECT name FROM "public"."pgmigrations" ORDER BY run_on, id': { - return Promise.resolve({ - rows: executedMigrations, - }); - } - - case 'BEGIN;': { - return Promise.resolve({}); // transaction started - } - - case 'COMMIT;': { - return Promise.resolve({}); // transaction committed - } - - default: { - if ( - query.startsWith( - 'DELETE FROM "public"."pgmigrations" WHERE name=' - ) - ) { - // delete migration - executedMigrations.pop(); - - return Promise.resolve({}); // migration deleted - } - - break; - } - } - - // bypass migration queries - return Promise.resolve({ rows: [{}] }); - }), - } as unknown as ClientBase; + const dbClient = mockMigrationClient(() => executedMigrations); await expect( runner({ @@ -192,5 +149,26 @@ describe('runner', () => { }) ).resolves.not.toThrow(); expect(executedMigrations).toHaveLength(0); + expect(executedMigrations).toMatchSnapshot(); + }); + + it('should execute a basic up migration in nested directories', async () => { + const executedMigrations: ExecutedMigrations = []; + + const dbClient = mockMigrationClient(() => executedMigrations); + + await expect( + runner({ + dbClient, + migrationsTable: 'pgmigrations', + // We use cockroach migrations for now, as they are more simple + // We either could mock the migration files later or define specific migrations for unit-testing + dir: 'test/migrations-subdir', + migrationSubdirs: true, + direction: 'up', + }) + ).resolves.not.toThrow(); + expect(executedMigrations).toHaveLength(4); + expect(executedMigrations).toMatchSnapshot(); }); });