Skip to content
This repository has been archived by the owner on May 7, 2024. It is now read-only.

Commit

Permalink
Add migrations tracking to database
Browse files Browse the repository at this point in the history
This commit introduces a new 'migrations' table in the database to keep track of executed database migrations. A 'migrations' section was also added accordingly in the 'databaseDriver.ts'. The migration files in the 'migrations' directory will now be sorted and executed in order. After a successful execution, the migration file information will be added to the 'migrations' table. This change was necessary to maintain the order and consistency of database migrations, and to prevent the re-execution of an already executed migration script.
  • Loading branch information
sopyb committed Oct 6, 2023
1 parent d2c472d commit 6d81e08
Show file tree
Hide file tree
Showing 3 changed files with 142 additions and 8 deletions.
15 changes: 15 additions & 0 deletions src/sql/migrations/0-example.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
-- example entry
-- please use {number} - {name} as your format to keep migrations in an consistent order
-- Top comment explaining why this is needed
-- 'migrations' table to track executed DB migrations
create table if not exists migrations
(
-- Each migration gets a unique ID
id BIGINT AUTO_INCREMENT PRIMARY KEY,

-- Timestamp of when the migration was executed
createdAt timestamp default current_timestamp() not null,

-- Name of the executed migration script
filename varchar(255) not null
)
10 changes: 9 additions & 1 deletion src/sql/setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -169,4 +169,12 @@ select `navigo`.`sessionTable`.`id` AS `id`,
`navigo`.`sessionTable`.`token` AS `token`,
`navigo`.`sessionTable`.`expires` AS `expires`
from `navigo`.`sessionTable`
where `navigo`.`sessionTable`.`expires` >= current_timestamp();
where `navigo`.`sessionTable`.`expires` >= current_timestamp();

create table if not exists migrations
(
id BIGINT AUTO_INCREMENT
PRIMARY KEY,
createdAt timestamp default current_timestamp() not null,
filename varchar(255) not null
)
125 changes: 118 additions & 7 deletions src/util/Database/DatabaseDriver.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import EnvVars from '@src/constants/EnvVars';
import { createPool, Pool } from 'mariadb';
import {createPool, Pool} from 'mariadb';
import path from 'path';
import logger from 'jet-logger';
import { User } from '@src/types/models/User';
import { GenericModelClass } from '@src/types/models/GenericModelClass';
import {readFileSync} from 'fs';
import {User} from '@src/types/models/User';
import {GenericModelClass} from '@src/types/models/GenericModelClass';
import {readdirSync, readFileSync} from 'fs';

// database credentials
const { DBCred } = EnvVars;
Expand Down Expand Up @@ -59,6 +59,9 @@ const trustedColumns = [
// session
'token',
'expires',

// migrations
'filename',
];

// data interface
Expand Down Expand Up @@ -90,6 +93,12 @@ interface CountQueryPacket extends RowDataPacket {
result: bigint;
}

interface IMigrations {
id: bigint;
created_at: Date;
filename: string;
}

export interface ResultSetHeader {
fieldCount: number;
affectedRows: number;
Expand Down Expand Up @@ -418,6 +427,59 @@ class Database {
}
}

// TODO: less duplicate code
protected async _insertUnsafe(
table: string,
data: object | Record<string, DataType>,
discardId = true,
): Promise<bigint> {
const { keys, values } = processData(data, discardId);

// create sql query - insert into table (keys) values (values)
// ? for values to be replaced by params
const sql = `INSERT INTO ${table} (${keys.join(',')})
VALUES (${values.map(() => '?').join(',')})`;
// execute query
const result = (await this._queryUnsafe(sql, values)) as ResultSetHeader;

// return insert id
let insertId = -1n;
if (result) {
insertId = BigInt(result.insertId);
}
return insertId;
}

protected async getAllUnsafe<T>(table: string): Promise<T[] | null> {
// create sql query - select * from table
const sql = `SELECT *
FROM ${table}`;

// execute query
const result = await this._queryUnsafe(sql);

// check if T has any properties that are JSON
// if so parse them
return parseResult(result) as T[] | null;
}

protected async _queryUnsafe(
sql: string,
params?: unknown[],
): Promise<ResultSetHeader | RowDataPacket[]> {
if (!sql) return Promise.reject(new Error('No SQL query'));

// get connection from pool
const conn = await Database.pool.getConnection();
try {
// execute query and return result
return await conn.query(sql, params);
} finally {
// release connection
await conn.release();
}
}

protected async _getWhere<T>(
table: string,
like: boolean,
Expand Down Expand Up @@ -465,7 +527,7 @@ class Database {
const result = await this._query(sql, queryBuilderResult.params);

return BigInt(
(result as CountDataPacket[])[0][`SUM(${column})`] as number || 0,
((result as CountDataPacket[])[0][`SUM(${column})`] as number) || 0,
);
}

Expand All @@ -488,8 +550,57 @@ class Database {
protected async _setup() {
const pathToSQLScripts = path.join(__dirname, '..', '..', 'sql');

const numberRegex = /^\d+/gi;

// run the setup sql
await this._executeFile(path.join(pathToSQLScripts, 'setup.sql'));

// read the migrations directory and sort files by number id
const migrationFiles = readdirSync(
path.join(pathToSQLScripts, 'migrations'),
)
.filter((file) => file.endsWith('.sql'))
.sort((fileA, fileB) => {
const numberA = parseInt(fileA.match(numberRegex)?.[0] || '');
const numberB = parseInt(fileB.match(numberRegex)?.[0] || '');

return numberA - numberB;
});

try {
await this._executeFile(path.join(pathToSQLScripts, 'setup.sql'));
} catch (e) {
logger.err(e);
}

const migrations = await this.getAllUnsafe<IMigrations>('migrations');

if (!!migrations) {
migrations.forEach((migration) => {
const index = migrationFiles.indexOf(migration.filename);
if (index > -1) migrationFiles.splice(index, 1);
});
}

try {
await this._executeFile(path.join(pathToSQLScripts, 'create.sql'));
if (migrationFiles.length) logger.info('Starting migrations...');
for (const migrationFile of migrationFiles) {
logger.info(`Now running: ${migrationFile}...`);
await this._executeFile(
path.join(pathToSQLScripts, 'migrations', migrationFile),
);

const success = await this._insertUnsafe('migrations', {
id: -1n,
createdAt: new Date(),
filename: migrationFile,
});

if (success < 0n) {
throw new Error('Failed to insert migration file into database');
}
logger.info(`Successfully ran: ${migrationFile}`);
}
} catch (e) {
logger.err(e);
} finally {
Expand Down Expand Up @@ -529,7 +640,7 @@ class Database {
// get connection from pool
const conn = await Database.pool.getConnection();

for(const query of queries) {
for (const query of queries) {
if (query) await conn.query(query);
}

Expand Down

0 comments on commit 6d81e08

Please sign in to comment.