Skip to content

Commit

Permalink
Cast REAL values to text in oplog triggers to work around the bug in …
Browse files Browse the repository at this point in the history
…SQLite's json_object
  • Loading branch information
kevin-dp committed Oct 26, 2023
1 parent 2c6fa6b commit 55af617
Show file tree
Hide file tree
Showing 8 changed files with 421 additions and 68 deletions.
120 changes: 92 additions & 28 deletions clients/typescript/src/migrators/triggers.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
import { Statement } from '../util'
import { dedent } from 'ts-dedent'

type ForeignKey = {
table: string
childKey: string
parentKey: string
}

type Table = {
type ColumnName = string
type SQLiteType = string
type ColumnTypes = Record<ColumnName, SQLiteType>

export type Table = {
tableName: string
namespace: string
columns: string[]
primary: string[]
columns: ColumnName[]
primary: ColumnName[]
foreignKeys: ForeignKey[]
columnTypes: ColumnTypes
}

type TableFullName = string
Expand All @@ -36,25 +42,25 @@ export function generateOplogTriggers(
tableFullName: TableFullName,
table: Omit<Table, 'foreignKeys'>
): Statement[] {
const { tableName, namespace, columns, primary } = table
const { tableName, namespace, columns, primary, columnTypes } = table

const newPKs = joinColsForJSON(primary, 'new')
const oldPKs = joinColsForJSON(primary, 'old')
const newRows = joinColsForJSON(columns, 'new')
const oldRows = joinColsForJSON(columns, 'old')
const newPKs = joinColsForJSON(primary, columnTypes, 'new')
const oldPKs = joinColsForJSON(primary, columnTypes, 'old')
const newRows = joinColsForJSON(columns, columnTypes, 'new')
const oldRows = joinColsForJSON(columns, columnTypes, 'old')

return [
`
dedent`
-- Toggles for turning the triggers on and off
INSERT OR IGNORE INTO _electric_trigger_settings(tablename,flag) VALUES ('${tableFullName}', 1);
`,
`
dedent`
/* Triggers for table ${tableName} */
-- ensures primary key is immutable
DROP TRIGGER IF EXISTS update_ensure_${namespace}_${tableName}_primarykey;
`,
`
dedent`
CREATE TRIGGER update_ensure_${namespace}_${tableName}_primarykey
BEFORE UPDATE ON ${tableFullName}
BEGIN
Expand All @@ -69,11 +75,11 @@ export function generateOplogTriggers(
END;
END;
`,
`
dedent`
-- Triggers that add INSERT, UPDATE, DELETE operation to the _opslog table
DROP TRIGGER IF EXISTS insert_${namespace}_${tableName}_into_oplog;
`,
`
dedent`
CREATE TRIGGER insert_${namespace}_${tableName}_into_oplog
AFTER INSERT ON ${tableFullName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${tableFullName}')
Expand All @@ -82,10 +88,10 @@ export function generateOplogTriggers(
VALUES ('${namespace}', '${tableName}', 'INSERT', json_object(${newPKs}), json_object(${newRows}), NULL, NULL);
END;
`,
`
dedent`
DROP TRIGGER IF EXISTS update_${namespace}_${tableName}_into_oplog;
`,
`
dedent`
CREATE TRIGGER update_${namespace}_${tableName}_into_oplog
AFTER UPDATE ON ${tableFullName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${tableFullName}')
Expand All @@ -94,10 +100,10 @@ export function generateOplogTriggers(
VALUES ('${namespace}', '${tableName}', 'UPDATE', json_object(${newPKs}), json_object(${newRows}), json_object(${oldRows}), NULL);
END;
`,
`
dedent`
DROP TRIGGER IF EXISTS delete_${namespace}_${tableName}_into_oplog;
`,
`
dedent`
CREATE TRIGGER delete_${namespace}_${tableName}_into_oplog
AFTER DELETE ON ${tableFullName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${tableFullName}')
Expand Down Expand Up @@ -128,24 +134,24 @@ function generateCompensationTriggers(
tableFullName: TableFullName,
table: Table
): Statement[] {
const { tableName, namespace, foreignKeys } = table
const { tableName, namespace, foreignKeys, columnTypes } = table

const makeTriggers = (foreignKey: ForeignKey) => {
const { childKey } = foreignKey

const fkTableNamespace = 'main' // currently, Electric always uses the 'main' namespace
const fkTableName = foreignKey.table
const fkTablePK = foreignKey.parentKey // primary key of the table pointed at by the FK.
const joinedFkPKs = joinColsForJSON([fkTablePK])
const joinedFkPKs = joinColsForJSON([fkTablePK], columnTypes)

return [
`-- Triggers for foreign key compensations
dedent`-- Triggers for foreign key compensations
DROP TRIGGER IF EXISTS compensation_insert_${namespace}_${tableName}_${childKey}_into_oplog;`,
// The compensation trigger inserts a row in `_electric_oplog` if the row pointed at by the FK exists
// The way how this works is that the values for the row are passed to the nested SELECT
// which will return those values for every record that matches the query
// which can be at most once since we filter on the foreign key which is also the primary key and thus is unique.
`
dedent`
CREATE TRIGGER compensation_insert_${namespace}_${tableName}_${childKey}_into_oplog
AFTER INSERT ON ${tableFullName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${fkTableNamespace}.${fkTableName}') AND
Expand All @@ -156,8 +162,8 @@ function generateCompensationTriggers(
FROM ${fkTableNamespace}.${fkTableName} WHERE ${foreignKey.parentKey} = new.${foreignKey.childKey};
END;
`,
`DROP TRIGGER IF EXISTS compensation_update_${namespace}_${tableName}_${foreignKey.childKey}_into_oplog;`,
`
dedent`DROP TRIGGER IF EXISTS compensation_update_${namespace}_${tableName}_${foreignKey.childKey}_into_oplog;`,
dedent`
CREATE TRIGGER compensation_update_${namespace}_${tableName}_${foreignKey.childKey}_into_oplog
AFTER UPDATE ON ${namespace}.${tableName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${fkTableNamespace}.${fkTableName}') AND
Expand Down Expand Up @@ -186,7 +192,7 @@ export function generateTableTriggers(
table: Table
): Statement[] {
const oplogTriggers = generateOplogTriggers(tableFullName, table)
const fkTriggers = generateCompensationTriggers(tableFullName, table) //, tables)
const fkTriggers = generateCompensationTriggers(tableFullName, table)
return oplogTriggers.concat(fkTriggers)
}

Expand All @@ -198,7 +204,7 @@ export function generateTableTriggers(
export function generateTriggers(tables: Tables): Statement[] {
const tableTriggers: Statement[] = []
tables.forEach((table, tableFullName) => {
const triggers = generateTableTriggers(tableFullName, table) //, tables)
const triggers = generateTableTriggers(tableFullName, table)
tableTriggers.push(...triggers)
})

Expand All @@ -213,16 +219,74 @@ export function generateTriggers(tables: Tables): Statement[] {
return stmts
}

function joinColsForJSON(cols: string[], target?: 'new' | 'old') {
/**
* Joins the column names and values into a string of pairs of the form `'col1', val1, 'col2', val2, ...`
* that can be used to build a JSON object in a SQLite `json_object` function call.
* Values of type REAL are cast to text to avoid a bug in SQLite's `json_object` function (see below).
*
* NOTE: There is a bug with SQLite's `json_object` function up to version 3.41.2
* that causes it to return an invalid JSON object if some value is +Infinity or -Infinity.
* @example
* sqlite> SELECT json_object('a',2e370,'b',-3e380);
* {"a":Inf,"b":-Inf}
*
* The returned JSON is not valid because JSON does not support `Inf` nor `-Inf`.
* @example
* sqlite> SELECT json_valid((SELECT json_object('a',2e370,'b',-3e380)));
* 0
*
* This is fixed in version 3.42.0 and on:
* @example
* sqlite> SELECT json_object('a',2e370,'b',-3e380);
* {"a":9e999,"b":-9e999}
*
* The returned JSON now is valid, the numbers 9e999 and -9e999
* are out of range of floating points and thus will be converted
* to `Infinity` and `-Infinity` when parsed with `JSON.parse`.
*
* Nevertheless version SQLite version 3.42.0 is very recent (May 2023)
* and users may be running older versions so we want to support them.
* Therefore we introduce the following workaround:
* @example
* sqlite> SELECT json_object('a', cast(2e370 as TEXT),'b', cast(-3e380 as TEXT));
* {"a":"Inf","b":"-Inf"}
*
* By casting the values to TEXT, infinity values are turned into their string representation.
* As such, the resulting JSON is valid.
* This means that the values will be stored as strings in the oplog,
* thus, we must be careful when parsing the oplog to convert those values back to their numeric type.
*
* For reference:
* - https://discord.com/channels/933657521581858818/1163829658236760185
* - https://www.sqlite.org/src/info/b52081d0acd07dc5bdb4951a3e8419866131965260c1e3a4c9b6e673bfe3dfea
*
* @param cols The column names
* @param target The target to use for the column values (new or old value provided by the trigger).
*/
function joinColsForJSON(
cols: string[],
colTypes: ColumnTypes,
target?: 'new' | 'old'
) {
// casts the value to TEXT if it is of type REAL
// to work around the bug in SQLite's `json_object` function
const castIfNeeded = (col: string, targettedCol: string) => {
if (colTypes[col] === 'REAL') {
return `cast(${targettedCol} as TEXT)`
} else {
return targettedCol
}
}

if (typeof target === 'undefined') {
return cols
.sort()
.map((col) => `'${col}', ${col}`)
.map((col) => `'${col}', ${castIfNeeded(col, col)}`)
.join(', ')
} else {
return cols
.sort()
.map((col) => `'${col}', ${target}.${col}`)
.map((col) => `'${col}', ${castIfNeeded(col, `${target}.${col}`)}`)
.join(', ')
}
}
54 changes: 26 additions & 28 deletions clients/typescript/src/satellite/oplog.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,18 +120,21 @@ export const localEntryToChanges = (
tag: Tag,
relations: RelationsCache
): OplogEntryChanges => {
const relation = relations[entry.tablename]

const result: OplogEntryChanges = {
namespace: entry.namespace,
tablename: entry.tablename,
primaryKeyCols: JSON.parse(entry.primaryKey),
primaryKeyCols: deserialiseRow(entry.primaryKey, relation) as Record<
string,
string | number
>,
optype: entry.optype === OPTYPES.delete ? OPTYPES.delete : OPTYPES.upsert,
changes: {},
tag: entry.optype == OPTYPES.delete ? null : tag,
clearTags: decodeTags(entry.clearTags),
}

const relation = relations[entry.tablename]

const oldRow: Row = entry.oldRow
? (deserialiseRow(entry.oldRow, relation) as Row)
: {}
Expand Down Expand Up @@ -167,7 +170,10 @@ export const remoteEntryToChanges = (
const result: ShadowEntryChanges = {
namespace: entry.namespace,
tablename: entry.tablename,
primaryKeyCols: JSON.parse(entry.primaryKey),
primaryKeyCols: deserialiseRow(entry.primaryKey, relation) as Record<
string,
string | number
>,
optype: entry.optype === OPTYPES.delete ? OPTYPES.delete : OPTYPES.upsert,
changes: {},
// if it is a delete, then `newRow` is empty so the full row is the old row
Expand Down Expand Up @@ -295,7 +301,7 @@ function serialiseRow(row?: Rec): string {
if (Number.isNaN(value)) {
return 'NaN'
} else if (value === Infinity) {
return '+Inf'
return 'Inf'
} else if (value === -Infinity) {
return '-Inf'
}
Expand Down Expand Up @@ -324,10 +330,12 @@ function deserialiseRow(str: string, rel: Pick<Relation, 'columns'>): Rec {
) {
if (value === 'NaN') {
return NaN
} else if (value === '+Inf') {
} else if (value === 'Inf') {
return Infinity
} else if (value === '-Inf') {
return -Infinity
} else {
return Number(value)
}
}
return value
Expand Down Expand Up @@ -405,15 +413,6 @@ export const toTransactions = (
)
}

export const newShadowEntry = (oplogEntry: OplogEntry): ShadowEntry => {
return {
namespace: oplogEntry.namespace,
tablename: oplogEntry.tablename,
primaryKey: primaryKeyToStr(JSON.parse(oplogEntry.primaryKey)),
tags: shadowTagsDefault,
}
}

export const getShadowPrimaryKey = (
oplogEntry: OplogEntry | OplogEntryChanges | ShadowEntryChanges
): ShadowKey => {
Expand Down Expand Up @@ -469,19 +468,18 @@ export const opLogEntryToChange = (
* @param primaryKeyObj object representing all columns of a primary key
* @returns a stringified JSON with stable sorting on column names
*/
export const primaryKeyToStr = (primaryKeyObj: {
[key: string]: string | number
}): string => {
const keys = Object.keys(primaryKeyObj).sort()
if (keys.length === 0) return '{}'

let json = '{'
for (const key of keys) {
json += JSON.stringify(key) + ':' + JSON.stringify(primaryKeyObj[key]) + ','
}

// Remove the last appended comma and close the object
return json.slice(0, -1) + '}'
export const primaryKeyToStr = <T extends Record<string, string | number>>(
primaryKeyObj: T
): string => {
// Sort the keys then insert them in order in a fresh object
// cf. https://stackoverflow.com/questions/5467129/sort-javascript-object-by-key
const keys: Array<keyof T> = Object.keys(primaryKeyObj).sort()
const sortedObj = keys.reduce((obj, key) => {
obj[key] = primaryKeyObj[key]
return obj
}, {} as T)

return serialiseRow(sortedObj)
}

export const generateTag = (
Expand Down
3 changes: 3 additions & 0 deletions clients/typescript/src/satellite/process.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1526,6 +1526,9 @@ export function generateTriggersForTable(tbl: MigrationTable): Statement[] {
parentKey: fk.pkCols[0],
}
}),
columnTypes: Object.fromEntries(
tbl.columns.map((col) => [col.name, col.sqliteType.toUpperCase()])
),
}
const fullTableName = table.namespace + '.' + table.tableName
return generateTableTriggers(fullTableName, table)
Expand Down
2 changes: 1 addition & 1 deletion clients/typescript/src/util/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ export type DataChange = {

// The properties are omitted from columns because they are not currently used.
export type MigrationTable = Omit<SatOpMigrate_Table, '$type' | 'columns'> & {
columns: Omit<SatOpMigrate_Column, '$type' | 'sqliteType' | 'pgType'>[]
columns: Omit<SatOpMigrate_Column, '$type' | 'pgType'>[]
}

export type SchemaChange = {
Expand Down
Loading

0 comments on commit 55af617

Please sign in to comment.