Skip to content

Commit

Permalink
feat(DAL): support float8 (#574)
Browse files Browse the repository at this point in the history
This PR introduces client-side support for float8 values.

---------

Co-authored-by: Oleksii Sholik <[email protected]>
  • Loading branch information
kevin-dp and alco authored Oct 30, 2023
1 parent 88a5375 commit 00eb469
Show file tree
Hide file tree
Showing 33 changed files with 1,557 additions and 157 deletions.
7 changes: 7 additions & 0 deletions .changeset/hungry-pandas-refuse.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"@core/electric": patch
"electric-sql": patch
"@electric-sql/prisma-generator": patch
---

Adds client-side support for float8 data type.
5 changes: 5 additions & 0 deletions clients/typescript/src/cli/migrations/migrate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -349,12 +349,14 @@ function addValidator(ln: string): string {

if (field) {
const intValidator = '@zod.number.int().gte(-2147483648).lte(2147483647)'
const float8Validator = '@zod.custom.use(z.number().or(z.nan()))'

// Map attributes to validators
const attributeValidatorMapping = new Map([
['@db.Uuid', '@zod.string.uuid()'],
['@db.SmallInt', '@zod.number.int().gte(-32768).lte(32767)'],
['@db.Int', intValidator],
['@db.DoublePrecision', float8Validator],
])
const attribute = field.attributes
.map((a) => a.type)
Expand All @@ -369,6 +371,9 @@ function addValidator(ln: string): string {
['Int', intValidator],
['Int?', intValidator],
['Int[]', intValidator],
['Float', float8Validator],
['Float?', float8Validator],
['Float[]', float8Validator],
])
const typeValidator = typeValidatorMapping.get(field.type)

Expand Down
11 changes: 11 additions & 0 deletions clients/typescript/src/client/conversions/sqlite.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@ export function toSqlite(v: any, pgType: PgType): any {
return serialiseDate(v, pgType as PgDateType)
} else if (pgType === PgBasicType.PG_BOOL) {
return serialiseBoolean(v)
} else if (Number.isNaN(v)) {
// Since SQLite does not support `NaN` we serialise `NaN` into the string`'NaN'`
// and deserialise it back to `NaN` when reading from the DB.
// cf. https://github.com/WiseLibs/better-sqlite3/issues/1088
return 'NaN'
} else {
return v
}
Expand All @@ -39,6 +44,12 @@ export function fromSqlite(v: any, pgType: PgType): any {
} else if (pgType === PgBasicType.PG_BOOL) {
// it's a serialised boolean
return deserialiseBoolean(v)
} else if (
v === 'NaN' &&
(pgType === PgBasicType.PG_FLOAT8 || pgType === PgBasicType.PG_FLOAT4)
) {
// it's a serialised NaN
return NaN
} else {
return v
}
Expand Down
120 changes: 92 additions & 28 deletions clients/typescript/src/migrators/triggers.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
import { Statement } from '../util'
import { dedent } from 'ts-dedent'

type ForeignKey = {
table: string
childKey: string
parentKey: string
}

type Table = {
type ColumnName = string
type SQLiteType = string
type ColumnTypes = Record<ColumnName, SQLiteType>

export type Table = {
tableName: string
namespace: string
columns: string[]
primary: string[]
columns: ColumnName[]
primary: ColumnName[]
foreignKeys: ForeignKey[]
columnTypes: ColumnTypes
}

type TableFullName = string
Expand All @@ -36,25 +42,25 @@ export function generateOplogTriggers(
tableFullName: TableFullName,
table: Omit<Table, 'foreignKeys'>
): Statement[] {
const { tableName, namespace, columns, primary } = table
const { tableName, namespace, columns, primary, columnTypes } = table

const newPKs = joinColsForJSON(primary, 'new')
const oldPKs = joinColsForJSON(primary, 'old')
const newRows = joinColsForJSON(columns, 'new')
const oldRows = joinColsForJSON(columns, 'old')
const newPKs = joinColsForJSON(primary, columnTypes, 'new')
const oldPKs = joinColsForJSON(primary, columnTypes, 'old')
const newRows = joinColsForJSON(columns, columnTypes, 'new')
const oldRows = joinColsForJSON(columns, columnTypes, 'old')

return [
`
dedent`
-- Toggles for turning the triggers on and off
INSERT OR IGNORE INTO _electric_trigger_settings(tablename,flag) VALUES ('${tableFullName}', 1);
`,
`
dedent`
/* Triggers for table ${tableName} */
-- ensures primary key is immutable
DROP TRIGGER IF EXISTS update_ensure_${namespace}_${tableName}_primarykey;
`,
`
dedent`
CREATE TRIGGER update_ensure_${namespace}_${tableName}_primarykey
BEFORE UPDATE ON ${tableFullName}
BEGIN
Expand All @@ -69,11 +75,11 @@ export function generateOplogTriggers(
END;
END;
`,
`
dedent`
-- Triggers that add INSERT, UPDATE, DELETE operation to the _opslog table
DROP TRIGGER IF EXISTS insert_${namespace}_${tableName}_into_oplog;
`,
`
dedent`
CREATE TRIGGER insert_${namespace}_${tableName}_into_oplog
AFTER INSERT ON ${tableFullName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${tableFullName}')
Expand All @@ -82,10 +88,10 @@ export function generateOplogTriggers(
VALUES ('${namespace}', '${tableName}', 'INSERT', json_object(${newPKs}), json_object(${newRows}), NULL, NULL);
END;
`,
`
dedent`
DROP TRIGGER IF EXISTS update_${namespace}_${tableName}_into_oplog;
`,
`
dedent`
CREATE TRIGGER update_${namespace}_${tableName}_into_oplog
AFTER UPDATE ON ${tableFullName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${tableFullName}')
Expand All @@ -94,10 +100,10 @@ export function generateOplogTriggers(
VALUES ('${namespace}', '${tableName}', 'UPDATE', json_object(${newPKs}), json_object(${newRows}), json_object(${oldRows}), NULL);
END;
`,
`
dedent`
DROP TRIGGER IF EXISTS delete_${namespace}_${tableName}_into_oplog;
`,
`
dedent`
CREATE TRIGGER delete_${namespace}_${tableName}_into_oplog
AFTER DELETE ON ${tableFullName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${tableFullName}')
Expand Down Expand Up @@ -128,24 +134,24 @@ function generateCompensationTriggers(
tableFullName: TableFullName,
table: Table
): Statement[] {
const { tableName, namespace, foreignKeys } = table
const { tableName, namespace, foreignKeys, columnTypes } = table

const makeTriggers = (foreignKey: ForeignKey) => {
const { childKey } = foreignKey

const fkTableNamespace = 'main' // currently, Electric always uses the 'main' namespace
const fkTableName = foreignKey.table
const fkTablePK = foreignKey.parentKey // primary key of the table pointed at by the FK.
const joinedFkPKs = joinColsForJSON([fkTablePK])
const joinedFkPKs = joinColsForJSON([fkTablePK], columnTypes)

return [
`-- Triggers for foreign key compensations
dedent`-- Triggers for foreign key compensations
DROP TRIGGER IF EXISTS compensation_insert_${namespace}_${tableName}_${childKey}_into_oplog;`,
// The compensation trigger inserts a row in `_electric_oplog` if the row pointed at by the FK exists
// The way how this works is that the values for the row are passed to the nested SELECT
// which will return those values for every record that matches the query
// which can be at most once since we filter on the foreign key which is also the primary key and thus is unique.
`
dedent`
CREATE TRIGGER compensation_insert_${namespace}_${tableName}_${childKey}_into_oplog
AFTER INSERT ON ${tableFullName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${fkTableNamespace}.${fkTableName}') AND
Expand All @@ -156,8 +162,8 @@ function generateCompensationTriggers(
FROM ${fkTableNamespace}.${fkTableName} WHERE ${foreignKey.parentKey} = new.${foreignKey.childKey};
END;
`,
`DROP TRIGGER IF EXISTS compensation_update_${namespace}_${tableName}_${foreignKey.childKey}_into_oplog;`,
`
dedent`DROP TRIGGER IF EXISTS compensation_update_${namespace}_${tableName}_${foreignKey.childKey}_into_oplog;`,
dedent`
CREATE TRIGGER compensation_update_${namespace}_${tableName}_${foreignKey.childKey}_into_oplog
AFTER UPDATE ON ${namespace}.${tableName}
WHEN 1 == (SELECT flag from _electric_trigger_settings WHERE tablename == '${fkTableNamespace}.${fkTableName}') AND
Expand Down Expand Up @@ -186,7 +192,7 @@ export function generateTableTriggers(
table: Table
): Statement[] {
const oplogTriggers = generateOplogTriggers(tableFullName, table)
const fkTriggers = generateCompensationTriggers(tableFullName, table) //, tables)
const fkTriggers = generateCompensationTriggers(tableFullName, table)
return oplogTriggers.concat(fkTriggers)
}

Expand All @@ -198,7 +204,7 @@ export function generateTableTriggers(
export function generateTriggers(tables: Tables): Statement[] {
const tableTriggers: Statement[] = []
tables.forEach((table, tableFullName) => {
const triggers = generateTableTriggers(tableFullName, table) //, tables)
const triggers = generateTableTriggers(tableFullName, table)
tableTriggers.push(...triggers)
})

Expand All @@ -213,16 +219,74 @@ export function generateTriggers(tables: Tables): Statement[] {
return stmts
}

function joinColsForJSON(cols: string[], target?: 'new' | 'old') {
/**
* Joins the column names and values into a string of pairs of the form `'col1', val1, 'col2', val2, ...`
* that can be used to build a JSON object in a SQLite `json_object` function call.
* Values of type REAL are cast to text to avoid a bug in SQLite's `json_object` function (see below).
*
* NOTE: There is a bug with SQLite's `json_object` function up to version 3.41.2
* that causes it to return an invalid JSON object if some value is +Infinity or -Infinity.
* @example
* sqlite> SELECT json_object('a',2e370,'b',-3e380);
* {"a":Inf,"b":-Inf}
*
* The returned JSON is not valid because JSON does not support `Inf` nor `-Inf`.
* @example
* sqlite> SELECT json_valid((SELECT json_object('a',2e370,'b',-3e380)));
* 0
*
* This is fixed in version 3.42.0 and on:
* @example
* sqlite> SELECT json_object('a',2e370,'b',-3e380);
* {"a":9e999,"b":-9e999}
*
* The returned JSON now is valid, the numbers 9e999 and -9e999
* are out of range of floating points and thus will be converted
* to `Infinity` and `-Infinity` when parsed with `JSON.parse`.
*
* Nevertheless version SQLite version 3.42.0 is very recent (May 2023)
* and users may be running older versions so we want to support them.
* Therefore we introduce the following workaround:
* @example
* sqlite> SELECT json_object('a', cast(2e370 as TEXT),'b', cast(-3e380 as TEXT));
* {"a":"Inf","b":"-Inf"}
*
* By casting the values to TEXT, infinity values are turned into their string representation.
* As such, the resulting JSON is valid.
* This means that the values will be stored as strings in the oplog,
* thus, we must be careful when parsing the oplog to convert those values back to their numeric type.
*
* For reference:
* - https://discord.com/channels/933657521581858818/1163829658236760185
* - https://www.sqlite.org/src/info/b52081d0acd07dc5bdb4951a3e8419866131965260c1e3a4c9b6e673bfe3dfea
*
* @param cols The column names
* @param target The target to use for the column values (new or old value provided by the trigger).
*/
function joinColsForJSON(
cols: string[],
colTypes: ColumnTypes,
target?: 'new' | 'old'
) {
// casts the value to TEXT if it is of type REAL
// to work around the bug in SQLite's `json_object` function
const castIfNeeded = (col: string, targettedCol: string) => {
if (colTypes[col] === 'REAL') {
return `cast(${targettedCol} as TEXT)`
} else {
return targettedCol
}
}

if (typeof target === 'undefined') {
return cols
.sort()
.map((col) => `'${col}', ${col}`)
.map((col) => `'${col}', ${castIfNeeded(col, col)}`)
.join(', ')
} else {
return cols
.sort()
.map((col) => `'${col}', ${target}.${col}`)
.map((col) => `'${col}', ${castIfNeeded(col, `${target}.${col}`)}`)
.join(', ')
}
}
17 changes: 7 additions & 10 deletions clients/typescript/src/satellite/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1080,25 +1080,26 @@ function deserializeColumnData(
): string | number {
switch (columnType) {
case PgBasicType.PG_CHAR:
case PgBasicType.PG_TEXT:
case PgBasicType.PG_UUID:
case PgBasicType.PG_VARCHAR:
case PgDateType.PG_DATE:
case PgBasicType.PG_TEXT:
case PgDateType.PG_TIME:
case PgDateType.PG_TIMESTAMP:
case PgDateType.PG_TIMESTAMPTZ:
case PgBasicType.PG_UUID:
case PgBasicType.PG_VARCHAR:
return typeDecoder.text(column)
case PgBasicType.PG_BOOL:
return typeDecoder.bool(column)
case PgBasicType.PG_REAL:
case PgBasicType.PG_FLOAT4:
case PgBasicType.PG_FLOAT8:
case PgBasicType.PG_INT:
case PgBasicType.PG_INT2:
case PgBasicType.PG_INT4:
case PgBasicType.PG_INT8:
case PgBasicType.PG_INTEGER:
return Number(typeDecoder.text(column))
case PgBasicType.PG_FLOAT4:
case PgBasicType.PG_FLOAT8:
case PgBasicType.PG_REAL:
return typeDecoder.float(column)
case PgDateType.PG_TIMETZ:
return typeDecoder.timetz(column)
default:
Expand All @@ -1118,10 +1119,6 @@ function serializeColumnData(
switch (columnType) {
case PgBasicType.PG_BOOL:
return typeEncoder.bool(columnValue as number)
case PgBasicType.PG_REAL:
case PgBasicType.PG_FLOAT4:
case PgBasicType.PG_FLOAT8:
return typeEncoder.real(columnValue as number)
case PgDateType.PG_TIMETZ:
return typeEncoder.timetz(columnValue as string)
default:
Expand Down
13 changes: 9 additions & 4 deletions clients/typescript/src/satellite/merge.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import {
OPTYPES,
} from './oplog'
import { difference, union } from '../util/sets'
import { Row } from '../util'
import { RelationsCache, Row } from '../util'

/**
* Merge server-sent operation with local pending oplog to arrive at the same row state the server is at.
Expand All @@ -25,15 +25,20 @@ export function mergeEntries(
localOrigin: string,
local: OplogEntry[],
incomingOrigin: string,
incoming: OplogEntry[]
incoming: OplogEntry[],
relations: RelationsCache
): PendingChanges {
const localTableChanges = localOperationsToTableChanges(
local,
(timestamp: Date) => {
return generateTag(localOrigin, timestamp)
}
},
relations
)
const incomingTableChanges = remoteOperationsToTableChanges(
incoming,
relations
)
const incomingTableChanges = remoteOperationsToTableChanges(incoming)

for (const [tablename, incomingMapping] of Object.entries(
incomingTableChanges
Expand Down
Loading

0 comments on commit 00eb469

Please sign in to comment.