diff --git a/examples/write-patterns/Dockerfile b/examples/write-patterns/Dockerfile new file mode 100644 index 0000000000..6f1297efed --- /dev/null +++ b/examples/write-patterns/Dockerfile @@ -0,0 +1,33 @@ +FROM node:lts-alpine AS base + +# Stage 1: Install dependencies +FROM base AS deps +WORKDIR /app + +RUN npm install -g pnpm + +COPY pnpm-*.yaml ./ +COPY package.json ./ +COPY tsconfig.build.json ./ +COPY tsconfig.base.json ./ +COPY packages/typescript-client packages/typescript-client/ +COPY packages/react-hooks packages/react-hooks/ +COPY packages/experimental packages/experimental/ +COPY examples/write-patterns/ examples/write-patterns/ + +# Install dependencies +RUN pnpm install --frozen-lockfile +RUN pnpm run -r build + + +# Need to make production image more clean +FROM node:lts-alpine AS prod +WORKDIR /app + +ENV NODE_ENV=production +COPY --from=deps /app/ ./ + +WORKDIR /app/examples/write-patterns + +EXPOSE 3001 +ENTRYPOINT ["node", "shared/backend/api.js"] diff --git a/examples/write-patterns/package.json b/examples/write-patterns/package.json index fa1108790b..11fae239c0 100644 --- a/examples/write-patterns/package.json +++ b/examples/write-patterns/package.json @@ -1,30 +1,16 @@ { "name": "@electric-examples/write-patterns", - "private": true, "version": "0.0.1", "author": "ElectricSQL", - "license": "Apache-2.0", "bugs": { "url": "https://github.com/electric-sql/electric/issues" }, - "type": "module", - "scripts": { - "backend:up": "PROJECT_NAME=write-patterns pnpm -C ../../ run example-backend:up && pnpm db:migrate", - "backend:down": "PROJECT_NAME=write-patterns pnpm -C ../../ run example-backend:down", - "db:migrate": "dotenv -e ../../.env.dev -- pnpm exec pg-migrations apply --directory ./shared/migrations", - "dev": "concurrently \"vite\" \"node shared/backend/api.js\"", - "build": "vite build", - "format": "eslint . --ext ts,tsx --fix", - "stylecheck": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0", - "preview": "vite preview", - "typecheck": "tsc --noEmit" - }, "dependencies": { "@electric-sql/client": "workspace:*", "@electric-sql/experimental": "workspace:*", "@electric-sql/pglite": "^0.2.14", "@electric-sql/pglite-react": "^0.2.14", - "@electric-sql/pglite-sync": "^0.2.16", + "@electric-sql/pglite-sync": "^0.2.17", "@electric-sql/react": "workspace:*", "body-parser": "^1.20.2", "cors": "^2.8.5", @@ -32,6 +18,7 @@ "pg": "^8.12.0", "react": "19.0.0-rc.1", "react-dom": "19.0.0-rc.1", + "sst": "3.3.64", "uuid": "^10.0.0", "valtio": "^2.1.2", "zod": "^3.23.8" @@ -50,10 +37,24 @@ "vite": "^5.3.4", "vite-plugin-pwa": "^0.21.0" }, + "license": "Apache-2.0", "overrides": { "@types/react": "npm:types-react@rc", "@types/react-dom": "npm:types-react-dom@rc", "react": "19.0.0-rc.1", "react-dom": "19.0.0-rc.1" - } + }, + "private": true, + "scripts": { + "backend:down": "PROJECT_NAME=write-patterns pnpm -C ../../ run example-backend:down", + "backend:up": "PROJECT_NAME=write-patterns pnpm -C ../../ run example-backend:up && pnpm db:migrate", + "build": "vite build", + "db:migrate": "dotenv -e ../../.env.dev -- pnpm exec pg-migrations apply --directory ./shared/migrations", + "dev": "concurrently \"vite\" \"node shared/backend/api.js\"", + "format": "eslint . --fix", + "preview": "vite preview", + "stylecheck": "eslint . --quiet", + "typecheck": "tsc --noEmit" + }, + "type": "module" } diff --git a/examples/write-patterns/patterns/1-online-writes/index.tsx b/examples/write-patterns/patterns/1-online-writes/index.tsx index 10d9b79938..af7d2255c0 100644 --- a/examples/write-patterns/patterns/1-online-writes/index.tsx +++ b/examples/write-patterns/patterns/1-online-writes/index.tsx @@ -1,9 +1,9 @@ import React from 'react' import { v4 as uuidv4 } from 'uuid' import { useShape } from '@electric-sql/react' -import api from '../../shared/app/client' -const ELECTRIC_URL = import.meta.env.ELECTRIC_URL || 'http://localhost:3000' +import api from '../../shared/app/client' +import { ELECTRIC_URL, envParams } from '../../shared/app/config' type Todo = { id: string @@ -19,6 +19,7 @@ export default function OnlineWrites() { url: `${ELECTRIC_URL}/v1/shape`, params: { table: 'todos', + ...envParams, }, parser: { timestamptz: (value: string) => new Date(value), diff --git a/examples/write-patterns/patterns/2-optimistic-state/index.tsx b/examples/write-patterns/patterns/2-optimistic-state/index.tsx index 1c8a3d221e..823280e0e9 100644 --- a/examples/write-patterns/patterns/2-optimistic-state/index.tsx +++ b/examples/write-patterns/patterns/2-optimistic-state/index.tsx @@ -2,9 +2,9 @@ import React, { useOptimistic, useTransition } from 'react' import { v4 as uuidv4 } from 'uuid' import { matchBy, matchStream } from '@electric-sql/experimental' import { useShape } from '@electric-sql/react' -import api from '../../shared/app/client' -const ELECTRIC_URL = import.meta.env.ELECTRIC_URL || 'http://localhost:3000' +import api from '../../shared/app/client' +import { ELECTRIC_URL, envParams } from '../../shared/app/config' type Todo = { id: string @@ -34,6 +34,7 @@ export default function OptimisticState() { url: `${ELECTRIC_URL}/v1/shape`, params: { table: 'todos', + ...envParams, }, parser: { timestamptz: (value: string) => new Date(value), diff --git a/examples/write-patterns/patterns/3-shared-persistent/index.tsx b/examples/write-patterns/patterns/3-shared-persistent/index.tsx index f63cadedc2..b7aa65f931 100644 --- a/examples/write-patterns/patterns/3-shared-persistent/index.tsx +++ b/examples/write-patterns/patterns/3-shared-persistent/index.tsx @@ -8,8 +8,8 @@ import { matchBy, matchStream } from '@electric-sql/experimental' import { useShape } from '@electric-sql/react' import api from '../../shared/app/client' +import { ELECTRIC_URL, envParams } from '../../shared/app/config' -const ELECTRIC_URL = import.meta.env.ELECTRIC_URL || 'http://localhost:3000' const KEY = 'electric-sql/examples/write-patterns/shared-persistent' type Todo = { @@ -112,6 +112,7 @@ export default function SharedPersistent() { url: `${ELECTRIC_URL}/v1/shape`, params: { table: 'todos', + ...envParams, }, parser: { timestamptz: (value: string) => new Date(value), @@ -123,25 +124,30 @@ export default function SharedPersistent() { // Get the local optimistic state. const localWrites = useSnapshot>(optimisticState) - // Merge the synced state with the local state. - const todos = localWrites - .values() - .reduce((synced: Todo[], { operation, value }: LocalWrite) => { - switch (operation) { - case 'insert': - return synced.some((todo) => todo.id === value.id) - ? synced - : [...synced, value as Todo] - - case 'update': - return synced.map((todo) => - todo.id === value.id ? { ...todo, ...value } : todo - ) - - case 'delete': - return synced.filter((todo) => todo.id !== value.id) - } - }, sorted) + const computeOptimisticState = ( + synced: Todo[], + writes: LocalWrite[] + ): Todo[] => { + return writes.reduce( + (synced: Todo[], { operation, value }: LocalWrite): Todo[] => { + switch (operation) { + case 'insert': + return [...synced, value as Todo] + case 'update': + return synced.map((todo) => + todo.id === value.id ? { ...todo, ...value } : todo + ) + case 'delete': + return synced.filter((todo) => todo.id !== value.id) + default: + return synced + } + }, + synced + ) + } + + const todos = computeOptimisticState(sorted, [...localWrites.values()]) // These are the same event handler functions from the previous optimistic // state pattern, adapted to add the state to the shared, persistent store. diff --git a/examples/write-patterns/patterns/4-through-the-db/db.ts b/examples/write-patterns/patterns/4-through-the-db/db.ts index 3ac8c56162..20ee19d039 100644 --- a/examples/write-patterns/patterns/4-through-the-db/db.ts +++ b/examples/write-patterns/patterns/4-through-the-db/db.ts @@ -4,16 +4,19 @@ import { electricSync } from '@electric-sql/pglite-sync' import localSchemaMigrations from './local-schema.sql?raw' +import { ELECTRIC_URL, envParams } from '../../shared/app/config' + const DATA_DIR = 'idb://electric-write-patterns-example' -const ELECTRIC_URL = import.meta.env.ELECTRIC_URL || 'http://localhost:3000' const registry = new Map>() export default async function loadPGlite(): Promise { - const loadingPromise = registry.get('loadingPromise') + let loadingPromise = registry.get('loadingPromise') if (loadingPromise === undefined) { - registry.set('loadingPromise', _loadPGlite()) + loadingPromise = _loadPGlite() + + registry.set('loadingPromise', loadingPromise) } return loadingPromise as Promise @@ -32,7 +35,10 @@ async function _loadPGlite(): Promise { await pglite.electric.syncShapeToTable({ shape: { url: `${ELECTRIC_URL}/v1/shape`, - table: 'todos', + params: { + table: 'todos', + ...envParams, + }, }, shapeKey: 'todos', table: 'todos_synced', diff --git a/examples/write-patterns/patterns/4-through-the-db/local-schema.sql b/examples/write-patterns/patterns/4-through-the-db/local-schema.sql index d20a2133e1..7d8b8827b1 100644 --- a/examples/write-patterns/patterns/4-through-the-db/local-schema.sql +++ b/examples/write-patterns/patterns/4-through-the-db/local-schema.sql @@ -78,12 +78,7 @@ END; $$ LANGUAGE plpgsql; CREATE OR REPLACE TRIGGER delete_local_on_synced_insert -AFTER INSERT ON todos_synced -FOR EACH ROW -EXECUTE FUNCTION delete_local_on_synced_insert_trigger(); - -CREATE OR REPLACE TRIGGER delete_local_on_synced_insert_and_update -AFTER UPDATE ON todos_synced +AFTER INSERT OR UPDATE ON todos_synced FOR EACH ROW EXECUTE FUNCTION delete_local_on_synced_insert_and_update_trigger(); diff --git a/examples/write-patterns/shared/app/client.ts b/examples/write-patterns/shared/app/client.ts index cde5d3d291..f779aaf4e6 100644 --- a/examples/write-patterns/shared/app/client.ts +++ b/examples/write-patterns/shared/app/client.ts @@ -1,4 +1,4 @@ -const API_URL = import.meta.env.API_URL || 'http://localhost:3001' +const API_URL = import.meta.env.VITE_SERVER_URL || 'http://localhost:3001' type RequestOptions = { method: string diff --git a/examples/write-patterns/shared/app/config.ts b/examples/write-patterns/shared/app/config.ts new file mode 100644 index 0000000000..731e8fe6a1 --- /dev/null +++ b/examples/write-patterns/shared/app/config.ts @@ -0,0 +1,11 @@ +export const ELECTRIC_URL = + import.meta.env.VITE_ELECTRIC_URL || 'http://localhost:3000' + +export const envParams: { database_id?: string; token?: string } = + import.meta.env.VITE_ELECTRIC_TOKEN && + import.meta.env.VITE_ELECTRIC_DATABASE_ID + ? { + database_id: import.meta.env.VITE_ELECTRIC_DATABASE_ID, + token: import.meta.env.VITE_ELECTRIC_TOKEN, + } + : {} diff --git a/examples/write-patterns/shared/backend/api.js b/examples/write-patterns/shared/backend/api.js index 0d4f6a6b76..b742f5027f 100644 --- a/examples/write-patterns/shared/backend/api.js +++ b/examples/write-patterns/shared/backend/api.js @@ -6,10 +6,10 @@ import pg from 'pg' import { z } from 'zod' // Connect to Postgres. -const DATABASE_URL = process.env.DATABASE_URL || 'postgresql://postgres:password@localhost:54321/electric' -const DATABASE_USE_SSL = process.env.DATABASE_USE_SSL === 'true' || false -const pool = new pg.Pool({connectionString: DATABASE_URL, ssl: DATABASE_USE_SSL}) -const db = await pool.connect() +const DATABASE_URL = + process.env.DATABASE_URL || + 'postgresql://postgres:password@localhost:54321/electric' +const pool = new pg.Pool({ connectionString: DATABASE_URL }) // Expose an HTTP server. const PORT = parseInt(process.env.PORT || '3001') @@ -23,11 +23,11 @@ const createSchema = z.object({ id: z.string().uuid(), title: z.string(), created_at: z.string(), - write_id: z.string().optional() + write_id: z.string().optional(), }) const updateSchema = z.object({ completed: z.boolean(), - write_id: z.string().optional() + write_id: z.string().optional(), }) // Define functions to create, update and delete todos @@ -39,14 +39,9 @@ const createTodo = async (id, title, created_at, write_id) => { VALUES ($1, $2, false, $3, $4) ` - const params = [ - id, - title, - created_at, - write_id || null - ] + const params = [id, title, created_at, write_id || null] - await db.query(sql, params) + await pool.query(sql, params) } const updateTodo = async (id, completed, write_id) => { @@ -55,75 +50,65 @@ const updateTodo = async (id, completed, write_id) => { WHERE id = $3 ` - const params = [ - completed ? '1' : '0', - write_id || null, - id - ] + const params = [completed ? '1' : '0', write_id || null, id] - await db.query(sql, params) + await pool.query(sql, params) } const deleteTodo = async (id) => { - const sql = `DELETE from todos where id = $1` + const sql = 'DELETE from todos where id = $1' const params = [id] - await db.query(sql, params) + await pool.query(sql, params) } // Expose the shared REST API to create, update and delete todos. -app.post(`/todos`, async (req, res) => { +app.post('/todos', async (req, res) => { let data try { data = createSchema.parse(req.body) - } - catch (err) { + } catch (err) { return res.status(400).json({ errors: err.errors }) } try { await createTodo(data.id, data.title, data.created_at, data.write_id) - } - catch (err) { + } catch (err) { return res.status(500).json({ errors: err }) } return res.status(200).json({ status: 'OK' }) }) -app.put(`/todos/:id`, async (req, res) => { +app.put('/todos/:id', async (req, res) => { let id, data try { id = idSchema.parse(req.params.id) data = updateSchema.parse(req.body) - } - catch (err) { + } catch (err) { return res.status(400).json({ errors: err.errors }) } try { await updateTodo(id, data.completed, data.write_id) - } - catch (err) { + } catch (err) { return res.status(500).json({ errors: err }) } return res.status(200).json({ status: 'OK' }) }) -app.delete(`/todos/:id`, async (req, res) => { +app.delete('/todos/:id', async (req, res) => { let id try { id = idSchema.parse(req.params.id) - } - catch (err) { + } catch (err) { return res.status(400).json({ errors: err.errors }) } try { await deleteTodo(id) - } - catch (err) { + } catch (err) { return res.status(500).json({ errors: err }) } @@ -145,26 +130,26 @@ const transactionsSchema = z.array( completed: z.boolean().optional(), created_at: z.string().optional(), }), - write_id: z.string() + write_id: z.string(), }) - ) + ), }) ) -app.post(`/changes`, async (req, res) => { +app.post('/changes', async (req, res) => { let data try { data = transactionsSchema.parse(req.body) - } - catch (err) { + } catch (err) { return res.status(400).json({ errors: err.errors }) } + const client = await pool.connect() try { - await db.query('BEGIN') + await client.query('BEGIN') data.forEach((tx) => { - tx.changes.forEach(({operation, value, write_id}) => { + tx.changes.forEach(({ operation, value, write_id }) => { switch (operation) { case 'insert': createTodo(value.id, value.title, value.created_at, write_id) @@ -181,12 +166,13 @@ app.post(`/changes`, async (req, res) => { }) }) - await db.query('COMMIT') - } - catch (err) { - await db.query('ROLLBACK') + await client.query('COMMIT') + } catch (err) { + await client.query('ROLLBACK') return res.status(500).json({ errors: err }) + } finally { + await client.release() } return res.status(200).json({ status: 'OK' }) @@ -194,5 +180,5 @@ app.post(`/changes`, async (req, res) => { // Start the server app.listen(PORT, () => { - console.log(`Server listening at http://localhost:${PORT}`) + console.log(`Server listening at port ${PORT}`) }) diff --git a/examples/write-patterns/sst-env.d.ts b/examples/write-patterns/sst-env.d.ts new file mode 100644 index 0000000000..53070092d9 --- /dev/null +++ b/examples/write-patterns/sst-env.d.ts @@ -0,0 +1,22 @@ +/* This file is auto-generated by SST. Do not edit. */ +/* tslint:disable */ +/* eslint-disable */ +/* deno-fmt-ignore-file */ +import "sst" +export {} +declare module "sst" { + export interface Resource { + "write-patterns-production-vpc": { + "type": "sst.aws.Vpc" + } + "write-patterns-service-production": { + "service": string + "type": "sst.aws.Service" + "url": string + } + "write-patterns-website": { + "type": "sst.aws.StaticSite" + "url": string + } + } +} diff --git a/examples/write-patterns/sst.config.ts b/examples/write-patterns/sst.config.ts new file mode 100644 index 0000000000..90ff2b76e5 --- /dev/null +++ b/examples/write-patterns/sst.config.ts @@ -0,0 +1,182 @@ +// eslint-disable-next-line @typescript-eslint/triple-slash-reference +/// + +import { execSync } from 'child_process' + +const isProduction = (stage) => stage.toLocaleLowerCase() === 'production' + +export default $config({ + app(input) { + return { + name: 'write-patterns', + removal: input?.stage === 'production' ? 'retain' : 'remove', + home: 'aws', + providers: { + cloudflare: '5.42.0', + aws: { + version: '6.57.0', + }, + neon: '0.6.3', + }, + } + }, + async run() { + const project = neon.getProjectOutput({ id: process.env.NEON_PROJECT_ID! }) + const base = { + projectId: project.id, + branchId: project.defaultBranchId, + } + + const db = new neon.Database('write-patterns', { + ...base, + name: isProduction($app.stage) + ? 'write-patterns-production' + : `write-patterns-${$app.stage}`, + ownerName: 'neondb_owner', + }) + + const databaseUri = getNeonDbUri(project, db, false) + try { + databaseUri.apply(applyMigrations) + + const electricInfo = databaseUri.apply((uri) => + addDatabaseToElectric(uri) + ) + + const vpc = new sst.aws.Vpc(`write-patterns-${$app.stage}-vpc`) + const cluster = new sst.aws.Cluster( + `write-patterns-${$app.stage}-cluster`, + { + vpc, + } + ) + + const service = cluster.addService( + `write-patterns-service-${$app.stage}`, + { + loadBalancer: { + ports: [{ listen: '443/https', forward: '3001/http' }], + domain: { + name: `write-patterns-backend${ + $app.stage === 'production' ? '' : `-stage-${$app.stage}` + }.examples.electric-sql.com`, + dns: sst.cloudflare.dns(), + }, + }, + environment: { + DATABASE_URL: databaseUri, + }, + image: { + context: '../..', + dockerfile: 'Dockerfile', + }, + dev: { + command: 'node server.js', + }, + } + ) + + if (!process.env.ELECTRIC_API) { + throw new Error('ELECTRIC_API environment variable is required') + } + + const website = new sst.aws.StaticSite('write-patterns-website', { + build: { + command: 'npm run build', + output: 'dist', + }, + environment: { + VITE_SERVER_URL: service.url.apply((url) => + url.slice(0, url.length - 1) + ), + VITE_ELECTRIC_URL: process.env.ELECTRIC_API, + VITE_ELECTRIC_DATABASE_ID: electricInfo.id, + VITE_ELECTRIC_TOKEN: electricInfo.token, + }, + domain: { + name: `write-patterns${ + isProduction($app.stage) ? '' : `-stage-${$app.stage}` + }.examples.electric-sql.com`, + dns: sst.cloudflare.dns(), + }, + dev: { + command: 'npm run vite', + }, + }) + + return { + databaseUri, + database_id: electricInfo.id, + electric_token: electricInfo.token, + server: service.url, + website: website.url, + } + } catch (e) { + console.error('Failed to deploy todo app example stack', e) + } + }, +}) + +function applyMigrations(uri: string) { + execSync('pnpm exec pg-migrations apply --directory ./shared/migrations', { + env: { + ...process.env, + DATABASE_URL: uri, + }, + }) +} + +function getNeonDbUri( + project: $util.Output, + db: neon.Database, + pooled: boolean +) { + const passwordOutput = neon.getBranchRolePasswordOutput({ + projectId: project.id, + branchId: project.defaultBranchId, + roleName: db.ownerName, + }) + + const endpoint = neon.getBranchEndpointsOutput({ + projectId: project.id, + branchId: project.defaultBranchId, + }) + + const databaseHost = pooled + ? endpoint.endpoints?.apply((endpoints) => + endpoints![0].host.replace( + endpoints![0].id, + endpoints![0].id + '-pooler' + ) + ) + : project.databaseHost + + const url = $interpolate`postgresql://${passwordOutput.roleName}:${passwordOutput.password}@${databaseHost}/${db.name}?sslmode=require` + return url +} + +async function addDatabaseToElectric( + database_url: string, + region: 'us-east-1' | 'eu-west-1' = 'us-east-1' +): Promise<{ id: string; token: string }> { + const adminApi = process.env.ELECTRIC_ADMIN_API + + const result = await fetch(new URL('v1/databases', adminApi), { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + database_url, + region, + }), + }) + + if (!result.ok) { + throw new Error( + `Could not add database to Electric (${ + result.status + }): ${await result.text()}` + ) + } + + return await result.json() +} diff --git a/examples/write-patterns/tsconfig.json b/examples/write-patterns/tsconfig.json index 27245b362f..cca57aeca7 100644 --- a/examples/write-patterns/tsconfig.json +++ b/examples/write-patterns/tsconfig.json @@ -20,5 +20,5 @@ "noUnusedParameters": true, "noFallthroughCasesInSwitch": true }, - "include": ["shared/app", "patterns"] + "include": ["src", "patterns", "shared", "types"] } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index d73f9abb48..207e5d6ff9 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -977,8 +977,8 @@ importers: specifier: ^0.2.14 version: 0.2.14(@electric-sql/pglite@0.2.14)(react@19.0.0-rc.1) '@electric-sql/pglite-sync': - specifier: ^0.2.16 - version: 0.2.16(@electric-sql/pglite@0.2.14) + specifier: ^0.2.17 + version: 0.2.17(@electric-sql/pglite@0.2.14) '@electric-sql/react': specifier: workspace:* version: link:../../packages/react-hooks @@ -1000,6 +1000,9 @@ importers: react-dom: specifier: 19.0.0-rc.1 version: 19.0.0-rc.1(react@19.0.0-rc.1) + sst: + specifier: 3.3.64 + version: 3.3.64(hono@4.6.13) uuid: specifier: ^10.0.0 version: 10.0.0 @@ -2154,9 +2157,6 @@ packages: search-insights: optional: true - '@electric-sql/client@0.8.0': - resolution: {integrity: sha512-M4VnuL2q2i1yhsjc9DEQtf4GEkXoaMjlfm0Lq7KqLDjj2nqPhbUTo8IeWhf3OJSZ7j+GyFd/YlLg4rlBDrE/6Q==} - '@electric-sql/client@0.9.1': resolution: {integrity: sha512-Lpo+GjnXW7i0uyITlkSTDGPH/fVYYln1/HQkgZAf89F3i/QeAVKT7UlWa7la1G0dGNSQaZuOypFvqENnHHJUKg==} @@ -2180,11 +2180,6 @@ packages: '@electric-sql/pglite': optional: true - '@electric-sql/pglite-sync@0.2.16': - resolution: {integrity: sha512-jv/86TRBAPHA273Dt0af/mf/kARHkzsmjTMVhmSs8FAn6UKR1fcqIIAeC3EyI0ihP2H3E2M3ivpMMvO8DULGxQ==} - peerDependencies: - '@electric-sql/pglite': ^0.2.14 - '@electric-sql/pglite-sync@0.2.17': resolution: {integrity: sha512-lawbWkSKdCO+ETKQjseEefSgltwB5jXBzwSx0YMI2zmO+rf7gl5DP1hqEz5drqrzACS2mNRldah7p+0QcFEjLg==} peerDependencies: @@ -8820,6 +8815,11 @@ packages: cpu: [arm64] os: [darwin] + sst-darwin-arm64@3.3.64: + resolution: {integrity: sha512-sGlGX66vjWuN/501zjjEdqIalteyuawuoUvbIQsYFZ3yYg+THSPsXR5e3ADo2LgrOelaMhOOAZ8jabGGEsptnw==} + cpu: [arm64] + os: [darwin] + sst-darwin-arm64@3.3.7: resolution: {integrity: sha512-2CQh78YIdvrRpO8enZ/Jx51JsUSFtk564u9w4ldcu5SsMMDY1ocdw5p/XIwBy1eKeRtrXLizd35sYbtSfSy6sw==} cpu: [arm64] @@ -8830,6 +8830,11 @@ packages: cpu: [x64] os: [darwin] + sst-darwin-x64@3.3.64: + resolution: {integrity: sha512-qH8vDyPMbRASBt5T5+dIyheJ7DoGjHaeGBWzloDVN9Ukz+H3W37rSR4cWEkZlYAwxx32DoiFl6hNIhH39TFpAw==} + cpu: [x64] + os: [darwin] + sst-darwin-x64@3.3.7: resolution: {integrity: sha512-+hiDT3+am+CBO3xBy8yl3bmFeTjGXUT/+7V6NFOV2yxlRP3A8J65nEjWdzPTU/u7hRl+leE8EBu14j0grt/7/A==} cpu: [x64] @@ -8840,6 +8845,11 @@ packages: cpu: [arm64] os: [linux] + sst-linux-arm64@3.3.64: + resolution: {integrity: sha512-MUd0hHMap0SXAdU+UniAAlAzEzuJNIPbnQ80SHFGKatJNgFlQmmp7jq4qR1GXjaB3/NttLKIroQ8RBXuydtUzA==} + cpu: [arm64] + os: [linux] + sst-linux-arm64@3.3.7: resolution: {integrity: sha512-dYolpXAjq0S8QjL8sTKzcRpPNgZDeMcJ9PHnt/8GpdqxNxEpGlNF9gMl2cB7mleJyJYBNMPvi4YEeCGtcazmeQ==} cpu: [arm64] @@ -8850,6 +8860,11 @@ packages: cpu: [x64] os: [linux] + sst-linux-x64@3.3.64: + resolution: {integrity: sha512-8V42iyy9hc2bcW570DCzFIZrL1wOlSx0PF2M+Zlq7jhVGbhBTTZ13HHIo//W7ETtsAhItBeknZil6LidLz4ZDg==} + cpu: [x64] + os: [linux] + sst-linux-x64@3.3.7: resolution: {integrity: sha512-K2vPOZ5DS8mJmE4QtffgZN5Nem1MIBhoVozNtZ0NoufeKHbFz0Hyw9wbqxYSbs2MOoVNKvG8qwcX99ojVXTFKw==} cpu: [x64] @@ -8860,6 +8875,11 @@ packages: cpu: [x86] os: [linux] + sst-linux-x86@3.3.64: + resolution: {integrity: sha512-TuJ5Zf9Lt5nTUOFKR+/EqPbpmzvtngHzQ5qvVq0k0wgPFC/ZgZItJtgZlspz8LL7wFl5dt2Y7VTVUHpov4PKNQ==} + cpu: [x86] + os: [linux] + sst-linux-x86@3.3.7: resolution: {integrity: sha512-4rXj54+UJd+HLmrhCHQ0k9AOkugHZhhh6sCUnkUNChJr5ei62pRscUQ7ge8/jywvfzHZGZw3eXXJWCCsjilXFA==} cpu: [x86] @@ -8877,6 +8897,18 @@ packages: valibot: optional: true + sst@3.3.64: + resolution: {integrity: sha512-Mvgbz/ylG2UOzDir9t0Qk8M0wbzQMzwm0/lk5+lywqpK8emAea43+yOIgPvaJEeGiv9i9T97XRnYLbddUGZAFA==} + hasBin: true + peerDependencies: + hono: 4.x + valibot: 0.30.x + peerDependenciesMeta: + hono: + optional: true + valibot: + optional: true + sst@3.3.7: resolution: {integrity: sha512-qIJPQnGeIHarWZoUvphwi6R1nu6Pccd3Q2Qy9ltBLs4Z47TkSdwBNeqCBhgAzWA0eLDwStTXliexyQCcNM6gDQ==} hasBin: true @@ -11128,10 +11160,6 @@ snapshots: transitivePeerDependencies: - '@algolia/client-search' - '@electric-sql/client@0.8.0': - optionalDependencies: - '@rollup/rollup-darwin-arm64': 4.24.4 - '@electric-sql/client@0.9.1': optionalDependencies: '@rollup/rollup-darwin-arm64': 4.24.4 @@ -11172,9 +11200,9 @@ snapshots: - '@lezer/common' - codemirror - '@electric-sql/pglite-sync@0.2.16(@electric-sql/pglite@0.2.14)': + '@electric-sql/pglite-sync@0.2.17(@electric-sql/pglite@0.2.14)': dependencies: - '@electric-sql/client': 0.8.0 + '@electric-sql/client': 0.9.1 '@electric-sql/pglite': 0.2.14 '@electric-sql/pglite-sync@0.2.17(@electric-sql/pglite@0.2.15)': @@ -18549,30 +18577,45 @@ snapshots: sst-darwin-arm64@3.3.59: optional: true + sst-darwin-arm64@3.3.64: + optional: true + sst-darwin-arm64@3.3.7: optional: true sst-darwin-x64@3.3.59: optional: true + sst-darwin-x64@3.3.64: + optional: true + sst-darwin-x64@3.3.7: optional: true sst-linux-arm64@3.3.59: optional: true + sst-linux-arm64@3.3.64: + optional: true + sst-linux-arm64@3.3.7: optional: true sst-linux-x64@3.3.59: optional: true + sst-linux-x64@3.3.64: + optional: true + sst-linux-x64@3.3.7: optional: true sst-linux-x86@3.3.59: optional: true + sst-linux-x86@3.3.64: + optional: true + sst-linux-x86@3.3.7: optional: true @@ -18589,6 +18632,19 @@ snapshots: sst-linux-x64: 3.3.59 sst-linux-x86: 3.3.59 + sst@3.3.64(hono@4.6.13): + dependencies: + aws4fetch: 1.0.20 + jose: 5.2.3 + openid-client: 5.6.4 + optionalDependencies: + hono: 4.6.13 + sst-darwin-arm64: 3.3.64 + sst-darwin-x64: 3.3.64 + sst-linux-arm64: 3.3.64 + sst-linux-x64: 3.3.64 + sst-linux-x86: 3.3.64 + sst@3.3.7(hono@4.6.13): dependencies: aws4fetch: 1.0.20