diff --git a/website/.vitepress/config.mts b/website/.vitepress/config.mts index c13f7f2a6d..02e15c36d4 100644 --- a/website/.vitepress/config.mts +++ b/website/.vitepress/config.mts @@ -46,6 +46,7 @@ export default defineConfig({ 'css', 'javascript', 'jsx', + 'nginx', 'shellscript', 'sql', 'tsx', @@ -147,8 +148,8 @@ export default defineConfig({ items: [ { text: 'Auth', link: '/docs/guides/auth' }, { text: 'Shapes', link: '/docs/guides/shapes' }, - { text: 'Deployment', link: '/docs/guides/deployment' }, { text: 'Installation', link: '/docs/guides/installation' }, + { text: 'Deployment', link: '/docs/guides/deployment' }, { text: 'Troubleshooting', link: '/docs/guides/troubleshooting' }, { text: 'Writing your own client', link: '/docs/guides/writing-your-own-client' }, ] @@ -166,21 +167,46 @@ export default defineConfig({ ], collapsed: false }, + { text: 'Config', link: '/docs/api/config' } + ] + }, + { + text: 'Integrations', + collapsed: false, + items: [ { - text: 'Integrations', + text: 'Frameworks', items: [ - // { text: 'MobX', link: '/docs/api/integrations/mobx' }, - { text: 'React', link: '/docs/api/integrations/react' }, - // { text: 'Redis', link: '/docs/api/integrations/redis' }, - // { text: 'TanStack', link: '/docs/api/integrations/tanstack' }, + { text: 'LiveStore', link: '/docs/integrations/livestore' }, + { text: 'MobX', link: '/docs/integrations/mobx' }, + { text: 'Next.js', link: '/docs/integrations/next' }, + { text: 'Phoenix', link: '/docs/integrations/phoenix' }, + { text: 'React', link: '/docs/integrations/react' }, + { text: 'Redis', link: '/docs/integrations/redis' }, + { text: 'TanStack', link: '/docs/integrations/tanstack' }, ], - collapsed: false + }, + { + text: 'Platforms', + items: [ + { text: 'AWS', link: '/docs/integrations/aws' }, + { text: 'Cloudflare', link: '/docs/integrations/cloudflare' }, + { text: 'Crunchy', link: '/docs/integrations/crunchy' }, + { text: 'Digital Ocean', link: '/docs/integrations/digital-ocean' }, + { text: 'Expo', link: '/docs/integrations/expo' }, + { text: 'Fly.io', link: '/docs/integrations/fly' }, + { text: 'GCP', link: '/docs/integrations/gcp' }, + { text: 'Neon', link: '/docs/integrations/neon' }, + { text: 'Netlify', link: '/docs/integrations/netlify' }, + { text: 'Render', link: '/docs/integrations/render' }, + { text: 'Supabase', link: '/docs/integrations/supabase' } + ] } ] }, { text: 'Reference', - collapsed: true, + collapsed: false, items: [ { text: 'Alternatives', link: '/docs/reference/alternatives' }, { text: 'Benchmarks', link: '/docs/reference/benchmarks' }, diff --git a/website/.vitepress/theme/custom.css b/website/.vitepress/theme/custom.css index c56f540594..428f37c1b0 100644 --- a/website/.vitepress/theme/custom.css +++ b/website/.vitepress/theme/custom.css @@ -87,7 +87,9 @@ --ddn-color: #D0BCFF; --pglite-color: #F6F95C; - --vp-code-font-size: 0.925em; + --vp-code-bg: var(--vp-c-bg-elv); + --vp-code-color: #9ECBFF; + --vp-code-font-size: 0.875em; --vp-font-family-base: OpenSauceOne, ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; --vp-font-family-mono: SourceCodePro, ui-monospace, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; } @@ -421,22 +423,24 @@ a.no-visual:hover { text-align: center; font-weight: 600; white-space: nowrap; - transition: color 0.25s, border-color 0.25s, background-color 0.25s; + transition: color 0.25s, border-color 0.25s, background-color 0.25s !important; } .VPButton.brand { background-color: var(--vp-button-brand-bg); border-color: var(--vp-button-brand-border); - color: var(--vp-button-brand-text); + color: var(--vp-button-brand-text) !important; } .VPButton.brand:hover { background-color: var(--vp-button-brand-hover-bg); border-color: var(--vp-button-brand-hover-border); - color: var(--vp-button-brand-hover-text); + color: var(--vp-button-brand-hover-text) !important; + opacity: 1 !important; } .VPButton.brand:active { background-color: var(--vp-button-brand-active-bg); border-color: var(--vp-button-brand-active-border); - color: var(--vp-button-brand-active-text); + color: var(--vp-button-brand-active-text) !important; + opacity: 1 !important; } .VPButton.electric, @@ -451,6 +455,16 @@ a.no-visual:hover { border-color: var(--pglite-color); color: var(--vp-c-bg); } +.VPButton.small { + border-radius: 16px; + padding: 0 16px; + line-height: 34px; + font-size: 13.5px; +} +.VPButton.vspace { + margin-top: 6px; + margin-bottom: 6px; +} .img-row { display: grid; @@ -560,7 +574,10 @@ iframe { .vp-doc .custom-block { margin: 24px 0; } - +.custom-block { + border-radius: 12px; + padding: 19px 21px 11px; +} .custom-block-no-title { margin-bottom: 10px !important; } @@ -576,6 +593,11 @@ iframe { padding: 24px 0 !important; } +pre { + max-width: 100%; + overflow-x: scroll; +} + ul.benefits { color: var(--vp-c-text-1) !important; diff --git a/website/blog/posts/2024-07-17-electric-next.md b/website/blog/posts/2024-07-17-electric-next.md index 10b51ecaa4..553ec5e75d 100644 --- a/website/blog/posts/2024-07-17-electric-next.md +++ b/website/blog/posts/2024-07-17-electric-next.md @@ -27,7 +27,7 @@ Electric Next was a clean rebuild of the Electric sync engine that now forms the We created a new repo and started by porting the absolute minimum code necessary from the [previous repo](https://github.com/electric-sql/electric-old). Once we were confident that Electric Next was the way forward, we froze the old system and moved the new code into our main repo at [https://github.com/electric-sql/electric](https://github.com/electric-sql/electric). -The new approach provides an [HTTP API](/docs/api/http) for syncing [Shapes](/docs/guides/shapes) of data from Postgres. This can be used directly or via [client libraries](/docs/api/clients/typescript) and [integrations](/docs/api/integrations/react). It's also simple to write your own client in any language. +The new approach provides an [HTTP API](/docs/api/http) for syncing [Shapes](/docs/guides/shapes) of data from Postgres. This can be used directly or via [client libraries](/docs/api/clients/typescript) and [integrations](/docs/integrations/react). It's also simple to write your own client in any language. ## Why build a new system? @@ -237,7 +237,7 @@ You can sync data into: - apps, replacing data fetching with data sync - development environments, for example syncing data into [an embedded PGlite](/product/pglite) -- edge workers and services, for example maintaining a low-latency [edge data cache](/docs/api/integrations/redis) +- edge workers and services, for example maintaining a low-latency [edge data cache](/docs/integrations/redis) - local AI systems running RAG, as per the example below
@@ -273,7 +273,7 @@ At the time of writing this document, we are early in the development of Electri However, even just with the first release of Electric Next you can already sync partial subsets of data from a Postgres database into a wide variety of clients and environments, for example: - syncing data into local apps using the [TypeScript](/docs/api/clients/typescript) and [Elixir](/docs/api/clients/elixir) clients -- replacing hot-path data fetching and database queries in apps using [React](/docs/api/integrations/react), [MobX](/docs/api/integrations/react) and [TanStack](/docs/api/integrations/tanstack) +- replacing hot-path data fetching and database queries in apps using [React](/docs/integrations/react), [MobX](/docs/integrations/react) and [TanStack](/docs/integrations/tanstack) - maintain live caches with automatic invalidation, as per [our Redis example](https://github.com/electric-sql/electric/blob/main/examples/redis-sync/src/index.ts) ### Roadmap diff --git a/website/docs/api/clients/elixir.md b/website/docs/api/clients/elixir.md index 7502aa30ba..06ca35f246 100644 --- a/website/docs/api/clients/elixir.md +++ b/website/docs/api/clients/elixir.md @@ -2,24 +2,51 @@ outline: deep --- -# Elixir Client +# Elixir client -The Elixir client is being developed in [electric-sql/electric-next/pull/38](https://github.com/electric-sql/electric-next/pull/38). At the moment it provides a GenStage producer that can be used to stream a Shape as per: +Electric provides an [Elixir client](#how-to-use) that wraps the [HTTP API](/docs/api/http) into a higher-level stream interface and a [Phoenix integration](#phoenix-integration) that adds sync to your Phoenix application. + +## How to use + +The [`Electric.Client`](https://hex.pm/packages/electric_client) library allows you to stream [Shapes](/docs/guides/shapes) into your Elixir application. It's published to Hex as the [`electric_client`](https://hex.pm/packages/electric_client) package. + +### Stream + +The client exposes a [`stream/3`](https://hexdocs.pm/electric_client/Electric.Client.html#stream/3) that streams a [Shape Log](/docs/api/http#shape-log) into an [`Enumerable`](https://hexdocs.pm/elixir/Enumerable.html): ```elixir -opts = [ - base_url: "http://...", - shape_definition: %Electric.Client.ShapeDefinition{ - table: "..." - } -] +Mix.install([:electric_client]) -{:ok, pid, stream} = Electric.Client.ShapeStream.stream(opts) +{:ok, client} = Electric.Client.new(base_url: "http://localhost:3000") + +stream = Electric.Client.stream(client, "my_table", where: "something = true") stream |> Stream.each(&IO.inspect/1) |> Stream.run() ``` -See the [shape_stream_test.exs](https://github.com/electric-sql/electric-next/blob/thruflo/elixir-client/elixir_client/test/electric/client/shape_stream_test.exs) for more details. +You can materialise the shape stream into a variety of data structures. For example by matching on insert, update and delete operations and applying them to a Map or an Ecto struct. (See the [redis-sync](https://github.com/electric-sql/electric/blob/main/examples/redis-sync/src/index.ts) example and Typescript [Shape class](https://github.com/electric-sql/electric/blob/main/packages/typescript-client/src/shape.ts) for reference). + +### Ecto queries + +The `stream/3` function also supports deriving the shape definition from an [`Ecto.Query`](https://hexdocs.pm/ecto/Ecto.Query.html): + +```elixir +import Ecto.Query, only: [from: 2] + +query = from(t in MyTable, where: t.something == true) + +stream = Electric.Client.stream(client, query) +``` + +See the documentation at [hexdocs.pm/electric_client](https://hexdocs.pm/electric_client) for more details. + +## Phoenix integration + +Electric also provides an [`Electric.Phoenix`](https://hex.pm/packages/electric_phoenix) integration allows you to: + +- sync data into a [front-end app](/docs/integrations/phoenix#front-end-sync) from a Postgres-backed Phoenix application; and +- add real-time streaming from Postgres into Phoenix LiveView via [Phoenix.Streams](/docs/integrations/phoenix#liveview-sync) +See the [Phoenix framework integration page](/docs/integrations/phoenix) for more details. diff --git a/website/docs/api/clients/typescript.md b/website/docs/api/clients/typescript.md index f7ac8af1be..dcfc263aa2 100644 --- a/website/docs/api/clients/typescript.md +++ b/website/docs/api/clients/typescript.md @@ -1,5 +1,5 @@ --- -outline: deep +outline: [2, 4] --- # TypeScript client @@ -18,9 +18,31 @@ npm i @electric-sql/client ## How to use -The client exports a `ShapeStream` class for getting updates to shapes on a row-by-row basis as well as a `Shape` class for getting updates to the entire shape. +The client exports: -### `ShapeStream` +- a [`ShapeStream`](#shapestream) class for consuming a [Shape Log](../http#shape-log); and +- a [`Shape`](#shape) class for materialising the log stream into a shape object + +These compose together, e.g.: + +```ts +import { ShapeStream } from '@electric-sql/client' + +const stream = new ShapeStream({ + url: `http://localhost:3000/v1/shape`, + table: 'items' +}) +const shape = new Shape(stream) + +// The callback runs every time the Shape data changes. +shape.subscribe(data => console.log(data)) +``` + +### ShapeStream + +The [`ShapeStream`](https://github.com/electric-sql/electric/blob/main/packages/typescript-client/src/client.ts#L163) is a low-level primitive for consuming a [Shape Log](../http#shape-log). + +Construct with a shape definition and options and then either subscribe to the shape log messages directly or pass into a [`Shape`](#shape) to materialise the stream into an object. ```tsx import { ShapeStream } from '@electric-sql/client' @@ -38,9 +60,101 @@ stream.subscribe(messages => { }) ``` -#### Custom parsing +#### Options -By default, `ShapeStream` parses the following Postgres types into native JavaScript values: +The `ShapeStream` constructor takes [the following options](https://github.com/electric-sql/electric/blob/main/packages/typescript-client/src/client.ts#L39): + +```ts +/** + * Options for constructing a ShapeStream. + */ +export interface ShapeStreamOptions { + /** + * The full URL to where the Shape is hosted. This can either be the Electric + * server directly or a proxy. E.g. for a local Electric instance, you might + * set `http://localhost:3000/v1/shape` + */ + url: string + + /** + * Which database to use. + * This is optional unless Electric is used with multiple databases. + */ + databaseId?: string + + /** + * The root table for the shape. + */ + table: string + + /** + * The where clauses for the shape. + */ + where?: string + + /** + * The columns to include in the shape. + * Must include primary keys, and can only inlude valid columns. + */ + columns?: string[] + + /** + * The "offset" on the shape log. This is typically not set as the ShapeStream + * will handle this automatically. A common scenario where you might pass an offset + * is if you're maintaining a local cache of the log. If you've gone offline + * and are re-starting a ShapeStream to catch-up to the latest state of the Shape, + * you'd pass in the last offset and shapeId you'd seen from the Electric server + * so it knows at what point in the shape to catch you up from. + */ + offset?: Offset + + /** + * Similar to `offset`, this isn't typically used unless you're maintaining + * a cache of the shape log. + */ + shapeId?: string + + /** + * HTTP headers to attach to requests made by the client. + * Can be used for adding authentication headers. + */ + headers?: Record + + /** + * Alternatively you can override the fetch function. + */ + fetchClient?: typeof fetch + + /** + * Automatically fetch updates to the Shape. If you just want to sync the current + * shape and stop, pass false. + */ + subscribe?: boolean + + backoffOptions?: BackoffOptions + parser?: Parser + signal?: AbortSignal +} +``` + +#### Messages + +A `ShapeStream` consumes and emits a stream of messages. These messages can either be a `ChangeMessage` representing a change to the shape data: + +```ts +export type ChangeMessage = Row> = { + key: string + value: T + headers: Header & { operation: `insert` | `update` | `delete` } + offset: Offset +} +```` + +Or a `ControlMessage`, representing an instruction to the client, as [documented here](../http#control-messages). + +#### Parsing + +By default, when constructing a `ChangeMessage.value`, `ShapeStream` parses the following Postgres types into native JavaScript values: - `int2`, `int4`, `float4`, and `float8` are parsed into JavaScript `Number` - `int8` is parsed into a JavaScript `BigInt` @@ -50,8 +164,9 @@ By default, `ShapeStream` parses the following Postgres types into native JavaSc All other types aren't parsed and are left in the string format as they were served by the HTTP endpoint. -The `ShapeStream` can be configured with a custom parser that is an object mapping Postgres types to parsing functions for those types. -For example, we can extend the [default parser](https://github.com/electric-sql/electric/blob/main/packages/typescript-client/src/parser.ts#L14-L22) to parse booleans into `1` or `0` instead of `true` or `false`: +##### Custom parsing + +You can extend this behaviour by configuring a custom parser. This is an object mapping Postgres types to parsing functions for those types. For example, we can extend the [default parser](https://github.com/electric-sql/electric/blob/main/packages/typescript-client/src/parser.ts#L28-L37) to parse booleans into `1` or `0` instead of `true` or `false`: ```ts const stream = new ShapeStream({ @@ -63,11 +178,15 @@ const stream = new ShapeStream({ }) ``` -#### `Replica` +#### Replica full -By default Electric only sends the modified columns in an update message, not -the complete row. If your use case requires the receipt of the full row, not just -the modified columns, then set the `replica` of your `ShapeStream` to `full`: +By default Electric sends the modified columns in an update message, not the complete row. To be specific: + +- an `insert` operation contains the full row +- an `update` operation contains the primary key column(s) and the changed columns +- a `delete` operation contains just the primary key column(s) + +If you'd like to recieve the full row value for updates and deletes, you can set the `replica` option of your `ShapeStream` to `full`: ```tsx import { ShapeStream } from "@electric-sql/client" @@ -79,13 +198,13 @@ const stream = new ShapeStream({ }) ``` -This is less efficient and will use much more bandwidth for the same shape, -especially for tables with large static column values. +This is less efficient and will use more bandwidth for the same shape (especially for tables with large static column values). Note also that shapes with different `replica` settings are distinct, even for the same table and where clause combination. + +### Shape -Shapes with differing `replica` settings are distinct, even for the same table -and where clause combination. +The [`Shape`](https://github.com/electric-sql/electric/blob/main/packages/typescript-client/src/shape.ts) is the main primitive for working with synced data. -### `Shape` +It takes a [`ShapeStream`](#shapestream), consumes the stream, materialises it into a Shape object and notifies you when this changes. ```tsx import { ShapeStream, Shape } from '@electric-sql/client' @@ -105,4 +224,4 @@ shape.subscribe(({ rows }) => { }) ``` -See the [Examples](https://github.com/electric-sql/electric/tree/main/examples) and [integrations](/docs/api/integrations/react) for more usage examples. +See the [Examples](https://github.com/electric-sql/electric/tree/main/examples) and [integrations](/docs/integrations/react) for more usage examples. diff --git a/website/docs/api/config.md b/website/docs/api/config.md new file mode 100644 index 0000000000..5d99c03ca5 --- /dev/null +++ b/website/docs/api/config.md @@ -0,0 +1,299 @@ +--- +title: Sync service +description: >- + Configuration options for the Electric sync engine. +outline: deep +--- + + + +# Sync service configuration + +This page documents the config options for [self-hosting](/docs/guides/deployment) the [Electric sync engine](/product/sync). + +> [!Warning] Advanced only +> You don't need to worry about this if you're using [Electric Cloud](/product/cloud). +> +> Also, the only required configuration is `DATABASE_URL`. + +## Configuration + +The sync engine is an [Elixir](https://elixir-lang.org) application developed at [packages/sync-service](https://github.com/electric-sql/electric/tree/main/packages/sync-service) and published as a [Docker](https://docs.docker.com/get-started/docker-overview) image at [electricsql/electric](https://hub.docker.com/r/electricsql/electric). + +Configuration options can be provided as environment variables, e.g.: + +```shell +docker run \ + -e "DATABASE_URL=postgresql://..." \ + -e "ELECTRIC_DB_POOL_SIZE=10" \ + -p 3000:3000 \ + electricsql/electric +``` + +These are passed into the application via [config/runtime.exs](https://github.com/electric-sql/electric/blob/main/packages/sync-service/config/runtime.exs). + +## Database + +### DATABASE_URL + + + +Postgres connection string. Used to connect to the Postgres database. + +The connection string must be in the [libpg Connection URI format](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING-URIS) of `postgresql://[userspec@][hostspec][/dbname][?sslmode=]`. + +The `userspec` section of the connection string specifies the database user that Electric connects to Postgres as. They must have the `REPLICATION` role. + +For a secure connection, set the `sslmode` query parameter to `require`. + + + +### ELECTRIC_DATABASE_USE_IPV6 + + + +Set to `true` to prioritise connecting to the database over IPv6. Electric will fall back to an IPv4 DNS lookup if the IPv6 lookup fails. + + + +### ELECTRIC_DB_POOL_SIZE + + + +How many connections Electric opens as a pool for handling shape queries. + + + +### ELECTRIC_REPLICATION_STREAM_ID + + + +Suffix for the logical replication publication and slot name. + + + +## Electric + +### ELECTRIC_INSTANCE_ID + + + +A unique identifier for the Electric instance. Defaults to a randomly generated UUID. + + + +### ELECTRIC_SERVICE_NAME + + + +Name of the electric service. Used as a resource identifier and namespace. + + + +### ELECTRIC_ENABLE_INTEGRATION_TESTING + + + +Expose some unsafe operations that faciliate integration testing. +Do not enable this production. + + + +### ELECTRIC_LISTEN_ON_IPV6 + + + +By default, Electric binds to IPv4. Enable this to listen on IPv6 addresses as well. + + + +### ELECTRIC_LOG_CHUNK_BYTES_THRESHOLD + + + +Limit the maximum size of a shape log response, to ensure they are cached by +upstream caches. Defaults to 10MB (10 * 1024 * 1024). See [#1581](https://github.com/electric-sql/electric/issues/1581) for context. + + + +### ELECTRIC_LOG_OTP_REPORTS + + + +Enable [OTP SASL](https://www.erlang.org/doc/apps/sasl/sasl_app.html) reporting at runtime. + + + +### ELECTRIC_PORT + + + +Port that the [HTTP API](/docs/api/http) is exposed on. + + + +## Caching + +### ELECTRIC_CACHE_MAX_AGE + + + +Default `max-age` for the cache headers of the HTTP API. + + + +### ElECTRIC_CACHE_STALE_AGE + + + +Default `stale-age` for the cache headers of the HTTP API. + + + +## Storage + +### ELECTRIC_PERSISTENT_STATE + + + +Where to store shape metadata. Defaults to storing on the filesystem. +If provided must be one of `MEMORY` or `FILE`. + + + +### ELECTRIC_STORAGE + + + +Where to store shape logs. Defaults to storing on the filesystem. +If provided must be one of `MEMORY` or `FILE`. + + + +### ELECTRIC_STORAGE_DIR + + + +Path to root folder for storing data on the filesystem. + + + +## Telemetry + +### ELECTRIC_OTLP_ENDPOINT + + + +Set an [OpenTelemetry](https://opentelemetry.io/docs/what-is-opentelemetry/) endpoint URL +to enable telemetry. + + + +### ELECTRIC_OTEL_DEBUG + + + +Debug tracing by printing spans to stdout, without batching. + + + +### ELECTRIC_HNY_API_KEY + + + +[Honeycomb.io](https://www.honeycomb.io) api key. Specify along with `HNY_DATASET` to +export traces directly to Honeycomb, without the need to run an OpenTelemetry Collector. + + + +### ELECTRIC_HNY_DATASET + + + +Name of your Honeycomb Dataset. + + + +### ELECTRIC_PROMETHEUS_PORT + + + +Expose a prometheus reporter for telemetry data on the specified port. + + + +### ELECTRIC_STATSD_HOST + + + +Enable sending telemetry data to a StatsD reporting endpoint. + + diff --git a/website/docs/api/http.md b/website/docs/api/http.md index a7210fb563..1a4b8244a3 100644 --- a/website/docs/api/http.md +++ b/website/docs/api/http.md @@ -5,11 +5,20 @@ description: >- outline: deep --- + + # HTTP API The HTTP API is the primary, low level API for syncing data with Electric. -Normative API documentation is published as an [OpenAPI](https://www.openapis.org/what-is-openapi) specification: +## HTTP API specification + +API documentation is published as an [OpenAPI](https://www.openapis.org/what-is-openapi) specification: - [download the specification file](https://github.com/electric-sql/electric/blob/main/website/electric-api.yaml) to view or use with other OpenAPI [tooling](https://tools.openapis.org/) - view the HTML documentation generated using [Redocly](https://redocly.com) @@ -41,7 +50,7 @@ When you sync a shape from Electric, you get the data in the form of a log of lo The `offset` that you see in the messages and provide as the `?offset=...` query parameter in your request identifies a position in the log. The messages you see in the response are shape log entries (the ones with `value`s and `action` headers) and control messages (the ones with `control` headers). -The Shape Log is similar conceptually to the logical replication stream from Postgres. Except that instead of getting all the database operations, you're getting the ones that affect the data in your Shape. It's then the responsibility of the client to consume the log and materialize out the current value of the shape. The values included in the shape log are strings formatted according to Postgres' display settings. The [OpenAPI](https://www.openapis.org/what-is-openapi) specification defines the display settings the HTTP API adheres to. +The Shape Log is similar conceptually to the logical replication stream from Postgres. Except that instead of getting all the database operations, you're getting the ones that affect the data in your Shape. It's then the responsibility of the client to consume the log and materialize out the current value of the shape.
@@ -56,6 +65,8 @@ The Shape Log is similar conceptually to the logical replication stream from Pos
+The values included in the shape log are strings formatted according to Postgres' display settings. The OpenAPI specification defines the display settings the HTTP API adheres to. + ### Initial sync request When you make an initial sync request, with `offset=-1`, you're telling the server that you want the whole log, from the start for a given shape. @@ -99,3 +110,73 @@ The server holds open the request until either a timeout (returning `204 No cont The algorithm for consuming the HTTP API described above can be implemented from scratch for your application. Howerver, it's typically implemented by clients that can be re-used and provide a simpler interface for application code. There are a number of existing clients, such as the [TypeScript](/docs/api/clients/typescript) and [Elixir](/docs/api/clients/elixir) clients. If one doesn't exist for your language or environment, we hope that the pattern is simple enough that you should be able to write your own client quite simply. + +## Caching + +HTTP API responses contain cache headers, including `cache-control` with `max-age` and `stale-age` and `etag`. These work out-of-the-box with caching proxies, such as [Nginx](https://nginx.org/en), [Caddy](https://caddyserver.com) or [Varnish](https://varnish-cache.org), or a CDN like [Cloudflare](https://www.cloudflare.com/en-gb/application-services/products/cdn) or [Fastly](https://www.fastly.com/products/cdn). + +There are three aspects to caching: + +1. [accelerating initial sync](#accelerating-initial-sync) +2. [caching in the browser](#caching-in-the-browser) +3. [coalescing live requests](#coalescing-live-requests) + +### Accelerating initial sync + +When a client makes a `GET` request to fetch shape data at a given `offset`, the response can be cached. Subsequent clients requesting the same data can be served from the proxy or CDN. This removes load from Electric (and from Postrgres) and allows data to be served extremely quickly, at the edge by an optimised CDN. + +You can see an example Nginx config at [packages/sync-service/dev/nginx.conf](https://github.com/electric-sql/electric/blob/main/packages/sync-service/dev/nginx.conf): + +<<< @../../packages/sync-service/dev/nginx.conf{nginx} + +### Caching in the browser + +Requests are also designed to be cached by the browser. This allows apps to cache and avoid re-fetching data. + +For example, say a page loads data by syncing a shape. + +
+ + + Console showing initial request loading from the network + +
+ +The next time the user navigates to the same page, the data is in the browser file cache. + +
+ + + Console showing subsequent requests loading from the browser's file cache + +
+ +This can make data access instant and available offline, even without using a persistent local store. + +### Coalescing live requests + +Once a client has requested the initial data for a shape, it switches into [live mode](#live-mode), using long polling to wait for new data. When new data arrives, the client reconnects to wait for more data, and so on. + +Most caching proxies and CDNs support a feature called [request coalescing](https://info.varnish-software.com/blog/two-minutes-tech-tuesdays-request-coalescing). This identifies requests to the same resource, queues them on a waiting list, and only sends a single request to the origin. + +
+
+ +
+
+ +Electric takes advantage of this to optimise realtime delivery to large numbers of concurrent clients. Instead of Electric holding open a connection per client, this is handled at the CDN level and allows us to coalesce concurrent long-polling requests in live mode. + +This is how Electric can support millions of concurrent clients with minimal load on the sync service and no load on the source Postgres. \ No newline at end of file diff --git a/website/docs/api/integrations/mobx.md b/website/docs/api/integrations/mobx.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/website/docs/api/integrations/react.md b/website/docs/api/integrations/react.md deleted file mode 100644 index ced62680da..0000000000 --- a/website/docs/api/integrations/react.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -outline: deep ---- - -# React integration - -To use Electric with React, we maintain a React provider and hook to simplify reading shape data in components. - -Example usage in a component. -```tsx -import { useShape } from "@electric-sql/react" - -export default function MyComponent() { - const { isLoading, lastSyncedAt, data } = useShape<{ title: string}>({ - url: `http://localhost:3000/v1/shape`, - table: `foo`, - }) - - if (isLoading) { - return
loading
- } - - return ( -
- {data.map(foo =>
{foo.title}
)} -
- ) -} -``` -Other helpful functions: - -- `preloadShape` — useful to call in route loading functions or elsewhere when you want to ensure Shape data is loaded before rendering a route or component. -- `getShapeStream` — get (or create) a ShapeStream off the global cache -- `getShape` — get (or create) a Shape off the global cache - diff --git a/website/docs/api/integrations/redis.md b/website/docs/api/integrations/redis.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/website/docs/api/integrations/tanstack.md b/website/docs/api/integrations/tanstack.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/website/docs/guides/deployment.md b/website/docs/guides/deployment.md index 01dddab87c..fc48ab480a 100644 --- a/website/docs/guides/deployment.md +++ b/website/docs/guides/deployment.md @@ -1,34 +1,151 @@ --- title: Deployment - Guide description: >- - Considerations to take into account when deploying Electric. -outline: deep + How to deploy the Electric sync engine. +outline: [2, 3] --- # Deployment -This page is under construction. +How to deploy the [Electric sync engine](/product/sync), with links to integration docs for specific platforms like [Supabase](/docs/integrations/supabase), [Neon](/docs/integrations/neon), [Render](/docs/integrations/render) and [AWS](/docs/integrations/aws). -Below you'll find basic information about where Electric keeps persistent state that is necessary for it to work correctly and to resume syncing after a restart of the sync service. +> [!TIP] Electric Cloud – the simplest way to use Electric +> The simplest way to use Electric is via the [Electric Cloud](/product/cloud), which is a simple, scalable, low-cost, managed Electric hosting service. +> +>

+> +> View Cloud +>

-## Data persistence +## The ingredients of a successful deployment -### Postgres +An Electric deployment has three main components. Your Postgres database, the Electric sync service and your app. -When running, Electric keeps a pool of active database connections for processing new shape requests and for streaming transactions from Postgres to shape consumers in realtime. It creates a replication slot and a publication inside the Postgres database configured via `DATABASE_URL`, the two of which ensure continuous replication of changes even in the face of restarts of the sync service or Postgres. +Electric connects to your Postgres using a `DATABASE_URL`. Your app connects to Electric [over HTTP](/docs/api/http), usually using a [Client library](/docs/api/clients/typescript). -If you decide to stop using Electric with a given Postgres database or switch to a different database but keep the old one around, make sure to clean up both the publication and the replication slot. See this [troubleshooting advice](./troubleshooting#wal-growth-mdash-why-is-my-postgres-database-storage-filling-up) for details. +
+ + + Illustration of the main components of a successfull deployment + +
-### Shape data storage +As a result, there are three ingredients to a successful Electric deployment: -Electric uses persistent storage outside of Postgres to store shape metadata and [shape logs](/docs/api/http#shape-log). By default, it creates a directory named `persistent` in the currrent working directory where it's running. This is fine for development, but not suitable for a production setup. +1. you need to be [running a Postgres database](#_1-running-postgres) +2. you need to [run and connect the Electric sync service](#_2-running-electric) +3. you need your app/client to [connect to Electric over HTTP](#_3-connecting-your-app) -The path to Electric's persistent storage can be configured via the `ELECTRIC_STORAGE_DIR` environment variable, e.g. `ELECTRIC_STORAGE_DIR=/var/lib/electric/persistent`. Electric will create the directory at that path if it doesn't exist yet but you need to make sure that the OS user that it's running as has the necessary permissions in the parent directory. +### Proxying requests to Electric -Naturally, the file system location configured via `ELECTRIC_STORAGE_DIR` and the data Electric stores there must survive sync service's restarts. When using Docker as the runtime environment, you can create a volume and use a path inside it as `ELECTRIC_STORAGE_DIR`. When using Kubernetes, you'll want to create a persistent volume and attach it to your Electric deployment. +You also often want to proxy requests to Electric through your API, or other proxy. For example to implement [auth](./auth) and/or [caching](/docs/api/http#caching). In these cases, you'll also need to deploy your API and/or proxy layer in front of Electric. -### Maintaining shape consistency +Note also that, when running Electric behind a CDN, you may want your proxy in front of the CDN. This is where primitives like [edge functions](/docs/integrations/supabase#sync-into-edge-function) and [edge workers](/docs/integrations/cloudflare#workers) can very useful. -To ensure consistent syncing of a subset of data from Postgres to a shape consumer, Electric needs to look at every single transaction committed in Postgres that touches any of the tables included in its active shapes. That's the reason for creating a publication, instructing Postgres to start replicating operations on specified tables, and a replication slot, instructing Postgres to hold on to WAL files until Electric processes all transactions contained in them. +## 1. Running Postgres -The persistent state that Electric maintains in Postgres must stay in sync with the shape data stored on disk, outside of the database cluster. If you change the value of `ELECTRIC_STORAGE_DIR` or switch to a different `DATABASE_URL` at any point, you must clean up the other location by hand, whether it's removing a directory tree on disk or dropping the replication slot and publication in Postgres. +You can use ***any standard Postgres***, version 14 and above. + +This includes Postgres you host yourself, or Postgres hosted by managed database hosting providers, including: + +- [Supabase](/docs/integrations/supabase) +- [Neon](/docs/integrations/neon) +- [AWS (RDS and Aurora)](/docs/integrations/aws) +- [GCP (Cloud SQL and Alloy)](/docs/integrations/gcp) +- [Digital Ocean](/docs/integrations/digital-ocean) +- [Crunchy](/docs/integrations/crunchy) + +Postgres must have [logical replication](https://www.postgresql.org/docs/current/logical-replication-config.html) enabled. You also need to connect as a database user that has the [`REPLICATION`](https://www.postgresql.org/docs/current/logical-replication-security.html) role. + +### Data model compatibility + +Electric is compatible with ***any Postgres data model***. + +Electric will work as a drop on to any existing data model. There are no limitations on the database features, data types or extensions you can use. + +### Connecting to Postgres + +You connect to Postgres using a [`DATABASE_URL`](/docs/api/config#database-url) env var. This connection string contains your user credentials and an `sslmode` parameter. + +You usually want to connect directly to Postgres and not via a connection pool. This is because Electric uses logical replication and most connection poolers don't support it. (pgBouncer does support logical replication, [as of version 1.23](https://www.pgbouncer.org/changelog.html#pgbouncer-123x) so this may change in future). + +> [!Tip] Troubleshooting common errors +> If you get a TCP connection error saying `non-existing domain - :nxdomain` or `network is unreachable - :enetunreach` then you may need to connect using IPv6. You can enable this by setting [`ELECTRIC_DATABASE_USE_IPV6=true`](/docs/api/config#database-use-ipv6). +> +> If you get a TCP connection `timeout` error then make sure you're connecting directly to Postgres and not via a connection pool. For example, when using [Supabase](/docs/integrations/supabase) you need to untick their "Use connection pooling" option on the database settings page. +> +> If you're using IPv6 with Docker, then assuming the machine you're running Electric on has IPv6 connectivity, you may also need to enable IPv6 for the Docker daemon. You can do this by [defining an IpV6-capable network](https://docs.docker.com/engine/daemon/ipv6/#create-an-ipv6-network)) in your Compose file and then adding the `networks` key to the Electric service definition. + +### Database resources + +Electric creates a logical replication [publication](https://www.postgresql.org/docs/current/logical-replication-publication.html) and [replication slot](https://www.postgresql.org/docs/current/logical-replication-subscription.html#LOGICAL-REPLICATION-SUBSCRIPTION-SLOT) inside Postgres. These are called `electric_publication_default` and `electric_slot_default` by default. You can configure the name suffix using the [`ELECTRIC_REPLICATION_STREAM_ID`](/docs/api/config#replication-stream-id) env var. + +When running, Electric also keeps a pool of active database connections open. The size of this pool defaults to `20` and can be configured using [`ELECTRIC_DB_POOL_SIZE`](/docs/api/config#electric-db-pool-size). + +> [!Tip] Cleaning up resources +> If you decide to stop using Electric with a given Postgres database or switch to a different database but keep the old one around, make sure to clean up both the publication and the replication slot. +> +> See this [troubleshooting advice](./troubleshooting#wal-growth-mdash-why-is-my-postgres-database-storage-filling-up) for details. + +## 2. Running Electric + +The [Electric sync engine](/product/sync) is an Elixir web service, packaged using Docker. + +You can deploy it anywhere you can run a container with a filesystem and exposed HTTP port. This includes cloud and application hosting platforms like: + +- [AWS](/docs/integrations/aws) +- [GCP](/docs/integrations/gcp) +- [Digital Ocean](/docs/integrations/digital-ocean) +- [Fly.io](/docs/integrations/fly) +- [Render](/docs/integrations/render) + +### Docker container + +Images are deployed to Docker Hub at [electricsql/electric](https://hub.docker.com/r/electricsql/electric). + +### Disk storage + +Electric caches [Shape logs](/docs/api/http#shape-log) and metadata on the filesystem. Your web hosting should provide a locally mounted, persistent filesystem. + +The path to Electric's persistent storage can be configured via the [`ELECTRIC_STORAGE_DIR`](/docs/api/config#electric-storage-dir) environment variable, e.g. `ELECTRIC_STORAGE_DIR=/var/lib/electric/persistent`. Electric will create the directory at that path if it doesn't exist yet. However, you need to make sure that the OS user that Electric is running as has the necessary permissions in the parent directory. + +Naturally, the file system location configured via `ELECTRIC_STORAGE_DIR` and the data Electric stores there must survive sync service's restarts. For example, when using Kubernetes, you'll want to create a persistent volume and attach it to your Electric deployment. + +> [!Tip] Clear one, clear the other +> The persistent state that Electric maintains in Postgres (via the logical replication publication and replication slot) **must** stay in sync with the shape data cached on disk by Electric. +> +> If you change the value of `ELECTRIC_STORAGE_DIR` or switch to a different `DATABASE_URL` at any point, you **must** clean up the other location by hand, whether it's removing a directory tree on disk or dropping the replication slot and publication in Postgres. + +> [!Tip] How much storage space? +> Electric trades storage for low memory use and fast sync. How much storage you need is highly application dependent. We encourage you to test with your own workload. +> +> We plan to implement [compaction](https://github.com/electric-sql/electric/issues/1582) and other features to limit and optimise storage use, such as [garbage collecting LRU shapes](https://github.com/electric-sql/electric/issues/1529). + +### HTTP port + +Electric provides an HTTP API exposed on a configurable [`ELECTRIC_PORT`](/docs/api/config#electric-port). You should make sure this is exposed to the Internet. + +### Caching proxy + +Electric is designed to run behind a caching proxy, such as [Nginx](https://nginx.org/en), [Caddy](https://caddyserver.com), [Varnish](https://varnish-cache.org) or a CDN like [Cloudflare](https://www.cloudflare.com/en-gb/application-services/products/cdn) or [Fastly](https://www.fastly.com/products/cdn). You don't *have* to run a proxy in front of Electric but you will benefit from radically better performance if you do. + +See the [Caching section](/docs/api/http#caching) of the HTTP API docs for more information. + +## 3. Connecting your app + +You can then connect your app to Electric [over HTTP](/docs/api/http). Typically you use a [Client library](/docs/api/clients/typescript) and configure the URL in the constructor, e.g.: + +```ts +const stream = new ShapeStream({ + url: `https://your-electric-service.example.com/v1/shape`, + table: 'foo' +}) +const shape = new Shape(stream) +``` + +You can connect to Electric from any language/environment that speaks HTTP. See the [HTTP API](/docs/api/http) and [Client docs](/docs/api/clients/typescript) for more information. diff --git a/website/docs/guides/installation.md b/website/docs/guides/installation.md index 22bd9d3fd6..47316c5775 100644 --- a/website/docs/guides/installation.md +++ b/website/docs/guides/installation.md @@ -42,7 +42,7 @@ docker run \ ### Postgres requirements -You can use any Postgres (new or existing) that has [logical replication](https://www.postgresql.org/docs/current/logical-replication-config.html) enabled. You also need to connect as a database user that has the [`REPLICATION`](https://www.postgresql.org/docs/current/logical-replication-security.html) privilege. +You can use any Postgres (new or existing) that has [logical replication](https://www.postgresql.org/docs/current/logical-replication-config.html) enabled. You also need to connect as a database user that has the [`REPLICATION`](https://www.postgresql.org/docs/current/logical-replication-security.html) role. ## Advanced @@ -83,7 +83,7 @@ mix run --no-halt This will try to connect to Postgres using the `DATABASE_URL` configured in [packages/sync-service/.env.dev](https://github.com/electric-sql/electric/blob/main/packages/sync-service/.env.dev), which defaults to: -<<< @/../packages/sync-service/.env.dev +<<< @/../packages/sync-service/.env.dev{shell} You can edit this file to change the configuration. To run the tests, you'll need a Postgres running that matches the `:test` env config in [config/runtime.exs](https://github.com/electric-sql/electric/blob/main/packages/sync-service/config/runtime.exs) and then: diff --git a/website/docs/guides/shapes.md b/website/docs/guides/shapes.md index 704f21da08..26f4495174 100644 --- a/website/docs/guides/shapes.md +++ b/website/docs/guides/shapes.md @@ -83,10 +83,9 @@ You can use logical operators like `AND` and `OR` to group multiple conditions, ## Subscribing to shapes -Local clients establish shape subscriptions, typically using [client libraries](/docs/api/clients/typescript). These sync data from the [Electric sync service](/product/sync) into the client using the [HTTP API](/docs/api/http). +Local clients establish shape subscriptions, typically using [client libraries](/docs/api/clients/typescript). These sync data from the [Electric sync engine](/product/sync) into the client using the [HTTP API](/docs/api/http). -The sync service maintains shape subscriptions and streams any new data and data changes to the local -client. In the client, shapes can be held as objects in memory, for example using a [`useShape`](/docs/api/integrations/react) hook, or in a normalised store or database like [PGlite](/product/pglite). +The sync service maintains shape subscriptions and streams any new data and data changes to the local client. In the client, shapes can be held as objects in memory, for example using a [`useShape`](/docs/integrations/react) hook, or in a normalised store or database like [PGlite](/product/pglite). ### HTTP @@ -140,7 +139,7 @@ shape.subscribe(({ rows }) => { }) ``` -Or you can use framework integrations like the [`useShape`](/docs/api/integrations/react) hook to automatically bind materialised shapes to your components. +Or you can use framework integrations like the [`useShape`](/docs/integrations/react) hook to automatically bind materialised shapes to your components. See the [Quickstart](/docs/quickstart) and [HTTP API](/docs/api/http) docs for more information. diff --git a/website/docs/integrations/aws.md b/website/docs/integrations/aws.md new file mode 100644 index 0000000000..f418ff30ff --- /dev/null +++ b/website/docs/integrations/aws.md @@ -0,0 +1,52 @@ +--- +outline: deep +title: Amazon Web Services (AWS) - Integrations +image: /img/integrations/electric-aws.jpg +--- + + + +# Amazon Web Services (AWS) + +AWS is a cloud infrastructure platform. + +## Electric and AWS + +You can use AWS to deploy any or all components of the Electric stack: + +- [deploy a Postgres database](#deploy-postgres) +- [an Electric sync service](#deploy-electric) +- [your client application](#deploy-your-app) + +If you already run Postgres in AWS, potentially using RDS or Aurora, then it's a great idea to also deploy Electric within the same network. + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy Postgres + +AWS provides Postgres hosting via RDS and Aurora. Electric works with either. You need to configure them to enable logical replication and connect with the right user. + +The default `wal_level` is `minimal` for RDS and `replica` for Aurora. It can be changed to `logical` by creating a [custom parameter group](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_WorkingWithParamGroups.html) for the RDS instance (or the Aurora DB cluster) and setting the value of the `rds.logical_replication` parameter to `1` and rebooting the instance. + +The default `postgres` user has the `REPLICATION` role. If you need to add it to another user you can do so by granting the `rds_replication` role, e.g.: + +```sql +GRANT rds_replication TO someuser; +``` + +### Deploy Electric + +AWS provides a [wide range of container hosting](https://aws.amazon.com/containers). For example, you can deploy Electric to [AWS Elastic Container Service](https://aws.amazon.com/efs) using [AWS Fargate](https://aws.amazon.com/fargate). + +You should store Shape logs to a persistent disk (not an ephemoral filesystem). For example using [Amazon Elastic File System](https://aws.amazon.com/efs). + +### Deploy your app + +AWS provides a range of [website hosting options](https://aws.amazon.com/getting-started/hands-on/host-static-website/). For example you can deploy a static app to [AWS Amplify](https://aws.amazon.com/amplify). + +## Examples + +### AWS Terraform + +We have an example Terraform repo at [electric-sql/terraform-aws](https://github.com/electric-sql/terraform-aws). diff --git a/website/docs/integrations/cloudflare.md b/website/docs/integrations/cloudflare.md new file mode 100644 index 0000000000..beccd46393 --- /dev/null +++ b/website/docs/integrations/cloudflare.md @@ -0,0 +1,119 @@ +--- +outline: deep +title: Cloudflare - Integrations +image: /img/integrations/electric-cloudflare.jpg +--- + + + + + +# Cloudflare + +Cloudflare is a global network and edge-cloud platform. + +## Electric and Cloudflare + +You can use Cloudflare [as a CDN](#cdn) in front of Electric and as a sync target to sync data into Cloudflare primitives including [Workers](#workers) and [Durable Objects](#durable-objects). + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### CDN + +Cloudflare provides a [global content delivery network](https://developers.cloudflare.com/cache/get-started/). + +[This guide](https://loadforge.com/guides/steps-to-set-up-cloudflare-cdn-for-your-website) walks through the process of using it. Basically you need to create a DNS rule resolving to the Electric service and enable Cloudflare as a proxy for it. + +Electric's [HTTP API caching](/docs/api/http#caching) will work out of the box. + +### Workers + +You can also use [Cloudflare Workers](https://workers.cloudflare.com) in front of the CDN to handle concerns like authorisation and routing. + +#### Auth example + +For example, you could validate an auth token to protect access to a shape and then proxy the request through to Electric: + +```ts +export default { + async fetch(request): Promise { + const ELECTRIC_URL = 'https://my-electric.example.com' + + const headers = request.headers + const authHeader = request.headers.get('Authorization') + const isValid = (header) => { /* ... e.g.: verify JWT ... */ } + if (!isValid(authHeader)) { + return new Response('Forbidden', {status: 403}) + } + + if (request.method != `GET`) { + return new Response('Method Not Allowed', {status: 405}) + } + + const url = new URL(request.url) + const shapeUrl = `${ELECTRIC_URL}${url.pathname}` + const clonedHeaders = new Headers(new Request(request).headers) + + return await fetch( + shapeUrl, { + headers: clonedHeaders, + cf: { cacheEverything: true } + } + ) + }, +} satisfies ExportedHandler; +``` + +#### Syncing data into the worker + +Or you can use Electric to hydrate data quickly into an edge worker. For example, you could sync data into an edge worker to dynamically redirect the request: + +```ts +import { ShapeStream, Shape } from '@electric-sql/client' + +export default { + async fetch(request): Promise { + const ELECTRIC_URL = 'https://my-electric.example.com' + + const stream = new ShapeStream({ + url: `${ELECTRIC_URL}/v1/shape`, + table: 'routes' + }) + const shape = new Shape(stream) + const routes = await shape.value + + const url = new URL(request.url) + const match = routes.find(x => x.path == url.pathname) + + if (!match) { + return new Response('Not Found', {status: 404}) + } + + return Response.redirect(match.redirect, 301) + }, +} satisfies ExportedHandler; +``` + +### Durable Objects + +You can implement a similar pattern to the [sync example above](#syncing-data-into-the-worker) to sync data into a Durable Object. + +The key difference is that with a [Durable Object](https://developers.cloudflare.com/durable-objects/), the data can be persisted across requests. This allows you to sync a shape log into the Durable Object, materialise the shape into persistent storage and then re-sync the latest changes whenver the Durable Object is accessed. + +You can see a demo of this pattern, using SQLite to persist the Shape data, at [KyleAMathews/electric-demo-cloudflare-sqlite](https://github.com/KyleAMathews/electric-demo-cloudflare-sqlite): + + + +> [!Tip] Combining CDN and Durable Objects +> Note that if you sync data into a Durable Object (or a Worker) [from Cloudflare's CDN](#cdn) it can be *extremely fast* — with high bandwidth and low network latency. + + + example apps using Cloudflare and/or wrap up the code samples above into a library. + \ No newline at end of file diff --git a/website/docs/integrations/crunchy.md b/website/docs/integrations/crunchy.md new file mode 100644 index 0000000000..389d988ef4 --- /dev/null +++ b/website/docs/integrations/crunchy.md @@ -0,0 +1,43 @@ +--- +outline: deep +title: Crunchy Data - Integrations +image: /img/integrations/electric-crunchy.jpg +--- + + + +# Crunchy Data + +Crunchy is a Postgres hosting provider. + +## Electric and Crunchy + +You can use Electric with [Crunchy Bridge](https://www.crunchydata.com/products/crunchy-bridge), their managed cloud Postgres product. + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy Postgres + +[Sign up to Crunchy Bridge](https://crunchybridge.com/register) and go through the steps to create a cluster. + +Go to the "Connection" tab, select "URL", set the role to "postgres (superuser)" and copy the connection string. + +You can then run Electric with this connection string as the `DATABASE_URL`, e.g.: + +```shell +docker run -it \ + -e "DATABASE_URL=postgres://postgres:****@p.YOUR_CLUSTER_ID.db.postgresbridge.com:5432/postgres" \ + electricsql/electric:latest +``` + +You can also use the `postgres` superuser to create other users with the `REPLICATION` role, e.g.: + +```sql +CREATE ROLE electric WITH REPLICATION LOGIN PASSWORD '...'; +GRANT ALL PRIVILEGES ON DATABASE "postgres" to electric; +``` + +You can then connect +> [!Tip] Need somewhere to host Electric? +> If you need somewhere to deploy Electric then [Crunchy works well](https://neon.tech/docs/guides/render) with [Render](./render#deploy-electric). \ No newline at end of file diff --git a/website/docs/integrations/digital-ocean.md b/website/docs/integrations/digital-ocean.md new file mode 100644 index 0000000000..abdc29c904 --- /dev/null +++ b/website/docs/integrations/digital-ocean.md @@ -0,0 +1,139 @@ +--- +outline: deep +title: Digital Ocean - Integrations +image: /img/integrations/electric-digital-ocean.jpg +--- + + + +# Digital Ocean + +Digital Ocean is a cloud hosting platform. + +## Electric and Digital Ocean + +You can use Digital Ocean to deploy any or all components of the Electric stack: + +- [deploy a Postgres database](#deploy-postgres) +- [an Electric sync service](#deploy-electric) +- [your client application](#deploy-your-app) + +If you already run a Managed Postgres in Digital Ocean, then it's a great idea to also deploy Electric within the same network. + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy Postgres + +Digital Ocean provides [Managed Postgres](https://docs.digitalocean.com/products/databases/postgresql/). This has logical replication enabled and works with Electric out of the box. + +> [!Tip] Use doadmin for older Postgres versions +> If you're using Postgres version 15 or lower, you will need to connect to your Managed Postgres as the `doadmin` user. This is the default user and the only user with the `REPLICATION` role. +> +> (With later Postgres versions its fine to create other users and use the `doadmin` user to grant them the `REPLICATION` role). + +### Deploy Electric + +Digital Ocean has a number of different ways to deploy web services. We recommend using a [Docker Droplet](https://marketplace.digitalocean.com/apps/docker). + +Below we walk through the steps to deploy Electric using a Docker Droplet. First you create the Droplet. Then setup some Docker / SSH networking so your local Docker can talk to it. Then use Docker Compose to run Electric inside the Droplet. + +> [!Warning] Don't use App Platform +> We **don't recommend** that you use [App Platform](https://docs.digitalocean.com/products/app-platform/) to deploy the Electric sync service because App Platform does not provide persistent file storage for Shape logs. + +#### Create Droplet + +Go to the [Docker marketplace page](https://marketplace.digitalocean.com/apps/docker) and click on the "Create Docker Droplet" button. Follow the prompts. You **must** use key-based SSH authentication (so that you can set up your local Docker to talk to the remote daemon). It's a good idea to change the hostname to something like `electric-sync` as well. + +Create the Droplet and wait until its ready with an IPv4 address. Copy the address and use it in place of `YOUR_IP_ADDRESS` in the instructions that follow. + +#### Connect Docker + +Connect to your new Droplet using `ssh` in order to verify the authenticity of the host and add its public key to your local `known_hosts` file. + +```console +$ ssh root@YOUR_IP_ADDRESS +... +Are you sure you want to continue connecting (yes/no/[fingerprint])? yes +Warning: Permanently added 'YOUR_IP_ADDRESS' (ED25519) to the list of known hosts. +``` + +> [!Warning] Permission denied? +> If the output from the command above ends with: +> +> ... Permission denied (publickey). +> +> Then you need to add a section to your `~/.ssg/config` to tell it to use your SSH key +> when connecting to `YOUR_IP_ADDRESS`. Something like this will do: +> +> ``` +> Host YOUR_IP_ADDRESS +> Port 22 +> Hostname YOUR_IP_ADDRESS +> AddKeysToAgent yes +> IdentitiesOnly yes +> IdentityFile ~/.ssh/path_to_your_private_ssh_key +> TCPKeepAlive yes +> UseKeychain yes +> ``` + +Now set the `DOCKER_HOST` environment variable to point to your Droplet's IP address: + +```shell +export DOCKER_HOST=ssh://root@YOUR_IP_ADDRESS +``` + +#### Deploy + +Save the following contents into a file called `compose.yaml`, changing the `DATABASE_URL` and setting [any other environment variables](/docs/api/config) to match your setup. + +```yaml +services: + electric: + image: electricsql/electric:latest + environment: + DATABASE_URL: "postgresql://..." + ports: + - 80:3000 + restart: always +``` + +Now launch on the remote server, with output that should look something like this: + +```console +$ docker compose up +[+] Running 8/8 + ✔ electric 7 layers [⣿⣿⣿⣿⣿⣿⣿] 0B/0B Pulled 8.2s + ✔ efc2b5ad9eec Pull complete 3.4s + ✔ 2cb0d575dcef Pull complete 4.5s + ✔ c1b251d76665 Pull complete 4.6s + ✔ c82981779fd9 Pull complete 4.7s + ✔ 65b429e477c5 Pull complete 4.8s + ✔ 1fd7ee9efb04 Pull complete 6.0s + ✔ 87053f06541e Pull complete 6.1s +[+] Running 2/2 + ✔ Network electric-sync-droplet_default Created 0.2s + ✔ Container electric-sync-droplet-electric-1 Created 0.2s +Attaching to electric-sync-droplet-electric-1 +electric-sync-droplet-electric-1 | =INFO REPORT==== 23-Oct-2024::13:16:01.777082 === +electric-sync-droplet-electric-1 | Loading 140 CA(s) from otp store +electric-sync-droplet-electric-1 | 13:16:01.832 [info] Running Electric.Plug.Router with Bandit 1.5.5 at 0.0.0.0:3000 (http) +electric-sync-droplet-electric-1 | 13:16:01.935 [info] Acquiring lock from postgres with name electric_slot_default +electric-sync-droplet-electric-1 | 13:16:01.937 [info] Lock acquired from postgres with name electric_slot_default +electric-sync-droplet-electric-1 | 13:16:02.006 [info] Postgres server version = 160004, system identifier = 7428958789530034185, timeline_id = 1 +electric-sync-droplet-electric-1 | 13:16:02.145 [info] No previous timeline detected. +electric-sync-droplet-electric-1 | 13:16:02.146 [info] Connected to Postgres and timeline +electric-sync-droplet-electric-1 | 13:16:02.147 [info] Starting shape replication pipeline +electric-sync-droplet-electric-1 | 13:16:02.150 [info] Starting replication from postgres +``` + +You can hit the health check endpoint to verify that everything is running OK: + +```console +$ curl http://YOUR_IP_ADDRESS/v1/health +{"status":"active"} +``` + +### Deploy your app + +You can deploy [your client app to Digital Ocean using App Platform](https://www.digitalocean.com/community/tutorials/how-to-deploy-a-static-website-to-the-cloud-with-digitalocean-app-platform). \ No newline at end of file diff --git a/website/docs/integrations/expo.md b/website/docs/integrations/expo.md new file mode 100644 index 0000000000..13d3d1c148 --- /dev/null +++ b/website/docs/integrations/expo.md @@ -0,0 +1,67 @@ +--- +outline: deep +title: Expo - Integrations +image: /img/integrations/electric-expo.jpg +--- + + + +# Expo + +Expo is a platform that helps you deploy React Native applications. + +## Electric and Expo + +Expo applications are developed in Javacript (or Typescript) using [React Native](https://reactnative.dev). + +You can use the Electric [Typescript client](/docs/api/clients/typescript) in your Expo applications. This allows you to sync data from Electric into mobile apps. + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +## Example + +Follow the [Expo Quickstart](https://docs.expo.dev/get-started/create-a-project/) to create an Expo app. Replace the generated `./app/(tabs)/index.tsx` with the following: + +```tsx +import { Text } from 'react-native' +import { useShape } from '@electric-sql/react' + +// Edit to match your setup. +const ELECTRIC_URL = 'https://my-electric-sync-service.example.com' + +export default function HomeScreen() { + const { isLoading, data } = useShape({ + url: `${ELECTRIC_URL}/v1/shape`, + table: 'items' + }) + + if (isLoading) { + return null + } + + return ( + { JSON.stringify(data, null, 4) } + ) +} +``` + +Install `@electric-sql/react` (if necessary using `--force` to work around a React dependency version mismatch): + +```shell +npm install '@electric-sql/react' --force +``` + +Run, e.g. in the browser: + +```shell +npm run web +``` + +If there's data in the `items` table of your Postgres, you should see it syncing into your app. + +## PGlite + +[PGlite](https://pglite.dev) doesn't *yet* work in React Native. + +We have an [open issue tracking support for it](https://github.com/electric-sql/pglite/issues/87). When it does, we hope to work with the Expo team to get an official `expo-pglite` package published. \ No newline at end of file diff --git a/website/docs/integrations/fly.md b/website/docs/integrations/fly.md new file mode 100644 index 0000000000..49e9dbbc83 --- /dev/null +++ b/website/docs/integrations/fly.md @@ -0,0 +1,71 @@ +--- +outline: deep +title: Fly.io - Integrations +image: /img/integrations/electric-fly.jpg +--- + + + +# Fly.io + +[Fly.io](https://fly.io) is a public cloud built for developers who ship. + +## Electric and Fly + +You *can* use Fly to deploy any or all components of the Electric stack: + +- [deploy a Postgres database](#deploy-postgres) +- [an Electric sync service](#deploy-electric) +- [your client application](#deploy-your-app) + +Fly's sweet spot is deploying Elixir applications, like the Electric sync service and/or [Phoenix aplications](./phoenix). + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy Postgres + +Fly isn't really a managed Postgres host. They do offer [database hosting](https://fly.io/docs/database-storage-guides/#managed-database-services) but they prefer to offload it to other providers, such as [Supabase](./supabase). + +### Deploy Electric + +Copy the following config into a file called `fly.toml`, replacing the app name and `DATABASE_URL`: + +```toml +app = "YOUR_UNIQUE_APP_NAME" + +[build] + image = "electricsql/electric:latest" + +[env] + DATABASE_URL = "postgresql://..." + DATABASE_USE_IPV6 = true + +[http_service] + internal_port = 3000 + force_https = true + + [[http_service.checks]] + interval = "10s" + timeout = "2s" + grace_period = "20s" + method = "GET" + path = "/v1/health" +``` + +Using the [`flyctl` client](https://fly.io/docs/flyctl/install/), in the same directory as `fly.toml`, run: + +```shell +flyctl launch --copy-config --ha=false +``` + +Hit the health check endpoint to verify that everything is running OK: + +```console +$ curl https://YOUR_UNIQUE_APP_NAME.fly.dev/v1/health +{"status":"active"} +``` + +### Deploy your app + +You can run most kinds of apps on Fly, including [static sites](https://fly.io/docs/languages-and-frameworks/static/). \ No newline at end of file diff --git a/website/docs/integrations/gcp.md b/website/docs/integrations/gcp.md new file mode 100644 index 0000000000..47d5b70ffb --- /dev/null +++ b/website/docs/integrations/gcp.md @@ -0,0 +1,68 @@ +--- +outline: deep +title: Google Cloud Platform (GCP) - Integrations +image: /img/integrations/electric-gcp.jpg +--- + + + +# Google Cloud Platform (GCP) + +GCP is a cloud infrastructure platform. + +## Electric and GCP + +You can use GCP to deploy any or all components of the Electric stack: + +- [deploy a Postgres database](#deploy-postgres) +- [an Electric sync service](#deploy-electric) +- [your client application](#deploy-your-app) + +If you already run Postgres in GCP, then it's a great idea to also deploy Electric within the same network. + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy Postgres + +GCP provides Postgres hosting via [Cloud SQL](https://cloud.google.com/sql/docs/postgres/) or [AlloyDB](https://cloud.google.com/alloydb). Electric works with either. You need to configure them to enable logical replication and connect with the right user. + +#### Cloud SQL + +The default `wal_level` is `replica`. Change it to `logical` by [setting the `cloudsql.logical_decoding` flag to `on`](https://cloud.google.com/sql/docs/postgres/replication/configure-logical-replication#configure-your-postgresql-instance). + +> [!Tip] Customise your instance on setup +> You can set flags in the "Flags" panel of the "Customise your instance" section of the [create database page](https://console.cloud.google.com/sql/instances/create;engine=PostgreSQL) in the console, when setting up your database. + +Be careful to connect using the "Outgoing IP address", not the "Public IP address". You will also need to create a new database user with `REPLICATION`. Log in using the default `postgres` user and then run something like this, changing the username and database name as necessary: + +```sql +CREATE ROLE electric WITH REPLICATION LOGIN PASSWORD '...'; +GRANT ALL PRIVILEGES ON DATABASE "postgres" to electric; +``` + +You can then connect to Postgres from Electric as that user, which you can verify using e.g.: + +```shell +docker run -it -e DATABASE_URL=postgresql://electric:YOUR_PASSWORD@YOUR_OUTGOING_IP/postgres electricsql/electric:latest +``` + +#### AlloyDB + +For AlloyDB, the flag to enable logical replication is called `alloydb.logical_decoding`. + +### Deploy Electric + +GCP provides a [wide range of container hosting](https://cloud.google.com/containers). We recommend using [Containers on Compute Engine](https://cloud.google.com/compute/docs/containers/deploying-containers) or [Google Kubernetes Engine (GKE)](https://cloud.google.com/kubernetes-engine). + +For example, you can deploy Electric on a [Container-Optimized OS](https://cloud.google.com/container-optimized-os/docs) with a [Persistent Disk](https://cloud.google.com/compute/docs/disks/#pdspecs) for storing Shape logs. + +> [!Warning] Don't use Cloud Run +> We **don't recommend** that you use [Cloud Run](https://cloud.google.com/run) to deploy the Electric sync service because Cloud Run uses an in-memory filesystem and does not provide persistent file storage for Shape logs. + +> [!Warning] IPv6 support +> If you're connecting to Postgres over IPv6 (for example, if you're [connecting to Supabase Postgres](./supabase#troubleshooting-ipv6)) then you may need to be on a Premium Network Tier to configure IPv6 for your VPC. See [this article](https://cloud.google.com/vpc/docs/subnets#ipv6-ranges) for more details. + +### Deploy your app + +GCP provides a range of [website hosting options](https://cloud.google.com/solutions/web-hosting?hl=en). For example you can deploy a static app to [Google Storage](https://cloud.google.com/storage/docs/hosting-static-website) with [Cloud Build](https://cloud.google.com/build/docs/overview). diff --git a/website/docs/integrations/livestore.md b/website/docs/integrations/livestore.md new file mode 100644 index 0000000000..b3632411c2 --- /dev/null +++ b/website/docs/integrations/livestore.md @@ -0,0 +1,21 @@ +--- +outline: deep +title: LiveStore - Integrations +image: /img/integrations/electric-livestore.jpg +--- + + + +# LiveStore + +[LiveStore](https://github.com/livestorejs) is a reactive SQLite-based state management library focused on high-performance client-side reactivity, originally based on [Riffle](https://riffle.systems). + +LiveStore is under active development by [Johannes Schickling](https://x.com/schickling). + +## Electric and Livestore + +ElectricSQL are sponsoring LiveStore development and building a sync integration. This will be released when LiveStore is released (as public open-source code). + +## Early access + +If you're interested in using LiveStore now, [get in touch](/about/contact) or contact Johannes directly. \ No newline at end of file diff --git a/website/docs/integrations/mobx.md b/website/docs/integrations/mobx.md new file mode 100644 index 0000000000..effc84dd44 --- /dev/null +++ b/website/docs/integrations/mobx.md @@ -0,0 +1,23 @@ +--- +outline: deep +title: MobX - Integrations +image: /img/integrations/electric-mobx.jpg +--- + + + + + +# MobX + +[MobX](https://mobx.js.org) is a framework for simple, scalable, client-side state management. + +## Electric and MobX + +Electric can be integrated with MobX by syncing data into a [Shape](/docs/api/clients/typescript#shape) and then [making the shape observable](https://mobx.js.org/observable-state.html). + + + an example or library integrating Electric and MobX. + \ No newline at end of file diff --git a/website/docs/integrations/neon.md b/website/docs/integrations/neon.md new file mode 100644 index 0000000000..9b31adf0fd --- /dev/null +++ b/website/docs/integrations/neon.md @@ -0,0 +1,48 @@ +--- +outline: deep +title: Neon - Integrations +image: /img/integrations/electric-neon.jpg +--- + + + +# Neon + +[Neon](https://neon.tech) is a serverless Postgres hosting platform. + +## Electric and Neon + +You can use Electric with Neon's [serverless Postgres hosting](https://neon.tech/docs/introduction/serverless). + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy Postgres + +[Sign up to Neon](https://neon.tech/docs/get-started-with-neon/signing-up) and go through the steps to create a database. + +On the project page, go to `Settings -> Logical Replication` and click "Enable". + +> [!Tip] Neon and logical replication +> See the [Neon guide on logical replication](https://neon.tech/docs/guides/logical-replication-neon) for information about how logical replication works with the rest of the Neon feature set. + +### Connect Electric + +Go to the Dashboard page and copy the database connection string. + +Make sure you **don't** check "Pooled connection". You want the direct connection string in order to use logical replication. + +You can then run Electric with this connection string as the `DATABASE_URL`, e.g.: + +```shell +docker run -it \ + -e "DATABASE_URL=YOUR_NEON_CONNECTION_STRING" \ + electricsql/electric:latest +``` + +> [!Tip] Need somewhere to host Electric? +> If you need somewhere to deploy Electric then [Neon works well](https://neon.tech/docs/guides/render) with [Render](./render#deploy-electric). + +## PGlite + +Electric and Neon have also collaborated to develop [PGlite](/product/pglite), which was started as a project by Neon's CTO, [Stas Kelvich](https://github.com/kelvich). diff --git a/website/docs/integrations/netlify.md b/website/docs/integrations/netlify.md new file mode 100644 index 0000000000..b03c87051b --- /dev/null +++ b/website/docs/integrations/netlify.md @@ -0,0 +1,51 @@ +--- +outline: deep +title: Netlify - Integrations +image: /img/integrations/electric-netlify.jpg +--- + + + + +# Netlify + +[Netlify](https://www.netlify.com/) is an [application deployment platform](https://www.netlify.com/platform/). + +## Electric and Netlify + +Netlify is a great choice for deploying client-side web apps that use Electric. + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy your app + +[Create your app](https://docs.netlify.com/welcome/add-new-site/), connect it to Netlify and [deploy via `git push`](https://docs.netlify.com/site-deploys/create-deploys/#deploy-with-git). + +### Connect to Electric + +> [!Warning] You need Electric (and Postgres) running somewhere else +> The easiest way is to use the [Electric Cloud](/product/cloud). Or see the [Deployment guide](/docs/guides/deployment). + +Copy the URL to your Electric instance and use it when [syncing data](/docs/api/clients/typescript#shape) into your app. E.g.: by [setting an environment variable](https://docs.netlify.com/environment-variables/get-started/#site-environment-variables) and using it in your code: + +```tsx +const ELECTRIC_URL = process.env.ELECTRIC_URL + +const stream = new ShapeStream({ + url: `${ELECTRIC_URL}/v1/shape`, + table: 'items' +}) +``` + +See the [Client docs](/docs/api/clients/typescript) for more information. + +## Example + +### Deploy example app + +Deploy our [standalone-basic-example](https://github.com/electric-sql/standalone-basic-example) app using the form below: + + \ No newline at end of file diff --git a/website/docs/integrations/next.md b/website/docs/integrations/next.md new file mode 100644 index 0000000000..6dc5275cb0 --- /dev/null +++ b/website/docs/integrations/next.md @@ -0,0 +1,37 @@ +--- +outline: deep +title: Next.js - Integrations +image: /img/integrations/electric-next.jpg +--- + + + + + +# Next.js + +[Next.js](https://mobx.js.org) is a full-stack React framework. + +## Electric and Next.js + +Next.js is based on React. Electric [works with React](./react). You can integrate Electric into your Next.js application like any other npm / React library. + +### SSR + +Next.js supports SSR. We are currently [experimenting with patterns](https://github.com/electric-sql/electric/pull/1596) to use Electric with SSR in a way that supports server rendering *and* client-side components seamlessly moving into realtime sync. + +### Examples + +[ElectroDrizzle](https://github.com/LeonAlvarez/ElectroDrizzle) is an example Next.js application using ElectricSQL and PGLite. + + + + + to improving our Next.js documentation, patterns and framework integrations. + diff --git a/website/docs/integrations/phoenix.md b/website/docs/integrations/phoenix.md new file mode 100644 index 0000000000..f5b3b6f22e --- /dev/null +++ b/website/docs/integrations/phoenix.md @@ -0,0 +1,83 @@ +--- +outline: deep +title: Phoenix - Integrations +image: /img/integrations/electric-phoenix.jpg +--- + + + + + +# Phoenix + +[Phoenix](https://www.phoenixframework.org) is a full-stack web development framework for [Elixir](https://elixir-lang.org). + +## Electric and Phoenix + +Electric is [developed in Elixir](/product/sync#how-does-it-work) and provides [an Elixir client](/docs/api/clients/elixir). We've leveraged this to develop a batteries-included Phoenix integration for: + +- [front-end sync](#front-end-sync): into a front-end client from a Postgres-backed Phoenix application +- [LiveView sync](#liveview-sync): into Phoenix LiveView from Postgres in realtime via [Phoenix.Streams](/docs/integrations/phoenix#liveview-sync) + +`Electric.Phoenix` is published on Hex as [hex.pm/packages/electric_phoenix](https://hex.pm/packages/electric_phoenix). + +### Inspiration + +It was inspired by [`josevalim/sync`](https://github.com/josevalim/sync). You can read José's [original design document](https://github.com/josevalim/sync/blob/main/DESIGN.md). + +## How to use + +### Front-end sync + +Phoenix is a general framework that provides a number of different methods to get data from the server to the client. These include exposing [REST APIs](https://hexdocs.pm/phoenix/routing.html#resources) and using [Absinthe](https://hexdocs.pm/absinthe/overview.html) to expose a GraphQL endpoint. + +`Electric.Phoenix` provides an alternative method: exposing [Shapes](/docs/guides/shapes) that sync data directly from Postgres into the client. With this, shapes are exposed and configured in your Phoenix Router. For example, here we expose a predefined shape of all visible todos, deriving the shape definition from an Ecto query using your existing data model: + +```elixir +defmodule MyAppWeb.Router do + use Phoenix.Router + alias MyApp.Todos.Todo + + scope "/shapes" do + pipe_through :browser + + get "/todos", Electric.Phoenix.Gateway.Plug, + shape: Electric.Client.shape!(Todo, where: "visible = true") + end +end +``` + +Because the shape is defined in your Router, it can use Plug middleware for authorisation. See [Parameter-based shapes](https://hexdocs.pm/electric_phoenix/0.1.0-dev-2/Electric.Phoenix.Gateway.Plug.html#module-parameter-based-shapes) for more details. + +### LiveView sync + +[Phoenix LiveView](https://hexdocs.pm/phoenix_live_view) allows you to develop interactive web applications in Elixir/Phoenix, often without writing any front-end code. + +LiveView provides a primitive, called [Phoenix.Streams](https://fly.io/phoenix-files/phoenix-dev-blog-streams) that allows you to stream data into a LiveView. `Electric.Phoenix` provides a wrapper around this to automatically stream a [Shape](/docs/guides/shapes) into a LiveView. + +The key primitive is a [`live_stream/4`](https://hexdocs.pm/electric_phoenix/Electric.Phoenix.html#live_stream/4) function that wraps [`Phoenix.LiveView.stream/4`](https://hexdocs.pm/phoenix_live_view/Phoenix.LiveView.html#stream/4) to provide a live updating collection of items. + +```elixir +def mount(_params, _session, socket) do + socket = + Electric.Phoenix.live_stream( + socket, + :visible_todos, + from(t in Todo, where: t.visible == true) + ) + + {:ok, socket} +end +``` + +This makes your LiveView applications real-time. In fact, it allows you to build interactive, real-time multi-user applications straight out of your existing Ecto schema, without writing any JavaScript at all 🤯 + +### More details + +For more details and full documentation see [hexdocs.pm/electric_phoenix](https://hexdocs.pm/electric_phoenix). + + + an equivalent integration for other server-side frameworks, such as Rails, Laravel, Django, etc. + \ No newline at end of file diff --git a/website/docs/integrations/react.md b/website/docs/integrations/react.md new file mode 100644 index 0000000000..379ca27961 --- /dev/null +++ b/website/docs/integrations/react.md @@ -0,0 +1,117 @@ +--- +outline: deep +title: React - Integrations +image: /img/integrations/electric-react.jpg +--- + + + +# React + +React is a popular library for building declarative, component-based UI. + +## Electric and React + +Electric has first-class support for React. We maintain a [react-hooks](https://github.com/electric-sql/electric/tree/main/packages/react-hooks) package that provides a number of [React Hooks](https://react.dev/reference/react/hooks) to bind Shape data to your components. + +## How to use + +### Install + +The package is published on NPM as [`@electric-sql/react`](https://www.npmjs.com/package/@electric-sql/react). Install using e.g.: + +```shell +npm i @electricsql/react +``` + +### `useShape` + +[`useShape`](https://github.com/electric-sql/electric/blob/main/packages/react-hooks/src/react-hooks.tsx#L131) binds a materialised [Shape](/docs/api/clients/typescript#shape) to a state variable. For example: + +```tsx +import { useShape } from '@electric-sql/react' + +const MyComponent = () => { + const { isLoading, data } = useShape<{title: string}>({ + url: `http://localhost:3000/v1/shape`, + table: 'items' + }) + + if (isLoading) { + return
Loading ...
+ } + + return ( +
+ {data.map(item =>
{item.title}
)} +
+ ) +} +``` + +`useShape` takes the same options as [ShapeStream](/docs/api/clients/typescript#options). The return value is a `UseShapeResult`: + +```tsx +export interface UseShapeResult = Row> { + /** + * The array of rows that make up the materialised Shape. + * @type {T[]} + */ + data: T[] + + /** + * The Shape instance used by this useShape + * @type {Shape} + */ + shape: Shape + + /** True during initial fetch. False afterwise. */ + isLoading: boolean + + /** Unix time at which we last synced. Undefined when `isLoading` is true. */ + lastSyncedAt?: number + + /** Unix time at which we last synced. Undefined when `isLoading` is true. */ + isError: boolean + error: Shape[`error`] + +} +``` + +### `preloadShape` + +[`preloadShape`](https://github.com/electric-sql/electric/blob/main/packages/react-hooks/src/react-hooks.tsx#L17) is useful to call in route loading functions or elsewhere when you want to ensure Shape data is loaded before rendering a route or component. + +```tsx +export const clientLoader = async () => { + return await preloadShape({ + url: `http://localhost:3000/v1/shape`, + table: 'items' + }) +} +``` + +It takes the same options as [ShapeStream](/docs/api/clients/typescript#options). + +### `getShapeStream` + +[`getShapeStream`](https://github.com/electric-sql/electric/blob/main/packages/react-hooks/src/react-hooks.tsx#L30) get-or-creates a `ShapeStream` off the global cache. + +```tsx +const itemsStream = getShapeStream({ + url: `http://localhost:3000/v1/shape`, + table: 'items' +}) +``` + +This allows you to avoid consuming multiple streams for the same shape log. + +### `getShape` + +[`getShape`](https://github.com/electric-sql/electric/blob/main/packages/react-hooks/src/react-hooks.tsx#L49) get-or-creates a `Shape` off the global cache. + +```tsx +const itemsShape = getShape(stream) +``` + +This allows you to avoid materialising multiple shapes for the same stream. diff --git a/website/docs/integrations/redis.md b/website/docs/integrations/redis.md new file mode 100644 index 0000000000..e2b037001e --- /dev/null +++ b/website/docs/integrations/redis.md @@ -0,0 +1,35 @@ +--- +outline: deep +title: Redis - Integrations +image: /img/integrations/electric-redis.jpg +--- + + + + + +# Redis + +Redis is an in-memory "data structure server", often used as a cache. + +## Electric and Redis + +Many applications use [Redis](https://redis.io/docs/latest/develop/use/client-side-caching/) as a local cache. With Electric, you can define a [Shape](/docs/guides/shapes) and sync it into a [Redis data structure](https://redis.io/docs/latest/develop/data-types/hashes/). + +### Example + +The shape stream comes through as a [log](/docs/api/http#shape-log) of insert, update and delete messages. Apply these to the Redis hash and the cache automatically stays up-to-date: + +<<< @../../examples/redis-sync/src/index.ts + +See the [`redis-sync` example](https://github.com/electric-sql/electric/tree/main/examples/redis-sync) for more details. + + + a library that wraps up the + redis-sync + example into an + @electric-sql/redis + integration library. + diff --git a/website/docs/integrations/render.md b/website/docs/integrations/render.md new file mode 100644 index 0000000000..d4700a5544 --- /dev/null +++ b/website/docs/integrations/render.md @@ -0,0 +1,89 @@ +--- +outline: deep +title: Render - Integrations +image: /img/integrations/electric-render.jpg +--- + + + +# Render + +[Render](https://render.com) is a cloud infrastructure and web hosting platform. + +## Electric and Render + +You can use Render to deploy [an Electric sync service](#deploy-electric) and [your client application](#deploy-your-app). + +> [!Info] Postgres on Render and logical replication +> Render does provide [managed Postgres hosting](https://docs.render.com/postgresql). However, this [doesn't yet](https://feedback.render.com/features/p/allow-for-postgres-logical-replication) support logical replication, so you can't currently use Electric with it. +> +> If you need Postgres hosting to use with Render, [Neon](./neon) and [Supabase](./supabase) both work great. + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy Electric + +Deploy Electric as a [Web Service](https://docs.render.com/web-services) using their [deploy from a container registry](https://docs.render.com/web-services#deploy-from-a-container-registry) option. + +In the Render dashboard, create a new Web Service, select Existing Image and paste `electricsql/electric` as the image URL. Then on the next screen set a `DATABASE_URL` and [any other config](/docs/api/config) as environment variables. + +You can also optionally enter `/v1/health` as the path for a health check. + +Under "Advanced" make sure you add a Persistent Disk and set the Mount path to e.g.: `/var/electric`. Then also set the [`ELECTRIC_STORAGE_DIR` environment variable](/docs/api/config#storage-dir) to the same mount path, e.g.: `ELECTRIC_STORAGE_DIR=/var/electric`. + +### Deploy your app + +You can deploy your app on Render as a [Static Site](https://docs.render.com/static-sites). For example, you can deploy our [standalone-basic-example](https://github.com/electric-sql/standalone-basic-example) by: + +- selecting "Public GitHub Repository" and pasting `https://github.com/electric-sql/standalone-basic-example` as the value +- setting the publish directory to `dist` +- setting a `VITE_ELECTRIC_URL` environment variable to the URL of your Electric web service, such as `https://YOUR_WEB_SERVICE_NAME.onrender.com` + +Then make sure that your Postgres database has an `items` table with an `id` column and insert some data into it. + +## Example + +Render supports [Blueprints](https://docs.render.com/infrastructure-as-code) to deploy infrastructure as code. The following example shows how to deploy Electric and an example web app that connects to it. + +> [!Warning] Requires an existing Postgres running somewhere else +> The Blueprint above requires a `DATABASE_URL` to an existing Postgres database hosted somewhere else. +> +> Also, as per [the example above](#deploy-your-app), the example app it deploys assumes you have an `items` table in your database. + +### `render.yaml` Blueprint + +Clone [github.com/electric-sql/render-blueprint](https://github.com/electric-sql/render-blueprint) or copy the following config into a `render.yaml` file: + +```yaml +services: + - type: web + runtime: image + name: electric + image: + url: electricsql/electric:latest + disk: + name: storage + mountPath: /var/electric + sizeGB: 20 + envVars: + - key: DATABASE_URL + sync: false + - key: ELECTRIC_STORAGE_DIR + value: "/var/electric" + - type: web + runtime: static + name: app + buildCommand: VITE_ELECTRIC_URL="https://${ELECTRIC_HOST}.onrender.com" npm run build + staticPublishPath: ./dist + envVars: + - key: ELECTRIC_HOST + fromService: + name: electric + type: web + property: host +``` + +You can then follow [the instructions here](https://docs.render.com/infrastructure-as-code#setup) to deploy the Blueprint on Render. + +In short, you push the `render.yaml` to a repo, open the [Render Dashboard](https://dashboard.render.com/), click "New > Blueprint", connect the repo and enter your `DATABASE_URL` when prompted. \ No newline at end of file diff --git a/website/docs/integrations/supabase.md b/website/docs/integrations/supabase.md new file mode 100644 index 0000000000..82b79752ef --- /dev/null +++ b/website/docs/integrations/supabase.md @@ -0,0 +1,130 @@ +--- +outline: deep +title: Supabase - Integrations +image: /img/integrations/electric-supabase.jpg +--- + + + +# Supabase + +[Supabase](https://supabase.com) is a Postgres hosting and backend-as-a-service platform for building web, mobile and AI applications. + +## Electric and Supabase + +You can use Electric on Supabase's [hosted Postgres](#deploy-postgres). + +You can also use Electric to [sync data into Supabase Edge Functions](#sync-into-edge-function). + +> [!Tip] Need context? +> See the [Deployment guide](/docs/guides/deployment) for more details. + +### Deploy Postgres + +[Supabase Postgres databases](https://supabase.com/docs/guides/database/overview) come with logical replication enabled and the necessary permissions for Electric to work. + +Create a database on [Supabase.com](https://supabase.com). Click the "Connect" button in the top right to get the connection string. + +Make sure you untick the "Display connection pooler" option to get the direct access URL, because the pooled URL does not support logical replication. Note that this direct access URL only works with IPv6, which means you will need to [configure Electric to connect over IPv6](#troubleshooting-ipv6). + +### Connect Electric + +Configure Electric to connect to the direct access `DATABASE_URL` you copied above. Set [`DATABASE_USE_IPV6`](/docs/api/config#database-use-ipv6) to `true`, e.g.: + +```shell +docker run -it \ + -e "DATABASE_URL=postgresql://postgres:[YOUR_PASSWORD]@db.[YOUR_PROJECT_ID].supabase.co:5432/postgres" \ + -e "DATABASE_USE_IPV6=true" \ + -p 3000:3000 \ + electricsql/electric:latest +``` + +#### Troubleshooting IPv6 + +The network where Electric is running must support IPv6. If you're running Electric on your own computer, check if you have IPv6 support by opening [test-ipv6.com](https://test-ipv6.com). If you see "No IPv6 address detected" on that page, consider `ssh`ing into another machine or using a VPN service that works with IPv6 networks. + +When running Electric in a Docker container, there's an additional hurdle in that Docker does not enable IPv6 out-of-the-box. Follow the [official guide](https://docs.docker.com/config/daemon/ipv6/#use-ipv6-for-the-default-bridge-network) to configure your Docker daemon for IPv6. + +If you're subscribed to the Pro or Team plan on Supabase Platform, you can side-step those hurdles by purchasing the [IPv4 add-on](https://supabase.com/docs/guides/platform/ipv4-address#enabling-the-add-on) to make your database host available at an IPv4 address. + +> [!Tip] Need somewhere to host Electric? +> If you need to deploy Electric, then [Supabase works great](https://supabase.com/blog/postgres-on-fly-by-supabase) with [Fly.io](./fly#deploy-electric). + + +### Sync into Edge Function + +You can also use Electric to sync data into a Supabase [Edge Function](https://supabase.com/docs/guides/functions). + +Install the [Supabase CLI](https://supabase.com/docs/guides/local-development/cli/getting-started) and follow the steps in [this Quickstart](https://supabase.com/docs/guides/functions/quickstart) to initialise a new project and create an edge function, e.g.: + +```shell +supabase init +supabase functions new hello-electric +``` + +Start Supabase and serve the functions locally: + +```shell +supabase start +supabase functions serve +``` + +Run `tail` to see the `curl` command at the bottom of the generated `supabase/functions/hello-electric/index.ts` file: + +```shell +tail supabase/functions/hello-electric/index.ts +``` + +Copy the `curl` command (with the real value for `[YOUR_ANON_KEY]`) and run it once against the default function implementation: + +```console +$ curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/hello-electric' \ + --header 'Authorization: Bearer [YOUR_ANON_KEY]' \ + --header 'Content-Type: application/json' \ + --data '{"name":"Functions"}' +... + +{"message":"Hello Functions!"} +``` + +Now, replace the contents of `supabase/functions/hello-electric/index.ts` with the following, replacing `[YOUR_ELECTRIC_URL]` with the URL of an Electric service, running against a Postgres database with an `items` table. (This can be `http://localhost:3000` if you're running the local docker command we [used above](#connect-electric) when connecting Electric to Supabase Postgres). + +```ts +import { Shape, ShapeStream } from 'npm:@electric-sql/client' + +Deno.serve(async (req) => { + const stream = new ShapeStream({ + url: '[YOUR_ELECTRIC_URL]/v1/shape', + table: 'items' + }) + const shape = new Shape(stream) + const items = [...await shape.value] + + return new Response( + JSON.stringify(items), + { headers: { "Content-Type": "application/json" } }, + ) +}) +``` + +Save it, wait a second and then run the same `curl` command you just ran before to make a request to the edge function. You should see the data from your `items` table in the HTTP response, e.g.: + +```console +$ curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/hello-electric' \ + --header 'Authorization: Bearer [YOUR_ANON_KEY]' \ + --header 'Content-Type: application/json' \ + --data '{"name":"Functions"}' +... + +[["\"public\".\"items\"/\"69ad0c7c-7a84-48e8-84fc-d92e5bd5e2f4\"", ...] +``` + +## PGlite + +Electric and Supabase are also collaborating to develop [PGlite](/product/pglite), which Supabase sponsor, contribute to and have developed [database.build](https://database.build) on. + +
+
+ +
+
diff --git a/website/docs/integrations/tanstack.md b/website/docs/integrations/tanstack.md new file mode 100644 index 0000000000..aa01212a8f --- /dev/null +++ b/website/docs/integrations/tanstack.md @@ -0,0 +1,62 @@ +--- +outline: deep +title: TanStack - Integrations +image: /img/integrations/electric-tanstack.jpg +--- + + + + + +# TanStack + +[TanStack](https://tanstack.com/) is a set of utilities for building web applications. + +[TanStack Query](https://tanstack.com/query/latest) is a data-fetching and state management library. + +## Electric and TanStack + +Electric works very well together with TanStack Query, where Electric provides the read-path sync and TanStack provides a [local write-path with optimistic state](https://tanstack.com/query/latest/docs/framework/react/guides/optimistic-updates#via-the-cache). + +
+ + + Illustration of an Electric - TanStack integration + +
+ + + Green shows read-path sync via Electric. + Red shows write-path via TanStack. + + +
+
+ +In this configuration, Electric and TanStack can provide a fully offline-capable system with active-active replication of both reads and writes. + +### Example + +The example below shows a simple todo application that uses Electric for read-path sync and TanStack for local optimistic writes. + +Electric is used to sync a shape. TanStack is used to apply mutations and maintain optimistic state. When a mutation is confirmed, it cleares the optimistic state. When the component renders, it merges the optimistic state into the shape data. + +<<< @../../examples/tanstack-example/src/Example.tsx + +See the [`tanstack-example`](https://github.com/electric-sql/electric/tree/main/examples/tanstack-example) for the full source code. + + + a library based on the + tanstack-example + that integrates Electric and TanStack into a higher level interface. + diff --git a/website/electric-api.yaml b/website/electric-api.yaml index 8326b97c34..4eadd274ca 100644 --- a/website/electric-api.yaml +++ b/website/electric-api.yaml @@ -163,6 +163,9 @@ paths: When set to `full` the entire row will be sent for updates and deletes. + + Note that insert operations always include the full row, + in either mode. # Headers - name: If-None-Match in: header diff --git a/website/product/cloud/sign-up.md b/website/product/cloud/sign-up.md index 4ee5ecc599..2acdef0c58 100644 --- a/website/product/cloud/sign-up.md +++ b/website/product/cloud/sign-up.md @@ -26,14 +26,39 @@ If you're interested in early-access to the Electric Cloud, you can sign up to t ## Next steps -In the meantime, you may like to [#say-hello on Discord](https://discord.electric-sql.com). +In the meantime, you may like to [#say-hello on Discord](https://discord.electric-sql.com) and check out our guides on installing locally and self-hosting in the meantime.