From d939b682258c4f4fa4bbb25e5cc238ac660c5977 Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Fri, 4 Oct 2024 11:12:11 -0600 Subject: [PATCH 01/11] Update table path param to be a query param --- .changeset/hot-lions-smell.md | 6 ++ .changeset/tender-pens-cheer.md | 5 ++ .gitignore | 2 + README.md | 5 +- examples/auth/app/page.tsx | 12 ++- .../app/shape-proxy/{[...table] => }/route.ts | 8 +- examples/bash-client/bash-client.bash | 4 +- examples/basic-example/src/Example.tsx | 3 +- examples/linearlite/.env.prod | 1 + .../linearlite/src/pages/Issue/Comments.tsx | 3 +- examples/linearlite/src/shapes.ts | 3 +- examples/nextjs-example/app/page.tsx | 7 +- .../app/shape-proxy/{[...table] => }/route.ts | 8 +- examples/redis-sync/src/index.ts | 3 +- examples/remix-basic/app/routes/_index.tsx | 3 +- .../{shape-proxy.$table.ts => shape-proxy.ts} | 5 +- examples/tanstack-example/src/Example.tsx | 13 +-- examples/todo-app/src/routes/index.tsx | 3 +- integration-tests/tests/crash-recovery.lux | 4 +- ...suming-replication-at-consistent-point.lux | 4 +- packages/react-hooks/README.md | 3 +- .../react-hooks/test/react-hooks.test-d.ts | 3 + .../react-hooks/test/react-hooks.test.tsx | 39 ++++++--- .../react-hooks/test/support/test-context.ts | 4 +- .../lib/electric/plug/delete_shape_plug.ex | 2 +- .../lib/electric/plug/label_process_plug.ex | 12 +-- .../sync-service/lib/electric/plug/router.ex | 8 +- .../lib/electric/plug/serve_shape_plug.ex | 14 +-- .../sync-service/lib/electric/plug/utils.ex | 31 +++++++ .../sync-service/lib/electric/shapes/shape.ex | 6 +- .../electric/plug/delete_shape_plug_test.exs | 10 +-- .../test/electric/plug/router_test.exs | 85 ++++++++++--------- .../electric/plug/serve_shape_plug_test.exs | 48 ++++++----- .../test/electric/plug/utils_test.exs | 48 +++++++++++ .../test/electric/shapes/shape_test.exs | 4 +- packages/typescript-client/README.md | 6 +- packages/typescript-client/src/client.ts | 18 +++- packages/typescript-client/src/constants.ts | 1 + packages/typescript-client/src/shape.ts | 2 +- packages/typescript-client/test/cache.test.ts | 25 +++--- .../typescript-client/test/client.test-d.ts | 4 + .../typescript-client/test/client.test.ts | 30 ++++--- packages/typescript-client/test/fetch.test.ts | 24 +++--- .../test/integration.test.ts | 77 ++++++++++------- .../typescript-client/test/stream.test.ts | 3 +- .../test/support/test-context.ts | 2 +- website/docs/api/clients/typescript.md | 9 +- website/docs/api/http.md | 6 +- website/docs/api/integrations/react.md | 3 +- website/docs/guides/auth.md | 6 +- website/docs/guides/shapes.md | 15 ++-- website/docs/quickstart.md | 13 +-- website/electric-api.yaml | 20 ++--- website/src/partials/home-cta.md | 5 +- website/src/partials/sync-into-pglite.tsx | 9 +- website/use-cases/state-transfer.md | 3 +- 56 files changed, 438 insertions(+), 262 deletions(-) create mode 100644 .changeset/hot-lions-smell.md create mode 100644 .changeset/tender-pens-cheer.md rename examples/auth/app/shape-proxy/{[...table] => }/route.ts (88%) create mode 100644 examples/linearlite/.env.prod rename examples/nextjs-example/app/shape-proxy/{[...table] => }/route.ts (80%) rename examples/remix-basic/app/routes/{shape-proxy.$table.ts => shape-proxy.ts} (83%) diff --git a/.changeset/hot-lions-smell.md b/.changeset/hot-lions-smell.md new file mode 100644 index 0000000000..750e5e2b19 --- /dev/null +++ b/.changeset/hot-lions-smell.md @@ -0,0 +1,6 @@ +--- +"@electric-sql/client": minor +"@electric-sql/react": minor +--- + +All `Shape` interfaces (`ShapeStream`, `Shape`, `useShape`) now require `table` as an additional configuration parameter, and the shape API endpoint url only needs to point to `/v1/shape`. diff --git a/.changeset/tender-pens-cheer.md b/.changeset/tender-pens-cheer.md new file mode 100644 index 0000000000..9a6e883e9f --- /dev/null +++ b/.changeset/tender-pens-cheer.md @@ -0,0 +1,5 @@ +--- +"@core/sync-service": minor +--- + +[BREAKING] All shape API endpoints now accept `table` as a query parameter rather than a path parameter, so `/v1/shape/foo?offset=-1` now becomes `/v1/shape?table=foo&offset=-1`. diff --git a/.gitignore b/.gitignore index ea28d94d74..95c3401388 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,5 @@ shape-data.json test-dbs tsconfig.tsbuildinfo wal +shapes +.sst diff --git a/README.md b/README.md index b113b27fee..da78a56347 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ docker compose -f .support/docker-compose.yml up You can then use the [HTTP API](https://electric-sql.com/docs/api/http) to sync data from your Postgres. For example, to start syncing the whole `foo` table: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` Or use one of the clients or integrations, such as the [`useShape`](https://electric-sql.com/docs/api/integrations/react) React hook: @@ -69,7 +69,8 @@ import { useShape } from '@electric-sql/react' function Component() { const { data } = useShape({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, where: `title LIKE 'foo%'`, }) diff --git a/examples/auth/app/page.tsx b/examples/auth/app/page.tsx index 49c4c49291..8a9a9f4aff 100644 --- a/examples/auth/app/page.tsx +++ b/examples/auth/app/page.tsx @@ -20,18 +20,16 @@ const usersShape = (): ShapeStreamOptions => { const queryParams = new URLSearchParams(window.location.search) const org_id = queryParams.get(`org_id`) return { - url: new URL( - `/shape-proxy/users?org_id=${org_id}`, - window.location.origin - ).href, + url: new URL(`/shape-proxy?org_id=${org_id}`, window.location.origin).href, + table: `users`, headers: { Authorization: org_id || ``, - }, + } } } else { return { - url: new URL(`https://not-sure-how-this-works.com/shape-proxy/items`) - .href, + url: new URL(`https://not-sure-how-this-works.com/shape-proxy`).href, + table: `items`, } } } diff --git a/examples/auth/app/shape-proxy/[...table]/route.ts b/examples/auth/app/shape-proxy/route.ts similarity index 88% rename from examples/auth/app/shape-proxy/[...table]/route.ts rename to examples/auth/app/shape-proxy/route.ts index 73bb4e2dfd..14fd6fdd1d 100644 --- a/examples/auth/app/shape-proxy/[...table]/route.ts +++ b/examples/auth/app/shape-proxy/route.ts @@ -1,12 +1,8 @@ -export async function GET( - request: Request, - { params }: { params: { table: string } } -) { +export async function GET(request: Request) { const url = new URL(request.url) - const { table } = params // Constuct the upstream URL - const originUrl = new URL(`http://localhost:3000/v1/shape/${table}`) + const originUrl = new URL(`http://localhost:3000/v1/shape`) url.searchParams.forEach((value, key) => { originUrl.searchParams.set(key, value) }) diff --git a/examples/bash-client/bash-client.bash b/examples/bash-client/bash-client.bash index e62af39fda..0a7045bfdc 100755 --- a/examples/bash-client/bash-client.bash +++ b/examples/bash-client/bash-client.bash @@ -1,7 +1,7 @@ #!/bin/bash # URL to download the JSON file from (without the output parameter) -BASE_URL="http://localhost:3000/v1/shape/todos" +BASE_URL="http://localhost:3000/v1/shape?table=todos" # Directory to store individual JSON files OFFSET_DIR="./json_files" @@ -78,7 +78,7 @@ process_json() { # Main loop to poll for updates every second while true; do - url="$BASE_URL?offset=$LATEST_OFFSET" + url="$BASE_URL&offset=$LATEST_OFFSET" echo $url LATEST_OFFSET=$(process_json "$url" "shape-data.json") diff --git a/examples/basic-example/src/Example.tsx b/examples/basic-example/src/Example.tsx index 4902444793..2f92992338 100644 --- a/examples/basic-example/src/Example.tsx +++ b/examples/basic-example/src/Example.tsx @@ -7,7 +7,8 @@ const baseUrl = import.meta.env.ELECTRIC_URL ?? `http://localhost:3000` export const Example = () => { const { data: items } = useShape({ - url: `${baseUrl}/v1/shape/items`, + url: `${baseUrl}/v1/shape`, + table: `items` }) /* diff --git a/examples/linearlite/.env.prod b/examples/linearlite/.env.prod new file mode 100644 index 0000000000..ae8b7f377f --- /dev/null +++ b/examples/linearlite/.env.prod @@ -0,0 +1 @@ +DATABASE_URL=postgresql://neondb_owner:nM9OBhJAr6wv@ep-curly-truth-a43bi79a.us-east-1.aws.neon.tech/neondb?sslmode=require diff --git a/examples/linearlite/src/pages/Issue/Comments.tsx b/examples/linearlite/src/pages/Issue/Comments.tsx index 79175620c7..a7eea79dcf 100644 --- a/examples/linearlite/src/pages/Issue/Comments.tsx +++ b/examples/linearlite/src/pages/Issue/Comments.tsx @@ -17,7 +17,8 @@ export interface CommentsProps { function Comments(commentProps: CommentsProps) { const [newCommentBody, setNewCommentBody] = useState(``) const allComments = useShape({ - url: `${baseUrl}/v1/shape/comment`, + url: `${baseUrl}/v1/shape`, + table: `comment`, })! as Comment[] const comments = allComments.data.filter( diff --git a/examples/linearlite/src/shapes.ts b/examples/linearlite/src/shapes.ts index fc4dabe149..7c11f22005 100644 --- a/examples/linearlite/src/shapes.ts +++ b/examples/linearlite/src/shapes.ts @@ -1,5 +1,6 @@ import { baseUrl } from './electric' export const issueShape = { - url: `${baseUrl}/v1/shape/issue`, + url: `${baseUrl}/v1/shape`, + table: `issue`, } diff --git a/examples/nextjs-example/app/page.tsx b/examples/nextjs-example/app/page.tsx index 51fe2e230a..239ebe2a7d 100644 --- a/examples/nextjs-example/app/page.tsx +++ b/examples/nextjs-example/app/page.tsx @@ -9,12 +9,13 @@ import { matchStream } from "./match-stream" const itemShape = () => { if (typeof window !== `undefined`) { return { - url: new URL(`/shape-proxy/items`, window?.location.origin).href, + url: new URL(`/shape-proxy`, window?.location.origin).href, + table: `items`, } } else { return { - url: new URL(`https://not-sure-how-this-works.com/shape-proxy/items`) - .href, + url: new URL(`https://not-sure-how-this-works.com/shape-proxy`).href, + table: `items`, } } } diff --git a/examples/nextjs-example/app/shape-proxy/[...table]/route.ts b/examples/nextjs-example/app/shape-proxy/route.ts similarity index 80% rename from examples/nextjs-example/app/shape-proxy/[...table]/route.ts rename to examples/nextjs-example/app/shape-proxy/route.ts index 54e3562d55..f542070d5b 100644 --- a/examples/nextjs-example/app/shape-proxy/[...table]/route.ts +++ b/examples/nextjs-example/app/shape-proxy/route.ts @@ -1,10 +1,6 @@ -export async function GET( - request: Request, - { params }: { params: { table: string } } -) { +export async function GET(request: Request) { const url = new URL(request.url) - const { table } = params - const originUrl = new URL(`http://localhost:3000/v1/shape/${table}`) + const originUrl = new URL(`http://localhost:3000/v1/shape`) url.searchParams.forEach((value, key) => { originUrl.searchParams.set(key, value) }) diff --git a/examples/redis-sync/src/index.ts b/examples/redis-sync/src/index.ts index a306eefef9..178e54d6b7 100644 --- a/examples/redis-sync/src/index.ts +++ b/examples/redis-sync/src/index.ts @@ -33,7 +33,8 @@ client.connect().then(async () => { const updateKeyScriptSha1 = await client.SCRIPT_LOAD(script) const itemsStream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/items`, + url: `http://localhost:3000/v1/shape`, + table: `items`, }) itemsStream.subscribe(async (messages: Message[]) => { // Begin a Redis transaction diff --git a/examples/remix-basic/app/routes/_index.tsx b/examples/remix-basic/app/routes/_index.tsx index 80aaaf57bb..d2d708055a 100644 --- a/examples/remix-basic/app/routes/_index.tsx +++ b/examples/remix-basic/app/routes/_index.tsx @@ -7,7 +7,8 @@ import { matchStream } from "../match-stream" const itemShape = () => { return { - url: new URL(`/shape-proxy/items`, window.location.origin).href, + url: new URL(`/shape-proxy`, window.location.origin).href, + table: `items`, } } diff --git a/examples/remix-basic/app/routes/shape-proxy.$table.ts b/examples/remix-basic/app/routes/shape-proxy.ts similarity index 83% rename from examples/remix-basic/app/routes/shape-proxy.$table.ts rename to examples/remix-basic/app/routes/shape-proxy.ts index dd5d0526a7..aeea7f448c 100644 --- a/examples/remix-basic/app/routes/shape-proxy.$table.ts +++ b/examples/remix-basic/app/routes/shape-proxy.ts @@ -1,9 +1,8 @@ import type { LoaderFunctionArgs } from "@remix-run/node" -export async function loader({ params, request }: LoaderFunctionArgs) { +export async function loader({ request }: LoaderFunctionArgs) { const url = new URL(request.url) - const { table } = params - const originUrl = new URL(`http://localhost:3000/v1/shape/${table}`) + const originUrl = new URL(`http://localhost:3000/v1/shape`) url.searchParams.forEach((value, key) => { originUrl.searchParams.set(key, value) }) diff --git a/examples/tanstack-example/src/Example.tsx b/examples/tanstack-example/src/Example.tsx index 3ae8f9156d..a5a790138a 100644 --- a/examples/tanstack-example/src/Example.tsx +++ b/examples/tanstack-example/src/Example.tsx @@ -14,7 +14,8 @@ const baseUrl = import.meta.env.ELECTRIC_URL ?? `http://localhost:3000` const baseApiUrl = `http://localhost:3001` const itemShape = () => ({ - url: new URL(`/v1/shape/items`, baseUrl).href, + url: new URL(`/v1/shape`, baseUrl).href, + table: `items` }) async function createItem(newId: string) { @@ -43,11 +44,11 @@ async function clearItems(numItems: number) { const findUpdatePromise = numItems > 0 ? matchStream({ - stream: itemsStream, - operations: [`delete`], - // First delete will match - matchFn: () => true, - }) + stream: itemsStream, + operations: [`delete`], + // First delete will match + matchFn: () => true, + }) : Promise.resolve() // Delete all items diff --git a/examples/todo-app/src/routes/index.tsx b/examples/todo-app/src/routes/index.tsx index 9c90fbac04..7acae6ad62 100644 --- a/examples/todo-app/src/routes/index.tsx +++ b/examples/todo-app/src/routes/index.tsx @@ -19,7 +19,8 @@ type ToDo = { export default function Index() { const { data: todos } = useShape({ - url: `http://localhost:3000/v1/shape/todos`, + url: `http://localhost:3000/v1/shape`, + table: `todos`, }) todos.sort((a, b) => a.created_at - b.created_at) console.log({ todos }) diff --git a/integration-tests/tests/crash-recovery.lux b/integration-tests/tests/crash-recovery.lux index 3349ded84f..6eae172add 100644 --- a/integration-tests/tests/crash-recovery.lux +++ b/integration-tests/tests/crash-recovery.lux @@ -40,7 +40,7 @@ # Initialize a shape and collect the offset [shell client] # strip ANSI codes from response for easier matching - !curl -v -X GET http://localhost:3000/v1/shape/items?offset=-1 + !curl -v -X GET "http://localhost:3000/v1/shape?table=items&offset=-1" ?electric-shape-id: ([\d-]+) [local shape_id=$1] ?electric-chunk-last-offset: ([\w\d_]+) @@ -58,7 +58,7 @@ # Client should be able to continue same shape [shell client] - !curl -v -X GET "http://localhost:3000/v1/shape/items?offset=$last_offset&shape_id=$shape_id" + !curl -v -X GET "http://localhost:3000/v1/shape?table=items&offset=$last_offset&shape_id=$shape_id" ??HTTP/1.1 200 OK [cleanup] diff --git a/integration-tests/tests/resuming-replication-at-consistent-point.lux b/integration-tests/tests/resuming-replication-at-consistent-point.lux index f6e6b8256c..f42069373d 100644 --- a/integration-tests/tests/resuming-replication-at-consistent-point.lux +++ b/integration-tests/tests/resuming-replication-at-consistent-point.lux @@ -29,10 +29,10 @@ ## Initialize a couple of shapes so that Electric starts processing transactions from Postgres [shell client] - !curl -i http://localhost:3000/v1/shape/roots?offset=-1 + !curl -i "http://localhost:3000/v1/shape?table=roots&offset=-1" ??200 OK - !curl -i http://localhost:3000/v1/shape/leaves?offset=-1 + !curl -i "http://localhost:3000/v1/shape?table=leaves&offset=-1" ??200 OK ## Commit enough new transactions for shape storage to hit the simulated failure. diff --git a/packages/react-hooks/README.md b/packages/react-hooks/README.md index 29b1ec56c3..56a3fe941f 100644 --- a/packages/react-hooks/README.md +++ b/packages/react-hooks/README.md @@ -21,7 +21,8 @@ import { useShape } from "@electric-sql/react" export default function MyComponent () { const { isLoading, data } = useShape({ - url: "http://my-api.com/shape/foo", + url: "http://my-api.com/shape", + table: `foo`, }) if (isLoading) { diff --git a/packages/react-hooks/test/react-hooks.test-d.ts b/packages/react-hooks/test/react-hooks.test-d.ts index 6f7c934db8..c5a5ab5ec3 100644 --- a/packages/react-hooks/test/react-hooks.test-d.ts +++ b/packages/react-hooks/test/react-hooks.test-d.ts @@ -5,6 +5,7 @@ import { Row } from 'packages/typescript-client/dist' describe(`useShape`, () => { it(`should infer correct return type when no selector is provided`, () => { const shape = useShape({ + table: ``, url: ``, }) @@ -20,6 +21,7 @@ describe(`useShape`, () => { it(`should infer correct return type when a selector is provided`, () => { const shape = useShape({ + table: ``, url: ``, selector: (_value: UseShapeResult) => { return { @@ -36,6 +38,7 @@ describe(`useShape`, () => { it(`should raise a type error if type argument does not equal inferred return type`, () => { const shape = useShape({ + table: ``, url: ``, // @ts-expect-error - should have type mismatch, because doesn't match the declared `Number` type selector: (_value: UseShapeResult) => { diff --git a/packages/react-hooks/test/react-hooks.test.tsx b/packages/react-hooks/test/react-hooks.test.tsx index 35c5a3ce7d..057e8c6a21 100644 --- a/packages/react-hooks/test/react-hooks.test.tsx +++ b/packages/react-hooks/test/react-hooks.test.tsx @@ -12,12 +12,14 @@ describe(`sortedOptionsHash`, () => { `should create the same hash from options sorted in different ways`, () => { const hash1 = sortedOptionsHash({ - url: `http://whatever/foo`, + url: `http://whatever`, + table: `foo`, offset: `-1`, }) const hash2 = sortedOptionsHash({ offset: `-1`, - url: `http://whatever/foo`, + table: `foo`, + url: `http://whatever`, }) expect(hash1).toEqual(hash2) } @@ -28,7 +30,8 @@ describe(`useShape`, () => { it(`should sync an empty shape`, async ({ aborter, issuesTableUrl }) => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: false, }) @@ -49,7 +52,8 @@ describe(`useShape`, () => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter?.signal, subscribe: false, }) @@ -94,7 +98,8 @@ describe(`useShape`, () => { it(`should expose isLoading status`, async ({ issuesTableUrl }) => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, fetchClient: async (input, init) => { await sleep(10) return fetch(input, init) @@ -112,7 +117,8 @@ describe(`useShape`, () => { }) => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, fetchClient: async (input, init) => { await sleep(50) return fetch(input, init) @@ -139,13 +145,15 @@ describe(`useShape`, () => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: true, }) ) await waitFor(() => expect(result.current.data).not.toEqual([])) + await sleep(100) // TODO: remove later, just testing if this improves flakes // Add an item. const [id2] = await insertIssues({ title: `other row` }) @@ -170,7 +178,8 @@ describe(`useShape`, () => { const { result, rerender } = renderHook((options) => useShape(options), { initialProps: { - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, where: `id = '${id}'`, signal: aborter.signal, subscribe: true, @@ -182,7 +191,8 @@ describe(`useShape`, () => { ) rerender({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, where: `id = '${id2}'`, signal: aborter.signal, subscribe: true, @@ -203,7 +213,8 @@ describe(`useShape`, () => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: true, selector: (result) => { @@ -249,7 +260,8 @@ describe(`useShape`, () => { const { result, rerender } = renderHook( ({ selector }) => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: true, selector: selector, @@ -277,7 +289,8 @@ describe(`useShape`, () => { const { result, unmount } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: true, }) @@ -292,7 +305,7 @@ describe(`useShape`, () => { // And wait until it's definitely seen await waitFor(async () => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1` + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1` ) const body = (await res.json()) as Message[] expect(body).toMatchObject([{}, { value: { id: newId } }]) diff --git a/packages/react-hooks/test/support/test-context.ts b/packages/react-hooks/test/support/test-context.ts index a3f3c44d2c..df22ce4d0d 100644 --- a/packages/react-hooks/test/support/test-context.ts +++ b/packages/react-hooks/test/support/test-context.ts @@ -38,7 +38,7 @@ export const testWithDbClient = test.extend<{ use(async (table: string, shapeId?: string) => { const baseUrl = inject(`baseUrl`) const resp = await fetch( - `${baseUrl}/v1/shape/${table}${shapeId ? `?shape_id=${shapeId}` : ``}`, + `${baseUrl}/v1/shape?table=${table}${shapeId ? `&shape_id=${shapeId}` : ``}`, { method: `DELETE`, } @@ -47,7 +47,7 @@ export const testWithDbClient = test.extend<{ console.error( await FetchError.fromResponse( resp, - `DELETE ${baseUrl}/v1/shape/${table}` + `DELETE ${baseUrl}/v1/shape?table=${table}` ) ) throw new Error(`Could not delete shape ${table} with ID ${shapeId}`) diff --git a/packages/sync-service/lib/electric/plug/delete_shape_plug.ex b/packages/sync-service/lib/electric/plug/delete_shape_plug.ex index f8ecfee487..761450995f 100644 --- a/packages/sync-service/lib/electric/plug/delete_shape_plug.ex +++ b/packages/sync-service/lib/electric/plug/delete_shape_plug.ex @@ -29,7 +29,7 @@ defmodule Electric.Plug.DeleteShapePlug do defp validate_query_params(%Plug.Conn{} = conn, _) do all_params = Map.merge(conn.query_params, conn.path_params) - |> Map.take(["root_table", "shape_id"]) + |> Map.take(["table", "shape_id"]) |> Map.put("offset", "-1") case Params.validate(all_params, inspector: conn.assigns.config[:inspector]) do diff --git a/packages/sync-service/lib/electric/plug/label_process_plug.ex b/packages/sync-service/lib/electric/plug/label_process_plug.ex index 4e43046a44..badc92c576 100644 --- a/packages/sync-service/lib/electric/plug/label_process_plug.ex +++ b/packages/sync-service/lib/electric/plug/label_process_plug.ex @@ -26,19 +26,19 @@ defmodule Electric.Plug.LabelProcessPlug do iex> process_label(%{ ...> method: "GET", - ...> request_path: "/v1/shape/users", - ...> query_string: "offset=-1", + ...> request_path: "/v1/shape", + ...> query_string: "table=users&offset=-1", ...> assigns: %{plug_request_id: "F-jPUudNHxbD8lIAABQG"} ...> }) - "Request F-jPUudNHxbD8lIAABQG - GET /v1/shape/users?offset=-1" + "Request F-jPUudNHxbD8lIAABQG - GET /v1/shape?table=users&offset=-1" iex> process_label(%{ ...> method: "GET", - ...> request_path: "/v1/shape/users", - ...> query_string: "", + ...> request_path: "/v1/shape", + ...> query_string: "table=users", ...> assigns: %{plug_request_id: "F-jPUudNHxbD8lIAABQG"} ...> }) - "Request F-jPUudNHxbD8lIAABQG - GET /v1/shape/users" + "Request F-jPUudNHxbD8lIAABQG - GET /v1/shape?table=users" """ def process_label(conn) do "Request #{conn.assigns.plug_request_id} - #{conn.method} #{conn.request_path}#{query_suffix(conn)}" diff --git a/packages/sync-service/lib/electric/plug/router.ex b/packages/sync-service/lib/electric/plug/router.ex index 426632d1a5..bd6ce5debb 100644 --- a/packages/sync-service/lib/electric/plug/router.ex +++ b/packages/sync-service/lib/electric/plug/router.ex @@ -15,9 +15,9 @@ defmodule Electric.Plug.Router do match "/", via: [:get, :head], do: send_resp(conn, 200, "") - get "/v1/shape/:root_table", to: Electric.Plug.ServeShapePlug - delete "/v1/shape/:root_table", to: Electric.Plug.DeleteShapePlug - match "/v1/shape/:root_table", via: :options, to: Electric.Plug.OptionsShapePlug + get "/v1/shape", to: Electric.Plug.ServeShapePlug + delete "/v1/shape", to: Electric.Plug.DeleteShapePlug + match "/v1/shape", via: :options, to: Electric.Plug.OptionsShapePlug get "/v1/health", to: Electric.Plug.HealthCheckPlug @@ -29,7 +29,7 @@ defmodule Electric.Plug.Router do def server_header(conn, version), do: conn |> Plug.Conn.put_resp_header("server", "ElectricSQL/#{version}") - def put_cors_headers(%Plug.Conn{path_info: ["v1", "shape", _ | _]} = conn, _opts), + def put_cors_headers(%Plug.Conn{path_info: ["v1", "shape" | _]} = conn, _opts), do: CORSHeaderPlug.call(conn, %{methods: ["GET", "HEAD", "DELETE", "OPTIONS"]}) def put_cors_headers(%Plug.Conn{path_info: ["v1", "admin", _ | _]} = conn, _opts), diff --git a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex index 58ee9395d5..5c9e2610a0 100644 --- a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex +++ b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex @@ -63,7 +63,7 @@ defmodule Electric.Plug.ServeShapePlug do @primary_key false embedded_schema do - field(:root_table, :string) + field(:table, :string) field(:offset, :string) field(:shape_id, :string) field(:live, :boolean, default: false) @@ -77,7 +77,7 @@ defmodule Electric.Plug.ServeShapePlug do |> cast(params, __schema__(:fields) -- [:shape_definition], message: fn _, _ -> "must be %{type}" end ) - |> validate_required([:root_table, :offset]) + |> validate_required([:table, :offset]) |> cast_offset() |> cast_columns() |> validate_shape_id_with_offset() @@ -152,7 +152,7 @@ defmodule Electric.Plug.ServeShapePlug do end def cast_root_table(%Ecto.Changeset{} = changeset, opts) do - table = fetch_change!(changeset, :root_table) + table = fetch_change!(changeset, :table) where = fetch_field!(changeset, :where) columns = get_change(changeset, :columns, nil) @@ -267,7 +267,7 @@ defmodule Electric.Plug.ServeShapePlug do end defp handle_shape_info( - %Conn{assigns: %{shape_id: shape_id, config: config}} = conn, + %Conn{assigns: %{shape_id: shape_id, table: table, config: config}} = conn, {active_shape_id, _} ) do if Shapes.has_shape?(config, shape_id) do @@ -279,7 +279,7 @@ defmodule Electric.Plug.ServeShapePlug do else # The requested shape_id is not found, returns 409 along with a location redirect for clients to # re-request the shape from scratch with the new shape id which acts as a consistent cache buster - # e.g. GET /v1/shape/{root_table}?shape_id={new_shape_id}&offset=-1 + # e.g. GET /v1/shape?table={root_table}&shape_id={new_shape_id}&offset=-1 # TODO: discuss returning a 307 redirect rather than a 409, the client # will have to detect this and throw out old data @@ -287,7 +287,7 @@ defmodule Electric.Plug.ServeShapePlug do |> put_resp_header("electric-shape-id", active_shape_id) |> put_resp_header( "location", - "#{conn.request_path}?shape_id=#{active_shape_id}&offset=-1" + "#{conn.request_path}?table=#{table}&shape_id=#{active_shape_id}&offset=-1" ) |> send_resp(409, @must_refetch) |> halt() @@ -609,7 +609,7 @@ defmodule Electric.Plug.ServeShapePlug do |> Map.merge(%{ "shape.id" => shape_id, "shape.where" => assigns[:where], - "shape.root_table" => assigns[:root_table], + "shape.root_table" => assigns[:table], "shape.definition" => assigns[:shape_definition], "shape_req.is_live" => assigns[:live], "shape_req.offset" => assigns[:offset], diff --git a/packages/sync-service/lib/electric/plug/utils.ex b/packages/sync-service/lib/electric/plug/utils.ex index bb401eaabd..80890ba843 100644 --- a/packages/sync-service/lib/electric/plug/utils.ex +++ b/packages/sync-service/lib/electric/plug/utils.ex @@ -47,6 +47,37 @@ defmodule Electric.Plug.Utils do end) end + @doc """ + Calculate the next interval that should be used for long polling based on the + current time and previous interval used. + """ + @oct9th2024 DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") + @spec seconds_since_oct9th_2024_next_interval(integer(), binary() | nil) :: integer() + def seconds_since_oct9th_2024_next_interval(long_poll_timeout_ms, prev_interval \\ nil) do + case div(long_poll_timeout_ms, 1000) do + 0 -> + 0 + + long_poll_timeout_sec -> + now = DateTime.utc_now() + + diff_in_seconds = DateTime.diff(now, @oct9th2024, :second) + next_interval = ceil(diff_in_seconds / long_poll_timeout_sec) * long_poll_timeout_sec + + # randomize the interval if previous one is the same + next_interval = + if prev_interval && "#{next_interval}" == prev_interval do + # Generate a random integer between 0 and 99999 + random_integer = :rand.uniform(100_000) + next_interval + random_integer + else + next_interval + end + + next_interval + end + end + alias OpenTelemetry.SemConv, as: SC def common_open_telemetry_attrs(%Plug.Conn{assigns: assigns} = conn) do diff --git a/packages/sync-service/lib/electric/shapes/shape.ex b/packages/sync-service/lib/electric/shapes/shape.ex index 6bcdc513d3..084d080cc0 100644 --- a/packages/sync-service/lib/electric/shapes/shape.ex +++ b/packages/sync-service/lib/electric/shapes/shape.ex @@ -132,7 +132,7 @@ defmodule Electric.Shapes.Shape do defp load_column_info(table, inspector) do case Inspector.load_column_info(table, inspector) do :table_not_found -> - {:error, {:root_table, ["table not found"]}} + {:error, {:table, ["table not found"]}} {:ok, column_info} -> # %{["column_name"] => :type} @@ -154,13 +154,13 @@ defmodule Electric.Shapes.Shape do case Regex.run(~r/.+ relation "(?.+)" does not exist/, err, capture: :all_names) do [table_name] -> {:error, - {:root_table, + {:table, [ ~s|Table "#{table_name}" does not exist. If the table name contains capitals or special characters you must quote it.| ]}} _ -> - {:error, {:root_table, [err]}} + {:error, {:table, [err]}} end end end diff --git a/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs b/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs index 441b47b120..d850ca6eb9 100644 --- a/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs +++ b/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs @@ -80,7 +80,7 @@ defmodule Electric.Plug.DeleteShapePlugTest do test "returns 404 if shape deletion is not allowed", ctx do conn = ctx - |> conn("DELETE", "?root_table=.invalid_shape", false) + |> conn("DELETE", "?table=.invalid_shape", false) |> DeleteShapePlug.call([]) assert conn.status == 404 @@ -93,13 +93,13 @@ defmodule Electric.Plug.DeleteShapePlugTest do test "returns 400 for invalid params", ctx do conn = ctx - |> conn("DELETE", "?root_table=.invalid_shape") + |> conn("DELETE", "?table=.invalid_shape") |> DeleteShapePlug.call([]) assert conn.status == 400 assert Jason.decode!(conn.resp_body) == %{ - "root_table" => [ + "table" => [ "Invalid zero-length delimited identifier" ] } @@ -122,7 +122,7 @@ defmodule Electric.Plug.DeleteShapePlugTest do conn = ctx - |> conn(:delete, "?root_table=public.users") + |> conn(:delete, "?table=public.users") |> DeleteShapePlug.call([]) assert conn.status == 202 @@ -134,7 +134,7 @@ defmodule Electric.Plug.DeleteShapePlugTest do conn = ctx - |> conn(:delete, "?root_table=public.users&shape_id=#{@test_shape_id}") + |> conn(:delete, "?table=public.users&shape_id=#{@test_shape_id}") |> DeleteShapePlug.call([]) assert conn.status == 202 diff --git a/packages/sync-service/test/electric/plug/router_test.exs b/packages/sync-service/test/electric/plug/router_test.exs index 7b5dd3dbca..cc44d45185 100644 --- a/packages/sync-service/test/electric/plug/router_test.exs +++ b/packages/sync-service/test/electric/plug/router_test.exs @@ -72,7 +72,7 @@ defmodule Electric.Plug.RouterTest do ] test "GET returns a snapshot of initial data", %{opts: opts} do conn = - conn("GET", "/v1/shape/items?offset=-1") + conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -92,13 +92,13 @@ defmodule Electric.Plug.RouterTest do test "GET returns an error when table is not found", %{opts: opts} do conn = - conn("GET", "/v1/shape/nonexistent?offset=-1") + conn("GET", "/v1/shape?table=nonexistent&offset=-1") |> Router.call(opts) assert %{status: 400} = conn assert %{ - "root_table" => [ + "table" => [ ~s|Table "nonexistent" does not exist. If the table name contains capitals or special characters you must quote it.| ] } = Jason.decode!(conn.resp_body) @@ -110,7 +110,7 @@ defmodule Electric.Plug.RouterTest do ] test "GET returns values in the snapshot and the rest of the log in the same format (as strings)", %{opts: opts, db_conn: db_conn} do - conn = conn("GET", "/v1/shape/items?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert [%{"value" => %{"num" => "1"}}] = Jason.decode!(conn.resp_body) Postgrex.query!( @@ -122,7 +122,8 @@ defmodule Electric.Plug.RouterTest do shape_id = get_resp_shape_id(conn) conn = - conn("GET", "/v1/shape/items?shape_id=#{shape_id}&offset=0_0&live") |> Router.call(opts) + conn("GET", "/v1/shape?table=items&shape_id=#{shape_id}&offset=0_0&live") + |> Router.call(opts) assert [%{"value" => %{"num" => "2"}}, _] = Jason.decode!(conn.resp_body) end @@ -133,7 +134,7 @@ defmodule Electric.Plug.RouterTest do test "DELETE forces the shape ID to be different on reconnect and new snapshot to be created", %{opts: opts, db_conn: db_conn} do conn = - conn("GET", "/v1/shape/items?offset=-1") + conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -143,14 +144,14 @@ defmodule Electric.Plug.RouterTest do Jason.decode!(conn.resp_body) assert %{status: 202} = - conn("DELETE", "/v1/shape/items?shape_id=#{shape1_id}") + conn("DELETE", "/v1/shape?table=items&shape_id=#{shape1_id}") |> Router.call(opts) Postgrex.query!(db_conn, "DELETE FROM items", []) Postgrex.query!(db_conn, "INSERT INTO items VALUES (gen_random_uuid(), 'test value 2')", []) conn = - conn("GET", "/v1/shape/items?offset=-1") + conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -171,7 +172,7 @@ defmodule Electric.Plug.RouterTest do } do # Request a snapshot conn = - conn("GET", "/v1/shape/foo?offset=-1") + conn("GET", "/v1/shape?table=foo&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -200,7 +201,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/foo?offset=#{@first_offset}&shape_id=#{shape_id}&live") + conn("GET", "/v1/shape?table=foo&offset=#{@first_offset}&shape_id=#{shape_id}&live") |> Router.call(opts) end) @@ -243,7 +244,7 @@ defmodule Electric.Plug.RouterTest do "INSERT INTO wide_table VALUES (1, 'test value 1', 'test value 1', 'test value 1')" ] test "GET received only a diff when receiving updates", %{opts: opts, db_conn: db_conn} do - conn = conn("GET", "/v1/shape/wide_table?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=wide_table&offset=-1") |> Router.call(opts) assert %{status: 200} = conn shape_id = get_resp_shape_id(conn) @@ -256,7 +257,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/wide_table?offset=0_0&shape_id=#{shape_id}&live") + conn("GET", "/v1/shape?table=wide_table&offset=0_0&shape_id=#{shape_id}&live") |> Router.call(opts) end) @@ -277,7 +278,7 @@ defmodule Electric.Plug.RouterTest do opts: opts, db_conn: db_conn } do - conn = conn("GET", "/v1/shape/wide_table?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=wide_table&offset=-1") |> Router.call(opts) assert %{status: 200} = conn shape_id = get_resp_shape_id(conn) @@ -290,7 +291,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/wide_table?offset=0_0&shape_id=#{shape_id}&live") + conn("GET", "/v1/shape?table=wide_table&offset=0_0&shape_id=#{shape_id}&live") |> Router.call(opts) end) @@ -340,7 +341,7 @@ defmodule Electric.Plug.RouterTest do ] test "GET works correctly when table has no PK", %{opts: opts, db_conn: db_conn} do - conn = conn("GET", "/v1/shape/test_table?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=test_table&offset=-1") |> Router.call(opts) assert %{status: 200} = conn shape_id = get_resp_shape_id(conn) @@ -349,7 +350,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/test_table?offset=0_0&shape_id=#{shape_id}&live") + conn("GET", "/v1/shape?table=test_table&offset=0_0&shape_id=#{shape_id}&live") |> Router.call(opts) end) @@ -390,7 +391,9 @@ defmodule Electric.Plug.RouterTest do "INSERT INTO wide_table VALUES (1, 'test value 1', 'test value 1', 'test value 1')" ] test "GET receives only specified columns out of wide table", %{opts: opts, db_conn: db_conn} do - conn = conn("GET", "/v1/shape/wide_table?offset=-1&columns=id,value1") |> Router.call(opts) + conn = + conn("GET", "/v1/shape?table=wide_table&offset=-1&columns=id,value1") |> Router.call(opts) + assert %{status: 200} = conn shape_id = get_resp_shape_id(conn) @@ -408,7 +411,7 @@ defmodule Electric.Plug.RouterTest do Task.async(fn -> conn( "GET", - "/v1/shape/wide_table?offset=#{next_offset}&columns=id,value1&shape_id=#{shape_id}&live" + "/v1/shape?table=wide_table&offset=#{next_offset}&columns=id,value1&shape_id=#{shape_id}&live" ) |> Router.call(opts) |> then(fn conn -> @@ -437,7 +440,7 @@ defmodule Electric.Plug.RouterTest do where = "value ILIKE 'yes%'" conn = - conn("GET", "/v1/shape/items", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=items", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -447,7 +450,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/items", %{ + conn("GET", "/v1/shape?table=items", %{ offset: "0_0", shape_id: shape_id, where: where, @@ -469,7 +472,7 @@ defmodule Electric.Plug.RouterTest do assert %{status: 200} = conn = - conn("GET", "/v1/shape/items", %{ + conn("GET", "/v1/shape?table=items", %{ offset: new_offset, shape_id: shape_id, where: where @@ -489,7 +492,7 @@ defmodule Electric.Plug.RouterTest do # Verify that a single row is in-shape initially. conn = - conn("GET", "/v1/shape/serial_ids", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=serial_ids", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -508,7 +511,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/serial_ids", %{ + conn("GET", "/v1/shape?table=serial_ids", %{ offset: "0_0", shape_id: shape_id, where: where, @@ -538,7 +541,7 @@ defmodule Electric.Plug.RouterTest do # DELETE operations, respectively. task = Task.async(fn -> - conn("GET", "/v1/shape/serial_ids", %{ + conn("GET", "/v1/shape?table=serial_ids", %{ offset: new_offset, shape_id: shape_id, where: where, @@ -597,7 +600,7 @@ defmodule Electric.Plug.RouterTest do # Verify that a two rows are in-shape initially. conn = - conn("GET", "/v1/shape/serial_ids", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=serial_ids", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -622,7 +625,7 @@ defmodule Electric.Plug.RouterTest do # Simulate a move-in and a move-out by changing the PK of some rows. task = Task.async(fn -> - conn("GET", "/v1/shape/serial_ids", %{ + conn("GET", "/v1/shape?table=serial_ids", %{ offset: "0_0", shape_id: shape_id, where: where, @@ -684,7 +687,7 @@ defmodule Electric.Plug.RouterTest do second_val = String.duplicate("b", round(threshold * 0.7)) third_val = String.duplicate("c", round(threshold * 0.4)) - conn = conn("GET", "/v1/shape/large_rows_table?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=large_rows_table&offset=-1") |> Router.call(opts) assert %{status: 200} = conn [shape_id] = Plug.Conn.get_resp_header(conn, "electric-shape-id") [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset") @@ -696,7 +699,7 @@ defmodule Electric.Plug.RouterTest do Task.async(fn -> conn( "GET", - "/v1/shape/large_rows_table?offset=#{next_offset}&shape_id=#{shape_id}&live" + "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}&live" ) |> Router.call(opts) end) @@ -710,7 +713,7 @@ defmodule Electric.Plug.RouterTest do assert %{status: 200} = Task.await(task) conn = - conn("GET", "/v1/shape/large_rows_table?offset=#{next_offset}&shape_id=#{shape_id}") + conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}") |> Router.call(opts) assert %{status: 200} = conn @@ -731,7 +734,7 @@ defmodule Electric.Plug.RouterTest do [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset") conn = - conn("GET", "/v1/shape/large_rows_table?offset=#{next_offset}&shape_id=#{shape_id}") + conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}") |> Router.call(opts) assert %{status: 200} = conn @@ -756,7 +759,7 @@ defmodule Electric.Plug.RouterTest do # Initial shape request # forces the shape to be created conn = - conn("GET", "/v1/shape/items", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=items", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -767,7 +770,7 @@ defmodule Electric.Plug.RouterTest do # Make the next request but forget to include the where clause conn = - conn("GET", "/v1/shape/items", %{offset: next_offset, shape_id: shape_id}) + conn("GET", "/v1/shape?table=items", %{offset: next_offset, shape_id: shape_id}) |> Router.call(opts) assert %{status: 400} = conn @@ -785,7 +788,7 @@ defmodule Electric.Plug.RouterTest do } do # Make the next request but forget to include the where clause conn = - conn("GET", "/v1/shape/items", %{offset: "0_0", shape_id: "nonexistent"}) + conn("GET", "/v1/shape?table=items", %{offset: "0_0", shape_id: "nonexistent"}) |> Router.call(opts) assert %{status: 409} = conn @@ -793,7 +796,7 @@ defmodule Electric.Plug.RouterTest do new_shape_id = get_resp_header(conn, "electric-shape-id") assert get_resp_header(conn, "location") == - "/v1/shape/items?shape_id=#{new_shape_id}&offset=-1" + "/v1/shape?table=items&shape_id=#{new_shape_id}&offset=-1" end test "GET receives 409 when shape ID is not found but there is another shape matching the definition", @@ -805,7 +808,7 @@ defmodule Electric.Plug.RouterTest do # Initial shape request # forces the shape to be created conn = - conn("GET", "/v1/shape/items", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=items", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -815,7 +818,11 @@ defmodule Electric.Plug.RouterTest do # Request the same shape definition but with invalid shape_id conn = - conn("GET", "/v1/shape/items", %{offset: "0_0", shape_id: "nonexistent", where: where}) + conn("GET", "/v1/shape?table=items", %{ + offset: "0_0", + shape_id: "nonexistent", + where: where + }) |> Router.call(opts) assert %{status: 409} = conn @@ -827,7 +834,7 @@ defmodule Electric.Plug.RouterTest do ] test "HEAD receives all headers", %{opts: opts} do conn_res = - conn("GET", "/v1/shape/items?offset=-1") + conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn_res @@ -838,7 +845,7 @@ defmodule Electric.Plug.RouterTest do |> Enum.filter(&(Kernel.elem(&1, 0) != "x-request-id")) conn = - conn("HEAD", "/v1/shape/items?offset=-1") + conn("HEAD", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -853,7 +860,7 @@ defmodule Electric.Plug.RouterTest do test "OPTIONS receives supported methods", %{opts: opts} do conn = - conn("OPTIONS", "/v1/shape/items") + conn("OPTIONS", "/v1/shape?table=items") |> Router.call(opts) assert %{status: 204} = conn diff --git a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs index 819ab2fffc..62d1c59505 100644 --- a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs +++ b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs @@ -154,14 +154,14 @@ defmodule Electric.Plug.ServeShapePlugTest do test "returns 400 for invalid params", ctx do conn = ctx - |> conn(:get, %{"root_table" => ".invalid_shape"}, "?offset=invalid") + |> conn(:get, %{"table" => ".invalid_shape"}, "?offset=invalid") |> ServeShapePlug.call([]) assert conn.status == 400 assert Jason.decode!(conn.resp_body) == %{ "offset" => ["has invalid format"], - "root_table" => [ + "table" => [ "Invalid zero-length delimited identifier" ] } @@ -172,20 +172,20 @@ defmodule Electric.Plug.ServeShapePlugTest do # but will fail to find the table conn = ctx - |> conn(:get, %{"root_table" => "_val1d_schëmaΦ$.Φtàble"}, "?offset=-1") + |> conn(:get, %{"table" => "_val1d_schëmaΦ$.Φtàble"}, "?offset=-1") |> ServeShapePlug.call([]) assert conn.status == 400 assert Jason.decode!(conn.resp_body) == %{ - "root_table" => ["table not found"] + "table" => ["table not found"] } end test "returns 400 for missing shape_id when offset != -1", ctx do conn = ctx - |> conn(:get, %{"root_table" => "public.users"}, "?offset=#{LogOffset.first()}") + |> conn(:get, %{"table" => "public.users"}, "?offset=#{LogOffset.first()}") |> ServeShapePlug.call([]) assert conn.status == 400 @@ -200,7 +200,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{LogOffset.before_all()}&live=true" ) |> ServeShapePlug.call([]) @@ -246,7 +246,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = ctx - |> conn(:get, %{"root_table" => "public.users"}, "?offset=-1") + |> conn(:get, %{"table" => "public.users"}, "?offset=-1") |> ServeShapePlug.call([]) assert conn.status == 200 @@ -297,7 +297,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> Map.put(:max_age, max_age) |> Map.put(:stale_age, stale_age) - |> conn(:get, %{"root_table" => "public.users"}, "?offset=-1") + |> conn(:get, %{"table" => "public.users"}, "?offset=-1") |> ServeShapePlug.call([]) assert conn.status == 200 @@ -331,7 +331,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = ctx - |> conn(:get, %{"root_table" => "public.users"}, "?offset=-1") + |> conn(:get, %{"table" => "public.users"}, "?offset=-1") |> ServeShapePlug.call([]) assert Plug.Conn.get_resp_header(conn, "electric-schema") == [ @@ -365,7 +365,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@start_offset_50}&shape_id=#{@test_shape_id}" ) |> ServeShapePlug.call([]) @@ -418,7 +418,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@start_offset_50}&shape_id=#{@test_shape_id}" ) |> put_req_header( @@ -460,7 +460,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true" ) |> ServeShapePlug.call([]) @@ -518,7 +518,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true" ) |> ServeShapePlug.call([]) @@ -565,7 +565,7 @@ defmodule Electric.Plug.ServeShapePlugTest do |> Map.put(:long_poll_timeout, 100) |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true" ) |> ServeShapePlug.call([]) @@ -596,7 +596,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{"50_12"}&shape_id=foo" ) |> ServeShapePlug.call([]) @@ -605,7 +605,10 @@ defmodule Electric.Plug.ServeShapePlugTest do assert Jason.decode!(conn.resp_body) == [%{"headers" => %{"control" => "must-refetch"}}] assert get_resp_header(conn, "electric-shape-id") == [@test_shape_id] - assert get_resp_header(conn, "location") == ["/?shape_id=#{@test_shape_id}&offset=-1"] + + assert get_resp_header(conn, "location") == [ + "/?table=public.users&shape_id=#{@test_shape_id}&offset=-1" + ] end test "creates a new shape when shape ID does not exist and sends a 409 redirecting to the newly created shape", @@ -626,7 +629,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{"50_12"}&shape_id=#{@test_shape_id}" ) |> ServeShapePlug.call([]) @@ -635,7 +638,10 @@ defmodule Electric.Plug.ServeShapePlugTest do assert Jason.decode!(conn.resp_body) == [%{"headers" => %{"control" => "must-refetch"}}] assert get_resp_header(conn, "electric-shape-id") == [new_shape_id] - assert get_resp_header(conn, "location") == ["/?shape_id=#{new_shape_id}&offset=-1"] + + assert get_resp_header(conn, "location") == [ + "/?table=public.users&shape_id=#{new_shape_id}&offset=-1" + ] end test "sends 400 when shape ID does not match shape definition", @@ -651,7 +657,7 @@ defmodule Electric.Plug.ServeShapePlugTest do ctx |> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{"50_12"}&shape_id=#{@test_shape_id}" ) |> ServeShapePlug.call([]) @@ -668,7 +674,7 @@ defmodule Electric.Plug.ServeShapePlugTest do test "sends 400 when omitting primary key columns in selection", ctx do conn = ctx - |> conn(:get, %{"root_table" => "public.users", "columns" => "value"}, "?offset=-1") + |> conn(:get, %{"table" => "public.users", "columns" => "value"}, "?offset=-1") |> ServeShapePlug.call([]) assert conn.status == 400 @@ -681,7 +687,7 @@ defmodule Electric.Plug.ServeShapePlugTest do test "sends 400 when selecting invalid columns", ctx do conn = ctx - |> conn(:get, %{"root_table" => "public.users", "columns" => "id,invalid"}, "?offset=-1") + |> conn(:get, %{"table" => "public.users", "columns" => "id,invalid"}, "?offset=-1") |> ServeShapePlug.call([]) assert conn.status == 400 diff --git a/packages/sync-service/test/electric/plug/utils_test.exs b/packages/sync-service/test/electric/plug/utils_test.exs index e79d347485..5ca1d67d7b 100644 --- a/packages/sync-service/test/electric/plug/utils_test.exs +++ b/packages/sync-service/test/electric/plug/utils_test.exs @@ -2,4 +2,52 @@ defmodule Electric.Plug.UtilsTest do alias Electric.Plug.Utils use ExUnit.Case, async: true doctest Utils, import: true + + describe "seconds_since_oct9th_2024_next_interval/2" do + test "returns expected interval" do + long_poll_timeout_ms = 20000 + long_poll_timeout_sec = div(long_poll_timeout_ms, 1000) + # Calculate the expected next interval + now = DateTime.utc_now() + oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") + diff_in_seconds = DateTime.diff(now, oct9th2024, :second) + expected_interval = ceil(diff_in_seconds / long_poll_timeout_sec) * long_poll_timeout_sec + + # Assert that the function returns the expected value + assert Utils.seconds_since_oct9th_2024_next_interval(long_poll_timeout_ms) == + expected_interval + end + + test "returns expected inteval with different timeout" do + long_poll_timeout_ms = 30000 + long_poll_timeout_sec = div(long_poll_timeout_ms, 1000) + + # Calculate the expected next interval + now = DateTime.utc_now() + oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") + diff_in_seconds = DateTime.diff(now, oct9th2024, :second) + expected_interval = ceil(diff_in_seconds / long_poll_timeout_sec) * long_poll_timeout_sec + + # Assert that the function returns the expected value + assert Utils.seconds_since_oct9th_2024_next_interval(long_poll_timeout_ms) == + expected_interval + end + + test "returns expected interval with different timeout and cursor collision" do + long_poll_timeout_ms = 30000 + long_poll_timeout_sec = div(long_poll_timeout_ms, 1000) + + # Calculate the expected next interval + now = DateTime.utc_now() + oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") + diff_in_seconds = DateTime.diff(now, oct9th2024, :second) + expected_interval = ceil(diff_in_seconds / long_poll_timeout_sec) * long_poll_timeout_sec + + # Assert that the function returns a DIFFERENT value due to collision + assert Utils.seconds_since_oct9th_2024_next_interval( + long_poll_timeout_ms, + "#{expected_interval}" + ) != expected_interval + end + end end diff --git a/packages/sync-service/test/electric/shapes/shape_test.exs b/packages/sync-service/test/electric/shapes/shape_test.exs index 6773e095db..422c23f800 100644 --- a/packages/sync-service/test/electric/shapes/shape_test.exs +++ b/packages/sync-service/test/electric/shapes/shape_test.exs @@ -285,14 +285,14 @@ defmodule Electric.Shapes.ShapeTest do end test "errors on empty table name", %{inspector: inspector} do - {:error, {:root_table, ["Invalid zero-length delimited identifier"]}} = + {:error, {:table, ["Invalid zero-length delimited identifier"]}} = Shape.new("", inspector: inspector) end test "errors when the table doesn't exist", %{inspector: inspector} do {:error, { - :root_table, + :table, [ ~S|Table "nonexistent" does not exist. If the table name contains capitals or special characters you must quote it.| ] diff --git a/packages/typescript-client/README.md b/packages/typescript-client/README.md index 8b3e452edf..81b4d1253a 100644 --- a/packages/typescript-client/README.md +++ b/packages/typescript-client/README.md @@ -50,7 +50,8 @@ import { ShapeStream } from '@electric-sql/client' // Passes subscribers rows as they're inserted, updated, or deleted const stream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/foo`, + url: `${BASE_URL}/v1/shape`, + table: `foo`, }) stream.subscribe(messages => { @@ -66,7 +67,8 @@ stream.subscribe(messages => { import { ShapeStream, Shape } from '@electric-sql/client' const stream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/foo`, + url: `${BASE_URL}/v1/shape`, + table: `foo`, }) const shape = new Shape(stream) diff --git a/packages/typescript-client/src/client.ts b/packages/typescript-client/src/client.ts index b5df0d0866..db4e0cfc25 100644 --- a/packages/typescript-client/src/client.ts +++ b/packages/typescript-client/src/client.ts @@ -27,6 +27,7 @@ import { SHAPE_SCHEMA_HEADER, WHERE_QUERY_PARAM, DATABASE_ID_QUERY_PARAM, + TABLE_QUERY_PARAM, } from './constants' /** @@ -34,8 +35,8 @@ import { */ export interface ShapeStreamOptions { /** - * The full URL to where the Shape is hosted. This can either be the Electric server - * directly or a proxy. E.g. for a local Electric instance, you might set `http://localhost:3000/v1/shape/foo` + * The full URL to where the Shape is served. This can either be the Electric server + * directly or a proxy. E.g. for a local Electric instance, you might set `http://localhost:3000/v1/shape` */ url: string @@ -45,6 +46,11 @@ export interface ShapeStreamOptions { */ databaseId?: string + /** + * The root table for the shape. + */ + table: string + /** * The where clauses for the shape. */ @@ -211,7 +217,7 @@ export class ShapeStream = Row> async start() { this.#isUpToDate = false - const { url, where, columns, signal } = this.options + const { url, table, where, columns, signal } = this.options try { while ( @@ -219,6 +225,7 @@ export class ShapeStream = Row> this.options.subscribe ) { const fetchUrl = new URL(url) + fetchUrl.searchParams.set(TABLE_QUERY_PARAM, table) if (where) fetchUrl.searchParams.set(WHERE_QUERY_PARAM, where) if (columns && columns.length > 0) fetchUrl.searchParams.set(COLUMNS_QUERY_PARAM, columns.join(`,`)) @@ -425,7 +432,10 @@ export class ShapeStream = Row> function validateOptions(options: Partial>): void { if (!options.url) { - throw new Error(`Invalid shape option. It must provide the url`) + throw new Error(`Invalid shape options. It must provide the url`) + } + if (!options.table) { + throw new Error(`Invalid shape options. It must provide the table`) } if (options.signal && !(options.signal instanceof AbortSignal)) { throw new Error( diff --git a/packages/typescript-client/src/constants.ts b/packages/typescript-client/src/constants.ts index ab465a9d73..45a0d86f33 100644 --- a/packages/typescript-client/src/constants.ts +++ b/packages/typescript-client/src/constants.ts @@ -9,4 +9,5 @@ export const DATABASE_ID_QUERY_PARAM = `database_id` export const OFFSET_QUERY_PARAM = `offset` export const WHERE_QUERY_PARAM = `where` export const COLUMNS_QUERY_PARAM = `columns` +export const TABLE_QUERY_PARAM = `table` export const LIVE_QUERY_PARAM = `live` diff --git a/packages/typescript-client/src/shape.ts b/packages/typescript-client/src/shape.ts index 0965aa4811..6ea1529f3c 100644 --- a/packages/typescript-client/src/shape.ts +++ b/packages/typescript-client/src/shape.ts @@ -21,7 +21,7 @@ export type ShapeChangedCallback = Row> = (data: { * @param {ShapeStream} - the underlying shape stream * @example * ``` - * const shapeStream = new ShapeStream<{ foo: number }>(url: 'http://localhost:3000/v1/shape/foo'}) + * const shapeStream = new ShapeStream<{ foo: number }>(url: `http://localhost:3000/v1/shape`, table: `foo`}) * const shape = new Shape(shapeStream) * ``` * diff --git a/packages/typescript-client/test/cache.test.ts b/packages/typescript-client/test/cache.test.ts index 49d348805a..9db79c2f1a 100644 --- a/packages/typescript-client/test/cache.test.ts +++ b/packages/typescript-client/test/cache.test.ts @@ -72,7 +72,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { }) => { // First request get initial request const initialRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) @@ -82,13 +82,14 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { // add some data and follow with live request await insertIssues({ title: `foo` }) const searchParams = new URLSearchParams({ + table: issuesTableUrl, offset: initialRes.headers.get(`electric-chunk-last-offset`)!, shape_id: initialRes.headers.get(`electric-shape-id`)!, live: `true`, }) const liveRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?${searchParams.toString()}`, + `${proxyCacheBaseUrl}/v1/shape?${searchParams.toString()}`, {} ) expect(liveRes.status).toBe(200) @@ -96,7 +97,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { // Second request gets a cached response const cachedRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?${searchParams.toString()}`, + `${proxyCacheBaseUrl}/v1/shape?${searchParams.toString()}`, {} ) expect(cachedRes.status).toBe(200) @@ -110,7 +111,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { }) => { // First request gets non-cached response const originalRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) @@ -119,7 +120,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { // Second request gets cached response const cachedRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) expect(cachedRes.status).toBe(200) @@ -132,12 +133,12 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { issuesTableUrl, }) => { const originalRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const lastOffset = originalRes.headers.get(CHUNK_LAST_OFFSET_HEADER) const shapeId = originalRes.headers.get(SHAPE_ID_HEADER) - const urlToTest = `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=${lastOffset}&shape_id=${shapeId}` + const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&shape_id=${shapeId}` // Make a first request such that response is cached const originalUpToDateRes = await fetch(urlToTest, {}) @@ -166,12 +167,12 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { issuesTableUrl, }) => { const originalRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const lastOffset = originalRes.headers.get(CHUNK_LAST_OFFSET_HEADER) const shapeId = originalRes.headers.get(SHAPE_ID_HEADER) - const urlToTest = `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=${lastOffset}&shape_id=${shapeId}` + const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&shape_id=${shapeId}` // Make a first request such that response is cached const originalUpToDateRes = await fetch(urlToTest, {}) @@ -209,7 +210,7 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => { // Make a client that fetches a shape // which forces the shape data to be cached const client1Res = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) expect(client1Res.status).toBe(200) @@ -222,7 +223,7 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => { // Make a 2nd client that fetches the shape // check that it is served from cached data const client2Res = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) expect(client2Res.status).toBe(200) @@ -245,7 +246,7 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => { // should tell you to go back to initial sync // because the shape is out of scope const liveRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=${latestOffset}&shape_id=${originalShapeId}&live`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${latestOffset}&shape_id=${originalShapeId}&live`, {} ) expect(liveRes.status).toBe(409) diff --git a/packages/typescript-client/test/client.test-d.ts b/packages/typescript-client/test/client.test-d.ts index 61318ec552..0b7f9e80ad 100644 --- a/packages/typescript-client/test/client.test-d.ts +++ b/packages/typescript-client/test/client.test-d.ts @@ -19,6 +19,7 @@ describe(`client`, () => { describe(`ShapeStream`, () => { it(`should infer generic row return type when no type is provided`, () => { const shapeStream = new ShapeStream({ + table: ``, url: ``, }) @@ -30,6 +31,7 @@ describe(`client`, () => { it(`should infer correct return type when provided`, () => { const shapeStream = new ShapeStream({ + table: ``, url: ``, parser: { timestampz: (date: string) => { @@ -50,6 +52,7 @@ describe(`client`, () => { describe(`Shape`, () => { it(`should infer generic row return type when no type is provided`, async () => { const shapeStream = new ShapeStream({ + table: ``, url: ``, }) const shape = new Shape(shapeStream) @@ -67,6 +70,7 @@ describe(`client`, () => { it(`should infer correct return type when provided`, async () => { const shapeStream = new ShapeStream({ + table: ``, url: ``, parser: { timestampz: (date: string) => { diff --git a/packages/typescript-client/test/client.test.ts b/packages/typescript-client/test/client.test.ts index e67a8505b9..e16dbb97c4 100644 --- a/packages/typescript-client/test/client.test.ts +++ b/packages/typescript-client/test/client.test.ts @@ -10,7 +10,8 @@ describe(`Shape`, () => { it(`should sync an empty shape`, async ({ issuesTableUrl }) => { const start = Date.now() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, }) const shape = new Shape(shapeStream) @@ -30,7 +31,8 @@ describe(`Shape`, () => { const start = Date.now() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) const shape = new Shape(shapeStream) @@ -64,7 +66,8 @@ describe(`Shape`, () => { const start = Date.now() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) const shape = new Shape(shapeStream) @@ -156,7 +159,8 @@ describe(`Shape`, () => { } const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, fetchClient: fetchWrapper, }) @@ -192,7 +196,8 @@ describe(`Shape`, () => { const start = Date.now() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) const shape = new Shape(shapeStream) @@ -226,7 +231,8 @@ describe(`Shape`, () => { it(`should support unsubscribe`, async ({ issuesTableUrl }) => { const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, }) const shape = new Shape(shapeStream) @@ -242,7 +248,8 @@ describe(`Shape`, () => { it(`should expose connection status`, async ({ issuesTableUrl }) => { const aborter = new AbortController() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) @@ -266,7 +273,8 @@ describe(`Shape`, () => { }) => { let fetchShouldFail = false const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, fetchClient: async (_input, _init) => { if (fetchShouldFail) throw new FetchError( @@ -301,7 +309,8 @@ describe(`Shape`, () => { issuesTableUrl, }) => { const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: false, }) @@ -314,7 +323,8 @@ describe(`Shape`, () => { it(`should expose isLoading status`, async ({ issuesTableUrl }) => { const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, fetchClient: async (input, init) => { await sleep(20) return fetch(input, init) diff --git a/packages/typescript-client/test/fetch.test.ts b/packages/typescript-client/test/fetch.test.ts index ddbd0f6c64..ffbdf14050 100644 --- a/packages/typescript-client/test/fetch.test.ts +++ b/packages/typescript-client/test/fetch.test.ts @@ -173,7 +173,7 @@ describe(`createFetchWithBackoff`, () => { }) describe(`createFetchWithChunkBuffer`, () => { - const baseUrl = `https://example.com/v1/shape/foo` + const baseUrl = `https://example.com/v1/shape?table=foo` let mockFetch: Mock const responseHeaders = (headers: Record) => { return new Headers(headers) @@ -218,7 +218,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(result).toBe(initialResponse) // Check if the next chunk was prefetched - const nextUrl = `${baseUrl}?shape_id=123&offset=456` + const nextUrl = `${baseUrl}&shape_id=123&offset=456` expect(mockFetch).toHaveBeenCalledWith(nextUrl, expect.anything()) }) @@ -250,23 +250,23 @@ describe(`createFetchWithChunkBuffer`, () => { expect(mockFetch).toHaveBeenCalledTimes(1 + maxPrefetchNum) expect(mockFetch).toHaveBeenNthCalledWith( 2, - `${baseUrl}?shape_id=123&offset=0`, + `${baseUrl}&shape_id=123&offset=0`, expect.anything() ) expect(mockFetch).toHaveBeenNthCalledWith( 3, - `${baseUrl}?shape_id=123&offset=1`, + `${baseUrl}&shape_id=123&offset=1`, expect.anything() ) // Second request consumes one of the prefetched responses and // next one fires up - await fetchWrapper(`${baseUrl}?shape_id=123&offset=0`) + await fetchWrapper(`${baseUrl}&shape_id=123&offset=0`) await sleep() expect(mockFetch).toHaveBeenCalledTimes(1 + maxPrefetchNum + 1) expect(mockFetch).toHaveBeenNthCalledWith( 4, - `${baseUrl}?shape_id=123&offset=2`, + `${baseUrl}&shape_id=123&offset=2`, expect.anything() ) }) @@ -297,7 +297,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(result).toBe(initialResponse) // fetch the next chunk as well - const nextUrl = `${baseUrl}?shape_id=123&offset=456` + const nextUrl = `${baseUrl}&shape_id=123&offset=456` const nextResult = await fetchWrapper(nextUrl) expect(nextResult).toBe(nextResponse) @@ -339,7 +339,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(result).toBe(initialResponse) // Prefetch should have been attempted but failed - const nextUrl = `${baseUrl}?shape_id=123&offset=456` + const nextUrl = `${baseUrl}&shape_id=123&offset=456` expect(mockFetch).toHaveBeenCalledWith(nextUrl, expect.anything()) // One for the main request, one for the prefetch @@ -370,7 +370,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(mockFetch).toHaveBeenCalledTimes(2) // requesting a different path should clear the prefetches - const altUrl = `${baseUrl}/bar` + const altUrl = `${baseUrl}_alt` await fetchWrapper(altUrl) await sleep() @@ -381,7 +381,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(mockFetch).toHaveBeenNthCalledWith(1, baseUrl) expect(mockFetch).toHaveBeenNthCalledWith( 2, - `${baseUrl}?shape_id=123&offset=0`, + `${baseUrl}&shape_id=123&offset=0`, expect.anything() ) @@ -389,12 +389,12 @@ describe(`createFetchWithChunkBuffer`, () => { expect(mockFetch).toHaveBeenNthCalledWith(3, altUrl) expect(mockFetch).toHaveBeenNthCalledWith( 4, - `${altUrl}?shape_id=123&offset=2`, + `${altUrl}&shape_id=123&offset=2`, expect.anything() ) expect(mockFetch).toHaveBeenNthCalledWith( 5, - `${altUrl}?shape_id=123&offset=3`, + `${altUrl}&shape_id=123&offset=3`, expect.anything() ) }) diff --git a/packages/typescript-client/test/integration.test.ts b/packages/typescript-client/test/integration.test.ts index 19e40910f9..572ad2b836 100644 --- a/packages/typescript-client/test/integration.test.ts +++ b/packages/typescript-client/test/integration.test.ts @@ -31,7 +31,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: false, signal: aborter.signal, }) @@ -68,7 +69,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, fetchClient: fetchWrapper, }) @@ -119,7 +121,7 @@ describe(`HTTP Sync`, () => { issuesTableUrl, }) => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const shapeId = res.headers.get(`electric-shape-id`) @@ -130,7 +132,7 @@ describe(`HTTP Sync`, () => { issuesTableUrl, }) => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const lastOffset = res.headers.get(`electric-chunk-last-offset`) @@ -149,7 +151,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) @@ -224,7 +227,8 @@ describe(`HTTP Sync`, () => { // Now fetch the data from the HTTP endpoint const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${tableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: tableUrl, signal: aborter.signal, }) const client = new Shape(issueStream) @@ -299,7 +303,9 @@ describe(`HTTP Sync`, () => { ) await vi.waitFor(async () => { - const res = await fetch(`${BASE_URL}/v1/shape/${tableUrl}?offset=-1`) + const res = await fetch( + `${BASE_URL}/v1/shape?table=${tableUrl}&offset=-1` + ) const body = (await res.json()) as Message[] expect(body.length).greaterThan(1) }) @@ -351,7 +357,8 @@ describe(`HTTP Sync`, () => { const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) let secondRowId = `` @@ -398,7 +405,8 @@ describe(`HTTP Sync`, () => { const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, fetchClient: fetchWrapper, }) @@ -440,14 +448,16 @@ describe(`HTTP Sync`, () => { const shapeData1 = new Map() const aborter1 = new AbortController() const issueStream1 = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter1.signal, }) const shapeData2 = new Map() const aborter2 = new AbortController() const issueStream2 = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter2.signal, }) @@ -487,7 +497,8 @@ describe(`HTTP Sync`, () => { let lastOffset: Offset = `-1` const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: false, }) @@ -508,7 +519,7 @@ describe(`HTTP Sync`, () => { // And wait until it's definitely seen await vi.waitFor(async () => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1` + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1` ) const body = (await res.json()) as Message[] expect(body).toHaveLength(12) @@ -517,7 +528,8 @@ describe(`HTTP Sync`, () => { let catchupOpsCount = 0 const newAborter = new AbortController() const newIssueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: false, signal: newAborter.signal, offset: lastOffset, @@ -540,7 +552,7 @@ describe(`HTTP Sync`, () => { insertIssues, }) => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const cacheHeaders = res.headers.get(`cache-control`) @@ -566,7 +578,7 @@ describe(`HTTP Sync`, () => { await sleep(40) const res2 = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const etag2Header = res2.headers.get(`etag`) @@ -576,7 +588,7 @@ describe(`HTTP Sync`, () => { it(`should revalidate etags`, async ({ issuesTableUrl, insertIssues }) => { // Start the shape - await fetch(`${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, {}) + await fetch(`${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {}) // Fill it up in separate transactions for (const i of [1, 2, 3, 4, 5, 6, 7, 8, 9]) { await insertIssues({ title: `foo${i}` }) @@ -585,7 +597,7 @@ describe(`HTTP Sync`, () => { await sleep(100) const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const messages = (await res.json()) as Message[] @@ -598,7 +610,7 @@ describe(`HTTP Sync`, () => { assert(etag !== null, `Response should have etag header`) const etagValidation = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, { headers: { 'If-None-Match': etag }, } @@ -609,7 +621,7 @@ describe(`HTTP Sync`, () => { // Get etag for catchup const catchupEtagRes = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=${midOffset}&shape_id=${shapeId}`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=${midOffset}&shape_id=${shapeId}`, {} ) const catchupEtag = catchupEtagRes.headers.get(`etag`) @@ -618,7 +630,7 @@ describe(`HTTP Sync`, () => { // Catch-up offsets should also use the same etag as they're // also working through the end of the current log. const catchupEtagValidation = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=${midOffset}&shape_id=${shapeId}`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=${midOffset}&shape_id=${shapeId}`, { headers: { 'If-None-Match': catchupEtag }, } @@ -644,7 +656,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, where: `title LIKE 'foo%'`, subscribe: true, signal: aborter.signal, @@ -680,7 +693,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${tableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: tableUrl, columns: [`txt`, `i2`, `i4`], signal: aborter.signal, }) @@ -724,7 +738,8 @@ describe(`HTTP Sync`, () => { // Get initial data let lastOffset: Offset = `-1` const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: true, signal: aborter.signal, }) @@ -756,7 +771,7 @@ describe(`HTTP Sync`, () => { // And wait until it's definitely seen await vi.waitFor(async () => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1` + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1` ) const body = (await res.json()) as Message[] expect(body.length).greaterThan(2) @@ -775,7 +790,8 @@ describe(`HTTP Sync`, () => { const newAborter = new AbortController() const newIssueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: false, signal: newAborter.signal, offset: lastOffset, @@ -811,7 +827,8 @@ describe(`HTTP Sync`, () => { aborter, }) => { const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: true, signal: aborter.signal, }) @@ -821,7 +838,8 @@ describe(`HTTP Sync`, () => { }) const invalidIssueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: true, shapeId: issueStream.shapeId, where: `1=1`, @@ -875,7 +893,8 @@ describe(`HTTP Sync`, () => { } const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: true, signal: aborter.signal, fetchClient: fetchWrapper, diff --git a/packages/typescript-client/test/stream.test.ts b/packages/typescript-client/test/stream.test.ts index 9c1ab77d43..e816e41b83 100644 --- a/packages/typescript-client/test/stream.test.ts +++ b/packages/typescript-client/test/stream.test.ts @@ -2,7 +2,7 @@ import { afterEach, beforeEach, describe, expect, it } from 'vitest' import { ShapeStream } from '../src' describe(`ShapeStream`, () => { - const shapeUrl = `https://example.com/v1/shape/foo` + const shapeUrl = `https://example.com/v1/shape` let aborter: AbortController beforeEach(() => { @@ -25,6 +25,7 @@ describe(`ShapeStream`, () => { const aborter = new AbortController() new ShapeStream({ url: shapeUrl, + table: `foo`, signal: aborter.signal, fetchClient: fetchWrapper, headers: { diff --git a/packages/typescript-client/test/support/test-context.ts b/packages/typescript-client/test/support/test-context.ts index 4736ab50ab..146fc74e65 100644 --- a/packages/typescript-client/test/support/test-context.ts +++ b/packages/typescript-client/test/support/test-context.ts @@ -47,7 +47,7 @@ export const testWithDbClient = test.extend<{ } = {} ) => { const baseUrl = inject(`baseUrl`) - const url = new URL(`${baseUrl}/v1/shape/${table}`) + const url = new URL(`${baseUrl}/v1/shape?table=${table}`) if (!options.databaseId) { options.databaseId = inject(`databaseId`) diff --git a/website/docs/api/clients/typescript.md b/website/docs/api/clients/typescript.md index f5d2841571..4576bd9b41 100644 --- a/website/docs/api/clients/typescript.md +++ b/website/docs/api/clients/typescript.md @@ -27,7 +27,8 @@ import { ShapeStream } from '@electric-sql/client' // Passes subscribers rows as they're inserted, updated, or deleted const stream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, }) stream.subscribe(messages => { @@ -53,7 +54,8 @@ For example, we can extend the [default parser](https://github.com/electric-sql/ ```ts const stream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, parser: { bool: (value: string) => value === `true` ? 1 : 0 } @@ -66,7 +68,8 @@ const stream = new ShapeStream({ import { ShapeStream, Shape } from '@electric-sql/client' const stream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, }) const shape = new Shape(stream) diff --git a/website/docs/api/http.md b/website/docs/api/http.md index 1baf13ce99..70647a85a9 100644 --- a/website/docs/api/http.md +++ b/website/docs/api/http.md @@ -23,14 +23,14 @@ The rest of this page will describe the features of the API. ## Syncing shapes The API allows you to sync [Shapes](/docs/guides/shapes) of data out of Postgres using the - GET /v1/shape endpoint. The pattern is as follows. First you make an initial sync request to get the current data for the Shape, such as: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` Then you switch into a live mode to use long-polling to receive real-time updates. We'll go over these steps in more detail below. First a note on the data that the endpoint returns. @@ -87,7 +87,7 @@ Note that the other control message is `must-refetch` which indicates that the c Once a client is up-to-date, it can switch to live mode to receive real-time updates, by making requests with `live=true`, an `offset` and a `shape_id`, e.g.: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?live=true&offset=0_0&shape_id=3833821-1721812114261' +curl -i 'http://localhost:3000/v1/shape?table=foo&live=true&offset=0_0&shape_id=3833821-1721812114261' ``` The `live` parameter puts the server into live mode, where it will hold open the connection, waiting for new data arrive. This allows you to implement a long-polling strategy to consume real-time updates. diff --git a/website/docs/api/integrations/react.md b/website/docs/api/integrations/react.md index 77f44b6f20..ced62680da 100644 --- a/website/docs/api/integrations/react.md +++ b/website/docs/api/integrations/react.md @@ -12,7 +12,8 @@ import { useShape } from "@electric-sql/react" export default function MyComponent() { const { isLoading, lastSyncedAt, data } = useShape<{ title: string}>({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, }) if (isLoading) { diff --git a/website/docs/guides/auth.md b/website/docs/guides/auth.md index d25bc90aa8..b065ce6f62 100644 --- a/website/docs/guides/auth.md +++ b/website/docs/guides/auth.md @@ -69,15 +69,13 @@ Then for the `/api/shapes/users` route: ```tsx export async function GET( request: Request, - { params }: { params: { table: string } } ) { const url = new URL(request.url) - const { table } = params // Construct the upstream URL - const originUrl = new URL(`http://localhost:3000/v1/shape/${table}`) + const originUrl = new URL(`http://localhost:3000/v1/shape`) - // Copy over the shape_id & offset query params that the + // Copy over the table, shape_id, and offset query params that the // Electric client adds so we return the right part of the Shape log. url.searchParams.forEach((value, key) => { if ([`shape_id`, `offset`].includes(key)) { diff --git a/website/docs/guides/shapes.md b/website/docs/guides/shapes.md index f13e373f48..b2b389e975 100644 --- a/website/docs/guides/shapes.md +++ b/website/docs/guides/shapes.md @@ -38,13 +38,13 @@ A client can choose to sync one shape, or lots of shapes. Many clients can sync Shapes are defined by: -- a `root_table`, such as `projects` +- a `table`, such as `projects` - a `where` clause, used to filter the rows in that table, such as `status='active'` > [!IMPORTANT] Limitations > Shapes are currently single table, whole row only. You can sync all the rows in a table, or a subset of the rows in that table. You can't yet [select columns](#whole-rows) or sync an [include tree](#single-table) without filtering or joining in the client. -### `root_table` +### `table` This is the root table of the shape. It must match a table in your Postgres database. @@ -52,7 +52,7 @@ The value can be just a tablename like `projects`, or can be a qualified tablena ### `where` clause -Optional where clause to filter rows in the `root_table`. +Optional where clause to filter rows in the `table`. This must be a valid [PostgreSQL WHERE clause](https://www.postgresql.org/docs/current/queries-table-expressions.html#QUERIES-WHERE) using SQL syntax, e.g.: @@ -91,18 +91,18 @@ client. In the client, shapes can be held as objects in memory, for example usin ### HTTP You can sync shapes manually using the - GET /v1/shape endpoint. First make an initial sync request to get the current data for the Shape, such as: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` Then switch into a live mode to use long-polling to receive real-time updates: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?live=true&offset=...&shape_id=...' +curl -i 'http://localhost:3000/v1/shape?table=foo&live=true&offset=...&shape_id=...' ``` These requests both return an array of [Shape Log](/docs/api/http#shape-log) entries. You can process these manually, or use a higher-level client. @@ -123,7 +123,8 @@ Instantiate a `ShapeStream` and materialise into a `Shape`: import { ShapeStream, Shape } from '@electric-sql/client' const stream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, }) const shape = new Shape(stream) diff --git a/website/docs/quickstart.md b/website/docs/quickstart.md index a4b713a1dc..b114baf2a5 100644 --- a/website/docs/quickstart.md +++ b/website/docs/quickstart.md @@ -36,13 +36,13 @@ First let's try the low-level [HTTP API](/docs/api/http). In a new terminal, use `curl` to request a [Shape](/docs/guides/shapes) containing all rows in the `foo` table: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` ::: info A bit of explanation about the URL structure. -- `/v1/shape/` is a standard prefix with the API version and the shape sync endpoint path -- `foo` is the name of the [`root_table`](/docs/guides/shapes#root-table) of the shape (and is required); if you wanted to sync data from the `items` table, you would change the path to `/v1/shape/items` +- `/v1/shape` is a standard prefix with the API version and the shape sync endpoint path +- `foo` is the name of the [`table`](/docs/guides/shapes#table) of the shape (and is required); if you wanted to sync data from the `items` table, you would change the path to `/v1/shape?table=items` - `offset=-1` means we're asking for the *entire* Shape as we don't have any of the data cached locally yet. If we had previously fetched the shape and wanted to see if there were any updates, we'd set the offset to the last offset we'd already seen. ::: @@ -60,7 +60,7 @@ access-control-expose-headers: * access-control-allow-methods: GET, POST, OPTIONS content-type: application/json; charset=utf-8 -{"root_table":["table not found"]} +{"table":["table not found"]} ``` So it didn't work! Which makes sense... as it's an empty database without any tables or data. Let's fix that. @@ -99,7 +99,7 @@ INSERT INTO foo (name, value) VALUES Exit your Postgres client (e.g.: with `psql` enter `\q`) and try the `curl` request again: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` Success! You should see the data you just put into Postgres in the shape response: @@ -158,7 +158,8 @@ import { useShape } from '@electric-sql/react' function Component() { const { data } = useShape({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo` }) return ( diff --git a/website/electric-api.yaml b/website/electric-api.yaml index 90a7fa88da..bb22caf97d 100644 --- a/website/electric-api.yaml +++ b/website/electric-api.yaml @@ -17,19 +17,19 @@ servers: description: Local server paths: - /v1/shape/{root_table}: + /v1/shape: get: summary: Get Shape description: |- Load the initial data for a shape and poll for real-time updates. - Define your shape using the `root_table` and `where` parameters. + Define your shape using the `table` and `where` parameters. Use `offset` to fetch data from a specific position in the shape log and the `live` parameter to consume real-time updates. parameters: - # Path parameters - - name: root_table - in: path + # Query parameters + - name: table + in: query schema: type: string examples: @@ -117,7 +117,7 @@ paths: schema: type: string description: |- - Optional where clause to filter rows in the `root_table`. + Optional where clause to filter rows in the `table`. This should be a valid PostgreSQL WHERE clause using SQL syntax. examples: @@ -133,7 +133,7 @@ paths: schema: type: string description: |- - Optional list of columns to include in the rows from the `root_table`. + Optional list of columns to include in the rows from the `table`. They should always include the primary key columns, and should be formed as a comma separated list of column names exactly as they are in the database schema. @@ -354,9 +354,9 @@ paths: **NOTE** Delete shape only works if Electric is configured to `allow_shape_deletion`. parameters: - # Path parameters - - name: root_table - in: path + # Query parameters + - name: table + in: query schema: type: string examples: diff --git a/website/src/partials/home-cta.md b/website/src/partials/home-cta.md index 645647a4c9..4d56a643d9 100644 --- a/website/src/partials/home-cta.md +++ b/website/src/partials/home-cta.md @@ -24,7 +24,8 @@ import { useShape } from '@electric-sql/react' const Component = () => { const { data } = useShape({ - url: `${BASE_URL}/v1/shape/items` + url: `${BASE_URL}/v1/shape`, + table: `items` }) return ( @@ -87,4 +88,4 @@ to syncing into a local embedded theme="alt" /> - \ No newline at end of file + diff --git a/website/src/partials/sync-into-pglite.tsx b/website/src/partials/sync-into-pglite.tsx index 6e94428978..f81f9b309e 100644 --- a/website/src/partials/sync-into-pglite.tsx +++ b/website/src/partials/sync-into-pglite.tsx @@ -21,7 +21,8 @@ await pg.exec(` // Establish a persistent shape subscription await pg.electric.syncShapeToTable({ - url: `${BASE_URL}/v1/shape/items`, + // TODO update this when the sync plugin is updated. + url: `${BASE_URL}/v1/shape`, table: 'items', primaryKey: ['id'], }) @@ -34,6 +35,6 @@ const Component = () => { ) return ( -
{ JSON.stringify(items) }
-  )
-}
\ No newline at end of file
+    
{JSON.stringify(items)}
+      )
+}
diff --git a/website/use-cases/state-transfer.md b/website/use-cases/state-transfer.md
index afa83c9a30..59c84ac04b 100644
--- a/website/use-cases/state-transfer.md
+++ b/website/use-cases/state-transfer.md
@@ -88,7 +88,8 @@ import { useShape } from '@electric-sql/react'
 
 const Component = () => {
   const { data } = useShape({
-    url: `${BASE_URL}/v1/shape/items`
+    url: `${BASE_URL}/v1/shape`,
+    table: `items`
   })
 
   return (

From 57ac982428cfc3286f4ad864085a13fc48b3141d Mon Sep 17 00:00:00 2001
From: James Arthur 
Date: Tue, 29 Oct 2024 16:02:00 +0100
Subject: [PATCH 02/11] chore: Refactor shape_id -> shape_handle for clearer
 name

---
 .gitignore                                    |   2 +-
 integration-tests/tests/crash-recovery.lux    |   6 +-
 .../react-hooks/test/support/test-context.ts  |  12 +-
 packages/sync-service/CHANGELOG.md            |   2 +-
 .../lib/electric/plug/delete_shape_plug.ex    |  14 +-
 .../lib/electric/plug/serve_shape_plug.ex     | 100 +++---
 .../sync-service/lib/electric/shape_cache.ex  | 131 ++++---
 .../shape_cache/crashing_file_storage.ex      |   2 +-
 .../lib/electric/shape_cache/file_storage.ex  |  30 +-
 .../electric/shape_cache/in_memory_storage.ex |  24 +-
 .../lib/electric/shape_cache/shape_status.ex  | 133 +++----
 .../lib/electric/shape_cache/storage.ex       |  16 +-
 packages/sync-service/lib/electric/shapes.ex  |  66 ++--
 .../lib/electric/shapes/consumer.ex           |  91 ++---
 .../electric/shapes/consumer/snapshotter.ex   |  52 ++-
 .../electric/shapes/consumer/supervisor.ex    |  20 +-
 .../electric/shapes/consumer_supervisor.ex    |  10 +-
 .../electric/plug/delete_shape_plug_test.exs  |  12 +-
 .../test/electric/plug/router_test.exs        |  85 ++---
 .../electric/plug/serve_shape_plug_test.exs   | 127 ++++---
 .../shape_cache/shape_status_test.exs         |  99 ++---
 .../storage_implementations_test.exs          |   6 +-
 .../electric/shape_cache/storage_test.exs     |  20 +-
 .../test/electric/shape_cache_test.exs        | 340 +++++++++---------
 .../test/electric/shapes/consumer_test.exs    | 218 +++++------
 .../sync-service/test/support/test_storage.ex |  60 ++--
 packages/typescript-client/CHANGELOG.md       |   2 +-
 packages/typescript-client/src/client.ts      |  44 +--
 packages/typescript-client/src/constants.ts   |   4 +-
 packages/typescript-client/src/fetch.ts       |  12 +-
 packages/typescript-client/test/cache.test.ts |  53 +--
 .../typescript-client/test/client.test.ts     |   2 +-
 packages/typescript-client/test/fetch.test.ts |  36 +-
 .../test/integration.test.ts                  |  36 +-
 .../test/support/test-context.ts              |  18 +-
 website/docs/api/http.md                      |   4 +-
 website/docs/guides/auth.md                   |   6 +-
 website/docs/guides/shapes.md                 |   2 +-
 website/docs/quickstart.md                    |   2 +-
 website/electric-api.yaml                     |  32 +-
 40 files changed, 1002 insertions(+), 929 deletions(-)

diff --git a/.gitignore b/.gitignore
index 95c3401388..7f5514041c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,5 +19,5 @@ shape-data.json
 test-dbs
 tsconfig.tsbuildinfo
 wal
-shapes
+/shapes
 .sst
diff --git a/integration-tests/tests/crash-recovery.lux b/integration-tests/tests/crash-recovery.lux
index 6eae172add..5b9ec66107 100644
--- a/integration-tests/tests/crash-recovery.lux
+++ b/integration-tests/tests/crash-recovery.lux
@@ -41,8 +41,8 @@
 [shell client]
   # strip ANSI codes from response for easier matching
   !curl -v -X GET "http://localhost:3000/v1/shape?table=items&offset=-1"
-  ?electric-shape-id: ([\d-]+)
-  [local shape_id=$1]
+  ?electric-handle: ([\d-]+)
+  [local shape_handle=$1]
   ?electric-chunk-last-offset: ([\w\d_]+)
   [local last_offset=$1]
 
@@ -58,7 +58,7 @@
 
 # Client should be able to continue same shape
 [shell client]
-  !curl -v -X GET "http://localhost:3000/v1/shape?table=items&offset=$last_offset&shape_id=$shape_id"
+  !curl -v -X GET "http://localhost:3000/v1/shape?table=items&handle=$shape_handle&offset=$last_offset"
   ??HTTP/1.1 200 OK
 
 [cleanup]
diff --git a/packages/react-hooks/test/support/test-context.ts b/packages/react-hooks/test/support/test-context.ts
index df22ce4d0d..24dcbe6f51 100644
--- a/packages/react-hooks/test/support/test-context.ts
+++ b/packages/react-hooks/test/support/test-context.ts
@@ -10,8 +10,8 @@ export type GeneratedIssueRow = { id?: string; title: string }
 export type UpdateIssueFn = (row: IssueRow) => Promise>
 export type DeleteIssueFn = (row: IssueRow) => Promise>
 export type InsertIssuesFn = (...rows: GeneratedIssueRow[]) => Promise
-export type ClearIssuesShapeFn = (shapeId?: string) => Promise
-export type ClearShapeFn = (table: string, shapeId?: string) => Promise
+export type ClearIssuesShapeFn = (shapeHandle?: string) => Promise
+export type ClearShapeFn = (table: string, shapeHandle?: string) => Promise
 
 export const testWithDbClient = test.extend<{
   dbClient: Client
@@ -35,10 +35,10 @@ export const testWithDbClient = test.extend<{
   baseUrl: async ({}, use) => use(inject(`baseUrl`)),
   pgSchema: async ({}, use) => use(inject(`testPgSchema`)),
   clearShape: async ({}, use) => {
-    use(async (table: string, shapeId?: string) => {
+    use(async (table: string, shapeHandle?: string) => {
       const baseUrl = inject(`baseUrl`)
       const resp = await fetch(
-        `${baseUrl}/v1/shape?table=${table}${shapeId ? `&shape_id=${shapeId}` : ``}`,
+        `${baseUrl}/v1/shape?table=${table}${shapeHandle ? `&handle=${shapeHandle}` : ``}`,
         {
           method: `DELETE`,
         }
@@ -50,7 +50,7 @@ export const testWithDbClient = test.extend<{
             `DELETE ${baseUrl}/v1/shape?table=${table}`
           )
         )
-        throw new Error(`Could not delete shape ${table} with ID ${shapeId}`)
+        throw new Error(`Could not delete shape ${table} with handle ${shapeHandle}`)
       }
     })
   },
@@ -113,6 +113,6 @@ export const testWithIssuesTable = testWithDbClient.extend<{
     }),
 
   clearIssuesShape: async ({ clearShape, issuesTableUrl }, use) => {
-    use((shapeId?: string) => clearShape(issuesTableUrl, shapeId))
+    use((shapeHandle?: string) => clearShape(issuesTableUrl, shapeHandle))
   },
 })
diff --git a/packages/sync-service/CHANGELOG.md b/packages/sync-service/CHANGELOG.md
index b2122ae924..38dc0189c6 100644
--- a/packages/sync-service/CHANGELOG.md
+++ b/packages/sync-service/CHANGELOG.md
@@ -124,7 +124,7 @@
 
 ### Patch Changes
 
-- e3a07b7: Return 400 if shape ID does not match shape definition. Also handle 400 status codes on the client.
+- e3a07b7: Return 400 if shape handle does not match shape definition. Also handle 400 status codes on the client.
 - 5c684bd: Add shape filters to Postgres publication to reduce processing load on Electric.
 
 ## 0.4.3
diff --git a/packages/sync-service/lib/electric/plug/delete_shape_plug.ex b/packages/sync-service/lib/electric/plug/delete_shape_plug.ex
index 761450995f..b24cb857ac 100644
--- a/packages/sync-service/lib/electric/plug/delete_shape_plug.ex
+++ b/packages/sync-service/lib/electric/plug/delete_shape_plug.ex
@@ -29,7 +29,7 @@ defmodule Electric.Plug.DeleteShapePlug do
   defp validate_query_params(%Plug.Conn{} = conn, _) do
     all_params =
       Map.merge(conn.query_params, conn.path_params)
-      |> Map.take(["table", "shape_id"])
+      |> Map.take(["table", "handle"])
       |> Map.put("offset", "-1")
 
     case Params.validate(all_params, inspector: conn.assigns.config[:inspector]) do
@@ -44,16 +44,16 @@ defmodule Electric.Plug.DeleteShapePlug do
   end
 
   defp truncate_or_delete_shape(%Plug.Conn{} = conn, _) do
-    if conn.assigns.shape_id !== nil do
-      with :ok <- Shapes.clean_shape(conn.assigns.shape_id, conn.assigns.config) do
+    if conn.assigns.shape_handle !== nil do
+      with :ok <- Shapes.clean_shape(conn.assigns.shape_handle, conn.assigns.config) do
         send_resp(conn, 202, "")
       end
     else
-      # FIXME: This has a race condition where we accidentally create a snapshot & shape id, but clean
+      # FIXME: This has a race condition where we accidentally create a snapshot & shape handle, but clean
       #        it before snapshot is actually made.
-      with {shape_id, _} <-
-             Shapes.get_or_create_shape_id(conn.assigns.config, conn.assigns.shape_definition),
-           :ok <- Shapes.clean_shape(shape_id, conn.assigns.config) do
+      with {shape_handle, _} <-
+             Shapes.get_or_create_shape_handle(conn.assigns.config, conn.assigns.shape_definition),
+           :ok <- Shapes.clean_shape(shape_handle, conn.assigns.config) do
         send_resp(conn, 202, "")
       end
     end
diff --git a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
index 5c9e2610a0..13f3dcfde4 100644
--- a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
+++ b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
@@ -65,7 +65,7 @@ defmodule Electric.Plug.ServeShapePlug do
     embedded_schema do
       field(:table, :string)
       field(:offset, :string)
-      field(:shape_id, :string)
+      field(:shape_handle, :string)
       field(:live, :boolean, default: false)
       field(:where, :string)
       field(:columns, :string)
@@ -80,7 +80,7 @@ defmodule Electric.Plug.ServeShapePlug do
       |> validate_required([:table, :offset])
       |> cast_offset()
       |> cast_columns()
-      |> validate_shape_id_with_offset()
+      |> validate_shape_handle_with_offset()
       |> validate_live_with_offset()
       |> cast_root_table(opts)
       |> apply_action(:validate)
@@ -127,15 +127,16 @@ defmodule Electric.Plug.ServeShapePlug do
       end
     end
 
-    def validate_shape_id_with_offset(%Ecto.Changeset{valid?: false} = changeset), do: changeset
+    def validate_shape_handle_with_offset(%Ecto.Changeset{valid?: false} = changeset),
+      do: changeset
 
-    def validate_shape_id_with_offset(%Ecto.Changeset{} = changeset) do
+    def validate_shape_handle_with_offset(%Ecto.Changeset{} = changeset) do
       offset = fetch_change!(changeset, :offset)
 
       if offset == LogOffset.before_all() do
         changeset
       else
-        validate_required(changeset, [:shape_id], message: "can't be blank when offset != -1")
+        validate_required(changeset, [:shape_handle], message: "can't be blank when offset != -1")
       end
     end
 
@@ -214,80 +215,81 @@ defmodule Electric.Plug.ServeShapePlug do
 
   defp load_shape_info(%Conn{} = conn, _) do
     OpenTelemetry.with_span("shape_get.plug.load_shape_info", [], fn ->
-      shape_info = get_or_create_shape_id(conn.assigns)
+      shape_info = get_or_create_shape_handle(conn.assigns)
       handle_shape_info(conn, shape_info)
     end)
   end
 
-  # No shape_id is provided so we can get the existing one for this shape
+  # No shape_handle is provided so we can get the existing one for this shape
   # or create a new shape if it does not yet exist
-  defp get_or_create_shape_id(%{shape_definition: shape, config: config, shape_id: nil}) do
-    Shapes.get_or_create_shape_id(config, shape)
+  defp get_or_create_shape_handle(%{shape_definition: shape, config: config, shape_handle: nil}) do
+    Shapes.get_or_create_shape_handle(config, shape)
   end
 
-  # A shape ID is provided so we need to return the shape that matches the shape ID and the shape definition
-  defp get_or_create_shape_id(%{shape_definition: shape, config: config}) do
+  # A shape handle is provided so we need to return the shape that matches the shape handle and the shape definition
+  defp get_or_create_shape_handle(%{shape_definition: shape, config: config}) do
     Shapes.get_shape(config, shape)
   end
 
   defp handle_shape_info(
-         %Conn{assigns: %{shape_definition: shape, config: config, shape_id: shape_id}} = conn,
+         %Conn{assigns: %{shape_definition: shape, config: config, shape_handle: shape_handle}} =
+           conn,
          nil
        ) do
     # There is no shape that matches the shape definition (because shape info is `nil`)
-    if shape_id != nil && Shapes.has_shape?(config, shape_id) do
-      # but there is a shape that matches the shape ID
-      # thus the shape ID does not match the shape definition
+    if shape_handle != nil && Shapes.has_shape?(config, shape_handle) do
+      # but there is a shape that matches the shape handle
+      # thus the shape handle does not match the shape definition
       # and we return a 400 bad request status code
       conn
       |> send_resp(400, @shape_definition_mismatch)
       |> halt()
     else
-      # The shape ID does not exist or no longer exists
+      # The shape handle does not exist or no longer exists
       # e.g. it may have been deleted.
       # Hence, create a new shape for this shape definition
       # and return a 409 with a redirect to the newly created shape.
       # (will be done by the recursive `handle_shape_info` call)
-      shape_info = Shapes.get_or_create_shape_id(config, shape)
+      shape_info = Shapes.get_or_create_shape_handle(config, shape)
       handle_shape_info(conn, shape_info)
     end
   end
 
   defp handle_shape_info(
-         %Conn{assigns: %{shape_id: shape_id}} = conn,
-         {active_shape_id, last_offset}
+         %Conn{assigns: %{shape_handle: shape_handle}} = conn,
+         {active_shape_handle, last_offset}
        )
-       when is_nil(shape_id) or shape_id == active_shape_id do
+       when is_nil(shape_handle) or shape_handle == active_shape_handle do
     # We found a shape that matches the shape definition
-    # and the shape has the same ID as the shape ID provided by the user
+    # and the shape has the same ID as the shape handle provided by the user
     conn
-    |> assign(:active_shape_id, active_shape_id)
+    |> assign(:active_shape_handle, active_shape_handle)
     |> assign(:last_offset, last_offset)
-    |> put_resp_header("electric-shape-id", active_shape_id)
+    |> put_resp_header("electric-handle", active_shape_handle)
   end
 
   defp handle_shape_info(
-         %Conn{assigns: %{shape_id: shape_id, table: table, config: config}} = conn,
-         {active_shape_id, _}
+         %Conn{assigns: %{config: config, handle: shape_handle, table: table}} = conn,
+         {active_shape_handle, _}
        ) do
-    if Shapes.has_shape?(config, shape_id) do
+    if Shapes.has_shape?(config, shape_handle) do
       # The shape with the provided ID exists but does not match the shape definition
       # otherwise we would have found it and it would have matched the previous function clause
       conn
       |> send_resp(400, @shape_definition_mismatch)
       |> halt()
     else
-      # The requested shape_id is not found, returns 409 along with a location redirect for clients to
+      # The requested shape_handle is not found, returns 409 along with a location redirect for clients to
       # re-request the shape from scratch with the new shape id which acts as a consistent cache buster
-      # e.g. GET /v1/shape?table={root_table}&shape_id={new_shape_id}&offset=-1
+      # e.g. GET /v1/shape?table={root_table}&handle={new_shape_handle}&offset=-1
 
       # TODO: discuss returning a 307 redirect rather than a 409, the client
       # will have to detect this and throw out old data
       conn
-      |> put_resp_header("electric-shape-id", active_shape_id)
+      |> put_resp_header("electric-handle", active_shape_handle)
       |> put_resp_header(
         "location",
-        "#{conn.request_path}?table=#{table}&shape_id=#{active_shape_id}&offset=-1"
+        "#{conn.request_path}?table=#{table}&handle=#{active_shape_handle}&offset=-1"
       )
       |> send_resp(409, @must_refetch)
       |> halt()
@@ -313,10 +315,10 @@ defmodule Electric.Plug.ServeShapePlug do
   # If chunk offsets are available, use those instead of the latest available offset
   # to optimize for cache hits and response sizes
   defp determine_log_chunk_offset(%Conn{assigns: assigns} = conn, _) do
-    %{config: config, active_shape_id: shape_id, offset: offset, tenant_id: tenant_id} = assigns
+    %{config: config, active_shape_handle: shape_handle, offset: offset, tenant_id: tenant_id} = assigns
 
     chunk_end_offset =
-      Shapes.get_chunk_end_log_offset(config, shape_id, offset, tenant_id) || assigns.last_offset
+      Shapes.get_chunk_end_log_offset(config, shape_handle, offset, tenant_id) || assigns.last_offset
 
     conn
     |> assign(:chunk_end_offset, chunk_end_offset)
@@ -352,14 +354,14 @@ defmodule Electric.Plug.ServeShapePlug do
   defp generate_etag(%Conn{} = conn, _) do
     %{
       offset: offset,
-      active_shape_id: active_shape_id,
+      active_shape_handle: active_shape_handle,
       chunk_end_offset: chunk_end_offset
     } = conn.assigns
 
     conn
     |> assign(
       :etag,
-      "#{active_shape_id}:#{offset}:#{chunk_end_offset}"
+      "#{active_shape_handle}:#{offset}:#{chunk_end_offset}"
     )
   end
 
@@ -431,16 +433,16 @@ defmodule Electric.Plug.ServeShapePlug do
          %Conn{
            assigns: %{
              chunk_end_offset: chunk_end_offset,
-             active_shape_id: shape_id,
+             active_shape_handle: shape_handle,
              tenant_id: tenant_id,
              up_to_date: maybe_up_to_date
            }
          } = conn
        ) do
-    case Shapes.get_snapshot(conn.assigns.config, shape_id, tenant_id) do
+    case Shapes.get_snapshot(conn.assigns.config, shape_handle, tenant_id) do
       {:ok, {offset, snapshot}} ->
         log =
-          Shapes.get_log_stream(conn.assigns.config, shape_id, tenant_id,
+          Shapes.get_log_stream(conn.assigns.config, shape_handle, tenant_id,
             since: offset,
             up_to: chunk_end_offset
           )
@@ -475,14 +477,14 @@ defmodule Electric.Plug.ServeShapePlug do
            assigns: %{
              offset: offset,
              chunk_end_offset: chunk_end_offset,
-             active_shape_id: shape_id,
+             active_shape_handle: shape_handle,
              tenant_id: tenant_id,
              up_to_date: maybe_up_to_date
            }
          } = conn
        ) do
     log =
-      Shapes.get_log_stream(conn.assigns.config, shape_id, tenant_id,
+      Shapes.get_log_stream(conn.assigns.config, shape_handle, tenant_id,
         since: offset,
         up_to: chunk_end_offset
       )
@@ -490,7 +492,7 @@ defmodule Electric.Plug.ServeShapePlug do
     if Enum.take(log, 1) == [] and conn.assigns.live do
       conn
       |> assign(:ot_is_immediate_response, false)
-      |> hold_until_change(shape_id)
+      |> hold_until_change(shape_handle)
     else
       [log, maybe_up_to_date]
       |> Stream.concat()
@@ -545,15 +547,15 @@ defmodule Electric.Plug.ServeShapePlug do
   defp listen_for_new_changes(%Conn{assigns: assigns} = conn, _) do
     # Only start listening when we know there is a possibility that nothing is going to be returned
     if LogOffset.compare(assigns.offset, assigns.last_offset) != :lt do
-      shape_id = assigns.shape_id
+      shape_handle = assigns.shape_handle
 
       ref = make_ref()
       registry = conn.assigns.config[:registry]
       tenant = conn.assigns.tenant_id
-      Registry.register(registry, {tenant, shape_id}, ref)
+      Registry.register(registry, {tenant, shape_handle}, ref)
 
       Logger.debug(
-        "[Tenant #{tenant}]: Client #{inspect(self())} is registered for changes to #{shape_id}"
+        "[Tenant #{tenant}]: Client #{inspect(self())} is registered for changes to #{shape_handle}"
       )
 
       assign(conn, :new_changes_ref, ref)
@@ -562,9 +564,9 @@ defmodule Electric.Plug.ServeShapePlug do
     end
   end
 
-  def hold_until_change(conn, shape_id) do
+  def hold_until_change(conn, shape_handle) do
     long_poll_timeout = conn.assigns.config[:long_poll_timeout]
-    Logger.debug("Client #{inspect(self())} is waiting for changes to #{shape_id}")
+    Logger.debug("Client #{inspect(self())} is waiting for changes to #{shape_handle}")
     ref = conn.assigns.new_changes_ref
 
     receive do
@@ -579,7 +581,7 @@ defmodule Electric.Plug.ServeShapePlug do
         |> serve_shape_log()
 
       {^ref, :shape_rotation} ->
-        # We may want to notify the client better that the shape ID had changed, but just closing the response
+        # We may want to notify the client better that the shape handle had changed, but just closing the response
         # and letting the client handle it on reconnection is good enough.
         conn
         |> assign(:ot_is_shape_rotated, true)
@@ -596,9 +598,9 @@ defmodule Electric.Plug.ServeShapePlug do
   end
 
   defp open_telemetry_attrs(%Conn{assigns: assigns} = conn) do
-    shape_id =
+    shape_handle =
       if is_struct(conn.query_params, Plug.Conn.Unfetched) do
-        assigns[:active_shape_id] || assigns[:shape_id]
+        assigns[:active_shape_handle] || assigns[:shape_handle]
       else
         conn.query_params["shape_id"] || assigns[:active_shape_id] || assigns[:shape_id]
       end
@@ -607,7 +609,7 @@ defmodule Electric.Plug.ServeShapePlug do
 
     Electric.Plug.Utils.common_open_telemetry_attrs(conn)
     |> Map.merge(%{
-      "shape.id" => shape_id,
+      "shape.handle" => shape_handle,
       "shape.where" => assigns[:where],
       "shape.root_table" => assigns[:table],
       "shape.definition" => assigns[:shape_definition],
diff --git a/packages/sync-service/lib/electric/shape_cache.ex b/packages/sync-service/lib/electric/shape_cache.ex
index 6edc944a7e..35d42277dc 100644
--- a/packages/sync-service/lib/electric/shape_cache.ex
+++ b/packages/sync-service/lib/electric/shape_cache.ex
@@ -5,23 +5,23 @@ defmodule Electric.ShapeCacheBehaviour do
   alias Electric.Shapes.Shape
   alias Electric.Replication.LogOffset
 
-  @type shape_id :: String.t()
+  @type shape_handle :: String.t()
   @type shape_def :: Shape.t()
   @type xmin :: non_neg_integer()
 
   @doc "Update a shape's status with a new log offset"
-  @callback update_shape_latest_offset(shape_id(), LogOffset.t(), keyword()) :: :ok
+  @callback update_shape_latest_offset(shape_handle(), LogOffset.t(), keyword()) :: :ok
 
   @callback get_shape(shape_def(), opts :: keyword()) ::
-              {shape_id(), current_snapshot_offset :: LogOffset.t()}
-  @callback get_or_create_shape_id(shape_def(), opts :: keyword()) ::
-              {shape_id(), current_snapshot_offset :: LogOffset.t()}
-  @callback list_shapes(Electric.ShapeCache.ShapeStatus.t()) :: [{shape_id(), Shape.t()}]
-  @callback await_snapshot_start(shape_id(), opts :: keyword()) :: :started | {:error, term()}
-  @callback handle_truncate(shape_id(), keyword()) :: :ok
-  @callback clean_shape(shape_id(), keyword()) :: :ok
+              {shape_handle(), current_snapshot_offset :: LogOffset.t()}
+  @callback get_or_create_shape_handle(shape_def(), opts :: keyword()) ::
+              {shape_handle(), current_snapshot_offset :: LogOffset.t()}
+  @callback list_shapes(Electric.ShapeCache.ShapeStatus.t()) :: [{shape_handle(), Shape.t()}]
+  @callback await_snapshot_start(shape_handle(), opts :: keyword()) :: :started | {:error, term()}
+  @callback handle_truncate(shape_handle(), keyword()) :: :ok
+  @callback clean_shape(shape_handle(), keyword()) :: :ok
   @callback clean_all_shapes(GenServer.name()) :: :ok
-  @callback has_shape?(shape_id(), keyword()) :: boolean()
+  @callback has_shape?(shape_handle(), keyword()) :: boolean()
 end
 
 defmodule Electric.ShapeCache do
@@ -36,7 +36,7 @@ defmodule Electric.ShapeCache do
 
   @behaviour Electric.ShapeCacheBehaviour
 
-  @type shape_id :: Electric.ShapeCacheBehaviour.shape_id()
+  @type shape_handle :: Electric.ShapeCacheBehaviour.shape_handle()
 
   @name_schema_tuple {:tuple, [:atom, :atom, :any]}
   @genserver_name_schema {:or, [:atom, @name_schema_tuple]}
@@ -110,43 +110,46 @@ defmodule Electric.ShapeCache do
   end
 
   @impl Electric.ShapeCacheBehaviour
-  def get_or_create_shape_id(shape, opts \\ []) do
-    # Get or create the shape ID and fire a snapshot if necessary
+  def get_or_create_shape_handle(shape, opts \\ []) do
+    # Get or create the shape handle and fire a snapshot if necessary
     if shape_state = get_shape(shape, opts) do
       shape_state
     else
       server = Access.get(opts, :server, name(opts))
-      GenServer.call(server, {:create_or_wait_shape_id, shape})
+      GenStage.call(server, {:create_or_wait_shape_handle, shape})
     end
   end
 
   @impl Electric.ShapeCacheBehaviour
-  @spec update_shape_latest_offset(shape_id(), LogOffset.t(), opts :: keyword()) ::
+  @spec update_shape_latest_offset(shape_handle(), LogOffset.t(), opts :: keyword()) ::
           :ok | {:error, term()}
-  def update_shape_latest_offset(shape_id, latest_offset, opts) do
+  def update_shape_latest_offset(shape_handle, latest_offset, opts) do
     meta_table = get_shape_meta_table(opts)
     shape_status = Access.get(opts, :shape_status, ShapeStatus)
 
-    if shape_status.set_latest_offset(meta_table, shape_id, latest_offset) do
+    if shape_status.set_latest_offset(meta_table, shape_handle, latest_offset) do
       :ok
     else
-      Logger.warning("Tried to update latest offset for shape #{shape_id} which doesn't exist")
+      Logger.warning(
+        "Tried to update latest offset for shape #{shape_handle} which doesn't exist"
+      )
+
       :error
     end
   end
 
   @impl Electric.ShapeCacheBehaviour
-  @spec list_shapes(Electric.ShapeCache.ShapeStatus.t()) :: [{shape_id(), Shape.t()}]
+  @spec list_shapes(Electric.ShapeCache.ShapeStatus.t()) :: [{shape_handle(), Shape.t()}]
   def list_shapes(opts) do
     shape_status = Access.get(opts, :shape_status, ShapeStatus)
     shape_status.list_shapes(opts)
   end
 
   @impl Electric.ShapeCacheBehaviour
-  @spec clean_shape(shape_id(), keyword()) :: :ok
-  def clean_shape(shape_id, opts) do
+  @spec clean_shape(shape_handle(), keyword()) :: :ok
+  def clean_shape(shape_handle, opts) do
     server = Access.get(opts, :server, name(opts))
-    GenServer.call(server, {:clean, shape_id})
+    GenStage.call(server, {:clean, shape_handle})
   end
 
   @impl Electric.ShapeCacheBehaviour
@@ -157,43 +160,43 @@ defmodule Electric.ShapeCache do
   end
 
   @impl Electric.ShapeCacheBehaviour
-  @spec handle_truncate(shape_id(), keyword()) :: :ok
-  def handle_truncate(shape_id, opts \\ []) do
+  @spec handle_truncate(shape_handle(), keyword()) :: :ok
+  def handle_truncate(shape_handle, opts \\ []) do
     server = Access.get(opts, :server, name(opts))
-    GenServer.call(server, {:truncate, shape_id})
+    GenStage.call(server, {:truncate, shape_handle})
   end
 
   @impl Electric.ShapeCacheBehaviour
-  @spec await_snapshot_start(shape_id(), keyword()) :: :started | {:error, term()}
-  def await_snapshot_start(shape_id, opts \\ []) when is_binary(shape_id) do
+  @spec await_snapshot_start(shape_handle(), keyword()) :: :started | {:error, term()}
+  def await_snapshot_start(shape_handle, opts \\ []) when is_binary(shape_handle) do
     table = get_shape_meta_table(opts)
     shape_status = Access.get(opts, :shape_status, ShapeStatus)
     electric_instance_id = Access.fetch!(opts, :electric_instance_id)
     tenant_id = Access.fetch!(opts, :tenant_id)
 
     cond do
-      shape_status.snapshot_started?(table, shape_id) ->
+      shape_status.snapshot_started?(table, shape_handle) ->
         :started
 
-      !shape_status.get_existing_shape(table, shape_id) ->
+      !shape_status.get_existing_shape(table, shape_handle) ->
         {:error, :unknown}
 
       true ->
-        server = Electric.Shapes.Consumer.name(electric_instance_id, tenant_id, shape_id)
+        server = Electric.Shapes.Consumer.name(electric_instance_id, tenant_id, shape_handle)
         GenServer.call(server, :await_snapshot_start)
     end
   end
 
   @impl Electric.ShapeCacheBehaviour
-  def has_shape?(shape_id, opts \\ []) do
+  def has_shape?(shape_handle, opts \\ []) do
     table = get_shape_meta_table(opts)
     shape_status = Access.get(opts, :shape_status, ShapeStatus)
 
-    if shape_status.get_existing_shape(table, shape_id) do
+    if shape_status.get_existing_shape(table, shape_handle) do
       true
     else
       server = Access.get(opts, :server, name(opts))
-      GenServer.call(server, {:wait_shape_id, shape_id})
+      GenStage.call(server, {:wait_shape_handle, shape_handle})
     end
   end
 
@@ -251,38 +254,48 @@ defmodule Electric.ShapeCache do
   end
 
   @impl GenServer
-  def handle_call({:create_or_wait_shape_id, shape}, _from, %{shape_status: shape_status} = state) do
-    {{shape_id, latest_offset}, state} =
+  def handle_call(
+        {:create_or_wait_shape_handle, shape},
+        _from,
+        %{shape_status: shape_status} = state
+      ) do
+    {{shape_handle, latest_offset}, state} =
       if shape_state = shape_status.get_existing_shape(state.shape_status_state, shape) do
         {shape_state, state}
       else
-        {:ok, shape_id} = shape_status.add_shape(state.shape_status_state, shape)
+        {:ok, shape_handle} = shape_status.add_shape(state.shape_status_state, shape)
 
-        {:ok, _pid, _snapshot_xmin, latest_offset} = start_shape(shape_id, shape, state)
-        {{shape_id, latest_offset}, state}
+        {:ok, _pid, _snapshot_xmin, latest_offset} = start_shape(shape_handle, shape, state)
+        {{shape_handle, latest_offset}, state}
       end
 
-    Logger.debug("Returning shape id #{shape_id} for shape #{inspect(shape)}")
-
-    {:reply, {shape_id, latest_offset}, state}
+    Logger.debug("Returning shape id #{shape_handle} for shape #{inspect(shape)}")
+    {:reply, {shape_handle, latest_offset}, state}
   end
 
-  def handle_call({:wait_shape_id, shape_id}, _from, %{shape_status: shape_status} = state) do
-    {:reply, !is_nil(shape_status.get_existing_shape(state.shape_status_state, shape_id)), state}
+  def handle_call(
+        {:wait_shape_handle, shape_handle},
+        _from,
+        %{shape_status: shape_status} = state
+      ) do
+    {:reply, !is_nil(shape_status.get_existing_shape(state.shape_status_state, shape_handle)),
+     state}
   end
 
-  def handle_call({:truncate, shape_id}, _from, state) do
-    with :ok <- clean_up_shape(state, shape_id) do
-      Logger.info("Truncating and rotating shape id, previous shape id #{shape_id} cleaned up")
+  def handle_call({:truncate, shape_handle}, _from, state) do
+    with :ok <- clean_up_shape(state, shape_handle) do
+      Logger.info(
+        "Truncating and rotating shape handle, previous shape handle #{shape_handle} cleaned up"
+      )
     end
 
     {:reply, :ok, state}
   end
 
-  def handle_call({:clean, shape_id}, _from, state) do
+  def handle_call({:clean, shape_handle}, _from, state) do
     # ignore errors when cleaning up non-existant shape id
-    with :ok <- clean_up_shape(state, shape_id) do
-      Logger.info("Cleaning up shape #{shape_id}")
+    with :ok <- clean_up_shape(state, shape_handle) do
+      Logger.info("Cleaning up shape #{shape_handle}")
     end
 
     {:reply, :ok, state}
@@ -299,42 +312,42 @@ defmodule Electric.ShapeCache do
     {:reply, table, state}
   end
 
-  defp clean_up_shape(state, shape_id) do
+  defp clean_up_shape(state, shape_handle) do
     Electric.Shapes.ConsumerSupervisor.stop_shape_consumer(
       state.consumer_supervisor,
       state.electric_instance_id,
       state.tenant_id,
-      shape_id
+      shape_handle
     )
 
     :ok
   end
 
   defp clean_up_all_shapes(state) do
-    shape_ids =
+    shape_handles =
       state.shape_status_state |> state.shape_status.list_shapes() |> Enum.map(&elem(&1, 0))
 
-    for shape_id <- shape_ids do
-      clean_up_shape(state, shape_id)
+    for shape_handle <- shape_handles do
+      clean_up_shape(state, shape_handle)
     end
   end
 
   defp recover_shapes(state) do
     state.shape_status_state
     |> state.shape_status.list_shapes()
-    |> Enum.each(fn {shape_id, shape} ->
-      {:ok, _pid, _snapshot_xmin, _latest_offset} = start_shape(shape_id, shape, state)
+    |> Enum.each(fn {shape_handle, shape} ->
+      {:ok, _pid, _snapshot_xmin, _latest_offset} = start_shape(shape_handle, shape, state)
     end)
   end
 
-  defp start_shape(shape_id, shape, state) do
+  defp start_shape(shape_handle, shape, state) do
     with {:ok, pid} <-
            Electric.Shapes.ConsumerSupervisor.start_shape_consumer(
              state.consumer_supervisor,
              electric_instance_id: state.electric_instance_id,
              inspector: state.inspector,
              tenant_id: state.tenant_id,
-             shape_id: shape_id,
+             shape_handle: shape_handle,
              shape: shape,
              shape_status: {state.shape_status, state.shape_status_state},
              storage: state.storage,
@@ -354,7 +367,7 @@ defmodule Electric.ShapeCache do
              prepare_tables_fn: state.prepare_tables_fn,
              create_snapshot_fn: state.create_snapshot_fn
            ) do
-      consumer = Shapes.Consumer.name(state.electric_instance_id, state.tenant_id, shape_id)
+      consumer = Shapes.Consumer.name(state.electric_instance_id, state.tenant_id, shape_handle)
 
       {:ok, snapshot_xmin, latest_offset} = Shapes.Consumer.initial_state(consumer)
 
diff --git a/packages/sync-service/lib/electric/shape_cache/crashing_file_storage.ex b/packages/sync-service/lib/electric/shape_cache/crashing_file_storage.ex
index ff9107fbd1..61ad1364ea 100644
--- a/packages/sync-service/lib/electric/shape_cache/crashing_file_storage.ex
+++ b/packages/sync-service/lib/electric/shape_cache/crashing_file_storage.ex
@@ -9,7 +9,7 @@ defmodule Electric.ShapeCache.CrashingFileStorage do
 
   @num_calls_until_crash_key :num_calls_until_crash
 
-  defdelegate for_shape(shape_id, tenant_id, opts), to: FileStorage
+  defdelegate for_shape(shape_handle, tenant_id, opts), to: FileStorage
   defdelegate start_link(opts), to: FileStorage
   defdelegate set_shape_definition(shape, opts), to: FileStorage
   defdelegate get_all_stored_shapes(opts), to: FileStorage
diff --git a/packages/sync-service/lib/electric/shape_cache/file_storage.ex b/packages/sync-service/lib/electric/shape_cache/file_storage.ex
index 52357fd614..462d38a082 100644
--- a/packages/sync-service/lib/electric/shape_cache/file_storage.ex
+++ b/packages/sync-service/lib/electric/shape_cache/file_storage.ex
@@ -20,7 +20,7 @@ defmodule Electric.ShapeCache.FileStorage do
 
   defstruct [
     :base_path,
-    :shape_id,
+    :shape_handle,
     :db,
     :cubdb_dir,
     :shape_definition_dir,
@@ -41,30 +41,30 @@ defmodule Electric.ShapeCache.FileStorage do
   end
 
   @impl Electric.ShapeCache.Storage
-  def for_shape(shape_id, _tenant_id, %FS{shape_id: shape_id} = opts) do
+  def for_shape(shape_handle, _tenant_id, %FS{shape_handle: shape_handle} = opts) do
     opts
   end
 
   def for_shape(
-        shape_id,
+        shape_handle,
         tenant_id,
         %{base_path: base_path, electric_instance_id: electric_instance_id} = opts
       ) do
     %FS{
       base_path: base_path,
-      shape_id: shape_id,
-      db: name(electric_instance_id, tenant_id, shape_id),
-      cubdb_dir: Path.join([base_path, tenant_id, shape_id, "cubdb"]),
-      snapshot_dir: Path.join([base_path, tenant_id, shape_id, "snapshots"]),
-      shape_definition_dir: Path.join([base_path, tenant_id, shape_id]),
+      shape_handle: shape_handle,
+      db: name(electric_instance_id, tenant_id, shape_handle),
+      cubdb_dir: Path.join([base_path, tenant_id, shape_handle, "cubdb"]),
+      snapshot_dir: Path.join([base_path, tenant_id, shape_handle, "snapshots"]),
+      shape_definition_dir: Path.join([base_path, tenant_id, shape_handle]),
       electric_instance_id: electric_instance_id,
       tenant_id: tenant_id,
       extra_opts: Map.get(opts, :extra_opts, %{})
     }
   end
 
-  defp name(electric_instance_id, tenant_id, shape_id) do
-    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_id)
+  defp name(electric_instance_id, tenant_id, shape_handle) do
+    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_handle)
   end
 
   def child_spec(%FS{} = opts) do
@@ -128,17 +128,17 @@ defmodule Electric.ShapeCache.FileStorage do
     shapes_dir = Path.join([opts.base_path, opts.tenant_id])
 
     case File.ls(shapes_dir) do
-      {:ok, shape_ids} ->
-        Enum.reduce(shape_ids, %{}, fn shape_id, acc ->
+      {:ok, shape_handles} ->
+        Enum.reduce(shape_handles, %{}, fn shape_handle, acc ->
           shape_def_path =
             shape_definition_path(%{
-              shape_definition_dir: Path.join([opts.base_path, opts.tenant_id, shape_id])
+              shape_definition_dir: Path.join([opts.base_path, opts.tenant_id, shape_handle])
             })
 
           with {:ok, shape_def_encoded} <- File.read(shape_def_path),
                {:ok, shape_def_json} <- Jason.decode(shape_def_encoded),
                shape = Electric.Shapes.Shape.from_json_safe!(shape_def_json) do
-            Map.put(acc, shape_id, shape)
+            Map.put(acc, shape_handle, shape)
           else
             # if the shape definition file cannot be read/decoded, just ignore it
             {:error, _reason} -> acc
@@ -201,7 +201,7 @@ defmodule Electric.ShapeCache.FileStorage do
   def make_new_snapshot!(data_stream, %FS{} = opts) do
     OpenTelemetry.with_span(
       "storage.make_new_snapshot",
-      [storage_impl: "mixed_disk", "shape.id": opts.shape_id],
+      [storage_impl: "mixed_disk", "shape.handle": opts.shape_handle],
       fn ->
         data_stream
         |> Stream.map(&[&1, ?\n])
diff --git a/packages/sync-service/lib/electric/shape_cache/in_memory_storage.ex b/packages/sync-service/lib/electric/shape_cache/in_memory_storage.ex
index 491ae51f54..5ce4a485b7 100644
--- a/packages/sync-service/lib/electric/shape_cache/in_memory_storage.ex
+++ b/packages/sync-service/lib/electric/shape_cache/in_memory_storage.ex
@@ -18,7 +18,7 @@ defmodule Electric.ShapeCache.InMemoryStorage do
     :snapshot_table,
     :log_table,
     :chunk_checkpoint_table,
-    :shape_id,
+    :shape_handle,
     :electric_instance_id,
     :tenant_id
   ]
@@ -36,26 +36,26 @@ defmodule Electric.ShapeCache.InMemoryStorage do
     }
   end
 
-  def name(electric_instance_id, tenant_id, shape_id) when is_binary(shape_id) do
-    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_id)
+  def name(electric_instance_id, tenant_id, shape_handle) when is_binary(shape_handle) do
+    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_handle)
   end
 
   @impl Electric.ShapeCache.Storage
-  def for_shape(shape_id, _tenant_id, %{shape_id: shape_id} = opts) do
+  def for_shape(shape_handle, _tenant_id, %{shape_handle: shape_handle} = opts) do
     opts
   end
 
-  def for_shape(shape_id, tenant_id, %{
+  def for_shape(shape_handle, tenant_id, %{
         table_base_name: table_base_name,
         electric_instance_id: electric_instance_id
       }) do
-    snapshot_table_name = :"#{table_base_name}.#{tenant_id}.Snapshot_#{shape_id}"
-    log_table_name = :"#{table_base_name}.#{tenant_id}.Log_#{shape_id}"
-    chunk_checkpoint_table_name = :"#{table_base_name}.#{tenant_id}.ChunkCheckpoint_#{shape_id}"
+    snapshot_table_name = :"#{table_base_name}.#{tenant_id}.Snapshot_#{shape_handle}"
+    log_table_name = :"#{table_base_name}.#{tenant_id}.Log_#{shape_handle}"
+    chunk_checkpoint_table_name = :"#{table_base_name}.#{tenant_id}.ChunkCheckpoint_#{shape_handle}"
 
     %__MODULE__{
       table_base_name: table_base_name,
-      shape_id: shape_id,
+      shape_handle: shape_handle,
       snapshot_table: snapshot_table_name,
       log_table: log_table_name,
       chunk_checkpoint_table: chunk_checkpoint_table_name,
@@ -66,7 +66,7 @@ defmodule Electric.ShapeCache.InMemoryStorage do
 
   @impl Electric.ShapeCache.Storage
   def start_link(%MS{} = opts) do
-    if is_nil(opts.shape_id), do: raise("cannot start an un-attached storage instance")
+    if is_nil(opts.shape_handle), do: raise("cannot start an un-attached storage instance")
     if is_nil(opts.electric_instance_id), do: raise("electric_instance_id cannot be nil")
     if is_nil(opts.tenant_id), do: raise("tenant_id cannot be nil")
 
@@ -78,7 +78,7 @@ defmodule Electric.ShapeCache.InMemoryStorage do
           chunk_checkpoint_table: storage_table(opts.chunk_checkpoint_table)
         }
       end,
-      name: name(opts.electric_instance_id, opts.tenant_id, opts.shape_id)
+      name: name(opts.electric_instance_id, opts.tenant_id, opts.shape_handle)
     )
   end
 
@@ -203,7 +203,7 @@ defmodule Electric.ShapeCache.InMemoryStorage do
   def make_new_snapshot!(data_stream, %MS{} = opts) do
     OpenTelemetry.with_span(
       "storage.make_new_snapshot",
-      [storage_impl: "in_memory", "shape.id": opts.shape_id],
+      [storage_impl: "in_memory", "shape.handle": opts.shape_handle],
       fn ->
         table = opts.snapshot_table
 
diff --git a/packages/sync-service/lib/electric/shape_cache/shape_status.ex b/packages/sync-service/lib/electric/shape_cache/shape_status.ex
index 35b30d5d04..47498e0a16 100644
--- a/packages/sync-service/lib/electric/shape_cache/shape_status.ex
+++ b/packages/sync-service/lib/electric/shape_cache/shape_status.ex
@@ -6,22 +6,22 @@ defmodule Electric.ShapeCache.ShapeStatusBehaviour do
   alias Electric.ShapeCache.ShapeStatus
   alias Electric.Replication.LogOffset
 
-  @type shape_id() :: Electric.ShapeCacheBehaviour.shape_id()
+  @type shape_handle() :: Electric.ShapeCacheBehaviour.shape_handle()
   @type xmin() :: Electric.ShapeCacheBehaviour.xmin()
 
   @callback initialise(ShapeStatus.options()) :: {:ok, ShapeStatus.t()} | {:error, term()}
-  @callback list_shapes(ShapeStatus.t()) :: [{shape_id(), Shape.t()}]
-  @callback get_existing_shape(ShapeStatus.t(), Shape.t() | shape_id()) ::
-              {shape_id(), LogOffset.t()} | nil
+  @callback list_shapes(ShapeStatus.t()) :: [{shape_handle(), Shape.t()}]
+  @callback get_existing_shape(ShapeStatus.t(), Shape.t() | shape_handle()) ::
+              {shape_handle(), LogOffset.t()} | nil
   @callback add_shape(ShapeStatus.t(), Shape.t()) ::
-              {:ok, shape_id()} | {:error, term()}
-  @callback initialise_shape(ShapeStatus.t(), shape_id(), xmin(), LogOffset.t()) ::
+              {:ok, shape_handle()} | {:error, term()}
+  @callback initialise_shape(ShapeStatus.t(), shape_handle(), xmin(), LogOffset.t()) ::
               :ok
-  @callback set_snapshot_xmin(ShapeStatus.t(), shape_id(), xmin()) :: :ok
-  @callback set_latest_offset(ShapeStatus.t(), shape_id(), LogOffset.t()) :: :ok
-  @callback mark_snapshot_started(ShapeStatus.t(), shape_id()) :: :ok
-  @callback snapshot_started?(ShapeStatus.t(), shape_id()) :: boolean()
-  @callback remove_shape(ShapeStatus.t(), shape_id()) ::
+  @callback set_snapshot_xmin(ShapeStatus.t(), shape_handle(), xmin()) :: :ok
+  @callback set_latest_offset(ShapeStatus.t(), shape_handle(), LogOffset.t()) :: :ok
+  @callback mark_snapshot_started(ShapeStatus.t(), shape_handle()) :: :ok
+  @callback snapshot_started?(ShapeStatus.t(), shape_handle()) :: boolean()
+  @callback remove_shape(ShapeStatus.t(), shape_handle()) ::
               {:ok, Shape.t()} | {:error, term()}
 end
 
@@ -33,7 +33,7 @@ defmodule Electric.ShapeCache.ShapeStatus do
   the in-memory cache.
 
   The shape cache then loads this and starts processes (storage and consumer)
-  for each `{shape_id, %Shape{}}` pair. These then use their attached storage
+  for each `{shape_handle, %Shape{}}` pair. These then use their attached storage
   to recover the status information for the shape (snapshot xmin and latest
   offset).
 
@@ -53,7 +53,7 @@ defmodule Electric.ShapeCache.ShapeStatus do
 
   defstruct [:root, :shape_meta_table, :storage]
 
-  @type shape_id() :: Electric.ShapeCacheBehaviour.shape_id()
+  @type shape_handle() :: Electric.ShapeCacheBehaviour.shape_handle()
   @type xmin() :: Electric.ShapeCacheBehaviour.xmin()
   @type table() :: atom() | reference()
   @type t() :: %__MODULE__{
@@ -89,9 +89,9 @@ defmodule Electric.ShapeCache.ShapeStatus do
     end
   end
 
-  @spec add_shape(t(), Shape.t()) :: {:ok, shape_id()} | {:error, term()}
+  @spec add_shape(t(), Shape.t()) :: {:ok, shape_handle()} | {:error, term()}
   def add_shape(state, shape) do
-    {hash, shape_id} = Shape.generate_id(shape)
+    {hash, shape_handle} = Shape.generate_id(shape)
     # fresh snapshots always start with a zero offset - only once they
     # are folded into the log do we have non-zero offsets
     offset = LogOffset.first()
@@ -100,15 +100,15 @@ defmodule Electric.ShapeCache.ShapeStatus do
       :ets.insert_new(
         state.shape_meta_table,
         [
-          {{@shape_hash_lookup, hash}, shape_id},
-          {{@shape_meta_data, shape_id}, shape, nil, offset}
+          {{@shape_hash_lookup, hash}, shape_handle},
+          {{@shape_meta_data, shape_handle}, shape, nil, offset}
         ]
       )
 
-    {:ok, shape_id}
+    {:ok, shape_handle}
   end
 
-  @spec list_shapes(t()) :: [{shape_id(), Shape.t()}]
+  @spec list_shapes(t()) :: [{shape_handle(), Shape.t()}]
   def list_shapes(state) do
     :ets.select(state.shape_meta_table, [
       {
@@ -119,21 +119,21 @@ defmodule Electric.ShapeCache.ShapeStatus do
     ])
   end
 
-  @spec remove_shape(t(), shape_id()) :: {:ok, t()} | {:error, term()}
-  def remove_shape(state, shape_id) do
+  @spec remove_shape(t(), Shape.t()) :: {:ok, t()} | {:error, term()}
+  def remove_shape(state, shape_handle) do
     try do
       shape =
         :ets.lookup_element(
           state.shape_meta_table,
-          {@shape_meta_data, shape_id},
+          {@shape_meta_data, shape_handle},
           @shape_meta_shape_pos
         )
 
       :ets.select_delete(
         state.shape_meta_table,
         [
-          {{{@shape_meta_data, shape_id}, :_, :_, :_}, [], [true]},
-          {{{@shape_hash_lookup, :_}, shape_id}, [], [true]}
+          {{{@shape_meta_data, shape_handle}, :_, :_, :_}, [], [true]},
+          {{{@shape_hash_lookup, :_}, shape_handle}, [], [true]}
         ]
       )
 
@@ -144,16 +144,17 @@ defmodule Electric.ShapeCache.ShapeStatus do
       # keys, so we're doing our best to just delete everything without
       # crashing
       ArgumentError ->
-        {:error, "No shape matching #{inspect(shape_id)}"}
+        {:error, "No shape matching #{inspect(shape_handle)}"}
     end
   end
 
-  @spec get_existing_shape(t(), shape_id() | Shape.t()) :: nil | {shape_id(), LogOffset.t()}
+  @spec get_existing_shape(t(), shape_handle() | Shape.t()) ::
+          nil | {shape_handle(), LogOffset.t()}
   def get_existing_shape(%__MODULE__{shape_meta_table: table}, shape_or_id) do
     get_existing_shape(table, shape_or_id)
   end
 
-  @spec get_existing_shape(table(), Shape.t()) :: nil | {shape_id(), LogOffset.t()}
+  @spec get_existing_shape(table(), Shape.t()) :: nil | {shape_handle(), LogOffset.t()}
   def get_existing_shape(meta_table, %Shape{} = shape) do
     hash = Shape.hash(shape)
 
@@ -161,22 +162,22 @@ defmodule Electric.ShapeCache.ShapeStatus do
       [] ->
         nil
 
-      [shape_id] ->
-        {shape_id, latest_offset!(meta_table, shape_id)}
+      [shape_handle] ->
+        {shape_handle, latest_offset!(meta_table, shape_handle)}
     end
   end
 
-  @spec get_existing_shape(table(), shape_id()) :: nil | {shape_id(), LogOffset.t()}
-  def get_existing_shape(meta_table, shape_id) when is_binary(shape_id) do
-    case :ets.lookup(meta_table, {@shape_meta_data, shape_id}) do
+  @spec get_existing_shape(table(), shape_handle()) :: nil | {shape_handle(), LogOffset.t()}
+  def get_existing_shape(meta_table, shape_handle) when is_binary(shape_handle) do
+    case :ets.lookup(meta_table, {@shape_meta_data, shape_handle}) do
       [] -> nil
-      [{_, _shape, _xmin, offset}] -> {shape_id, offset}
+      [{_, _shape, _xmin, offset}] -> {shape_handle, offset}
     end
   end
 
-  @spec initialise_shape(t(), shape_id(), xmin(), LogOffset.t()) :: :ok
-  def initialise_shape(state, shape_id, snapshot_xmin, latest_offset) do
-    :ets.update_element(state.shape_meta_table, {@shape_meta_data, shape_id}, [
+  @spec initialise_shape(t(), shape_handle(), xmin(), LogOffset.t()) :: :ok
+  def initialise_shape(state, shape_handle, snapshot_xmin, latest_offset) do
+    :ets.update_element(state.shape_meta_table, {@shape_meta_data, shape_handle}, [
       {@shape_meta_xmin_pos, snapshot_xmin},
       {@shape_meta_latest_offset_pos, latest_offset}
     ])
@@ -184,75 +185,79 @@ defmodule Electric.ShapeCache.ShapeStatus do
     :ok
   end
 
-  def set_snapshot_xmin(state, shape_id, snapshot_xmin) do
-    :ets.update_element(state.shape_meta_table, {@shape_meta_data, shape_id}, [
+  def set_snapshot_xmin(state, shape_handle, snapshot_xmin) do
+    :ets.update_element(state.shape_meta_table, {@shape_meta_data, shape_handle}, [
       {@shape_meta_xmin_pos, snapshot_xmin}
     ])
   end
 
-  def set_latest_offset(%__MODULE__{shape_meta_table: table} = _state, shape_id, latest_offset) do
-    set_latest_offset(table, shape_id, latest_offset)
+  def set_latest_offset(
+        %__MODULE__{shape_meta_table: table} = _state,
+        shape_handle,
+        latest_offset
+      ) do
+    set_latest_offset(table, shape_handle, latest_offset)
   end
 
-  def set_latest_offset(meta_table, shape_id, latest_offset) do
-    :ets.update_element(meta_table, {@shape_meta_data, shape_id}, [
+  def set_latest_offset(meta_table, shape_handle, latest_offset) do
+    :ets.update_element(meta_table, {@shape_meta_data, shape_handle}, [
       {@shape_meta_latest_offset_pos, latest_offset}
     ])
   end
 
-  def latest_offset!(%__MODULE__{shape_meta_table: table} = _state, shape_id) do
-    latest_offset(table, shape_id)
+  def latest_offset!(%__MODULE__{shape_meta_table: table} = _state, shape_handle) do
+    latest_offset(table, shape_handle)
   end
 
-  def latest_offset!(meta_table, shape_id) do
+  def latest_offset!(meta_table, shape_handle) do
     :ets.lookup_element(
       meta_table,
-      {@shape_meta_data, shape_id},
+      {@shape_meta_data, shape_handle},
       @shape_meta_latest_offset_pos
     )
   end
 
-  def latest_offset(%__MODULE__{shape_meta_table: table} = _state, shape_id) do
-    latest_offset(table, shape_id)
+  def latest_offset(%__MODULE__{shape_meta_table: table} = _state, shape_handle) do
+    latest_offset(table, shape_handle)
   end
 
-  def latest_offset(meta_table, shape_id) do
+  def latest_offset(meta_table, shape_handle) do
     turn_raise_into_error(fn ->
       :ets.lookup_element(
         meta_table,
-        {@shape_meta_data, shape_id},
+        {@shape_meta_data, shape_handle},
         @shape_meta_latest_offset_pos
       )
     end)
   end
 
-  def snapshot_xmin(%__MODULE__{shape_meta_table: table} = _state, shape_id) do
-    snapshot_xmin(table, shape_id)
+  def snapshot_xmin(%__MODULE__{shape_meta_table: table} = _state, shape_handle) do
+    snapshot_xmin(table, shape_handle)
   end
 
-  def snapshot_xmin(meta_table, shape_id) when is_reference(meta_table) or is_atom(meta_table) do
+  def snapshot_xmin(meta_table, shape_handle) when is_reference(meta_table) or is_atom(meta_table) do
     turn_raise_into_error(fn ->
       :ets.lookup_element(
         meta_table,
-        {@shape_meta_data, shape_id},
+        {@shape_meta_data, shape_handle},
         @shape_meta_xmin_pos
       )
     end)
   end
 
-  def snapshot_started?(%__MODULE__{shape_meta_table: table} = _state, shape_id) do
-    snapshot_started?(table, shape_id)
+  def snapshot_started?(%__MODULE__{shape_meta_table: table} = _state, shape_handle) do
+    snapshot_started?(table, shape_handle)
   end
 
-  def snapshot_started?(meta_table, shape_id) do
-    case :ets.lookup(meta_table, {@snapshot_started, shape_id}) do
+  def snapshot_started?(meta_table, shape_handle) do
+    case :ets.lookup(meta_table, {@snapshot_started, shape_handle}) do
       [] -> false
-      [{{@snapshot_started, ^shape_id}, true}] -> true
+      [{{@snapshot_started, ^shape_handle}, true}] -> true
     end
   end
 
-  def mark_snapshot_started(%__MODULE__{shape_meta_table: table} = _state, shape_id) do
-    :ets.insert(table, {{@snapshot_started, shape_id}, true})
+  def mark_snapshot_started(%__MODULE__{shape_meta_table: table} = _state, shape_handle) do
+    :ets.insert(table, {{@snapshot_started, shape_handle}, true})
     :ok
   end
 
@@ -261,12 +266,12 @@ defmodule Electric.ShapeCache.ShapeStatus do
       :ets.insert(
         state.shape_meta_table,
         Enum.concat([
-          Enum.flat_map(shapes, fn {shape_id, shape} ->
+          Enum.flat_map(shapes, fn {shape_handle, shape} ->
             hash = Shape.hash(shape)
 
             [
-              {{@shape_hash_lookup, hash}, shape_id},
-              {{@shape_meta_data, shape_id}, shape, nil, LogOffset.first()}
+              {{@shape_hash_lookup, hash}, shape_handle},
+              {{@shape_meta_data, shape_handle}, shape, nil, LogOffset.first()}
             ]
           end)
         ])
diff --git a/packages/sync-service/lib/electric/shape_cache/storage.ex b/packages/sync-service/lib/electric/shape_cache/storage.ex
index 81e29fa1b6..97cef8e77f 100644
--- a/packages/sync-service/lib/electric/shape_cache/storage.ex
+++ b/packages/sync-service/lib/electric/shape_cache/storage.ex
@@ -6,7 +6,7 @@ defmodule Electric.ShapeCache.Storage do
   alias Electric.Replication.LogOffset
 
   @type tenant_id :: String.t()
-  @type shape_id :: Electric.ShapeCacheBehaviour.shape_id()
+  @type shape_handle :: Electric.ShapeCacheBehaviour.shape_handle()
   @type xmin :: Electric.ShapeCacheBehaviour.xmin()
   @type offset :: LogOffset.t()
 
@@ -26,7 +26,7 @@ defmodule Electric.ShapeCache.Storage do
   @callback shared_opts(Keyword.t()) :: compiled_opts()
 
   @doc "Initialise shape-specific opts from the shared, global, configuration"
-  @callback for_shape(shape_id(), tenant_id(), compiled_opts()) :: shape_opts()
+  @callback for_shape(shape_handle(), tenant_id(), compiled_opts()) :: shape_opts()
 
   @doc "Start any processes required to run the storage backend"
   @callback start_link(shape_opts()) :: GenServer.on_start()
@@ -39,7 +39,7 @@ defmodule Electric.ShapeCache.Storage do
 
   @doc "Retrieve all stored shapes"
   @callback get_all_stored_shapes(compiled_opts()) ::
-              {:ok, %{shape_id() => Shape.t()}} | {:error, term()}
+              {:ok, %{shape_handle() => Shape.t()}} | {:error, term()}
 
   @doc """
   Get the current xmin and offset for the shape storage.
@@ -50,14 +50,14 @@ defmodule Electric.ShapeCache.Storage do
 
   @callback set_snapshot_xmin(xmin(), shape_opts()) :: :ok
 
-  @doc "Check if snapshot for a given shape id already exists"
+  @doc "Check if snapshot for a given shape handle already exists"
   @callback snapshot_started?(shape_opts()) :: boolean()
 
   @doc "Get the full snapshot for a given shape, also returning the offset this snapshot includes"
   @callback get_snapshot(shape_opts()) :: {offset :: LogOffset.t(), log()}
 
   @doc """
-  Make a new snapshot for a shape ID based on the meta information about the table and a stream of plain string rows
+  Make a new snapshot for a shape handle based on the meta information about the table and a stream of plain string rows
 
   Should raise an error if making the snapshot had failed for any reason.
   """
@@ -92,7 +92,7 @@ defmodule Electric.ShapeCache.Storage do
   """
   @callback get_chunk_end_log_offset(LogOffset.t(), shape_opts()) :: LogOffset.t() | nil
 
-  @doc "Clean up snapshots/logs for a shape id"
+  @doc "Clean up snapshots/logs for a shape handle"
   @callback cleanup!(shape_opts()) :: :ok
 
   @behaviour __MODULE__
@@ -114,8 +114,8 @@ defmodule Electric.ShapeCache.Storage do
   end
 
   @impl __MODULE__
-  def for_shape(shape_id, tenant_id, {mod, opts}) do
-    {mod, mod.for_shape(shape_id, tenant_id, opts)}
+  def for_shape(shape_handle, tenant_id, {mod, opts}) do
+    {mod, mod.for_shape(shape_handle, tenant_id, opts)}
   end
 
   @impl __MODULE__
diff --git a/packages/sync-service/lib/electric/shapes.ex b/packages/sync-service/lib/electric/shapes.ex
index 7aedb3a949..2753d7a50f 100644
--- a/packages/sync-service/lib/electric/shapes.ex
+++ b/packages/sync-service/lib/electric/shapes.ex
@@ -5,44 +5,44 @@ defmodule Electric.Shapes do
   alias Electric.Shapes.Shape
   require Logger
 
-  @type shape_id :: Electric.ShapeCacheBehaviour.shape_id()
+  @type shape_handle :: Electric.ShapeCacheBehaviour.shape_handle()
 
   @doc """
-  Get snapshot for the shape ID
+  Get snapshot for the shape handle
   """
-  def get_snapshot(config, shape_id, tenant_id) do
+  def get_snapshot(config, shape_handle, tenant_id) do
     {shape_cache, opts} = Access.get(config, :shape_cache, {ShapeCache, []})
-    storage = shape_storage(config, shape_id, tenant_id)
+    storage = shape_storage(config, shape_handle, tenant_id)
 
-    if shape_cache.has_shape?(shape_id, opts) do
-      with :started <- shape_cache.await_snapshot_start(shape_id, opts) do
+    if shape_cache.has_shape?(shape_handle, opts) do
+      with :started <- shape_cache.await_snapshot_start(shape_handle, opts) do
         {:ok, Storage.get_snapshot(storage)}
       end
     else
-      {:error, "invalid shape_id #{inspect(shape_id)}"}
+      {:error, "invalid shape_handle #{inspect(shape_handle)}"}
     end
   end
 
   @doc """
   Get stream of the log since a given offset
   """
-  def get_log_stream(config, shape_id, tenant_id, opts) do
+  def get_log_stream(config, shape_handle, tenant_id, opts) do
     {shape_cache, shape_cache_opts} = Access.get(config, :shape_cache, {ShapeCache, []})
     offset = Access.get(opts, :since, LogOffset.before_all())
     max_offset = Access.get(opts, :up_to, LogOffset.last())
-    storage = shape_storage(config, shape_id, tenant_id)
+    storage = shape_storage(config, shape_handle, tenant_id)
 
-    if shape_cache.has_shape?(shape_id, shape_cache_opts) do
+    if shape_cache.has_shape?(shape_handle, shape_cache_opts) do
       Storage.get_log_stream(offset, max_offset, storage)
     else
-      raise "Unknown shape: #{shape_id}"
+      raise "Unknown shape: #{shape_handle}"
     end
   end
 
   @doc """
   Get the shape that corresponds to this shape definition and return it along with the latest offset of the shape
   """
-  @spec get_shape(keyword(), Shape.t()) :: {shape_id(), LogOffset.t()}
+  @spec get_shape(keyword(), Shape.t()) :: {shape_handle(), LogOffset.t()}
   def get_shape(config, shape_def) do
     {shape_cache, opts} = Access.get(config, :shape_cache, {ShapeCache, []})
 
@@ -50,13 +50,13 @@ defmodule Electric.Shapes do
   end
 
   @doc """
-  Get or create a shape ID and return it along with the latest offset of the shape
+  Get or create a shape handle and return it along with the latest offset of the shape
   """
-  @spec get_or_create_shape_id(keyword(), Shape.t()) :: {shape_id(), LogOffset.t()}
-  def get_or_create_shape_id(config, shape_def) do
+  @spec get_or_create_shape_handle(keyword(), Shape.t()) :: {shape_handle(), LogOffset.t()}
+  def get_or_create_shape_handle(config, shape_def) do
     {shape_cache, opts} = Access.get(config, :shape_cache, {ShapeCache, []})
 
-    shape_cache.get_or_create_shape_id(shape_def, opts)
+    shape_cache.get_or_create_shape_handle(shape_def, opts)
   end
 
   @doc """
@@ -64,45 +64,45 @@ defmodule Electric.Shapes do
 
   If `nil` is returned, chunk is not complete and the shape's latest offset should be used
   """
-  @spec get_chunk_end_log_offset(keyword(), shape_id(), LogOffset.t(), String.t()) ::
+  @spec get_chunk_end_log_offset(keyword(), shape_handle(), LogOffset.t(), String.t()) ::
           LogOffset.t() | nil
-  def get_chunk_end_log_offset(config, shape_id, offset, tenant_id) do
-    storage = shape_storage(config, shape_id, tenant_id)
+  def get_chunk_end_log_offset(config, shape_handle, offset, tenant_id) do
+    storage = shape_storage(config, shape_handle, tenant_id)
     Storage.get_chunk_end_log_offset(offset, storage)
   end
 
   @doc """
-  Check whether the log has an entry for a given shape ID
+  Check whether the log has an entry for a given shape handle
   """
-  @spec has_shape?(keyword(), shape_id()) :: boolean()
-  def has_shape?(config, shape_id) do
+  @spec has_shape?(keyword(), shape_handle()) :: boolean()
+  def has_shape?(config, shape_handle) do
     {shape_cache, opts} = Access.get(config, :shape_cache, {ShapeCache, []})
 
-    shape_cache.has_shape?(shape_id, opts)
+    shape_cache.has_shape?(shape_handle, opts)
   end
 
   @doc """
-  Clean up all data (meta data and shape log + snapshot) associated with the given shape ID
+  Clean up all data (meta data and shape log + snapshot) associated with the given shape handle
   """
-  @spec clean_shape(shape_id(), keyword()) :: :ok
-  def clean_shape(shape_id, opts \\ []) do
+  @spec clean_shape(shape_handle(), keyword()) :: :ok
+  def clean_shape(shape_handle, opts \\ []) do
     {shape_cache, opts} = Access.get(opts, :shape_cache, {ShapeCache, []})
-    shape_cache.clean_shape(shape_id, opts)
+    shape_cache.clean_shape(shape_handle, opts)
     :ok
   end
 
-  @spec clean_shapes([shape_id()], keyword()) :: :ok
-  def clean_shapes(shape_ids, opts \\ []) do
+  @spec clean_shapes([shape_handle()], keyword()) :: :ok
+  def clean_shapes(shape_handles, opts \\ []) do
     {shape_cache, opts} = Access.get(opts, :shape_cache, {ShapeCache, []})
 
-    for shape_id <- shape_ids do
-      shape_cache.clean_shape(shape_id, opts)
+    for shape_handle <- shape_handles do
+      shape_cache.clean_shape(shape_handle, opts)
     end
 
     :ok
   end
 
-  defp shape_storage(config, shape_id, tenant_id) do
-    Storage.for_shape(shape_id, tenant_id, Access.fetch!(config, :storage))
+  defp shape_storage(config, shape_handle, tenant_id) do
+    Storage.for_shape(shape_handle, tenant_id, Access.fetch!(config, :storage))
   end
 end
diff --git a/packages/sync-service/lib/electric/shapes/consumer.ex b/packages/sync-service/lib/electric/shapes/consumer.ex
index 398b416730..1d3e67b30c 100644
--- a/packages/sync-service/lib/electric/shapes/consumer.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer.ex
@@ -17,14 +17,14 @@ defmodule Electric.Shapes.Consumer do
   @initial_log_state %{current_chunk_byte_size: 0}
 
   def name(
-        %{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_id: shape_id} =
+        %{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_handle: shape_handle} =
           _config
       ) do
-    name(electric_instance_id, tenant_id, shape_id)
+    name(electric_instance_id, tenant_id, shape_handle)
   end
 
-  def name(electric_instance_id, tenant_id, shape_id) when is_binary(shape_id) do
-    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_id)
+  def name(electric_instance_id, tenant_id, shape_handle) when is_binary(shape_handle) do
+    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_handle)
   end
 
   def initial_state(consumer) do
@@ -33,17 +33,17 @@ defmodule Electric.Shapes.Consumer do
 
   @doc false
   # use in tests to avoid race conditions. registers `pid` to be notified
-  # when the `shape_id` consumer has processed every transaction.
+  # when the `shape_handle` consumer has processed every transaction.
   # Transactions that we skip because of xmin logic do not generate
   # a notification
-  @spec monitor(atom(), String.t(), ShapeCache.shape_id(), pid()) :: reference()
-  def monitor(electric_instance_id, tenant_id, shape_id, pid \\ self()) do
-    GenStage.call(name(electric_instance_id, tenant_id, shape_id), {:monitor, pid})
+  @spec monitor(atom(), String.t(), ShapeCache.shape_handle(), pid()) :: reference()
+  def monitor(electric_instance_id, tenant_id, shape_handle, pid \\ self()) do
+    GenStage.call(name(electric_instance_id, tenant_id, shape_handle), {:monitor, pid})
   end
 
-  @spec whereis(atom(), String.t(), ShapeCache.shape_id()) :: pid() | nil
-  def whereis(electric_instance_id, tenant_id, shape_id) do
-    GenServer.whereis(name(electric_instance_id, tenant_id, shape_id))
+  @spec whereis(atom(), String.t(), ShapeCache.shape_handle()) :: pid() | nil
+  def whereis(electric_instance_id, tenant_id, shape_handle) do
+    GenServer.whereis(name(electric_instance_id, tenant_id, shape_handle))
   end
 
   def start_link(config) when is_map(config) do
@@ -54,7 +54,7 @@ defmodule Electric.Shapes.Consumer do
     %{log_producer: producer, storage: storage, shape_status: {shape_status, shape_status_state}} =
       config
 
-    Logger.metadata(shape_id: config.shape_id)
+    Logger.metadata(shape_handle: config.shape_handle)
 
     Process.flag(:trap_exit, true)
 
@@ -68,7 +68,7 @@ defmodule Electric.Shapes.Consumer do
     :ok =
       shape_status.initialise_shape(
         shape_status_state,
-        config.shape_id,
+        config.shape_handle,
         snapshot_xmin,
         latest_offset
       )
@@ -123,32 +123,38 @@ defmodule Electric.Shapes.Consumer do
   end
 
   def handle_call(:await_snapshot_start, from, %{awaiting_snapshot_start: waiters} = state) do
-    Logger.debug("Starting a wait on the snapshot #{state.shape_id} for #{inspect(from)}}")
+    Logger.debug("Starting a wait on the snapshot #{state.shape_handle} for #{inspect(from)}}")
 
     {:noreply, [], %{state | awaiting_snapshot_start: [from | waiters]}}
   end
 
-  def handle_cast({:snapshot_xmin_known, shape_id, xmin}, %{shape_id: shape_id} = state) do
+  def handle_cast(
+        {:snapshot_xmin_known, shape_handle, xmin},
+        %{shape_handle: shape_handle} = state
+      ) do
     Logger.debug("Snapshot xmin known shape_id: #{shape_id} xmin: #{xmin}")
     state = set_snapshot_xmin(xmin, state)
     handle_txns(state.buffer, %{state | buffer: []})
   end
 
-  def handle_cast({:snapshot_started, shape_id}, %{shape_id: shape_id} = state) do
+  def handle_cast({:snapshot_started, shape_handle}, %{shape_handle: shape_handle} = state) do
     Logger.debug("Snapshot started shape_id: #{shape_id}")
     state = set_snapshot_started(state)
     {:noreply, [], state}
   end
 
-  def handle_cast({:snapshot_failed, shape_id, error, stacktrace}, %{shape_id: shape_id} = state) do
+  def handle_cast(
+        {:snapshot_failed, shape_handle, error, stacktrace},
+        %{shape_handle: shape_handle} = state
+      ) do
     if match?(%DBConnection.ConnectionError{reason: :queue_timeout}, error),
       do:
         Logger.warning(
-          "Snapshot creation failed for #{shape_id} because of a connection pool queue timeout"
+          "Snapshot creation failed for #{shape_handle} because of a connection pool queue timeout"
         ),
       else:
         Logger.error(
-          "Snapshot creation failed for #{shape_id} because of:\n#{Exception.format(:error, error, stacktrace)}"
+          "Snapshot creation failed for #{shape_handle} because of:\n#{Exception.format(:error, error, stacktrace)}"
         )
 
     state = reply_to_snapshot_waiters({:error, error}, state)
@@ -156,7 +162,7 @@ defmodule Electric.Shapes.Consumer do
     {:stop, :normal, state}
   end
 
-  def handle_cast({:snapshot_exists, shape_id}, %{shape_id: shape_id} = state) do
+  def handle_cast({:snapshot_exists, shape_handle}, %{shape_handle: shape_handle} = state) do
     state = set_snapshot_xmin(state.snapshot_xmin, state)
     state = set_snapshot_started(state)
     {:noreply, [], state}
@@ -172,7 +178,7 @@ defmodule Electric.Shapes.Consumer do
     %{shape: %{root_table: root_table}, inspector: {inspector, inspector_opts}} = state
 
     Logger.info(
-      "Schema for the table #{Utils.inspect_relation(root_table)} changed - terminating shape #{state.shape_id}"
+      "Schema for the table #{Utils.inspect_relation(root_table)} changed - terminating shape #{state.shape_handle}"
     )
 
     # We clean up the relation info from ETS as it has changed and we want
@@ -192,7 +198,7 @@ defmodule Electric.Shapes.Consumer do
   # Buffer incoming transactions until we know our xmin
   def handle_events([%Transaction{xid: xid}] = txns, _from, %{snapshot_xmin: nil} = state) do
     Logger.debug(fn ->
-      "Consumer for #{state.shape_id} buffering 1 transaction with xid #{xid}"
+      "Consumer for #{state.shape_handle} buffering 1 transaction with xid #{xid}"
     end)
 
     {:noreply, [], %{state | buffer: state.buffer ++ txns}}
@@ -222,7 +228,8 @@ defmodule Electric.Shapes.Consumer do
 
   defp handle_txn(%Transaction{} = txn, state) do
     ot_attrs =
-      [xid: txn.xid, num_changes: length(txn.changes)] ++ shape_attrs(state.shape_id, state.shape)
+      [xid: txn.xid, num_changes: length(txn.changes)] ++
+        shape_attrs(state.shape_handle, state.shape)
 
     OpenTelemetry.with_span("shape_write.consumer.handle_txn", ot_attrs, fn ->
       do_handle_txn(txn, state)
@@ -232,7 +239,7 @@ defmodule Electric.Shapes.Consumer do
   defp do_handle_txn(%Transaction{} = txn, state) do
     %{
       shape: shape,
-      shape_id: shape_id,
+      shape_handle: shape_handle,
       tenant_id: tenant_id,
       log_state: log_state,
       chunk_bytes_threshold: chunk_bytes_threshold,
@@ -251,12 +258,12 @@ defmodule Electric.Shapes.Consumer do
       Enum.any?(relevant_changes, &is_struct(&1, Changes.TruncatedRelation)) ->
         # TODO: This is a very naive way to handle truncations: if ANY relevant truncates are
         #       present in the transaction, we're considering the whole transaction empty, and
-        #       just rotate the shape id. "Correct" way to handle truncates is to be designed.
+        #       just rotate the shape handle. "Correct" way to handle truncates is to be designed.
         Logger.warning(
-          "Truncate operation encountered while processing txn #{txn.xid} for #{shape_id}"
+          "Truncate operation encountered while processing txn #{txn.xid} for #{shape_handle}"
         )
 
-        :ok = shape_cache.handle_truncate(shape_id, shape_cache_opts)
+        :ok = shape_cache.handle_truncate(shape_handle, shape_cache_opts)
 
         :ok = ShapeCache.Storage.cleanup!(storage)
 
@@ -270,9 +277,9 @@ defmodule Electric.Shapes.Consumer do
         #       Right now we'll just fail everything
         :ok = ShapeCache.Storage.append_to_log!(log_entries, storage)
 
-        shape_cache.update_shape_latest_offset(shape_id, last_log_offset, shape_cache_opts)
+        shape_cache.update_shape_latest_offset(shape_handle, last_log_offset, shape_cache_opts)
 
-        notify_listeners(registry, :new_changes, tenant_id, shape_id, last_log_offset)
+        notify_listeners(registry, :new_changes, tenant_id, shape_handle, last_log_offset)
 
         {:cont, notify(txn, %{state | log_state: new_log_state})}
 
@@ -285,10 +292,10 @@ defmodule Electric.Shapes.Consumer do
     end
   end
 
-  defp notify_listeners(registry, :new_changes, tenant_id, shape_id, latest_log_offset) do
-    Registry.dispatch(registry, {tenant_id, shape_id}, fn registered ->
+  defp notify_listeners(registry, :new_changes, tenant_id, shape_handle, latest_log_offset) do
+    Registry.dispatch(registry, {tenant_id, shape_handle}, fn registered ->
       Logger.debug(fn ->
-        "[Tenant #{tenant_id}]: Notifying ~#{length(registered)} clients about new changes to #{shape_id}"
+        "[Tenant #{tenant_id}]: Notifying ~#{length(registered)} clients about new changes to #{shape_handle}"
       end)
 
       for {pid, ref} <- registered,
@@ -301,13 +308,13 @@ defmodule Electric.Shapes.Consumer do
     set_snapshot_xmin(xmin, %{state | snapshot_xmin: xmin})
   end
 
-  defp set_snapshot_xmin(xmin, %{snapshot_xmin: xmin, shape_id: shape_id} = state) do
+  defp set_snapshot_xmin(xmin, %{snapshot_xmin: xmin, shape_handle: shape_handle} = state) do
     %{shape_status: {shape_status, shape_status_state}} = state
 
-    unless shape_status.set_snapshot_xmin(shape_status_state, shape_id, xmin),
+    unless shape_status.set_snapshot_xmin(shape_status_state, shape_handle, xmin),
       do:
         Logger.warning(
-          "Got snapshot information for a #{shape_id}, that shape id is no longer valid. Ignoring."
+          "Got snapshot information for a #{shape_handle}, that shape id is no longer valid. Ignoring."
         )
 
     state
@@ -318,15 +325,15 @@ defmodule Electric.Shapes.Consumer do
     set_snapshot_started(%{state | snapshot_started: true})
   end
 
-  defp set_snapshot_started(%{shape_id: shape_id} = state) do
+  defp set_snapshot_started(%{shape_handle: shape_handle} = state) do
     %{shape_status: {shape_status, shape_status_state}} = state
-    :ok = shape_status.mark_snapshot_started(shape_status_state, shape_id)
+    :ok = shape_status.mark_snapshot_started(shape_status_state, shape_handle)
     reply_to_snapshot_waiters(:started, state)
   end
 
   defp cleanup(state) do
     %{shape_status: {shape_status, shape_status_state}} = state
-    shape_status.remove_shape(shape_status_state, state.shape_id)
+    shape_status.remove_shape(shape_status_state, state.shape_handle)
     ShapeCache.Storage.cleanup!(state.storage)
     state
   end
@@ -385,7 +392,11 @@ defmodule Electric.Shapes.Consumer do
     {log_items, new_log_state}
   end
 
-  defp shape_attrs(shape_id, shape) do
-    ["shape.id": shape_id, "shape.root_table": shape.root_table, "shape.where": shape.where]
+  defp shape_attrs(shape_handle, shape) do
+    [
+      "shape.handle": shape_handle,
+      "shape.root_table": shape.root_table,
+      "shape.where": shape.where
+    ]
   end
 end
diff --git a/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex b/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
index a9a68c147d..f844914dd9 100644
--- a/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
@@ -10,12 +10,12 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
 
   require Logger
 
-  def name(%{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_id: shape_id}) do
-    name(electric_instance_id, tenant_id, shape_id)
+  def name(%{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_handle: shape_handle}) do
+    name(electric_instance_id, tenant_id, shape_handle)
   end
 
-  def name(electric_instance_id, tenant_id, shape_id) when is_binary(shape_id) do
-    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_id)
+  def name(electric_instance_id, tenant_id, shape_handle) when is_binary(shape_handle) do
+    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_handle)
   end
 
   def start_link(config) do
@@ -28,13 +28,14 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
 
   def handle_continue(:start_snapshot, state) do
     %{
-      shape_id: shape_id,
+      shape_handle: shape_handle,
       shape: shape,
       electric_instance_id: electric_instance_id,
       tenant_id: tenant_id
-    } = state
+    } =
+      state
 
-    case Shapes.Consumer.whereis(electric_instance_id, tenant_id, shape_id) do
+    case Shapes.Consumer.whereis(electric_instance_id, tenant_id, shape_handle) do
       consumer when is_pid(consumer) ->
         if not Storage.snapshot_started?(state.storage) do
           %{
@@ -49,7 +50,7 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
 
           OpenTelemetry.with_span(
             "shape_snapshot.create_snapshot_task",
-            shape_attrs(shape_id, shape),
+            shape_attrs(shape_handle, shape),
             fn ->
               try do
                 # Grab the same connection from the pool for both operations to
@@ -59,7 +60,7 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
                   fn pool_conn ->
                     OpenTelemetry.with_span(
                       "shape_snapshot.prepare_tables",
-                      shape_attrs(shape_id, shape),
+                      shape_attrs(shape_handle, shape),
                       fn ->
                         Utils.apply_fn_or_mfa(prepare_tables_fn_or_mfa, [
                           pool_conn,
@@ -68,12 +69,15 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
                       end
                     )
 
-                    apply(create_snapshot_fn, [consumer, shape_id, shape, pool_conn, storage])
+                    apply(create_snapshot_fn, [consumer, shape_handle, shape, pool_conn, storage])
                   end
                 ])
               rescue
                 error ->
-                  GenServer.cast(consumer, {:snapshot_failed, shape_id, error, __STACKTRACE__})
+                  GenServer.cast(
+                    consumer,
+                    {:snapshot_failed, shape_handle, error, __STACKTRACE__}
+                  )
               end
             end
           )
@@ -84,7 +88,7 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
           # storage does some clean up on start, e.g. in the case of a format
           # upgrade, we only know the actual on-disk state of the shape data
           # once things are running.
-          GenServer.cast(consumer, {:snapshot_exists, shape_id})
+          GenServer.cast(consumer, {:snapshot_exists, shape_handle})
         end
 
         {:stop, :normal, state}
@@ -106,8 +110,8 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
   end
 
   @doc false
-  def query_in_readonly_txn(parent, shape_id, shape, db_pool, storage) do
-    shape_attrs = shape_attrs(shape_id, shape)
+  def query_in_readonly_txn(parent, shape_handle, shape, db_pool, storage) do
+    shape_attrs = shape_attrs(shape_handle, shape)
 
     Postgrex.transaction(
       db_pool,
@@ -133,7 +137,7 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
                 []
               )
 
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, xmin})
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, xmin})
 
             # Enforce display settings *before* querying initial data to maintain consistent
             # formatting between snapshot and live log entries.
@@ -147,7 +151,7 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
 
             stream = Querying.stream_initial_data(conn, shape)
 
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
 
             # could pass the shape and then make_new_snapshot! can pass it to row_to_snapshot_item
             # that way it has the relation, but it is still missing the pk_cols
@@ -167,7 +171,19 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
     )
   end
 
-  defp shape_attrs(shape_id, shape) do
-    ["shape.id": shape_id, "shape.root_table": shape.root_table, "shape.where": shape.where]
+  defp query_span!(conn, span_name, span_attrs, query, params) do
+    OpenTelemetry.with_span(
+      span_name,
+      span_attrs,
+      fn -> Postgrex.query!(conn, query, params) end
+    )
+  end
+
+  defp shape_attrs(shape_handle, shape) do
+    [
+      "shape.handle": shape_handle,
+      "shape.root_table": shape.root_table,
+      "shape.where": shape.where
+    ]
   end
 end
diff --git a/packages/sync-service/lib/electric/shapes/consumer/supervisor.ex b/packages/sync-service/lib/electric/shapes/consumer/supervisor.ex
index 8826f0c73a..ac93b551ce 100644
--- a/packages/sync-service/lib/electric/shapes/consumer/supervisor.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer/supervisor.ex
@@ -7,7 +7,7 @@ defmodule Electric.Shapes.Consumer.Supervisor do
   @genserver_name_schema {:or, [:atom, @name_schema_tuple]}
   # TODO: unify these with ShapeCache
   @schema NimbleOptions.new!(
-            shape_id: [type: :string, required: true],
+            shape_handle: [type: :string, required: true],
             shape: [type: {:struct, Electric.Shapes.Shape}, required: true],
             electric_instance_id: [type: :atom, required: true],
             inspector: [type: :mod_arg, required: true],
@@ -27,12 +27,12 @@ defmodule Electric.Shapes.Consumer.Supervisor do
             ]
           )
 
-  def name(electric_instance_id, tenant_id, shape_id) when is_binary(shape_id) do
-    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_id)
+  def name(electric_instance_id, tenant_id, shape_handle) when is_binary(shape_handle) do
+    Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_handle)
   end
 
-  def name(%{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_id: shape_id}) do
-    name(electric_instance_id, tenant_id, shape_id)
+  def name(%{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_handle: shape_handle}) do
+    name(electric_instance_id, tenant_id, shape_handle)
   end
 
   def start_link(opts) do
@@ -45,22 +45,22 @@ defmodule Electric.Shapes.Consumer.Supervisor do
   def clean_and_stop(%{
         electric_instance_id: electric_instance_id,
         tenant_id: tenant_id,
-        shape_id: shape_id
+        shape_handle: shape_handle
       }) do
     # if consumer is present, terminate it gracefully, otherwise terminate supervisor
-    consumer = Electric.Shapes.Consumer.name(electric_instance_id, tenant_id, shape_id)
+    consumer = Electric.Shapes.Consumer.name(electric_instance_id, tenant_id, shape_handle)
 
     case GenServer.whereis(consumer) do
-      nil -> Supervisor.stop(name(electric_instance_id, tenant_id, shape_id))
+      nil -> Supervisor.stop(name(electric_instance_id, tenant_id, shape_handle))
       consumer_pid when is_pid(consumer_pid) -> GenServer.call(consumer_pid, :clean_and_stop)
     end
   end
 
   def init(config) when is_map(config) do
-    %{shape_id: shape_id, tenant_id: tenant_id, storage: {_, _} = storage} =
+    %{shape_handle: shape_handle, tenant_id: tenant_id, storage: {_, _} = storage} =
       config
 
-    shape_storage = Electric.ShapeCache.Storage.for_shape(shape_id, tenant_id, storage)
+    shape_storage = Electric.ShapeCache.Storage.for_shape(shape_handle, tenant_id, storage)
 
     shape_config = %{config | storage: shape_storage}
 
diff --git a/packages/sync-service/lib/electric/shapes/consumer_supervisor.ex b/packages/sync-service/lib/electric/shapes/consumer_supervisor.ex
index cba5bf6689..5d4f7dae34 100644
--- a/packages/sync-service/lib/electric/shapes/consumer_supervisor.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer_supervisor.ex
@@ -24,21 +24,21 @@ defmodule Electric.Shapes.ConsumerSupervisor do
   end
 
   def start_shape_consumer(name, config) do
-    Logger.debug(fn -> "Starting consumer for #{Access.fetch!(config, :shape_id)}" end)
+    Logger.debug(fn -> "Starting consumer for #{Access.fetch!(config, :shape_handle)}" end)
 
     DynamicSupervisor.start_child(name, {Consumer.Supervisor, config})
   end
 
-  def stop_shape_consumer(_name, electric_instance_id, tenant_id, shape_id) do
-    case GenServer.whereis(Consumer.Supervisor.name(electric_instance_id, tenant_id, shape_id)) do
+  def stop_shape_consumer(_name, electric_instance_id, tenant_id, shape_handle) do
+    case GenServer.whereis(Consumer.Supervisor.name(electric_instance_id, tenant_id, shape_handle)) do
       nil ->
-        {:error, "no consumer for shape id #{inspect(shape_id)}"}
+        {:error, "no consumer for shape handle #{inspect(shape_handle)}"}
 
       pid when is_pid(pid) ->
         Consumer.Supervisor.clean_and_stop(%{
           electric_instance_id: electric_instance_id,
           tenant_id: tenant_id,
-          shape_id: shape_id
+          shape_handle: shape_handle
         })
 
         :ok
diff --git a/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs b/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs
index d850ca6eb9..849c4f02a2 100644
--- a/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs
+++ b/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs
@@ -27,7 +27,7 @@ defmodule Electric.Plug.DeleteShapePlugTest do
       }
     }
   }
-  @test_shape_id "test-shape-id"
+  @test_shape_handle "test-shape-handle"
   @test_pg_id "12345"
 
   def load_column_info({"public", "users"}, _),
@@ -117,8 +117,8 @@ defmodule Electric.Plug.DeleteShapePlugTest do
 
     test "should clean shape based on shape definition", ctx do
       Mock.ShapeCache
-      |> expect(:get_or_create_shape_id, fn @test_shape, _opts -> {@test_shape_id, 0} end)
-      |> expect(:clean_shape, fn @test_shape_id, _ -> :ok end)
+      |> expect(:get_or_create_shape_handle, fn @test_shape, _opts -> {@test_shape_handle, 0} end)
+      |> expect(:clean_shape, fn @test_shape_handle, _ -> :ok end)
 
       conn =
         ctx
@@ -128,13 +128,13 @@ defmodule Electric.Plug.DeleteShapePlugTest do
       assert conn.status == 202
     end
 
-    test "should clean shape based on shape_id", ctx do
+    test "should clean shape based on shape_handle", ctx do
       Mock.ShapeCache
-      |> expect(:clean_shape, fn @test_shape_id, _ -> :ok end)
+      |> expect(:clean_shape, fn @test_shape_handle, _ -> :ok end)
 
       conn =
         ctx
-        |> conn(:delete, "?table=public.users&shape_id=#{@test_shape_id}")
+        |> conn(:delete, "?table=public.users&handle=#{@test_shape_handle}")
         |> DeleteShapePlug.call([])
 
       assert conn.status == 202
diff --git a/packages/sync-service/test/electric/plug/router_test.exs b/packages/sync-service/test/electric/plug/router_test.exs
index cc44d45185..1cbe31f9b5 100644
--- a/packages/sync-service/test/electric/plug/router_test.exs
+++ b/packages/sync-service/test/electric/plug/router_test.exs
@@ -119,10 +119,10 @@ defmodule Electric.Plug.RouterTest do
         []
       )
 
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
 
       conn =
-        conn("GET", "/v1/shape?table=items&shape_id=#{shape_id}&offset=0_0&live")
+        conn("GET", "/v1/shape?table=items&handle=#{shape_handle}&offset=0_0&live")
         |> Router.call(opts)
 
       assert [%{"value" => %{"num" => "2"}}, _] = Jason.decode!(conn.resp_body)
@@ -131,20 +131,20 @@ defmodule Electric.Plug.RouterTest do
     @tag with_sql: [
            "INSERT INTO items VALUES (gen_random_uuid(), 'test value 1')"
          ]
-    test "DELETE forces the shape ID to be different on reconnect and new snapshot to be created",
+    test "DELETE forces the shape handle to be different on reconnect and new snapshot to be created",
          %{opts: opts, db_conn: db_conn} do
       conn =
         conn("GET", "/v1/shape?table=items&offset=-1")
         |> Router.call(opts)
 
       assert %{status: 200} = conn
-      shape1_id = get_resp_shape_id(conn)
+      shape1_handle = get_resp_shape_handle(conn)
 
       assert [%{"value" => %{"value" => "test value 1"}}] =
                Jason.decode!(conn.resp_body)
 
       assert %{status: 202} =
-               conn("DELETE", "/v1/shape?table=items&shape_id=#{shape1_id}")
+               conn("DELETE", "/v1/shape/items?shape_handle=#{shape1_handle}")
                |> Router.call(opts)
 
       Postgrex.query!(db_conn, "DELETE FROM items", [])
@@ -155,8 +155,8 @@ defmodule Electric.Plug.RouterTest do
         |> Router.call(opts)
 
       assert %{status: 200} = conn
-      shape2_id = get_resp_shape_id(conn)
-      assert shape1_id != shape2_id
+      shape2_handle = get_resp_shape_handle(conn)
+      assert shape1_id != shape2_handle
 
       assert [%{"value" => %{"value" => "test value 2"}}] =
                Jason.decode!(conn.resp_body)
@@ -176,7 +176,7 @@ defmodule Electric.Plug.RouterTest do
         |> Router.call(opts)
 
       assert %{status: 200} = conn
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
 
       key =
         Changes.build_key({"public", "foo"}, %{"first" => "a", "second" => "b", "third" => "c"}, [
@@ -201,7 +201,7 @@ defmodule Electric.Plug.RouterTest do
 
       task =
         Task.async(fn ->
-          conn("GET", "/v1/shape?table=foo&offset=#{@first_offset}&shape_id=#{shape_id}&live")
+          conn("GET", "/v1/shape?table=foo&offset=#{@first_offset}&handle=#{shape_handle}&live")
           |> Router.call(opts)
         end)
 
@@ -246,7 +246,7 @@ defmodule Electric.Plug.RouterTest do
     test "GET received only a diff when receiving updates", %{opts: opts, db_conn: db_conn} do
       conn = conn("GET", "/v1/shape?table=wide_table&offset=-1") |> Router.call(opts)
       assert %{status: 200} = conn
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
 
       assert [
                %{
@@ -257,7 +257,7 @@ defmodule Electric.Plug.RouterTest do
 
       task =
         Task.async(fn ->
-          conn("GET", "/v1/shape?table=wide_table&offset=0_0&shape_id=#{shape_id}&live")
+          conn("GET", "/v1/shape?table=wide_table&offset=0_0&handle=#{shape_handle}&live")
           |> Router.call(opts)
         end)
 
@@ -280,7 +280,7 @@ defmodule Electric.Plug.RouterTest do
     } do
       conn = conn("GET", "/v1/shape?table=wide_table&offset=-1") |> Router.call(opts)
       assert %{status: 200} = conn
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
 
       assert [
                %{
@@ -291,7 +291,7 @@ defmodule Electric.Plug.RouterTest do
 
       task =
         Task.async(fn ->
-          conn("GET", "/v1/shape?table=wide_table&offset=0_0&shape_id=#{shape_id}&live")
+          conn("GET", "/v1/shape?table=wide_table&offset=0_0&handle=#{shape_handle}&live")
           |> Router.call(opts)
         end)
 
@@ -343,14 +343,14 @@ defmodule Electric.Plug.RouterTest do
          %{opts: opts, db_conn: db_conn} do
       conn = conn("GET", "/v1/shape?table=test_table&offset=-1") |> Router.call(opts)
       assert %{status: 200} = conn
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
 
       assert [%{"value" => %{"col1" => "test1", "col2" => "test2"}, "key" => key}] =
                Jason.decode!(conn.resp_body)
 
       task =
         Task.async(fn ->
-          conn("GET", "/v1/shape?table=test_table&offset=0_0&shape_id=#{shape_id}&live")
+          conn("GET", "/v1/shape?table=test_table&offset=0_0&handle=#{shape_handle}&live")
           |> Router.call(opts)
         end)
 
@@ -395,7 +395,7 @@ defmodule Electric.Plug.RouterTest do
         conn("GET", "/v1/shape?table=wide_table&offset=-1&columns=id,value1") |> Router.call(opts)
 
       assert %{status: 200} = conn
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
 
       assert [
                %{
@@ -411,7 +411,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn(
             "GET",
-            "/v1/shape?table=wide_table&offset=#{next_offset}&columns=id,value1&shape_id=#{shape_id}&live"
+            "/v1/shape?table=wide_table&offset=#{next_offset}&columns=id,value1&handle=#{shape_handle}&live"
           )
           |> Router.call(opts)
           |> then(fn conn ->
@@ -444,7 +444,7 @@ defmodule Electric.Plug.RouterTest do
         |> Router.call(opts)
 
       assert %{status: 200} = conn
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
 
       assert [] = Jason.decode!(conn.resp_body)
 
@@ -452,7 +452,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn("GET", "/v1/shape?table=items", %{
             offset: "0_0",
-            shape_id: shape_id,
+            shape_handle: shape_handle,
             where: where,
             live: true
           })
@@ -474,7 +474,7 @@ defmodule Electric.Plug.RouterTest do
                conn =
                conn("GET", "/v1/shape?table=items", %{
                  offset: new_offset,
-                 shape_id: shape_id,
+                 shape_handle: shape_handle,
                  where: where
                })
                |> Router.call(opts)
@@ -496,7 +496,8 @@ defmodule Electric.Plug.RouterTest do
         |> Router.call(opts)
 
       assert %{status: 200} = conn
-      shape_id = get_resp_shape_id(conn)
+
+      shape_handle = get_resp_shape_handle(conn)
       assert [op] = Jason.decode!(conn.resp_body)
 
       assert op == %{
@@ -513,7 +514,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn("GET", "/v1/shape?table=serial_ids", %{
             offset: "0_0",
-            shape_id: shape_id,
+            shape_handle: shape_handle,
             where: where,
             live: true
           })
@@ -543,7 +544,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn("GET", "/v1/shape?table=serial_ids", %{
             offset: new_offset,
-            shape_id: shape_id,
+            shape_handle: shape_handle,
             where: where,
             live: true
           })
@@ -604,7 +605,7 @@ defmodule Electric.Plug.RouterTest do
         |> Router.call(opts)
 
       assert %{status: 200} = conn
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
       assert [op1, op2] = Jason.decode!(conn.resp_body)
 
       assert [op1, op2] == [
@@ -627,7 +628,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn("GET", "/v1/shape?table=serial_ids", %{
             offset: "0_0",
-            shape_id: shape_id,
+            shape_handle: shape_handle,
             where: where,
             live: true
           })
@@ -689,7 +690,7 @@ defmodule Electric.Plug.RouterTest do
 
       conn = conn("GET", "/v1/shape?table=large_rows_table&offset=-1") |> Router.call(opts)
       assert %{status: 200} = conn
-      [shape_id] = Plug.Conn.get_resp_header(conn, "electric-shape-id")
+      [shape_handle] = Plug.Conn.get_resp_header(conn, "electric-handle")
       [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset")
 
       assert [] = Jason.decode!(conn.resp_body)
@@ -699,7 +700,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn(
             "GET",
-            "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}&live"
+            "/v1/shape?table=large_rows_table&offset=#{next_offset}&handle=#{shape_handle}&live"
           )
           |> Router.call(opts)
         end)
@@ -713,7 +714,7 @@ defmodule Electric.Plug.RouterTest do
       assert %{status: 200} = Task.await(task)
 
       conn =
-        conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}")
+        conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&handle=#{shape_handle}")
         |> Router.call(opts)
 
       assert %{status: 200} = conn
@@ -734,7 +735,7 @@ defmodule Electric.Plug.RouterTest do
       [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset")
 
       conn =
-        conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}")
+        conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&handle=#{shape_handle}")
         |> Router.call(opts)
 
       assert %{status: 200} = conn
@@ -751,7 +752,7 @@ defmodule Electric.Plug.RouterTest do
              ] = Jason.decode!(conn.resp_body)
     end
 
-    test "GET receives 400 when shape ID does not match shape definition", %{
+    test "GET receives 400 when shape handle does not match shape definition", %{
       opts: opts
     } do
       where = "value ILIKE 'yes%'"
@@ -765,12 +766,12 @@ defmodule Electric.Plug.RouterTest do
       assert %{status: 200} = conn
       assert conn.resp_body != ""
 
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
       [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset")
 
       # Make the next request but forget to include the where clause
       conn =
-        conn("GET", "/v1/shape?table=items", %{offset: next_offset, shape_id: shape_id})
+        conn("GET", "/v1/shape?table=items", %{offset: next_offset, handle: shape_handle})
         |> Router.call(opts)
 
       assert %{status: 400} = conn
@@ -782,24 +783,24 @@ defmodule Electric.Plug.RouterTest do
                })
     end
 
-    test "GET receives 409 to a newly created shape when shape ID is not found and no shape matches the shape definition",
+    test "GET receives 409 to a newly created shape when shape handle is not found and no shape matches the shape definition",
          %{
            opts: opts
          } do
       # Make the next request but forget to include the where clause
       conn =
-        conn("GET", "/v1/shape?table=items", %{offset: "0_0", shape_id: "nonexistent"})
+        conn("GET", "/v1/shape?table=items", %{offset: "0_0", handle: "nonexistent"})
         |> Router.call(opts)
 
       assert %{status: 409} = conn
       assert conn.resp_body == Jason.encode!([%{headers: %{control: "must-refetch"}}])
-      new_shape_id = get_resp_header(conn, "electric-shape-id")
+      new_shape_handle = get_resp_header(conn, "electric-handle")
 
       assert get_resp_header(conn, "location") ==
-               "/v1/shape?table=items&shape_id=#{new_shape_id}&offset=-1"
+               "/v1/shape?table=items&handle=#{new_shape_handle}&offset=-1"
     end
 
-    test "GET receives 409 when shape ID is not found but there is another shape matching the definition",
+    test "GET receives 409 when shape handle is not found but there is another shape matching the definition",
          %{
            opts: opts
          } do
@@ -814,19 +815,19 @@ defmodule Electric.Plug.RouterTest do
       assert %{status: 200} = conn
       assert conn.resp_body != ""
 
-      shape_id = get_resp_shape_id(conn)
+      shape_handle = get_resp_shape_handle(conn)
 
-      # Request the same shape definition but with invalid shape_id
+      # Request the same shape definition but with invalid shape_handle
       conn =
         conn("GET", "/v1/shape?table=items", %{
           offset: "0_0",
-          shape_id: "nonexistent",
+          handle: "nonexistent",
           where: where
         })
         |> Router.call(opts)
 
       assert %{status: 409} = conn
-      [^shape_id] = Plug.Conn.get_resp_header(conn, "electric-shape-id")
+      [^shape_handle] = Plug.Conn.get_resp_header(conn, "electric-handle")
     end
 
     @tag with_sql: [
@@ -897,7 +898,7 @@ defmodule Electric.Plug.RouterTest do
     end
   end
 
-  defp get_resp_shape_id(conn), do: get_resp_header(conn, "electric-shape-id")
+  defp get_resp_shape_handle(conn), do: get_resp_header(conn, "electric-handle")
   defp get_resp_last_offset(conn), do: get_resp_header(conn, "electric-chunk-last-offset")
 
   defp get_resp_header(conn, header) do
diff --git a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
index 62d1c59505..c0aba08582 100644
--- a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
+++ b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
@@ -33,7 +33,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
       }
     }
   }
-  @test_shape_id "test-shape-id"
+  @test_shape_handle "test-shape-handle"
   @test_opts %{foo: "bar"}
   @before_all_offset LogOffset.before_all()
   @first_offset LogOffset.first()
@@ -182,7 +182,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
              }
     end
 
-    test "returns 400 for missing shape_id when offset != -1", ctx do
+    test "returns 400 for missing shape_handle when offset != -1", ctx do
       conn =
         ctx
         |> conn(:get, %{"table" => "public.users"}, "?offset=#{LogOffset.first()}")
@@ -191,7 +191,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
       assert conn.status == 400
 
       assert Jason.decode!(conn.resp_body) == %{
-               "shape_id" => ["can't be blank when offset != -1"]
+               "shape_handle" => ["can't be blank when offset != -1"]
              }
     end
 
@@ -224,16 +224,16 @@ defmodule Electric.Plug.ServeShapePlugTest do
 
     test "returns snapshot when offset is -1", %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
-      |> expect(:get_or_create_shape_id, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+      |> expect(:get_or_create_shape_handle, fn @test_shape, _opts ->
+        {@test_shape_handle, @test_offset}
       end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
-      |> expect(:await_snapshot_start, fn @test_shape_id, _ -> :started end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
+      |> expect(:await_snapshot_start, fn @test_shape_handle, _ -> :started end)
 
       next_offset = LogOffset.increment(@first_offset)
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, _opts -> @test_opts end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, _opts -> @test_opts end)
       |> expect(:get_chunk_end_log_offset, fn @before_all_offset, _ ->
         next_offset
       end)
@@ -262,24 +262,24 @@ defmodule Electric.Plug.ServeShapePlugTest do
              ]
 
       assert Plug.Conn.get_resp_header(conn, "etag") == [
-               "#{@test_shape_id}:-1:#{next_offset}"
+               "#{@test_shape_handle}:-1:#{next_offset}"
              ]
 
-      assert Plug.Conn.get_resp_header(conn, "electric-shape-id") == [@test_shape_id]
+      assert Plug.Conn.get_resp_header(conn, "electric-handle") == [@test_shape_handle]
     end
 
     test "snapshot has correct cache control headers", %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
-      |> expect(:get_or_create_shape_id, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+      |> expect(:get_or_create_shape_handle, fn @test_shape, _opts ->
+        {@test_shape_handle, @test_offset}
       end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
-      |> expect(:await_snapshot_start, fn @test_shape_id, _ -> :started end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
+      |> expect(:await_snapshot_start, fn @test_shape_handle, _ -> :started end)
 
       next_offset = LogOffset.increment(@first_offset)
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, _opts -> @test_opts end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, _opts -> @test_opts end)
       |> expect(:get_chunk_end_log_offset, fn @before_all_offset, _ ->
         next_offset
       end)
@@ -309,16 +309,16 @@ defmodule Electric.Plug.ServeShapePlugTest do
 
     test "response has correct schema header", %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
-      |> expect(:get_or_create_shape_id, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+      |> expect(:get_or_create_shape_handle, fn @test_shape, _opts ->
+        {@test_shape_handle, @test_offset}
       end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
-      |> expect(:await_snapshot_start, fn @test_shape_id, _ -> :started end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
+      |> expect(:await_snapshot_start, fn @test_shape_handle, _ -> :started end)
 
       next_offset = LogOffset.increment(@first_offset)
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, _opts -> @test_opts end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, _opts -> @test_opts end)
       |> expect(:get_chunk_end_log_offset, fn @before_all_offset, _ ->
         next_offset
       end)
@@ -342,15 +342,15 @@ defmodule Electric.Plug.ServeShapePlugTest do
     test "returns log when offset is >= 0", %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
       |> expect(:get_shape, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+        {@test_shape_handle, @test_offset}
       end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
 
       next_offset = LogOffset.increment(@start_offset_50)
       next_next_offset = LogOffset.increment(next_offset)
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, _opts -> @test_opts end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, _opts -> @test_opts end)
       |> expect(:get_chunk_end_log_offset, fn @start_offset_50, _ ->
         next_next_offset
       end)
@@ -366,7 +366,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
         |> conn(
           :get,
           %{"table" => "public.users"},
-          "?offset=#{@start_offset_50}&shape_id=#{@test_shape_id}"
+          "?offset=#{@start_offset_50}&handle=#{@test_shape_handle}"
         )
         |> ServeShapePlug.call([])
 
@@ -388,10 +388,10 @@ defmodule Electric.Plug.ServeShapePlugTest do
              ]
 
       assert Plug.Conn.get_resp_header(conn, "etag") == [
-               "#{@test_shape_id}:#{@start_offset_50}:#{next_next_offset}"
+               "#{@test_shape_handle}:#{@start_offset_50}:#{next_next_offset}"
              ]
 
-      assert Plug.Conn.get_resp_header(conn, "electric-shape-id") == [@test_shape_id]
+      assert Plug.Conn.get_resp_header(conn, "electric-handle") == [@test_shape_handle]
 
       assert Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset") == [
                "#{next_next_offset}"
@@ -404,12 +404,12 @@ defmodule Electric.Plug.ServeShapePlugTest do
          %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
       |> expect(:get_shape, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+        {@test_shape_handle, @test_offset}
       end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, _opts -> @test_opts end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, _opts -> @test_opts end)
       |> expect(:get_chunk_end_log_offset, fn @start_offset_50, _ ->
         @test_offset
       end)
@@ -419,11 +419,11 @@ defmodule Electric.Plug.ServeShapePlugTest do
         |> conn(
           :get,
           %{"table" => "public.users"},
-          "?offset=#{@start_offset_50}&shape_id=#{@test_shape_id}"
+          "?offset=#{@start_offset_50}&handle=#{@test_shape_handle}"
         )
         |> put_req_header(
           "if-none-match",
-          ~s("#{@test_shape_id}:#{@start_offset_50}:#{@test_offset}")
+          ~s("#{@test_shape_handle}:#{@start_offset_50}:#{@test_offset}")
         )
         |> ServeShapePlug.call([])
 
@@ -434,16 +434,16 @@ defmodule Electric.Plug.ServeShapePlugTest do
     test "handles live updates", %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
       |> expect(:get_shape, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+        {@test_shape_handle, @test_offset}
       end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
 
       test_pid = self()
       next_offset = LogOffset.increment(@test_offset)
       next_offset_str = "#{next_offset}"
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, _opts -> @test_opts end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, _opts -> @test_opts end)
       |> expect(:get_chunk_end_log_offset, fn @test_offset, _ ->
         nil
       end)
@@ -461,7 +461,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
           |> conn(
             :get,
             %{"table" => "public.users"},
-            "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true"
+            "?offset=#{@test_offset}&handle=#{@test_shape_handle}&live=true"
           )
           |> ServeShapePlug.call([])
         end)
@@ -471,7 +471,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
       Process.sleep(50)
 
       # Simulate new changes arriving
-      Registry.dispatch(@registry, {ctx.tenant_id, @test_shape_id}, fn [{pid, ref}] ->
+      Registry.dispatch(@registry, {ctx.tenant_id, @test_shape_handle}, fn [{pid, ref}] ->
         send(pid, {ref, :new_changes, next_offset})
       end)
 
@@ -497,14 +497,14 @@ defmodule Electric.Plug.ServeShapePlugTest do
     test "handles shape rotation", %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
       |> expect(:get_shape, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+        {@test_shape_handle, @test_offset}
       end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
 
       test_pid = self()
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, _opts -> @test_opts end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, _opts -> @test_opts end)
       |> expect(:get_chunk_end_log_offset, fn @test_offset, _ ->
         nil
       end)
@@ -519,7 +519,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
           |> conn(
             :get,
             %{"table" => "public.users"},
-            "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true"
+            "?offset=#{@test_offset}&handle=#{@test_shape_handle}&live=true"
           )
           |> ServeShapePlug.call([])
         end)
@@ -529,7 +529,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
       Process.sleep(50)
 
       # Simulate shape rotation
-      Registry.dispatch(@registry, {ctx.tenant_id, @test_shape_id}, fn [{pid, ref}] ->
+      Registry.dispatch(@registry, {ctx.tenant_id, @test_shape_handle}, fn [{pid, ref}] ->
         send(pid, {ref, :shape_rotation})
       end)
 
@@ -547,12 +547,12 @@ defmodule Electric.Plug.ServeShapePlugTest do
          %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
       |> expect(:get_shape, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+        {@test_shape_handle, @test_offset}
       end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, _opts -> @test_opts end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, _opts -> @test_opts end)
       |> expect(:get_chunk_end_log_offset, fn @test_offset, _ ->
         nil
       end)
@@ -566,7 +566,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
         |> conn(
           :get,
           %{"table" => "public.users"},
-          "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true"
+          "?offset=#{@test_offset}&handle=#{@test_shape_handle}&live=true"
         )
         |> ServeShapePlug.call([])
 
@@ -581,11 +581,11 @@ defmodule Electric.Plug.ServeShapePlugTest do
       assert Plug.Conn.get_resp_header(conn, "electric-chunk-up-to-date") == [""]
     end
 
-    test "sends 409 with a redirect to existing shape when requested shape ID does not exist",
+    test "sends 409 with a redirect to existing shape when requested shape handle does not exist",
          %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
       |> expect(:get_shape, fn @test_shape, _opts ->
-        {@test_shape_id, @test_offset}
+        {@test_shape_handle, @test_offset}
       end)
       |> stub(:has_shape?, fn "foo", _opts -> false end)
 
@@ -597,68 +597,67 @@ defmodule Electric.Plug.ServeShapePlugTest do
         |> conn(
           :get,
           %{"table" => "public.users"},
-          "?offset=#{"50_12"}&shape_id=foo"
+          "?offset=#{"50_12"}&handle=foo"
         )
         |> ServeShapePlug.call([])
 
       assert conn.status == 409
 
       assert Jason.decode!(conn.resp_body) == [%{"headers" => %{"control" => "must-refetch"}}]
-      assert get_resp_header(conn, "electric-shape-id") == [@test_shape_id]
+      assert get_resp_header(conn, "electric-handle") == [@test_shape_handle]
 
       assert get_resp_header(conn, "location") == [
-               "/?table=public.users&shape_id=#{@test_shape_id}&offset=-1"
+               "/?table=public.users&handle=#{@test_shape_handle}&offset=-1"
              ]
     end
 
-    test "creates a new shape when shape ID does not exist and sends a 409 redirecting to the newly created shape",
+    test "creates a new shape when shape handle does not exist and sends a 409 redirecting to the newly created shape",
          %{tenant_id: tenant_id} = ctx do
-      new_shape_id = "new-shape-id"
+      new_shape_handle = "new-shape-handle"
 
       Mock.ShapeCache
       |> expect(:get_shape, fn @test_shape, _opts -> nil end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> false end)
-      |> expect(:get_or_create_shape_id, fn @test_shape, _opts ->
-        {new_shape_id, @test_offset}
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> false end)
+      |> expect(:get_or_create_shape_handle, fn @test_shape, _opts ->
+        {new_shape_handle, @test_offset}
       end)
 
       Mock.Storage
-      |> stub(:for_shape, fn new_shape_id, ^tenant_id, opts -> {new_shape_id, opts} end)
+      |> stub(:for_shape, fn new_shape_handle, ^tenant_id, opts -> {new_shape_handle, opts} end)
 
       conn =
         ctx
         |> conn(
           :get,
           %{"table" => "public.users"},
-          "?offset=#{"50_12"}&shape_id=#{@test_shape_id}"
+          "?offset=#{"50_12"}&handle=#{@test_shape_handle}"
         )
         |> ServeShapePlug.call([])
 
       assert conn.status == 409
 
       assert Jason.decode!(conn.resp_body) == [%{"headers" => %{"control" => "must-refetch"}}]
-      assert get_resp_header(conn, "electric-shape-id") == [new_shape_id]
-
+      assert get_resp_header(conn, "electric-handle") == [new_shape_handle]
       assert get_resp_header(conn, "location") == [
-               "/?table=public.users&shape_id=#{new_shape_id}&offset=-1"
+               "/?table=public.users&handle=#{new_shape_handle}&offset=-1"
              ]
     end
 
-    test "sends 400 when shape ID does not match shape definition",
+    test "sends 400 when shape handle does not match shape definition",
          %{tenant_id: tenant_id} = ctx do
       Mock.ShapeCache
       |> expect(:get_shape, fn @test_shape, _opts -> nil end)
-      |> stub(:has_shape?, fn @test_shape_id, _opts -> true end)
+      |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_id, ^tenant_id, opts -> {@test_shape_id, opts} end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, opts -> {@test_shape_handle, opts} end)
 
       conn =
         ctx
         |> conn(
           :get,
           %{"table" => "public.users"},
-          "?offset=#{"50_12"}&shape_id=#{@test_shape_id}"
+          "?offset=#{"50_12"}&handle=#{@test_shape_handle}"
         )
         |> ServeShapePlug.call([])
 
diff --git a/packages/sync-service/test/electric/shape_cache/shape_status_test.exs b/packages/sync-service/test/electric/shape_cache/shape_status_test.exs
index 546e77bdcd..1b72bc36c1 100644
--- a/packages/sync-service/test/electric/shape_cache/shape_status_test.exs
+++ b/packages/sync-service/test/electric/shape_cache/shape_status_test.exs
@@ -46,13 +46,13 @@ defmodule Electric.ShapeCache.ShapeStatusTest do
 
     shapes = Keyword.get(opts, :shapes, [])
 
-    shape_ids =
+    shape_handles =
       for shape <- shapes do
-        {:ok, shape_id} = ShapeStatus.add_shape(state, shape)
-        shape_id
+        {:ok, shape_handle} = ShapeStatus.add_shape(state, shape)
+        shape_handle
       end
 
-    {:ok, state, shape_ids}
+    {:ok, state, shape_handles}
   end
 
   test "starts empty", ctx do
@@ -63,39 +63,40 @@ defmodule Electric.ShapeCache.ShapeStatusTest do
   test "can recover shapes from storage", ctx do
     {:ok, state, []} = new_state(ctx)
     shape = shape!()
-    assert {:ok, shape_id} = ShapeStatus.add_shape(state, shape)
+    assert {:ok, shape_handle} = ShapeStatus.add_shape(state, shape)
+    assert [{^shape_handle, ^shape}] = ShapeStatus.list_shapes(state)
 
     {:ok, state, []} =
       new_state(ctx,
         stored_shapes: %{
-          shape_id => shape
+          shape_handle => shape
         }
       )
 
-    assert [{^shape_id, ^shape}] = ShapeStatus.list_shapes(state)
+    assert [{^shape_handle, ^shape}] = ShapeStatus.list_shapes(state)
   end
 
   test "can add shapes", ctx do
     {:ok, state, []} = new_state(ctx)
     shape = shape!()
-    assert {:ok, shape_id} = ShapeStatus.add_shape(state, shape)
-    assert [{^shape_id, ^shape}] = ShapeStatus.list_shapes(state)
+    assert {:ok, shape_handle} = ShapeStatus.add_shape(state, shape)
+    assert [{^shape_handle, ^shape}] = ShapeStatus.list_shapes(state)
   end
 
   test "can delete shape instances", ctx do
     {:ok, state, []} = new_state(ctx)
     shape_1 = shape!()
-    assert {:ok, shape_id_1} = ShapeStatus.add_shape(state, shape_1)
+    assert {:ok, shape_handle_1} = ShapeStatus.add_shape(state, shape_1)
 
     shape_2 = shape2!()
 
-    assert {:ok, shape_id_2} = ShapeStatus.add_shape(state, shape_2)
+    assert {:ok, shape_handle_2} = ShapeStatus.add_shape(state, shape_2)
 
-    assert Enum.sort_by([{shape_id_1, shape_1}, {shape_id_2, shape_2}], &elem(&1, 0)) ==
+    assert Enum.sort_by([{shape_handle_1, shape_1}, {shape_handle_2, shape_2}], &elem(&1, 0)) ==
              ShapeStatus.list_shapes(state) |> Enum.sort_by(&elem(&1, 0))
 
-    assert {:ok, ^shape_1} = ShapeStatus.remove_shape(state, shape_id_1)
-    assert [{^shape_id_2, ^shape_2}] = ShapeStatus.list_shapes(state)
+    assert {:ok, ^shape_1} = ShapeStatus.remove_shape(state, shape_handle_1)
+    assert [{^shape_handle_2, ^shape_2}] = ShapeStatus.list_shapes(state)
   end
 
   test "get_existing_shape/2 with %Shape{}", ctx do
@@ -104,94 +105,94 @@ defmodule Electric.ShapeCache.ShapeStatusTest do
 
     refute ShapeStatus.get_existing_shape(state, shape)
 
-    assert {:ok, shape_id} = ShapeStatus.add_shape(state, shape)
-    assert {^shape_id, _} = ShapeStatus.get_existing_shape(state, shape)
+    assert {:ok, shape_handle} = ShapeStatus.add_shape(state, shape)
+    assert {^shape_handle, _} = ShapeStatus.get_existing_shape(state, shape)
 
-    assert {:ok, ^shape} = ShapeStatus.remove_shape(state, shape_id)
+    assert {:ok, ^shape} = ShapeStatus.remove_shape(state, shape_handle)
     refute ShapeStatus.get_existing_shape(state, shape)
   end
 
-  test "get_existing_shape/2 with shape_id", ctx do
+  test "get_existing_shape/2 with shape_handle", ctx do
     shape = shape!()
-    {:ok, state, [shape_id]} = new_state(ctx, shapes: [shape])
+    {:ok, state, [shape_handle]} = new_state(ctx, shapes: [shape])
 
     refute ShapeStatus.get_existing_shape(state, "1234")
 
-    assert {^shape_id, _} = ShapeStatus.get_existing_shape(state, shape)
-    assert {^shape_id, _} = ShapeStatus.get_existing_shape(state, shape_id)
+    assert {^shape_handle, _} = ShapeStatus.get_existing_shape(state, shape)
+    assert {^shape_handle, _} = ShapeStatus.get_existing_shape(state, shape_handle)
 
-    assert {:ok, ^shape} = ShapeStatus.remove_shape(state, shape_id)
+    assert {:ok, ^shape} = ShapeStatus.remove_shape(state, shape_handle)
     refute ShapeStatus.get_existing_shape(state, shape)
-    refute ShapeStatus.get_existing_shape(state, shape_id)
+    refute ShapeStatus.get_existing_shape(state, shape_handle)
   end
 
   test "get_existing_shape/2 public api", ctx do
     shape = shape!()
     table = table_name()
 
-    {:ok, state, [shape_id]} = new_state(ctx, table: table, shapes: [shape])
+    {:ok, state, [shape_handle]} = new_state(ctx, table: table, shapes: [shape])
 
     refute ShapeStatus.get_existing_shape(table, "1234")
 
-    assert {^shape_id, _} = ShapeStatus.get_existing_shape(table, shape)
-    assert {^shape_id, _} = ShapeStatus.get_existing_shape(table, shape_id)
+    assert {^shape_handle, _} = ShapeStatus.get_existing_shape(table, shape)
+    assert {^shape_handle, _} = ShapeStatus.get_existing_shape(table, shape_handle)
 
-    assert {:ok, ^shape} = ShapeStatus.remove_shape(state, shape_id)
+    assert {:ok, ^shape} = ShapeStatus.remove_shape(state, shape_handle)
     refute ShapeStatus.get_existing_shape(table, shape)
-    refute ShapeStatus.get_existing_shape(table, shape_id)
+    refute ShapeStatus.get_existing_shape(table, shape_handle)
   end
 
   test "latest_offset", ctx do
-    {:ok, state, [shape_id]} = new_state(ctx, shapes: [shape!()])
+    {:ok, state, [shape_handle]} = new_state(ctx, shapes: [shape!()])
     assert :error = ShapeStatus.latest_offset(state, "sdfsodf")
-    assert ShapeStatus.latest_offset(state, shape_id) == {:ok, LogOffset.first()}
+    assert ShapeStatus.latest_offset(state, shape_handle) == {:ok, LogOffset.first()}
     offset = LogOffset.new(100, 3)
-    assert ShapeStatus.set_latest_offset(state, shape_id, offset)
+    assert ShapeStatus.set_latest_offset(state, shape_handle, offset)
     refute ShapeStatus.set_latest_offset(state, "not my shape", offset)
-    assert ShapeStatus.latest_offset(state, shape_id) == {:ok, offset}
+    assert ShapeStatus.latest_offset(state, shape_handle) == {:ok, offset}
   end
 
   test "latest_offset public api", ctx do
     table_name = table_name()
-    {:ok, _state, [shape_id]} = new_state(ctx, table: table_name, shapes: [shape!()])
+    {:ok, _state, [shape_handle]} = new_state(ctx, table: table_name, shapes: [shape!()])
     assert :error = ShapeStatus.latest_offset(table_name, "sdfsodf")
-    assert ShapeStatus.latest_offset(table_name, shape_id) == {:ok, LogOffset.first()}
+    assert ShapeStatus.latest_offset(table_name, shape_handle) == {:ok, LogOffset.first()}
     offset = LogOffset.new(100, 3)
     refute ShapeStatus.set_latest_offset(table_name, "not my shape", offset)
-    assert ShapeStatus.set_latest_offset(table_name, shape_id, offset)
-    assert ShapeStatus.latest_offset(table_name, shape_id) == {:ok, offset}
+    assert ShapeStatus.set_latest_offset(table_name, shape_handle, offset)
+    assert ShapeStatus.latest_offset(table_name, shape_handle) == {:ok, offset}
   end
 
   test "initialise_shape/4", ctx do
-    {:ok, state, [shape_id]} = new_state(ctx, shapes: [shape!()])
+    {:ok, state, [shape_handle]} = new_state(ctx, shapes: [shape!()])
     offset = LogOffset.new(100, 3)
-    assert :ok = ShapeStatus.initialise_shape(state, shape_id, 1234, offset)
-    assert ShapeStatus.latest_offset(state, shape_id) == {:ok, offset}
-    assert ShapeStatus.snapshot_xmin(state, shape_id) == {:ok, 1234}
+    assert :ok = ShapeStatus.initialise_shape(state, shape_handle, 1234, offset)
+    assert ShapeStatus.latest_offset(state, shape_handle) == {:ok, offset}
+    assert ShapeStatus.snapshot_xmin(state, shape_handle) == {:ok, 1234}
   end
 
   test "snapshot_xmin/2", ctx do
-    {:ok, state, [shape_id]} = new_state(ctx, shapes: [shape!()])
+    {:ok, state, [shape_handle]} = new_state(ctx, shapes: [shape!()])
 
     refute ShapeStatus.set_snapshot_xmin(state, "sdfsodf", 1234)
 
     assert :error = ShapeStatus.snapshot_xmin(state, "sdfsodf")
-    assert {:ok, nil} == ShapeStatus.snapshot_xmin(state, shape_id)
-    assert ShapeStatus.set_snapshot_xmin(state, shape_id, 1234)
-    assert {:ok, 1234} == ShapeStatus.snapshot_xmin(state, shape_id)
+    assert {:ok, nil} == ShapeStatus.snapshot_xmin(state, shape_handle)
+    assert ShapeStatus.set_snapshot_xmin(state, shape_handle, 1234)
+    assert {:ok, 1234} == ShapeStatus.snapshot_xmin(state, shape_handle)
   end
 
   test "snapshot_started?/2", ctx do
-    {:ok, state, [shape_id]} = new_state(ctx, shapes: [shape!()])
+    {:ok, state, [shape_handle]} = new_state(ctx, shapes: [shape!()])
 
     refute ShapeStatus.snapshot_started?(state, "sdfsodf")
     refute ShapeStatus.snapshot_started?(state.shape_meta_table, "sdfsodf")
-    refute ShapeStatus.snapshot_started?(state, shape_id)
+    refute ShapeStatus.snapshot_started?(state, shape_handle)
 
-    ShapeStatus.mark_snapshot_started(state, shape_id)
+    ShapeStatus.mark_snapshot_started(state, shape_handle)
 
-    assert ShapeStatus.snapshot_started?(state, shape_id)
-    assert ShapeStatus.snapshot_started?(state.shape_meta_table, shape_id)
+    assert ShapeStatus.snapshot_started?(state, shape_handle)
+    assert ShapeStatus.snapshot_started?(state.shape_meta_table, shape_handle)
   end
 
   def load_column_info({"public", "other_table"}, _),
diff --git a/packages/sync-service/test/electric/shape_cache/storage_implementations_test.exs b/packages/sync-service/test/electric/shape_cache/storage_implementations_test.exs
index 5e1d44b1a6..70ef7ecd02 100644
--- a/packages/sync-service/test/electric/shape_cache/storage_implementations_test.exs
+++ b/packages/sync-service/test/electric/shape_cache/storage_implementations_test.exs
@@ -13,7 +13,7 @@ defmodule Electric.ShapeCache.StorageImplimentationsTest do
 
   @moduletag :tmp_dir
 
-  @shape_id "the-shape-id"
+  @shape_handle "the-shape-handle"
   @shape %Shape{
     root_table: {"public", "items"},
     root_table_id: 1,
@@ -527,7 +527,7 @@ defmodule Electric.ShapeCache.StorageImplimentationsTest do
         storage.initialise(opts)
         storage.set_shape_definition(@shape, opts)
 
-        assert {:ok, %{@shape_id => @shape}} =
+        assert {:ok, %{@shape_handle => @shape}} =
                  Electric.ShapeCache.Storage.get_all_stored_shapes({storage, opts})
       end
     end
@@ -535,7 +535,7 @@ defmodule Electric.ShapeCache.StorageImplimentationsTest do
 
   defp start_storage(%{module: module} = context) do
     opts = module |> opts(context) |> module.shared_opts()
-    shape_opts = module.for_shape(@shape_id, @tenant_id, opts)
+    shape_opts = module.for_shape(@shape_handle, @tenant_id, opts)
     {:ok, _} = module.start_link(shape_opts)
     {:ok, %{module: module, opts: shape_opts}}
   end
diff --git a/packages/sync-service/test/electric/shape_cache/storage_test.exs b/packages/sync-service/test/electric/shape_cache/storage_test.exs
index acd5084b0e..ae42b64f20 100644
--- a/packages/sync-service/test/electric/shape_cache/storage_test.exs
+++ b/packages/sync-service/test/electric/shape_cache/storage_test.exs
@@ -11,18 +11,18 @@ defmodule Electric.ShapeCache.StorageTest do
 
   test "should pass through the calls to the storage module" do
     storage = {Mock.Storage, :opts}
-    shape_id = "test"
+    shape_handle = "test"
     tenant_id = "test_tenant"
 
     Mock.Storage
-    |> Mox.stub(:for_shape, fn ^shape_id, ^tenant_id, :opts -> {shape_id, :opts} end)
-    |> Mox.expect(:make_new_snapshot!, fn _, {^shape_id, :opts} -> :ok end)
-    |> Mox.expect(:snapshot_started?, fn {^shape_id, :opts} -> true end)
-    |> Mox.expect(:get_snapshot, fn {^shape_id, :opts} -> {1, []} end)
-    |> Mox.expect(:append_to_log!, fn _, {^shape_id, :opts} -> :ok end)
-    |> Mox.expect(:get_log_stream, fn _, _, {^shape_id, :opts} -> [] end)
+    |> Mox.stub(:for_shape, fn ^shape_handle, ^tenant_id, :opts -> {shape_handle, :opts} end)
+    |> Mox.expect(:make_new_snapshot!, fn _, {^shape_handle, :opts} -> :ok end)
+    |> Mox.expect(:snapshot_started?, fn {^shape_handle, :opts} -> true end)
+    |> Mox.expect(:get_snapshot, fn {^shape_handle, :opts} -> {1, []} end)
+    |> Mox.expect(:append_to_log!, fn _, {^shape_handle, :opts} -> :ok end)
+    |> Mox.expect(:get_log_stream, fn _, _, {^shape_handle, :opts} -> [] end)
 
-    shape_storage = Storage.for_shape(shape_id, tenant_id, storage)
+    shape_storage = Storage.for_shape(shape_handle, tenant_id, storage)
 
     Storage.make_new_snapshot!([], shape_storage)
     Storage.snapshot_started?(shape_storage)
@@ -37,8 +37,8 @@ defmodule Electric.ShapeCache.StorageTest do
     tenant_id = "test_tenant"
 
     Mock.Storage
-    |> Mox.stub(:for_shape, fn shape_id, _, :opts -> {shape_id, :opts} end)
-    |> Mox.expect(:get_log_stream, fn _, _, {_shape_id, :opts} -> [] end)
+    |> Mox.stub(:for_shape, fn shape_handle, _, :opts -> {shape_handle, :opts} end)
+    |> Mox.expect(:get_log_stream, fn _, _, {_shape_handle, :opts} -> [] end)
 
     l1 = LogOffset.new(26_877_408, 10)
     l2 = LogOffset.new(26_877_648, 0)
diff --git a/packages/sync-service/test/electric/shape_cache_test.exs b/packages/sync-service/test/electric/shape_cache_test.exs
index ed3422370a..860a337f34 100644
--- a/packages/sync-service/test/electric/shape_cache_test.exs
+++ b/packages/sync-service/test/electric/shape_cache_test.exs
@@ -62,7 +62,7 @@ defmodule Electric.ShapeCacheTest do
     %{inspector: @stub_inspector, run_with_conn_fn: fn _, cb -> cb.(:connection) end}
   end
 
-  describe "get_or_create_shape_id/2" do
+  describe "get_or_create_shape_handle/2" do
     setup [
       :with_electric_instance_id,
       :with_tenant_id,
@@ -81,19 +81,19 @@ defmodule Electric.ShapeCacheTest do
       )
     end
 
-    test "creates a new shape_id", %{shape_cache_opts: opts} do
-      {shape_id, @zero_offset} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert is_binary(shape_id)
+    test "creates a new shape_handle", %{shape_cache_opts: opts} do
+      {shape_handle, @zero_offset} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert is_binary(shape_handle)
     end
 
-    test "returns existing shape_id", %{shape_cache_opts: opts} do
-      {shape_id1, @zero_offset} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      {shape_id2, @zero_offset} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert shape_id1 == shape_id2
+    test "returns existing shape_handle", %{shape_cache_opts: opts} do
+      {shape_handle1, @zero_offset} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      {shape_handle2, @zero_offset} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert shape_handle1 == shape_handle2
     end
   end
 
-  describe "get_or_create_shape_id/2 shape initialization" do
+  describe "get_or_create_shape_handle/2 shape initialization" do
     setup [
       :with_electric_instance_id,
       :with_tenant_id,
@@ -108,18 +108,18 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {shape_id, offset} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {shape_handle, offset} = ShapeCache.get_or_create_shape_handle(@shape, opts)
       assert offset == @zero_offset
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
       Process.sleep(100)
-      shape_storage = Storage.for_shape(shape_id, ctx.tenant_id, storage)
+      shape_storage = Storage.for_shape(shape_handle, ctx.tenant_id, storage)
       assert Storage.snapshot_started?(shape_storage)
     end
 
@@ -132,20 +132,21 @@ defmodule Electric.ShapeCacheTest do
           prepare_tables_fn: fn nil, [{{"public", "items"}, nil}] ->
             send(test_pid, {:called, :prepare_tables_fn})
           end,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
             send(test_pid, {:called, :create_snapshot_fn})
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
 
-      # subsequent calls return the same shape_id
-      for _ <- 1..10, do: assert({^shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts))
+      # subsequent calls return the same shape_handle
+      for _ <- 1..10,
+          do: assert({^shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts))
 
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
 
       assert_received {:called, :prepare_tables_fn}
       assert_received {:called, :create_snapshot_fn}
@@ -159,11 +160,11 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
             send(test_pid, {:called, :create_snapshot_fn})
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
@@ -174,34 +175,34 @@ defmodule Electric.ShapeCacheTest do
 
       create_call_1 =
         Task.async(fn ->
-          {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-          shape_id
+          {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+          shape_handle
         end)
 
       create_call_2 =
         Task.async(fn ->
-          {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-          shape_id
+          {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+          shape_handle
         end)
 
-      # resume the genserver and assert both queued tasks return the same shape_id
+      # resume the genserver and assert both queued tasks return the same shape_handle
       :sys.resume(link_pid)
-      shape_id = Task.await(create_call_1)
-      assert shape_id == Task.await(create_call_2)
+      shape_handle = Task.await(create_call_1)
+      assert shape_handle == Task.await(create_call_2)
 
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
 
-      # any queued calls should still return the existing shape_id
+      # any queued calls should still return the existing shape_handle
       # after the snapshot has been created (simulated by directly
       # calling GenServer)
-      assert {^shape_id, _} =
-               GenServer.call(link_pid, {:create_or_wait_shape_id, @shape})
+      assert {^shape_handle, _} =
+               GenServer.call(link_pid, {:create_or_wait_shape_handle, @shape})
 
       assert_received {:called, :create_snapshot_fn}
     end
   end
 
-  describe "get_or_create_shape_id/2 against real db" do
+  describe "get_or_create_shape_handle/2 against real db" do
     setup [
       :with_electric_instance_id,
       :with_tenant_id,
@@ -232,9 +233,9 @@ defmodule Electric.ShapeCacheTest do
       shape_cache_opts: opts,
       tenant_id: tenant_id
     } do
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
-      storage = Storage.for_shape(shape_id, tenant_id, storage)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
+      storage = Storage.for_shape(shape_handle, tenant_id, storage)
       assert {@zero_offset, stream} = Storage.get_snapshot(storage)
 
       assert [%{"value" => %{"value" => "test1"}}, %{"value" => %{"value" => "test2"}}] =
@@ -296,9 +297,9 @@ defmodule Electric.ShapeCacheTest do
         ]
       )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(shape, opts)
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
-      storage = Storage.for_shape(shape_id, tenant_id, storage)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(shape, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
+      storage = Storage.for_shape(shape_handle, tenant_id, storage)
       assert {@zero_offset, stream} = Storage.get_snapshot(storage)
 
       assert [
@@ -322,18 +323,20 @@ defmodule Electric.ShapeCacheTest do
       tenant_id: tenant_id,
       storage: storage
     } do
-      {shape_id, initial_offset} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      {shape_handle, initial_offset} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
 
-      assert {^shape_id, offset_after_snapshot} =
-               ShapeCache.get_or_create_shape_id(@shape, opts)
+      assert {^shape_handle, offset_after_snapshot} =
+               ShapeCache.get_or_create_shape_handle(@shape, opts)
 
       expected_offset_after_log_entry =
         LogOffset.new(Electric.Postgres.Lsn.from_integer(1000), 0)
 
-      :ok = ShapeCache.update_shape_latest_offset(shape_id, expected_offset_after_log_entry, opts)
+      :ok =
+        ShapeCache.update_shape_latest_offset(shape_handle, expected_offset_after_log_entry, opts)
 
-      assert {^shape_id, offset_after_log_entry} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      assert {^shape_handle, offset_after_log_entry} =
+               ShapeCache.get_or_create_shape_handle(@shape, opts)
 
       assert initial_offset == @zero_offset
       assert initial_offset == offset_after_snapshot
@@ -341,35 +344,35 @@ defmodule Electric.ShapeCacheTest do
       assert offset_after_log_entry == expected_offset_after_log_entry
 
       # Stop snapshot process gracefully to prevent errors being logged in the test
-      storage = Storage.for_shape(shape_id, tenant_id, storage)
+      storage = Storage.for_shape(shape_handle, tenant_id, storage)
       {_, stream} = Storage.get_snapshot(storage)
       Stream.run(stream)
     end
 
-    test "errors if appending to untracked shape_id", %{shape_cache_opts: opts} do
-      shape_id = "foo"
+    test "errors if appending to untracked shape_handle", %{shape_cache_opts: opts} do
+      shape_handle = "foo"
       log_offset = LogOffset.new(1000, 0)
 
       {:error, log} =
-        with_log(fn -> ShapeCache.update_shape_latest_offset(shape_id, log_offset, opts) end)
+        with_log(fn -> ShapeCache.update_shape_latest_offset(shape_handle, log_offset, opts) end)
 
-      assert log =~ "Tried to update latest offset for shape #{shape_id} which doesn't exist"
+      assert log =~ "Tried to update latest offset for shape #{shape_handle} which doesn't exist"
     end
 
     test "correctly propagates the error", %{shape_cache_opts: opts} do
       shape = %Shape{root_table: {"public", "nonexistent"}, root_table_id: 2}
 
-      {shape_id, log} =
+      {shape_handle, log} =
         with_log(fn ->
-          {shape_id, _} = ShapeCache.get_or_create_shape_id(shape, opts)
+          {shape_handle, _} = ShapeCache.get_or_create_shape_handle(shape, opts)
 
           assert {:error, %Postgrex.Error{postgres: %{code: :undefined_table}}} =
-                   ShapeCache.await_snapshot_start(shape_id, opts)
+                   ShapeCache.await_snapshot_start(shape_handle, opts)
 
-          shape_id
+          shape_handle
         end)
 
-      log =~ "Snapshot creation failed for #{shape_id}"
+      log =~ "Snapshot creation failed for #{shape_handle}"
 
       log =~
         ~S|** (Postgrex.Error) ERROR 42P01 (undefined_table) relation "public.nonexistent" does not exist|
@@ -403,18 +406,18 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
       meta_table = Access.fetch!(opts, :shape_meta_table)
-      assert [{^shape_id, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
-      assert {:ok, 10} = ShapeStatus.snapshot_xmin(meta_table, shape_id)
+      assert [{^shape_handle, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
+      assert {:ok, 10} = ShapeStatus.snapshot_xmin(meta_table, shape_handle)
     end
 
     test "lists the shape even if we don't know xmin", ctx do
@@ -424,28 +427,28 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
             ref = make_ref()
             send(test_pid, {:waiting_point, ref, self()})
             receive(do: ({:continue, ^ref} -> :ok))
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
 
       # Wait until we get to the waiting point in the snapshot
       assert_receive {:waiting_point, ref, pid}
 
       meta_table = Access.fetch!(opts, :shape_meta_table)
-      assert [{^shape_id, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
+      assert [{^shape_handle, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
 
       send(pid, {:continue, ref})
 
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
-      assert [{^shape_id, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
+      assert [{^shape_handle, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
     end
   end
 
@@ -459,20 +462,20 @@ defmodule Electric.ShapeCacheTest do
       :with_shape_log_collector
     ]
 
-    test "returns true for known shape id", ctx do
+    test "returns true for known shape handle", ctx do
       %{shape_cache_opts: opts} =
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _, _, _ ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 100})
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+          create_snapshot_fn: fn parent, shape_handle, _, _, _ ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 100})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
       refute ShapeCache.has_shape?("some-random-id", opts)
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert ShapeCache.has_shape?(shape_id, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert ShapeCache.has_shape?(shape_handle, opts)
     end
 
     test "works with slow snapshot generation", ctx do
@@ -480,15 +483,15 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _, _, _ ->
+          create_snapshot_fn: fn parent, shape_handle, _, _, _ ->
             Process.sleep(100)
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 100})
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 100})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert ShapeCache.has_shape?(shape_id, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert ShapeCache.has_shape?(shape_handle, opts)
     end
   end
 
@@ -507,34 +510,34 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _, _, _ ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 100})
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+          create_snapshot_fn: fn parent, shape_handle, _, _, _ ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 100})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
 
-      assert ShapeCache.await_snapshot_start(shape_id, opts) == :started
+      assert ShapeCache.await_snapshot_start(shape_handle, opts) == :started
     end
 
-    test "returns an error if waiting is for an unknown shape id", ctx do
-      shape_id = "orphaned_id"
+    test "returns an error if waiting is for an unknown shape handle", ctx do
+      shape_handle = "orphaned_handle"
 
-      storage = Storage.for_shape(shape_id, ctx.tenant_id, ctx.storage)
+      storage = Storage.for_shape(shape_handle, ctx.tenant_id, ctx.storage)
 
       %{shape_cache_opts: opts} =
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      assert {:error, :unknown} = ShapeCache.await_snapshot_start(shape_id, opts)
+      assert {:error, :unknown} = ShapeCache.await_snapshot_start(shape_handle, opts)
 
       refute Storage.snapshot_started?(storage)
     end
@@ -546,28 +549,28 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
             ref = make_ref()
             send(test_pid, {:waiting_point, ref, self()})
             receive(do: ({:continue, ^ref} -> :ok))
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
 
             # Sometimes only some tasks subscribe before reaching this point, and then hang
             # if we don't actually have a snapshot. This is kind of part of the test, because
             # `await_snapshot_start/3` should always resolve to `:started` in concurrent situations
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
             Storage.make_new_snapshot!([[1], [2]], storage)
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
 
-      storage = Storage.for_shape(shape_id, ctx.tenant_id, ctx.storage)
+      storage = Storage.for_shape(shape_handle, ctx.tenant_id, ctx.storage)
 
       tasks =
         for _id <- 1..10 do
           Task.async(fn ->
-            assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
+            assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
             {_, stream} = Storage.get_snapshot(storage)
             assert Enum.count(stream) == 2
           end)
@@ -595,22 +598,22 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
 
             Storage.make_new_snapshot!(stream_from_database, storage)
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
 
-      storage = Storage.for_shape(shape_id, ctx.tenant_id, ctx.storage)
+      storage = Storage.for_shape(shape_handle, ctx.tenant_id, ctx.storage)
 
       tasks =
         for _ <- 1..10 do
           Task.async(fn ->
-            :started = ShapeCache.await_snapshot_start(shape_id, opts)
+            :started = ShapeCache.await_snapshot_start(shape_handle, opts)
             {_, stream} = Storage.get_snapshot(storage)
 
             assert_raise RuntimeError, fn -> Stream.run(stream) end
@@ -627,21 +630,21 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, _storage ->
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, _storage ->
             ref = make_ref()
             send(test_pid, {:waiting_point, ref, self()})
             receive(do: ({:continue, ^ref} -> :ok))
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
 
             GenServer.cast(
               parent,
-              {:snapshot_failed, shape_id, %RuntimeError{message: "expected error"}, []}
+              {:snapshot_failed, shape_handle, %RuntimeError{message: "expected error"}, []}
             )
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      task = Task.async(fn -> ShapeCache.await_snapshot_start(shape_id, opts) end)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      task = Task.async(fn -> ShapeCache.await_snapshot_start(shape_handle, opts) end)
 
       log =
         capture_log(fn ->
@@ -652,7 +655,7 @@ defmodule Electric.ShapeCacheTest do
                    Task.await(task)
         end)
 
-      assert log =~ "Snapshot creation failed for #{shape_id}"
+      assert log =~ "Snapshot creation failed for #{shape_handle}"
     end
   end
 
@@ -666,23 +669,23 @@ defmodule Electric.ShapeCacheTest do
       :with_shape_log_collector
     ]
 
-    test "cleans up shape data and rotates the shape id", ctx do
+    test "cleans up shape data and rotates the shape handle", ctx do
       %{shape_cache_opts: opts} =
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
       Process.sleep(50)
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
 
-      storage = Storage.for_shape(shape_id, ctx.tenant_id, ctx.storage)
+      storage = Storage.for_shape(shape_handle, ctx.tenant_id, ctx.storage)
 
       Storage.append_to_log!(
         changes_to_log_items([
@@ -699,11 +702,11 @@ defmodule Electric.ShapeCacheTest do
       assert Enum.count(Storage.get_log_stream(@zero_offset, storage)) == 1
 
       ref =
-        Shapes.Consumer.whereis(ctx.electric_instance_id, ctx.tenant_id, shape_id)
+        Shapes.Consumer.whereis(ctx.electric_instance_id, ctx.tenant_id, shape_handle)
         |> Process.monitor()
 
-      log = capture_log(fn -> ShapeCache.handle_truncate(shape_id, opts) end)
-      assert log =~ "Truncating and rotating shape id"
+      log = capture_log(fn -> ShapeCache.handle_truncate(shape_handle, opts) end)
+      assert log =~ "Truncating and rotating shape handle"
 
       assert_receive {:DOWN, ^ref, :process, _pid, _}
       # Wait a bit for the async cleanup to complete
@@ -722,23 +725,23 @@ defmodule Electric.ShapeCacheTest do
       :with_shape_log_collector
     ]
 
-    test "cleans up shape data and rotates the shape id", ctx do
+    test "cleans up shape data and rotates the shape handle", ctx do
       %{shape_cache_opts: opts} =
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
       Process.sleep(50)
-      assert :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      assert :started = ShapeCache.await_snapshot_start(shape_handle, opts)
 
-      storage = Storage.for_shape(shape_id, ctx.tenant_id, ctx.storage)
+      storage = Storage.for_shape(shape_handle, ctx.tenant_id, ctx.storage)
 
       Storage.append_to_log!(
         changes_to_log_items([
@@ -758,11 +761,14 @@ defmodule Electric.ShapeCacheTest do
 
       ref =
         Process.monitor(
-          module.name(ctx.electric_instance_id, ctx.tenant_id, shape_id)
+
+          module.name(ctx.electric_instance_id, ctx.tenant_id, shape_handle)
+
           |> GenServer.whereis()
+
         )
 
-      log = capture_log(fn -> :ok = ShapeCache.clean_shape(shape_id, opts) end)
+      log = capture_log(fn -> :ok = ShapeCache.clean_shape(shape_handle, opts) end)
       assert log =~ "Cleaning up shape"
 
       assert_receive {:DOWN, ^ref, :process, _pid, _reason}
@@ -775,25 +781,25 @@ defmodule Electric.ShapeCacheTest do
                    ~r"Snapshot no longer available",
                    fn -> Storage.get_snapshot(storage) end
 
-      {shape_id2, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert shape_id != shape_id2
+      {shape_handle2, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert shape_handle != shape_handle2
     end
 
     test "cleans up shape swallows error if no shape to clean up", ctx do
-      shape_id = "foo"
+      shape_handle = "foo"
 
       %{shape_cache_opts: opts} =
         with_shape_cache(Map.merge(ctx, %{pool: nil, inspector: @stub_inspector}),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
-      {:ok, _} = with_log(fn -> ShapeCache.clean_shape(shape_id, opts) end)
+      {:ok, _} = with_log(fn -> ShapeCache.clean_shape(shape_handle, opts) end)
     end
   end
 
@@ -827,43 +833,43 @@ defmodule Electric.ShapeCacheTest do
         with_shape_cache(Map.put(ctx, :inspector, @stub_inspector),
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: @prepare_tables_noop,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, @snapshot_xmin})
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, @snapshot_xmin})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
     )
 
-    test "restores shape_ids", %{shape_cache_opts: opts} = context do
-      {shape_id1, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      :started = ShapeCache.await_snapshot_start(shape_id1, opts)
+    test "restores shape_handles", %{shape_cache_opts: opts} = context do
+      {shape_handle1, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      :started = ShapeCache.await_snapshot_start(shape_handle1, opts)
       restart_shape_cache(context)
-      {shape_id2, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      assert shape_id1 == shape_id2
+      {shape_handle2, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      assert shape_handle1 == shape_handle2
     end
 
     test "restores snapshot xmins", %{shape_cache_opts: opts} = context do
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      :started = ShapeCache.await_snapshot_start(shape_handle, opts)
       meta_table = Keyword.fetch!(opts, :shape_meta_table)
-      [{^shape_id, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
-      {:ok, @snapshot_xmin} = ShapeStatus.snapshot_xmin(meta_table, shape_id)
+      [{^shape_handle, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
+      {:ok, @snapshot_xmin} = ShapeStatus.snapshot_xmin(meta_table, shape_handle)
 
       %{shape_cache_opts: opts} = restart_shape_cache(context)
-      :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      :started = ShapeCache.await_snapshot_start(shape_handle, opts)
 
       meta_table = Keyword.fetch!(opts, :shape_meta_table)
-      assert [{^shape_id, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
-      {:ok, @snapshot_xmin} = ShapeStatus.snapshot_xmin(meta_table, shape_id)
+      assert [{^shape_handle, @shape}] = ShapeCache.list_shapes(%{shape_meta_table: meta_table})
+      {:ok, @snapshot_xmin} = ShapeStatus.snapshot_xmin(meta_table, shape_handle)
     end
 
     test "restores latest offset", %{shape_cache_opts: opts} = context do
       offset = @change_offset
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape, opts)
-      :started = ShapeCache.await_snapshot_start(shape_id, opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape, opts)
+      :started = ShapeCache.await_snapshot_start(shape_handle, opts)
 
-      ref = Shapes.Consumer.monitor(context.electric_instance_id, context.tenant_id, shape_id)
+      ref = Shapes.Consumer.monitor(context.electric_instance_id, context.tenant_id, shape_handle)
 
       ShapeLogCollector.store_transaction(
         %Changes.Transaction{
@@ -878,7 +884,7 @@ defmodule Electric.ShapeCacheTest do
 
       assert_receive {Shapes.Consumer, ^ref, @xid}
 
-      {^shape_id, ^offset} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      {^shape_handle, ^offset} = ShapeCache.get_or_create_shape_handle(@shape, opts)
 
       # without this sleep, this test becomes unreliable. I think maybe due to
       # delays in actually writing the data to cubdb/fsyncing the tx. I've
@@ -888,22 +894,22 @@ defmodule Electric.ShapeCacheTest do
 
       restart_shape_cache(context)
 
-      :started = ShapeCache.await_snapshot_start(shape_id, opts)
-      assert {^shape_id, ^offset} = ShapeCache.get_or_create_shape_id(@shape, opts)
+      :started = ShapeCache.await_snapshot_start(shape_handle, opts)
+      assert {^shape_handle, ^offset} = ShapeCache.get_or_create_shape_handle(@shape, opts)
     end
 
     defp restart_shape_cache(context) do
       stop_shape_cache(context)
-      # Wait 1 millisecond to ensure shape IDs are not generated the same
+      # Wait 1 millisecond to ensure shape handles are not generated the same
       Process.sleep(1)
       with_cub_db_storage(context)
 
       with_shape_cache(Map.put(context, :inspector, @stub_inspector),
         prepare_tables_fn: @prepare_tables_noop,
-        create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
-          GenServer.cast(parent, {:snapshot_xmin_known, shape_id, @snapshot_xmin})
+        create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
+          GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, @snapshot_xmin})
           Storage.make_new_snapshot!([["test"]], storage)
-          GenServer.cast(parent, {:snapshot_started, shape_id})
+          GenServer.cast(parent, {:snapshot_started, shape_handle})
         end
       )
     end
@@ -912,8 +918,8 @@ defmodule Electric.ShapeCacheTest do
       %{shape_cache: {shape_cache, shape_cache_opts}} = ctx
 
       consumers =
-        for {shape_id, _} <- shape_cache.list_shapes(Map.new(shape_cache_opts)) do
-          pid = Shapes.Consumer.whereis(ctx.electric_instance_id, ctx.tenant_id, shape_id)
+        for {shape_handle, _} <- shape_cache.list_shapes(Map.new(shape_cache_opts)) do
+          pid = Shapes.Consumer.whereis(ctx.electric_instance_id, ctx.tenant_id, shape_handle)
           {pid, Process.monitor(pid)}
         end
 
diff --git a/packages/sync-service/test/electric/shapes/consumer_test.exs b/packages/sync-service/test/electric/shapes/consumer_test.exs
index b7959c9552..d7b1c7aea6 100644
--- a/packages/sync-service/test/electric/shapes/consumer_test.exs
+++ b/packages/sync-service/test/electric/shapes/consumer_test.exs
@@ -20,22 +20,22 @@ defmodule Electric.Shapes.ConsumerTest do
 
   import Mox
 
-  @shape_id1 "#{__MODULE__}-shape1"
+  @shape_handle1 "#{__MODULE__}-shape1"
   @shape1 Shape.new!("public.test_table",
             inspector: StubInspector.new([%{name: "id", type: "int8", pk_position: 0}])
           )
 
-  @shape_id2 "#{__MODULE__}-shape2"
+  @shape_handle2 "#{__MODULE__}-shape2"
   @shape2 Shape.new!("public.other_table",
             inspector: StubInspector.new([%{name: "id", type: "int8", pk_position: 0}])
           )
 
   @shape_position %{
-    @shape_id1 => %{
+    @shape_handle1 => %{
       latest_offset: LogOffset.new(Lsn.from_string("0/10"), 0),
       snapshot_xmin: 100
     },
-    @shape_id2 => %{
+    @shape_handle2 => %{
       latest_offset: LogOffset.new(Lsn.from_string("0/50"), 0),
       snapshot_xmin: 120
     }
@@ -56,22 +56,22 @@ defmodule Electric.Shapes.ConsumerTest do
   setup :set_mox_from_context
   setup :verify_on_exit!
 
-  defp shape_status(shape_id, ctx) do
-    get_in(ctx, [:shape_position, shape_id]) || raise "invalid shape_id #{shape_id}"
+  defp shape_status(shape_handle, ctx) do
+    get_in(ctx, [:shape_position, shape_handle]) || raise "invalid shape_handle #{shape_handle}"
   end
 
-  defp log_offset(shape_id, ctx) do
-    get_in(ctx, [:shape_position, shape_id, :latest_offset]) ||
-      raise "invalid shape_id #{shape_id}"
+  defp log_offset(shape_handle, ctx) do
+    get_in(ctx, [:shape_position, shape_handle, :latest_offset]) ||
+      raise "invalid shape_handle #{shape_handle}"
   end
 
-  defp snapshot_xmin(shape_id, ctx) do
-    get_in(ctx, [:shape_position, shape_id, :snapshot_xmin]) ||
-      raise "invalid shape_id #{shape_id}"
+  defp snapshot_xmin(shape_handle, ctx) do
+    get_in(ctx, [:shape_position, shape_handle, :snapshot_xmin]) ||
+      raise "invalid shape_handle #{shape_handle}"
   end
 
-  defp lsn(shape_id, ctx) do
-    %{tx_offset: offset} = log_offset(shape_id, ctx)
+  defp lsn(shape_handle, ctx) do
+    %{tx_offset: offset} = log_offset(shape_handle, ctx)
     Lsn.from_integer(offset)
   end
 
@@ -83,7 +83,7 @@ defmodule Electric.Shapes.ConsumerTest do
     setup [:with_in_memory_storage]
 
     setup(ctx) do
-      shapes = Map.get(ctx, :shapes, %{@shape_id1 => @shape1, @shape_id2 => @shape2})
+      shapes = Map.get(ctx, :shapes, %{@shape_handle1 => @shape1, @shape_handle2 => @shape2})
       shape_position = Map.get(ctx, :shape_position, @shape_position)
       [shape_position: shape_position, shapes: shapes]
     end
@@ -92,16 +92,16 @@ defmodule Electric.Shapes.ConsumerTest do
       registry_name = Module.concat(__MODULE__, Registry)
       start_link_supervised!({Registry, keys: :duplicate, name: registry_name})
 
-      %{latest_offset: _offset1, snapshot_xmin: xmin1} = shape_status(@shape_id1, ctx)
-      %{latest_offset: _offset2, snapshot_xmin: xmin2} = shape_status(@shape_id2, ctx)
+      %{latest_offset: _offset1, snapshot_xmin: xmin1} = shape_status(@shape_handle1, ctx)
+      %{latest_offset: _offset2, snapshot_xmin: xmin2} = shape_status(@shape_handle2, ctx)
 
       storage =
         Support.TestStorage.wrap(ctx.storage, %{
-          @shape_id1 => [
+          @shape_handle1 => [
             {:mark_snapshot_as_started, []},
             {:set_snapshot_xmin, [xmin1]}
           ],
-          @shape_id2 => [
+          @shape_handle2 => [
             {:mark_snapshot_as_started, []},
             {:set_snapshot_xmin, [xmin2]}
           ]
@@ -119,24 +119,24 @@ defmodule Electric.Shapes.ConsumerTest do
         )
 
       consumers =
-        for {shape_id, shape} <- ctx.shapes do
+        for {shape_handle, shape} <- ctx.shapes do
           Mock.ShapeStatus
-          |> expect(:initialise_shape, 1, fn _, ^shape_id, _, _ -> :ok end)
-          |> expect(:set_snapshot_xmin, 1, fn _, ^shape_id, _ -> :ok end)
-          |> expect(:mark_snapshot_started, 1, fn _, ^shape_id -> :ok end)
+          |> expect(:initialise_shape, 1, fn _, ^shape_handle, _, _ -> :ok end)
+          |> expect(:set_snapshot_xmin, 1, fn _, ^shape_handle, _ -> :ok end)
+          |> expect(:mark_snapshot_started, 1, fn _, ^shape_handle -> :ok end)
           |> allow(self(), fn ->
-            Shapes.Consumer.whereis(ctx.electric_instance_id, ctx.tenant_id, shape_id)
+            Shapes.Consumer.whereis(ctx.electric_instance_id, ctx.tenant_id, shape_handle)
           end)
 
           Mock.ShapeCache
           |> allow(self(), fn ->
-            Shapes.Consumer.whereis(ctx.electric_instance_id, ctx.tenant_id, shape_id)
+            Shapes.Consumer.whereis(ctx.electric_instance_id, ctx.tenant_id, shape_handle)
           end)
 
           {:ok, consumer} =
             start_supervised(
               {Shapes.Consumer.Supervisor,
-               shape_id: shape_id,
+               shape_handle: shape_handle,
                shape: shape,
                electric_instance_id: ctx.electric_instance_id,
                inspector: {Mock.Inspector, []},
@@ -156,10 +156,10 @@ defmodule Electric.Shapes.ConsumerTest do
                  Electric.ShapeCache.LogChunker.default_chunk_size_threshold(),
                run_with_conn_fn: &run_with_conn_noop/2,
                prepare_tables_fn: &prepare_tables_fn/2},
-              id: {Shapes.Consumer.Supervisor, shape_id}
+              id: {Shapes.Consumer.Supervisor, shape_handle}
             )
 
-          assert_receive {Support.TestStorage, :set_shape_definition, ^shape_id, ^shape}
+          assert_receive {Support.TestStorage, :set_shape_definition, ^shape_handle, ^shape}
 
           consumer
         end
@@ -173,18 +173,18 @@ defmodule Electric.Shapes.ConsumerTest do
 
     test "appends to log when xid >= xmin", ctx do
       xid = 150
-      xmin = snapshot_xmin(@shape_id1, ctx)
-      last_log_offset = log_offset(@shape_id1, ctx)
-      lsn = lsn(@shape_id1, ctx)
+      xmin = snapshot_xmin(@shape_handle1, ctx)
+      last_log_offset = log_offset(@shape_handle1, ctx)
+      lsn = lsn(@shape_handle1, ctx)
 
       Mock.ShapeCache
-      |> expect(:update_shape_latest_offset, 2, fn @shape_id1, ^last_log_offset, _ -> :ok end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> expect(:update_shape_latest_offset, 2, fn @shape_handle1, ^last_log_offset, _ -> :ok end)
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
 
       ref = make_ref()
 
       tenant_id = Access.fetch!(ctx, :tenant_id)
-      Registry.register(ctx.registry, {tenant_id, @shape_id1}, ref)
+      Registry.register(ctx.registry, {tenant_id, @shape_handle1}, ref)
 
       txn =
         %Transaction{xid: xmin, lsn: lsn, last_log_offset: last_log_offset}
@@ -196,37 +196,37 @@ defmodule Electric.Shapes.ConsumerTest do
 
       assert :ok = ShapeLogCollector.store_transaction(txn, ctx.producer)
       assert_receive {^ref, :new_changes, ^last_log_offset}, 1000
-      assert_receive {Support.TestStorage, :append_to_log!, @shape_id1, _}
-      refute_receive {Support.TestStorage, :append_to_log!, @shape_id2, _}
+      assert_receive {Support.TestStorage, :append_to_log!, @shape_handle1, _}
+      refute_receive {Support.TestStorage, :append_to_log!, @shape_handle2, _}
 
       txn2 = %{txn | xid: xid}
 
       assert :ok = ShapeLogCollector.store_transaction(txn2, ctx.producer)
       assert_receive {^ref, :new_changes, ^last_log_offset}, 1000
-      assert_receive {Support.TestStorage, :append_to_log!, @shape_id1, _}
-      refute_receive {Support.TestStorage, :append_to_log!, @shape_id2, _}
+      assert_receive {Support.TestStorage, :append_to_log!, @shape_handle1, _}
+      refute_receive {Support.TestStorage, :append_to_log!, @shape_handle2, _}
     end
 
     test "correctly writes only relevant changes to multiple shape logs", ctx do
-      last_log_offset = log_offset(@shape_id1, ctx)
-      lsn = lsn(@shape_id1, ctx)
+      last_log_offset = log_offset(@shape_handle1, ctx)
+      lsn = lsn(@shape_handle1, ctx)
 
       xid = 150
 
       Mock.ShapeCache
       |> expect(:update_shape_latest_offset, 2, fn
-        @shape_id1, ^last_log_offset, _ -> :ok
-        @shape_id2, ^last_log_offset, _ -> :ok
+        @shape_handle1, ^last_log_offset, _ -> :ok
+        @shape_handle2, ^last_log_offset, _ -> :ok
       end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
 
       ref1 = make_ref()
       ref2 = make_ref()
 
       tenant_id = Access.fetch!(ctx, :tenant_id)
-      Registry.register(ctx.registry, {tenant_id, @shape_id1}, ref1)
-      Registry.register(ctx.registry, {tenant_id, @shape_id2}, ref2)
+      Registry.register(ctx.registry, {tenant_id, @shape_handle1}, ref1)
+      Registry.register(ctx.registry, {tenant_id, @shape_handle2}, ref2)
 
       txn =
         %Transaction{xid: xid, lsn: lsn, last_log_offset: last_log_offset}
@@ -251,21 +251,21 @@ defmodule Electric.Shapes.ConsumerTest do
       assert_receive {^ref1, :new_changes, ^last_log_offset}, 1000
       assert_receive {^ref2, :new_changes, ^last_log_offset}, 1000
 
-      assert_receive {Support.TestStorage, :append_to_log!, @shape_id1,
+      assert_receive {Support.TestStorage, :append_to_log!, @shape_handle1,
                       [{_offset, serialized_record}]}
 
       assert %{"value" => %{"id" => "1"}} = Jason.decode!(serialized_record)
 
-      assert_receive {Support.TestStorage, :append_to_log!, @shape_id2,
+      assert_receive {Support.TestStorage, :append_to_log!, @shape_handle2,
                       [{_offset, serialized_record}]}
 
       assert %{"value" => %{"id" => "2"}} = Jason.decode!(serialized_record)
     end
 
     @tag shapes: %{
-           @shape_id1 =>
+           @shape_handle1 =>
              Shape.new!("public.test_table", where: "id != 1", inspector: {Mock.Inspector, []}),
-           @shape_id2 =>
+           @shape_handle2 =>
              Shape.new!("public.test_table", where: "id = 1", inspector: {Mock.Inspector, []})
          }
     test "doesn't append to log when change is irrelevant for active shapes", ctx do
@@ -273,12 +273,12 @@ defmodule Electric.Shapes.ConsumerTest do
       lsn = Lsn.from_string("0/10")
       last_log_offset = LogOffset.new(lsn, 0)
 
-      ref1 = Shapes.Consumer.monitor(ctx.electric_instance_id, ctx.tenant_id, @shape_id1)
-      ref2 = Shapes.Consumer.monitor(ctx.electric_instance_id, ctx.tenant_id, @shape_id2)
+      ref1 = Shapes.Consumer.monitor(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1)
+      ref2 = Shapes.Consumer.monitor(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2)
 
       Mock.ShapeCache
-      |> expect(:update_shape_latest_offset, fn @shape_id2, _offset, _ -> :ok end)
-      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+      |> expect(:update_shape_latest_offset, fn @shape_handle2, _offset, _ -> :ok end)
+      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
 
       txn =
         %Transaction{xid: xid, lsn: lsn, last_log_offset: last_log_offset}
@@ -290,8 +290,8 @@ defmodule Electric.Shapes.ConsumerTest do
 
       assert :ok = ShapeLogCollector.store_transaction(txn, ctx.producer)
 
-      assert_receive {Support.TestStorage, :append_to_log!, @shape_id2, _}
-      refute_receive {Support.TestStorage, :append_to_log!, @shape_id1, _}
+      assert_receive {Support.TestStorage, :append_to_log!, @shape_handle2, _}
+      refute_receive {Support.TestStorage, :append_to_log!, @shape_handle1, _}
 
       refute_receive {Shapes.Consumer, ^ref1, 150}
       assert_receive {Shapes.Consumer, ^ref2, 150}
@@ -303,8 +303,8 @@ defmodule Electric.Shapes.ConsumerTest do
       last_log_offset = LogOffset.new(lsn, 0)
 
       Mock.ShapeCache
-      |> expect(:handle_truncate, fn @shape_id1, _ -> :ok end)
-      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> expect(:handle_truncate, fn @shape_handle1, _ -> :ok end)
+      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
 
       txn =
         %Transaction{xid: xid, lsn: lsn, last_log_offset: last_log_offset}
@@ -312,20 +312,20 @@ defmodule Electric.Shapes.ConsumerTest do
           relation: {"public", "test_table"}
         })
 
-      assert_consumer_shutdown(ctx.electric_instance_id, ctx.tenant_id, @shape_id1, fn ->
+      assert_consumer_shutdown(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1, fn ->
         assert :ok = ShapeLogCollector.store_transaction(txn, ctx.producer)
       end)
 
-      assert_receive {Support.TestStorage, :cleanup!, @shape_id1}
-      refute_receive {Support.TestStorage, :cleanup!, @shape_id2}
+      assert_receive {Support.TestStorage, :cleanup!, @shape_handle1}
+      refute_receive {Support.TestStorage, :cleanup!, @shape_handle2}
     end
 
-    defp assert_consumer_shutdown(electric_instance_id, tenant_id, shape_id, fun) do
+    defp assert_consumer_shutdown(electric_instance_id, tenant_id, shape_handle, fun) do
       monitors =
         for name <- [
-              Shapes.Consumer.Supervisor.name(electric_instance_id, tenant_id, shape_id),
-              Shapes.Consumer.name(electric_instance_id, tenant_id, shape_id),
-              Shapes.Consumer.Snapshotter.name(electric_instance_id, tenant_id, shape_id)
+              Shapes.Consumer.Supervisor.name(electric_instance_id, tenant_id, shape_handle),
+              Shapes.Consumer.name(electric_instance_id, tenant_id, shape_handle),
+              Shapes.Consumer.Snapshotter.name(electric_instance_id, tenant_id, shape_handle)
             ],
             pid = GenServer.whereis(name) do
           ref = Process.monitor(pid)
@@ -341,7 +341,7 @@ defmodule Electric.Shapes.ConsumerTest do
     end
 
     @tag shapes: %{
-           @shape_id1 =>
+           @shape_handle1 =>
              Shape.new!("test_table",
                where: "id LIKE 'test'",
                inspector: StubInspector.new([%{pk_position: 0, name: "id"}])
@@ -353,8 +353,8 @@ defmodule Electric.Shapes.ConsumerTest do
       last_log_offset = LogOffset.new(lsn, 0)
 
       Mock.ShapeCache
-      |> expect(:handle_truncate, fn @shape_id1, _ -> :ok end)
-      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> expect(:handle_truncate, fn @shape_handle1, _ -> :ok end)
+      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
 
       txn =
         %Transaction{xid: xid, lsn: lsn, last_log_offset: last_log_offset}
@@ -362,13 +362,13 @@ defmodule Electric.Shapes.ConsumerTest do
           relation: {"public", "test_table"}
         })
 
-      assert_consumer_shutdown(ctx.electric_instance_id, ctx.tenant_id, @shape_id1, fn ->
+      assert_consumer_shutdown(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1, fn ->
         assert :ok = ShapeLogCollector.store_transaction(txn, ctx.producer)
       end)
 
-      refute_receive {Support.TestStorage, :append_to_log!, @shape_id1, _}
-      assert_receive {Support.TestStorage, :cleanup!, @shape_id1}
-      refute_receive {Support.TestStorage, :cleanup!, @shape_id2}
+      refute_receive {Support.TestStorage, :append_to_log!, @shape_handle1, _}
+      assert_receive {Support.TestStorage, :cleanup!, @shape_handle1}
+      refute_receive {Support.TestStorage, :cleanup!, @shape_handle2}
     end
 
     test "notifies listeners of new changes", ctx do
@@ -377,12 +377,12 @@ defmodule Electric.Shapes.ConsumerTest do
       last_log_offset = LogOffset.new(lsn, 0)
 
       Mock.ShapeCache
-      |> expect(:update_shape_latest_offset, fn @shape_id1, ^last_log_offset, _ -> :ok end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> expect(:update_shape_latest_offset, fn @shape_handle1, ^last_log_offset, _ -> :ok end)
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
 
       ref = make_ref()
       tenant_id = Access.fetch!(ctx, :tenant_id)
-      Registry.register(ctx.registry, {tenant_id, @shape_id1}, ref)
+      Registry.register(ctx.registry, {tenant_id, @shape_handle1}, ref)
 
       txn =
         %Transaction{xid: xid, lsn: lsn, last_log_offset: last_log_offset}
@@ -393,7 +393,7 @@ defmodule Electric.Shapes.ConsumerTest do
         })
 
       assert :ok = ShapeLogCollector.store_transaction(txn, ctx.producer)
-      assert_receive {Support.TestStorage, :append_to_log!, @shape_id1, _}
+      assert_receive {Support.TestStorage, :append_to_log!, @shape_handle1, _}
       assert_receive {^ref, :new_changes, ^last_log_offset}, 1000
     end
 
@@ -408,19 +408,23 @@ defmodule Electric.Shapes.ConsumerTest do
 
       ref1 =
         Process.monitor(
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+
+          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
+
         )
 
       ref2 =
         Process.monitor(
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+
+          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
+
         )
 
       Mock.ShapeStatus
       |> expect(:remove_shape, 0, fn _, _ -> :ok end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
       |> expect(:remove_shape, 0, fn _, _ -> :ok end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
 
       assert :ok = ShapeLogCollector.handle_relation_msg(rel, ctx.producer)
 
@@ -440,26 +444,30 @@ defmodule Electric.Shapes.ConsumerTest do
 
       ref1 =
         Process.monitor(
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+
+          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
+
         )
 
       ref2 =
         Process.monitor(
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+
+          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
+
         )
 
       # also cleans up inspector cache and shape status cache
       Mock.Inspector
       |> expect(:clean, 1, fn _, _ -> true end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
       |> expect(:clean, 0, fn _, _ -> true end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
 
       Mock.ShapeStatus
       |> expect(:remove_shape, 1, fn _, _ -> :ok end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
       |> expect(:remove_shape, 0, fn _, _ -> :ok end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
 
       assert :ok = ShapeLogCollector.handle_relation_msg(rel, ctx.producer)
 
@@ -482,26 +490,30 @@ defmodule Electric.Shapes.ConsumerTest do
 
       ref1 =
         Process.monitor(
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+
+          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
+
         )
 
       ref2 =
         Process.monitor(
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+
+          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
+
         )
 
       # also cleans up inspector cache and shape status cache
       Mock.Inspector
       |> expect(:clean, 1, fn _, _ -> true end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
       |> expect(:clean, 0, fn _, _ -> true end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
 
       Mock.ShapeStatus
       |> expect(:remove_shape, 1, fn _, _ -> :ok end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id1))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
       |> expect(:remove_shape, 0, fn _, _ -> :ok end)
-      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_id2))
+      |> allow(self(), Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
 
       assert :ok = ShapeLogCollector.handle_relation_msg(rel, ctx.producer)
 
@@ -534,11 +546,11 @@ defmodule Electric.Shapes.ConsumerTest do
           log_producer: ctx.shape_log_collector,
           run_with_conn_fn: &run_with_conn_noop/2,
           prepare_tables_fn: fn _, _ -> :ok end,
-          create_snapshot_fn: fn parent, shape_id, _shape, _, storage ->
+          create_snapshot_fn: fn parent, shape_handle, _shape, _, storage ->
             if is_integer(snapshot_delay), do: Process.sleep(snapshot_delay)
-            GenServer.cast(parent, {:snapshot_xmin_known, shape_id, 10})
+            GenServer.cast(parent, {:snapshot_xmin_known, shape_handle, 10})
             Storage.make_new_snapshot!([["test"]], storage)
-            GenServer.cast(parent, {:snapshot_started, shape_id})
+            GenServer.cast(parent, {:snapshot_started, shape_handle})
           end
         )
 
@@ -554,17 +566,17 @@ defmodule Electric.Shapes.ConsumerTest do
         shape_cache_opts: shape_cache_opts
       } = ctx
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape1, shape_cache_opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape1, shape_cache_opts)
 
       :started =
         ShapeCache.await_snapshot_start(
-          shape_id,
+          shape_handle,
           shape_cache_opts
         )
 
       lsn = Lsn.from_integer(10)
 
-      ref = Shapes.Consumer.monitor(ctx.electric_instance_id, ctx.tenant_id, shape_id)
+      ref = Shapes.Consumer.monitor(ctx.electric_instance_id, ctx.tenant_id, shape_handle)
 
       txn =
         %Transaction{xid: 11, lsn: lsn, last_log_offset: LogOffset.new(lsn, 2)}
@@ -583,7 +595,7 @@ defmodule Electric.Shapes.ConsumerTest do
 
       assert_receive {Shapes.Consumer, ^ref, 11}
 
-      shape_storage = Storage.for_shape(shape_id, ctx.tenant_id, storage)
+      shape_storage = Storage.for_shape(shape_handle, ctx.tenant_id, storage)
 
       assert [op1, op2] =
                Storage.get_log_stream(LogOffset.before_all(), shape_storage)
@@ -606,12 +618,12 @@ defmodule Electric.Shapes.ConsumerTest do
         shape_cache_opts: shape_cache_opts
       } = ctx
 
-      {shape_id, _} = ShapeCache.get_or_create_shape_id(@shape1, shape_cache_opts)
+      {shape_handle, _} = ShapeCache.get_or_create_shape_handle(@shape1, shape_cache_opts)
 
       lsn1 = Lsn.from_integer(9)
       lsn2 = Lsn.from_integer(10)
 
-      ref = Shapes.Consumer.monitor(ctx.electric_instance_id, ctx.tenant_id, shape_id)
+      ref = Shapes.Consumer.monitor(ctx.electric_instance_id, ctx.tenant_id, shape_handle)
 
       txn1 =
         %Transaction{xid: 9, lsn: lsn1, last_log_offset: LogOffset.new(lsn1, 2)}
@@ -642,11 +654,11 @@ defmodule Electric.Shapes.ConsumerTest do
       assert :ok = ShapeLogCollector.store_transaction(txn1, ctx.producer)
       assert :ok = ShapeLogCollector.store_transaction(txn2, ctx.producer)
 
-      :started = ShapeCache.await_snapshot_start(shape_id, shape_cache_opts)
+      :started = ShapeCache.await_snapshot_start(shape_handle, shape_cache_opts)
 
       assert_receive {Shapes.Consumer, ^ref, 10}
 
-      shape_storage = Storage.for_shape(shape_id, ctx.tenant_id, storage)
+      shape_storage = Storage.for_shape(shape_handle, ctx.tenant_id, storage)
 
       assert [_op1, _op2] =
                Storage.get_log_stream(LogOffset.before_all(), shape_storage)
diff --git a/packages/sync-service/test/support/test_storage.ex b/packages/sync-service/test/support/test_storage.ex
index af09010312..66453dfd42 100644
--- a/packages/sync-service/test/support/test_storage.ex
+++ b/packages/sync-service/test/support/test_storage.ex
@@ -5,7 +5,7 @@ defmodule Support.TestStorage do
 
   This is useful when mocking the storage doesn't work for some reason.
 
-  You can initialise the backing storage for a given shape id by passing a list
+  You can initialise the backing storage for a given shape handle by passing a list
   of `{function_name :: atom(), args :: []}` calls to make against it after the
   `initialise/1` call.
 
@@ -38,20 +38,20 @@ defmodule Support.TestStorage do
   end
 
   @impl Electric.ShapeCache.Storage
-  def for_shape(shape_id, tenant_id, {parent, init, storage}) do
-    send(parent, {__MODULE__, :for_shape, shape_id, tenant_id})
-    shape_init = Map.get(init, shape_id, [])
-    {parent, shape_id, shape_init, Storage.for_shape(shape_id, tenant_id, storage)}
+  def for_shape(shape_handle, tenant_id, {parent, init, storage}) do
+    send(parent, {__MODULE__, :for_shape, shape_handle, tenant_id})
+    shape_init = Map.get(init, shape_handle, [])
+    {parent, shape_handle, shape_init, Storage.for_shape(shape_handle, tenant_id, storage)}
   end
 
   @impl Electric.ShapeCache.Storage
-  def start_link({_parent, _shape_id, _shape_init, storage}) do
+  def start_link({_parent, _shape_handle, _shape_init, storage}) do
     Storage.start_link(storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def initialise({parent, shape_id, init, storage}) do
-    send(parent, {__MODULE__, :initialise, shape_id})
+  def initialise({parent, shape_handle, init, storage}) do
+    send(parent, {__MODULE__, :initialise, shape_handle})
 
     {module, opts} = storage
 
@@ -65,8 +65,8 @@ defmodule Support.TestStorage do
   end
 
   @impl Electric.ShapeCache.Storage
-  def set_shape_definition(shape, {parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :set_shape_definition, shape_id, shape})
+  def set_shape_definition(shape, {parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :set_shape_definition, shape_handle, shape})
     Storage.set_shape_definition(shape, storage)
   end
 
@@ -77,62 +77,62 @@ defmodule Support.TestStorage do
   end
 
   @impl Electric.ShapeCache.Storage
-  def get_current_position({parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :get_current_position, shape_id})
+  def get_current_position({parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :get_current_position, shape_handle})
     Storage.get_current_position(storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def set_snapshot_xmin(xmin, {parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :set_snapshot_xmin, shape_id, xmin})
+  def set_snapshot_xmin(xmin, {parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :set_snapshot_xmin, shape_handle, xmin})
     Storage.set_snapshot_xmin(xmin, storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def snapshot_started?({parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :snapshot_started?, shape_id})
+  def snapshot_started?({parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :snapshot_started?, shape_handle})
     Storage.snapshot_started?(storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def get_snapshot({parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :get_snapshot, shape_id})
+  def get_snapshot({parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :get_snapshot, shape_handle})
     Storage.get_snapshot(storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def get_log_stream(offset, max_offset, {parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :get_log_stream, shape_id, offset, max_offset})
+  def get_log_stream(offset, max_offset, {parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :get_log_stream, shape_handle, offset, max_offset})
     Storage.get_log_stream(offset, max_offset, storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def get_chunk_end_log_offset(offset, {parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :get_chunk_end_log_offset, shape_id, offset})
+  def get_chunk_end_log_offset(offset, {parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :get_chunk_end_log_offset, shape_handle, offset})
     Storage.get_chunk_end_log_offset(offset, storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def make_new_snapshot!(data_stream, {parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :make_new_snapshot!, shape_id, data_stream})
+  def make_new_snapshot!(data_stream, {parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :make_new_snapshot!, shape_handle, data_stream})
     Storage.make_new_snapshot!(data_stream, storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def mark_snapshot_as_started({parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :mark_snapshot_as_started, shape_id})
+  def mark_snapshot_as_started({parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :mark_snapshot_as_started, shape_handle})
     Storage.mark_snapshot_as_started(storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def append_to_log!(log_items, {parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :append_to_log!, shape_id, log_items})
+  def append_to_log!(log_items, {parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :append_to_log!, shape_handle, log_items})
     Storage.append_to_log!(log_items, storage)
   end
 
   @impl Electric.ShapeCache.Storage
-  def cleanup!({parent, shape_id, _, storage}) do
-    send(parent, {__MODULE__, :cleanup!, shape_id})
+  def cleanup!({parent, shape_handle, _, storage}) do
+    send(parent, {__MODULE__, :cleanup!, shape_handle})
     Storage.cleanup!(storage)
   end
 end
diff --git a/packages/typescript-client/CHANGELOG.md b/packages/typescript-client/CHANGELOG.md
index f58cbd6274..4e8b47cd39 100644
--- a/packages/typescript-client/CHANGELOG.md
+++ b/packages/typescript-client/CHANGELOG.md
@@ -164,7 +164,7 @@
 
 ### Patch Changes
 
-- 958cc0c: Respect 409 errors by restarting the stream with the new `shape_id`.
+- 958cc0c: Respect 409 errors by restarting the stream with the new `shape_handle`.
 
 ## 0.0.3
 
diff --git a/packages/typescript-client/src/client.ts b/packages/typescript-client/src/client.ts
index db4e0cfc25..9d523d16f3 100644
--- a/packages/typescript-client/src/client.ts
+++ b/packages/typescript-client/src/client.ts
@@ -22,8 +22,8 @@ import {
   COLUMNS_QUERY_PARAM,
   LIVE_QUERY_PARAM,
   OFFSET_QUERY_PARAM,
-  SHAPE_ID_HEADER,
-  SHAPE_ID_QUERY_PARAM,
+  SHAPE_HANDLE_HEADER,
+  SHAPE_HANDLE_QUERY_PARAM,
   SHAPE_SCHEMA_HEADER,
   WHERE_QUERY_PARAM,
   DATABASE_ID_QUERY_PARAM,
@@ -67,7 +67,7 @@ export interface ShapeStreamOptions {
    * will handle this automatically. A common scenario where you might pass an offset
    * is if you're maintaining a local cache of the log. If you've gone offline
    * and are re-starting a ShapeStream to catch-up to the latest state of the Shape,
-   * you'd pass in the last offset and shapeId you'd seen from the Electric server
+   * you'd pass in the last offset and shapeHandle you'd seen from the Electric server
    * so it knows at what point in the shape to catch you up from.
    */
   offset?: Offset
@@ -75,7 +75,7 @@ export interface ShapeStreamOptions {
    * Similar to `offset`, this isn't typically used unless you're maintaining
    * a cache of the shape log.
    */
-  shapeId?: string
+  shapeHandle?: string
   backoffOptions?: BackoffOptions
 
   /**
@@ -112,7 +112,7 @@ export interface ShapeStreamInterface = Row> {
   isConnected(): boolean
 
   isUpToDate: boolean
-  shapeId?: string
+  shapeHandle?: string
 }
 
 /**
@@ -171,7 +171,7 @@ export class ShapeStream = Row>
   #lastSyncedAt?: number // unix time
   #isUpToDate: boolean = false
   #connected: boolean = false
-  #shapeId?: string
+  #shapeHandle?: string
   #databaseId?: string
   #schema?: Schema
   #error?: unknown
@@ -181,7 +181,7 @@ export class ShapeStream = Row>
     this.options = { subscribe: true, ...options }
     this.#lastOffset = this.options.offset ?? `-1`
     this.#liveCacheBuster = ``
-    this.#shapeId = this.options.shapeId
+    this.#shapeHandle = this.options.shapeHandle
     this.#databaseId = this.options.databaseId
     this.#messageParser = new MessageParser(options.parser)
 
@@ -202,8 +202,8 @@ export class ShapeStream = Row>
     this.start()
   }
 
-  get shapeId() {
-    return this.#shapeId
+  get shapeHandle() {
+    return this.#shapeHandle
   }
 
   get isUpToDate() {
@@ -239,9 +239,9 @@ export class ShapeStream = Row>
           )
         }
 
-        if (this.#shapeId) {
+        if (this.#shapeHandle) {
           // This should probably be a header for better cache breaking?
-          fetchUrl.searchParams.set(SHAPE_ID_QUERY_PARAM, this.#shapeId!)
+          fetchUrl.searchParams.set(SHAPE_HANDLE_QUERY_PARAM, this.#shapeHandle!)
         }
 
         if (this.#databaseId) {
@@ -260,9 +260,9 @@ export class ShapeStream = Row>
           if (!(e instanceof FetchError)) throw e // should never happen
           if (e.status == 409) {
             // Upon receiving a 409, we should start from scratch
-            // with the newly provided shape ID
-            const newShapeId = e.headers[SHAPE_ID_HEADER]
-            this.#reset(newShapeId)
+            // with the newly provided shape handle
+            const newShapeHandle = e.headers[SHAPE_HANDLE_HEADER]
+            this.#reset(newShapeHandle)
             await this.#publish(e.json as Message[])
             continue
           } else if (e.status >= 400 && e.status < 500) {
@@ -277,9 +277,9 @@ export class ShapeStream = Row>
         }
 
         const { headers, status } = response
-        const shapeId = headers.get(SHAPE_ID_HEADER)
-        if (shapeId) {
-          this.#shapeId = shapeId
+        const shapeHandle = headers.get(SHAPE_HANDLE_HEADER)
+        if (shapeHandle) {
+          this.#shapeHandle = shapeHandle
         }
 
         const lastOffset = headers.get(CHUNK_LAST_OFFSET_HEADER)
@@ -418,12 +418,12 @@ export class ShapeStream = Row>
 
   /**
    * Resets the state of the stream, optionally with a provided
-   * shape ID
+   * shape handle
    */
-  #reset(shapeId?: string) {
+  #reset(shapeHandle?: string) {
     this.#lastOffset = `-1`
     this.#liveCacheBuster = ``
-    this.#shapeId = shapeId
+    this.#shapeHandle = shapeHandle
     this.#isUpToDate = false
     this.#connected = false
     this.#schema = undefined
@@ -446,10 +446,10 @@ function validateOptions(options: Partial>): void {
   if (
     options.offset !== undefined &&
     options.offset !== `-1` &&
-    !options.shapeId
+    !options.shapeHandle
   ) {
     throw new Error(
-      `shapeId is required if this isn't an initial fetch (i.e. offset > -1)`
+      `shapeHandle is required if this isn't an initial fetch (i.e. offset > -1)`
     )
   }
   return
diff --git a/packages/typescript-client/src/constants.ts b/packages/typescript-client/src/constants.ts
index 45a0d86f33..6c5fb8833c 100644
--- a/packages/typescript-client/src/constants.ts
+++ b/packages/typescript-client/src/constants.ts
@@ -1,10 +1,10 @@
-export const SHAPE_ID_HEADER = `electric-shape-id`
+export const SHAPE_HANDLE_HEADER = `electric-handle`
 export const LIVE_CACHE_BUSTER_HEADER = `electric-next-cursor`
 export const LIVE_CACHE_BUSTER_QUERY_PARAM = `cursor`
 export const CHUNK_LAST_OFFSET_HEADER = `electric-chunk-last-offset`
 export const CHUNK_UP_TO_DATE_HEADER = `electric-chunk-up-to-date`
 export const SHAPE_SCHEMA_HEADER = `electric-schema`
-export const SHAPE_ID_QUERY_PARAM = `shape_id`
+export const SHAPE_HANDLE_QUERY_PARAM = `shape_handle`
 export const DATABASE_ID_QUERY_PARAM = `database_id`
 export const OFFSET_QUERY_PARAM = `offset`
 export const WHERE_QUERY_PARAM = `where`
diff --git a/packages/typescript-client/src/fetch.ts b/packages/typescript-client/src/fetch.ts
index 1286898176..df226c0257 100644
--- a/packages/typescript-client/src/fetch.ts
+++ b/packages/typescript-client/src/fetch.ts
@@ -3,8 +3,8 @@ import {
   CHUNK_UP_TO_DATE_HEADER,
   LIVE_QUERY_PARAM,
   OFFSET_QUERY_PARAM,
-  SHAPE_ID_HEADER,
-  SHAPE_ID_QUERY_PARAM,
+  SHAPE_HANDLE_HEADER,
+  SHAPE_HANDLE_QUERY_PARAM,
 } from './constants'
 import { FetchError, FetchBackoffAbortError } from './error'
 
@@ -245,13 +245,13 @@ class PrefetchQueue {
  * Generate the next chunk's URL if the url and response are valid
  */
 function getNextChunkUrl(url: string, res: Response): string | void {
-  const shapeId = res.headers.get(SHAPE_ID_HEADER)
+  const shapeHandle = res.headers.get(SHAPE_HANDLE_HEADER)
   const lastOffset = res.headers.get(CHUNK_LAST_OFFSET_HEADER)
   const isUpToDate = res.headers.has(CHUNK_UP_TO_DATE_HEADER)
 
-  // only prefetch if shape ID and offset for next chunk are available, and
+  // only prefetch if shape handle and offset for next chunk are available, and
   // response is not already up-to-date
-  if (!shapeId || !lastOffset || isUpToDate) return
+  if (!shapeHandle || !lastOffset || isUpToDate) return
 
   const nextUrl = new URL(url)
 
@@ -259,7 +259,7 @@ function getNextChunkUrl(url: string, res: Response): string | void {
   // potentially miss more recent data
   if (nextUrl.searchParams.has(LIVE_QUERY_PARAM)) return
 
-  nextUrl.searchParams.set(SHAPE_ID_QUERY_PARAM, shapeId)
+  nextUrl.searchParams.set(SHAPE_HANDLE_QUERY_PARAM, shapeHandle)
   nextUrl.searchParams.set(OFFSET_QUERY_PARAM, lastOffset)
   return nextUrl.toString()
 }
diff --git a/packages/typescript-client/test/cache.test.ts b/packages/typescript-client/test/cache.test.ts
index 9db79c2f1a..d26e3538d7 100644
--- a/packages/typescript-client/test/cache.test.ts
+++ b/packages/typescript-client/test/cache.test.ts
@@ -3,7 +3,7 @@ import { describe, expect, assert, inject } from 'vitest'
 import { exec } from 'child_process'
 import { setTimeout as sleep } from 'node:timers/promises'
 import { testWithIssuesTable } from './support/test-context'
-import { CHUNK_LAST_OFFSET_HEADER, SHAPE_ID_HEADER } from '../src/constants'
+import { CHUNK_LAST_OFFSET_HEADER, SHAPE_HANDLE_HEADER } from '../src/constants'
 
 // FIXME: pull from environment?
 const maxAge = 1 // seconds
@@ -84,7 +84,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => {
     const searchParams = new URLSearchParams({
       table: issuesTableUrl,
       offset: initialRes.headers.get(`electric-chunk-last-offset`)!,
-      shape_id: initialRes.headers.get(`electric-shape-id`)!,
+      shape_handle: initialRes.headers.get(`electric-handle`)!,
       live: `true`,
     })
 
@@ -137,8 +137,8 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => {
       {}
     )
     const lastOffset = originalRes.headers.get(CHUNK_LAST_OFFSET_HEADER)
-    const shapeId = originalRes.headers.get(SHAPE_ID_HEADER)
-    const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&shape_id=${shapeId}`
+    const shapeHandle = originalRes.headers.get(SHAPE_HANDLE_HEADER)
+    const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&handle=${shapeHandle}`
 
     // Make a first request such that response is cached
     const originalUpToDateRes = await fetch(urlToTest, {})
@@ -171,8 +171,13 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => {
       {}
     )
     const lastOffset = originalRes.headers.get(CHUNK_LAST_OFFSET_HEADER)
-    const shapeId = originalRes.headers.get(SHAPE_ID_HEADER)
-    const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&shape_id=${shapeId}`
+<<<<<<< HEAD
+    const shapeHandle = originalRes.headers.get(SHAPE_HANDLE_HEADER)
+    const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&handle=${shapeHandle}`
+=======
+    const shapeId = originalRes.headers.get(SHAPE_HANDLE_HEADER)
+    const urlToTest = `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=${lastOffset}&shape_handle=${shapeId}`
+>>>>>>> 667e3032 (Fix more references to shape id)
 
     // Make a first request such that response is cached
     const originalUpToDateRes = await fetch(urlToTest, {})
@@ -214,9 +219,9 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => {
       {}
     )
     expect(client1Res.status).toBe(200)
-    const originalShapeId =
-      client1Res.headers.get(`electric-shape-id`) ?? undefined
-    assert(originalShapeId, `Should have shape ID`)
+    const originalShapeHandle =
+      client1Res.headers.get(`electric-handle`) ?? undefined
+    assert(originalShapeHandle, `Should have shape handle`)
     expect(getCacheStatus(client1Res)).toBe(CacheStatus.MISS)
     //const messages = client1Res.status === 204 ? [] : await client1Res.json()
 
@@ -227,12 +232,12 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => {
       {}
     )
     expect(client2Res.status).toBe(200)
-    const shapeId2 = client2Res.headers.get(`electric-shape-id`) ?? undefined
+    const shapeHandle2 = client2Res.headers.get(`electric-handle`) ?? undefined
 
     expect(
-      originalShapeId,
-      `Shape ID changed but expected it to stay the same`
-    ).toBe(shapeId2)
+      originalShapeHandle,
+      `Shape handle changed but expected it to stay the same`
+    ).toBe(shapeHandle2)
 
     expect(getCacheStatus(client2Res)).toBe(CacheStatus.HIT)
 
@@ -240,13 +245,13 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => {
     assert(latestOffset, `latestOffset should be defined`)
 
     // Now GC the shape
-    await clearIssuesShape(originalShapeId)
+    await clearIssuesShape(originalShapeHandle)
 
     // Now try to go live
     // should tell you to go back to initial sync
     // because the shape is out of scope
     const liveRes = await fetch(
-      `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${latestOffset}&shape_id=${originalShapeId}&live`,
+      `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${latestOffset}&handle=${originalShapeHandle}&live`,
       {}
     )
     expect(liveRes.status).toBe(409)
@@ -260,22 +265,22 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => {
 
     expect(newCacheIgnoredSyncRes.status).toBe(200)
     expect(getCacheStatus(newCacheIgnoredSyncRes)).toBe(CacheStatus.MISS)
-    const cacheBustedShapeId =
-      newCacheIgnoredSyncRes.headers.get(`electric-shape-id`)
-    assert(cacheBustedShapeId)
-    expect(cacheBustedShapeId).not.toBe(originalShapeId)
+    const cacheBustedShapeHandle =
+      newCacheIgnoredSyncRes.headers.get(`electric-handle`)
+    assert(cacheBustedShapeHandle)
+    expect(cacheBustedShapeHandle).not.toBe(originalShapeHandle)
 
-    // Then try do that and check that we get new shape id
+    // Then try do that and check that we get new shape handle
     const newInitialSyncRes = await fetch(
       `${proxyCacheBaseUrl}${redirectLocation}`,
       {}
     )
-    const cachedShapeId =
-      newInitialSyncRes.headers.get(`electric-shape-id`) ?? undefined
+    const cachedShapeHandle =
+      newInitialSyncRes.headers.get(`electric-handle`) ?? undefined
     expect(newInitialSyncRes.status).toBe(200)
     expect(getCacheStatus(newInitialSyncRes)).toBe(CacheStatus.HIT)
-    expect(cachedShapeId, `Got old shape id that is out of scope`).not.toBe(
-      originalShapeId
+    expect(cachedShapeHandle, `Got old shape handle that is out of scope`).not.toBe(
+      originalShapeHandle
     )
   })
 })
diff --git a/packages/typescript-client/test/client.test.ts b/packages/typescript-client/test/client.test.ts
index e16dbb97c4..6f19f0a1d1 100644
--- a/packages/typescript-client/test/client.test.ts
+++ b/packages/typescript-client/test/client.test.ts
@@ -148,7 +148,7 @@ describe(`Shape`, () => {
         await deleteIssue({ id: id1, title: `foo1` })
         await insertIssues({ id: id2, title: `foo2` })
         await sleep(100)
-        await clearIssuesShape(shapeStream.shapeId)
+        await clearIssuesShape(shapeStream.shapeHandle)
 
         rotationTime = Date.now()
       }
diff --git a/packages/typescript-client/test/fetch.test.ts b/packages/typescript-client/test/fetch.test.ts
index ffbdf14050..d9557f4e0a 100644
--- a/packages/typescript-client/test/fetch.test.ts
+++ b/packages/typescript-client/test/fetch.test.ts
@@ -6,7 +6,7 @@ import {
   BackoffDefaults,
   createFetchWithChunkBuffer,
 } from '../src/fetch'
-import { CHUNK_LAST_OFFSET_HEADER, SHAPE_ID_HEADER } from '../src/constants'
+import { CHUNK_LAST_OFFSET_HEADER, SHAPE_HANDLE_HEADER } from '../src/constants'
 
 describe(`createFetchWithBackoff`, () => {
   const initialDelay = 10
@@ -202,7 +202,7 @@ describe(`createFetchWithChunkBuffer`, () => {
     const initialResponse = new Response(`initial chunk`, {
       status: 200,
       headers: responseHeaders({
-        [SHAPE_ID_HEADER]: `123`,
+        [SHAPE_HANDLE_HEADER]: `123`,
         [CHUNK_LAST_OFFSET_HEADER]: `456`,
       }),
     })
@@ -218,7 +218,7 @@ describe(`createFetchWithChunkBuffer`, () => {
     expect(result).toBe(initialResponse)
 
     // Check if the next chunk was prefetched
-    const nextUrl = `${baseUrl}&shape_id=123&offset=456`
+    const nextUrl = `${baseUrl}&handle=123&offset=456`
     expect(mockFetch).toHaveBeenCalledWith(nextUrl, expect.anything())
   })
 
@@ -235,7 +235,7 @@ describe(`createFetchWithChunkBuffer`, () => {
         new Response(`next chunk`, {
           status: 200,
           headers: responseHeaders({
-            [SHAPE_ID_HEADER]: `123`,
+            [SHAPE_HANDLE_HEADER]: `123`,
             [CHUNK_LAST_OFFSET_HEADER]: `${idx}`,
           }),
         })
@@ -250,23 +250,23 @@ describe(`createFetchWithChunkBuffer`, () => {
     expect(mockFetch).toHaveBeenCalledTimes(1 + maxPrefetchNum)
     expect(mockFetch).toHaveBeenNthCalledWith(
       2,
-      `${baseUrl}&shape_id=123&offset=0`,
+      `${baseUrl}&handle=123&offset=0`,
       expect.anything()
     )
     expect(mockFetch).toHaveBeenNthCalledWith(
       3,
-      `${baseUrl}&shape_id=123&offset=1`,
+      `${baseUrl}&handle=123&offset=1`,
       expect.anything()
     )
 
     // Second request consumes one of the prefetched responses and
     // next one fires up
-    await fetchWrapper(`${baseUrl}&shape_id=123&offset=0`)
+    await fetchWrapper(`${baseUrl}&handle=123&offset=0`)
     await sleep()
     expect(mockFetch).toHaveBeenCalledTimes(1 + maxPrefetchNum + 1)
     expect(mockFetch).toHaveBeenNthCalledWith(
       4,
-      `${baseUrl}&shape_id=123&offset=2`,
+      `${baseUrl}&handle=123&offset=2`,
       expect.anything()
     )
   })
@@ -276,7 +276,7 @@ describe(`createFetchWithChunkBuffer`, () => {
     const initialResponse = new Response(`initial chunk`, {
       status: 200,
       headers: responseHeaders({
-        [SHAPE_ID_HEADER]: `123`,
+        [SHAPE_HANDLE_HEADER]: `123`,
         [CHUNK_LAST_OFFSET_HEADER]: `456`,
       }),
     })
@@ -284,7 +284,7 @@ describe(`createFetchWithChunkBuffer`, () => {
     const nextResponse = new Response(`next chunk`, {
       status: 200,
       headers: responseHeaders({
-        [SHAPE_ID_HEADER]: `123`,
+        [SHAPE_HANDLE_HEADER]: `123`,
         [CHUNK_LAST_OFFSET_HEADER]: `456`,
       }),
     })
@@ -297,7 +297,7 @@ describe(`createFetchWithChunkBuffer`, () => {
     expect(result).toBe(initialResponse)
 
     // fetch the next chunk as well
-    const nextUrl = `${baseUrl}&shape_id=123&offset=456`
+    const nextUrl = `${baseUrl}&handle=123&offset=456`
     const nextResult = await fetchWrapper(nextUrl)
     expect(nextResult).toBe(nextResponse)
 
@@ -327,7 +327,7 @@ describe(`createFetchWithChunkBuffer`, () => {
     const initialResponse = new Response(`initial chunk`, {
       status: 200,
       headers: responseHeaders({
-        [SHAPE_ID_HEADER]: `123`,
+        [SHAPE_HANDLE_HEADER]: `123`,
         [CHUNK_LAST_OFFSET_HEADER]: `456`,
       }),
     })
@@ -339,7 +339,7 @@ describe(`createFetchWithChunkBuffer`, () => {
     expect(result).toBe(initialResponse)
 
     // Prefetch should have been attempted but failed
-    const nextUrl = `${baseUrl}&shape_id=123&offset=456`
+    const nextUrl = `${baseUrl}&handle=123&offset=456`
     expect(mockFetch).toHaveBeenCalledWith(nextUrl, expect.anything())
 
     // One for the main request, one for the prefetch
@@ -357,7 +357,7 @@ describe(`createFetchWithChunkBuffer`, () => {
         return new Response(`chunk`, {
           status: 200,
           headers: responseHeaders({
-            [SHAPE_ID_HEADER]: `123`,
+            [SHAPE_HANDLE_HEADER]: `123`,
             [CHUNK_LAST_OFFSET_HEADER]: `${idx}`,
           }),
         })
@@ -381,7 +381,7 @@ describe(`createFetchWithChunkBuffer`, () => {
     expect(mockFetch).toHaveBeenNthCalledWith(1, baseUrl)
     expect(mockFetch).toHaveBeenNthCalledWith(
       2,
-      `${baseUrl}&shape_id=123&offset=0`,
+      `${baseUrl}&handle=123&offset=0`,
       expect.anything()
     )
 
@@ -389,12 +389,12 @@ describe(`createFetchWithChunkBuffer`, () => {
     expect(mockFetch).toHaveBeenNthCalledWith(3, altUrl)
     expect(mockFetch).toHaveBeenNthCalledWith(
       4,
-      `${altUrl}&shape_id=123&offset=2`,
+      `${altUrl}&handle=123&offset=2`,
       expect.anything()
     )
     expect(mockFetch).toHaveBeenNthCalledWith(
       5,
-      `${altUrl}&shape_id=123&offset=3`,
+      `${altUrl}&handle=123&offset=3`,
       expect.anything()
     )
   })
@@ -410,7 +410,7 @@ describe(`createFetchWithChunkBuffer`, () => {
         return new Response(`chunk`, {
           status: 200,
           headers: responseHeaders({
-            [SHAPE_ID_HEADER]: `123`,
+            [SHAPE_HANDLE_HEADER]: `123`,
             [CHUNK_LAST_OFFSET_HEADER]: `${idx}`,
           }),
         })
diff --git a/packages/typescript-client/test/integration.test.ts b/packages/typescript-client/test/integration.test.ts
index 572ad2b836..1cc61dc882 100644
--- a/packages/typescript-client/test/integration.test.ts
+++ b/packages/typescript-client/test/integration.test.ts
@@ -117,15 +117,15 @@ describe(`HTTP Sync`, () => {
     expect(values).toHaveLength(0)
   })
 
-  it(`returns a header with the server shape id`, async ({
+  it(`returns a header with the server shape handle`, async ({
     issuesTableUrl,
   }) => {
     const res = await fetch(
       `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`,
       {}
     )
-    const shapeId = res.headers.get(`electric-shape-id`)
-    expect(shapeId).to.exist
+    const shapeHandle = res.headers.get(`electric-handle`)
+    expect(shapeHandle).to.exist
   })
 
   it(`returns a header with the chunk's last offset`, async ({
@@ -533,7 +533,7 @@ describe(`HTTP Sync`, () => {
       subscribe: false,
       signal: newAborter.signal,
       offset: lastOffset,
-      shapeId: issueStream.shapeId,
+      shapeHandle: issueStream.shapeHandle,
     })
 
     await h.forEachMessage(newIssueStream, newAborter, (res, msg, nth) => {
@@ -605,7 +605,7 @@ describe(`HTTP Sync`, () => {
     const midMessage = messages.slice(-6)[0]
     assert(`offset` in midMessage)
     const midOffset = midMessage.offset
-    const shapeId = res.headers.get(`electric-shape-id`)
+    const shapeHandle = res.headers.get(`electric-handle`)
     const etag = res.headers.get(`etag`)
     assert(etag !== null, `Response should have etag header`)
 
@@ -621,7 +621,7 @@ describe(`HTTP Sync`, () => {
 
     // Get etag for catchup
     const catchupEtagRes = await fetch(
-      `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=${midOffset}&shape_id=${shapeId}`,
+      `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=${midOffset}&handle=${shapeHandle}`,
       {}
     )
     const catchupEtag = catchupEtagRes.headers.get(`etag`)
@@ -630,7 +630,7 @@ describe(`HTTP Sync`, () => {
     // Catch-up offsets should also use the same etag as they're
     // also working through the end of the current log.
     const catchupEtagValidation = await fetch(
-      `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=${midOffset}&shape_id=${shapeId}`,
+      `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=${midOffset}&handle=${shapeHandle}`,
       {
         headers: { 'If-None-Match': catchupEtag },
       }
@@ -675,7 +675,7 @@ describe(`HTTP Sync`, () => {
       }
     })
 
-    await clearShape(issuesTableUrl, { shapeId: issueStream.shapeId! })
+    await clearShape(issuesTableUrl, { shapeHandle: issueStream.shapeHandle! })
 
     expect(shapeData).toEqual(
       new Map([[`${issuesTableKey}/"${id1}"`, { id: id1, title: `foo1` }]])
@@ -795,7 +795,7 @@ describe(`HTTP Sync`, () => {
       subscribe: false,
       signal: newAborter.signal,
       offset: lastOffset,
-      shapeId: issueStream.shapeId,
+      shapeHandle: issueStream.shapeHandle,
       fetchClient: fetchWrapper,
     })
 
@@ -841,7 +841,7 @@ describe(`HTTP Sync`, () => {
       url: `${BASE_URL}/v1/shape`,
       table: issuesTableUrl,
       subscribe: true,
-      shapeId: issueStream.shapeId,
+      shapeHandle: issueStream.shapeHandle,
       where: `1=1`,
     })
 
@@ -879,7 +879,7 @@ describe(`HTTP Sync`, () => {
       // that the existing shape is deleted and some more data is inserted
       if (numRequests === 2) {
         await insertIssues({ id: secondRowId, title: `foo2` })
-        await clearIssuesShape(issueStream.shapeId)
+        await clearIssuesShape(issueStream.shapeHandle)
       }
 
       numRequests++
@@ -902,7 +902,7 @@ describe(`HTTP Sync`, () => {
 
     expect.assertions(12)
 
-    let originalShapeId: string | undefined
+    let originalShapeHandle: string | undefined
     let upToDateReachedCount = 0
     await h.forEachMessage(issueStream, aborter, async (res, msg, nth) => {
       // shapeData.set(msg.key, msg.value)
@@ -936,8 +936,8 @@ describe(`HTTP Sync`, () => {
             title: `foo1`,
             priority: 10,
           })
-          expect(issueStream.shapeId).to.exist
-          originalShapeId = issueStream.shapeId
+          expect(issueStream.shapeHandle).to.exist
+          originalShapeHandle = issueStream.shapeHandle
           break
         case 1:
         case 2:
@@ -946,21 +946,21 @@ describe(`HTTP Sync`, () => {
 
           if (msg.value.id == rowId) {
             // message is the initial row again as it is a new shape
-            // with different shape id
+            // with different shape handle
             expect(msg.value).toEqual({
               id: rowId,
               title: `foo1`,
               priority: 10,
             })
-            expect(issueStream.shapeId).not.toBe(originalShapeId)
+            expect(issueStream.shapeHandle).not.toBe(originalShapeHandle)
           } else {
-            // should get the second row as well with the new shape ID
+            // should get the second row as well with the new shape handle
             expect(msg.value).toEqual({
               id: secondRowId,
               title: `foo2`,
               priority: 10,
             })
-            expect(issueStream.shapeId).not.toBe(originalShapeId)
+            expect(issueStream.shapeHandle).not.toBe(originalShapeHandle)
           }
           break
         default:
diff --git a/packages/typescript-client/test/support/test-context.ts b/packages/typescript-client/test/support/test-context.ts
index 146fc74e65..d4824a7612 100644
--- a/packages/typescript-client/test/support/test-context.ts
+++ b/packages/typescript-client/test/support/test-context.ts
@@ -4,16 +4,17 @@ import { Client, QueryResult } from 'pg'
 import { inject, test } from 'vitest'
 import { makePgClient } from './test-helpers'
 import { FetchError } from '../../src/error'
+import { DATABASE_ID_QUERY_PARAM, SHAPE_HANDLE_QUERY_PARAM } from '../../src/constants'
 
 export type IssueRow = { id: string; title: string; priority?: string }
 export type GeneratedIssueRow = { id?: string; title: string }
 export type UpdateIssueFn = (row: IssueRow) => Promise>
 export type DeleteIssueFn = (row: IssueRow) => Promise>
 export type InsertIssuesFn = (...rows: GeneratedIssueRow[]) => Promise
-export type ClearIssuesShapeFn = (shapeId?: string) => Promise
+export type ClearIssuesShapeFn = (shapeHandle?: string) => Promise
 export type ClearShapeFn = (
   table: string,
-  options?: { shapeId?: string; databaseId?: string }
+  options?: { shapeHandle?: string; databaseId?: string }
 ) => Promise
 
 export const testWithDbClient = test.extend<{
@@ -43,7 +44,7 @@ export const testWithDbClient = test.extend<{
         table: string,
         options: {
           databaseId?: string
-          shapeId?: string
+          shapeHandle?: string
         } = {}
       ) => {
         const baseUrl = inject(`baseUrl`)
@@ -53,10 +54,10 @@ export const testWithDbClient = test.extend<{
           options.databaseId = inject(`databaseId`)
         }
 
-        url.searchParams.set(`database_id`, options.databaseId)
+        url.searchParams.set(DATABASE_ID_QUERY_PARAM, options.databaseId)
 
-        if (options.shapeId) {
-          url.searchParams.set(`shape_id`, options.shapeId)
+        if (options.shapeHandle) {
+          url.searchParams.set(SHAPE_HANDLE_QUERY_PARAM, options.shapeHandle)
         }
 
         const resp = await fetch(url.toString(), { method: `DELETE` })
@@ -65,7 +66,7 @@ export const testWithDbClient = test.extend<{
             await FetchError.fromResponse(resp, `DELETE ${url.toString()}`)
           )
           throw new Error(
-            `Could not delete shape ${table} with ID ${options.shapeId}`
+            `Could not delete shape ${table} with ID ${options.shapeHandle}`
           )
         }
       }
@@ -152,7 +153,7 @@ export const testWithIssuesTable = testWithDbClient.extend<{
     }),
 
   clearIssuesShape: async ({ clearShape, issuesTableUrl }, use) => {
-    use((shapeId?: string) => clearShape(issuesTableUrl, { shapeId }))
+    use((shapeHandle?: string) => clearShape(issuesTableUrl, { shapeHandle }))
   },
 })
 
@@ -222,6 +223,7 @@ export const testWithMultiTenantIssuesTable = testWithDbClients.extend<{
     }),
 })
 
+
 export const testWithMultitypeTable = testWithDbClient.extend<{
   tableSql: string
   tableUrl: string
diff --git a/website/docs/api/http.md b/website/docs/api/http.md
index 70647a85a9..5126edc321 100644
--- a/website/docs/api/http.md
+++ b/website/docs/api/http.md
@@ -84,10 +84,10 @@ Note that the other control message is `must-refetch` which indicates that the c
 
 ### Live mode
 
-Once a client is up-to-date, it can switch to live mode to receive real-time updates, by making requests with `live=true`, an `offset` and a `shape_id`, e.g.:
+Once a client is up-to-date, it can switch to live mode to receive real-time updates, by making requests with `live=true`, an `offset` and a shape `handle`, e.g.:
 
 ```sh
-curl -i 'http://localhost:3000/v1/shape?table=foo&live=true&offset=0_0&shape_id=3833821-1721812114261'
+curl -i 'http://localhost:3000/v1/shape?table=foo&live=true&handle=3833821-1721812114261&offset=0_0'
 ```
 
 The `live` parameter puts the server into live mode, where it will hold open the connection, waiting for new data arrive. This allows you to implement a long-polling strategy to consume real-time updates.
diff --git a/website/docs/guides/auth.md b/website/docs/guides/auth.md
index b065ce6f62..730ee99d52 100644
--- a/website/docs/guides/auth.md
+++ b/website/docs/guides/auth.md
@@ -75,10 +75,10 @@ export async function GET(
   // Construct the upstream URL
   const originUrl = new URL(`http://localhost:3000/v1/shape`)
 
-  // Copy over the table, shape_id, and offset query params that the
-  // Electric client adds so we return the right part of the Shape log.
+  // Copy over the relevant query params that the Electric client adds
+  // so that we return the right part of the Shape log.
   url.searchParams.forEach((value, key) => {
-    if ([`shape_id`, `offset`].includes(key)) {
+    if ([`live`, `table`, `handle`, `offset`, `cursor`].includes(key)) {
       originUrl.searchParams.set(key, value)
     }
   })
diff --git a/website/docs/guides/shapes.md b/website/docs/guides/shapes.md
index b2b389e975..704f21da08 100644
--- a/website/docs/guides/shapes.md
+++ b/website/docs/guides/shapes.md
@@ -102,7 +102,7 @@ curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1'
 Then switch into a live mode to use long-polling to receive real-time updates:
 
 ```sh
-curl -i 'http://localhost:3000/v1/shape?table=foo&live=true&offset=...&shape_id=...'
+curl -i 'http://localhost:3000/v1/shape?table=foo&live=true&offset=...&handle=...'
 ```
 
 These requests both return an array of [Shape Log](/docs/api/http#shape-log) entries. You can process these manually, or use a higher-level client.
diff --git a/website/docs/quickstart.md b/website/docs/quickstart.md
index b114baf2a5..f0f9dcc7bf 100644
--- a/website/docs/quickstart.md
+++ b/website/docs/quickstart.md
@@ -115,7 +115,7 @@ access-control-allow-origin: *
 access-control-expose-headers: *
 access-control-allow-methods: GET, POST, OPTIONS
 content-type: application/json; charset=utf-8
-x-electric-shape-id: 3833821-1721299734314
+electric-handle: 3833821-1721299734314
 x-electric-chunk-last-offset: 0_0
 x-electric-schema: {"id":{"type":"int4","pk_index":0},"name":{"type":"varchar","max_length":255},"value":{"type":"float8"}}
 etag: 3833821-1721299734314:-1:0_0
diff --git a/website/electric-api.yaml b/website/electric-api.yaml
index bb22caf97d..9629a85c46 100644
--- a/website/electric-api.yaml
+++ b/website/electric-api.yaml
@@ -77,7 +77,7 @@ paths:
             in the stream.
 
             Note that when `offset` is not `-1` then you must also provide
-            a `shape_id`.
+            a `shape_handle`.
         - name: live
           in: query
           schema:
@@ -102,13 +102,13 @@ paths:
           description: |-
             This is a cursor generated by the server during live requests. It helps bust caches for
             responses from previous long-polls.
-        - name: shape_id
+        - name: shape_handle
           in: query
           schema:
             type: string
           example: "3833821-1721812114261"
           description: |-
-            The shape ID returned by the initial shape request.
+            The shape handle returned by the initial shape request.
 
             This is a required parameter when this is not an initial sync request.
             I.e. when offset is not `-1`.
@@ -171,9 +171,9 @@ paths:
                 type: string
               example: "3833821-1721812114261:26800584_4:26800584_4"
               description: |-
-                Etag header specifying the shape ID and offset for efficient caching.
+                Etag header specifying the shape handle and offset for efficient caching.
 
-                In the format `{shape_id}:{start_offset}:{end_offset}`.
+                In the format `{shape_handle}:{start_offset}:{end_offset}`.
             electric-chunk-last-offset:
               schema:
                 type: string
@@ -185,14 +185,14 @@ paths:
                 you have provided. This header simplifies client development by
                 avoiding the need to parse the last offset out of the stream of
                 log entries.
-            electric-shape-id:
+            electric-handle:
               schema:
                 type: string
               example: "3833821-1721812114261"
               description: |-
-                The shape ID.
+                The shape handle.
 
-                Must be provided as the `shape_id` parameter when making
+                Must be provided as the `shape_handle` parameter when making
                 subsequent requests where `offset` is not `-1`.
             electric-schema:
               schema:
@@ -319,15 +319,15 @@ paths:
                   message:
                     type: string
                     description: Error message
-                  shape_id:
+                  shape_handle:
                     type: string
-                    description: The latest shape ID the client should sync.
+                    description: The latest shape handle the client should sync.
                   offset:
                     type: string
-                    description: The offset from where to sync the given shape_id.
+                    description: The offset from where to sync the given shape_handle.
                 example:
-                  message: "The shape associated with this shape_id and offset was not found. Resync to fetch the latest shape"
-                  shape_id: "2494_84241"
+                  message: "The shape associated with this shape_handle and offset was not found. Resync to fetch the latest shape"
+                  shape_handle: "2494_84241"
                   offset: "-1"
         "429":
           description:
@@ -379,18 +379,18 @@ paths:
           description: |-
             The ID of the database from which to delete the shape.
             This is required only if Electric manages several databases.
-        - name: shape_id
+        - name: handle
           in: query
           schema:
             type: string
           example: "3833821-1721812114261"
           description:
-            Optional, deletes the current shape if it matches the shape_id.
+            Optional, deletes the current shape if it matches the `handle` provided.
             If not provided, deletes the current shape.
       responses:
         "202":
           description: |-
-            Accepted. The shape has been deleted (or to be more precise: the shape ID
+            Accepted. The shape has been deleted (or to be more precise: the shape handle
             has been invalidated and the storage will be cleaned up eventually).
         "400":
           description: Bad request.

From b150a17fe45e48f8518491281d199a37a000aa4e Mon Sep 17 00:00:00 2001
From: James Arthur 
Date: Tue, 29 Oct 2024 15:04:16 +0100
Subject: [PATCH 03/11] api: update header names and query params.

---
 integration-tests/tests/crash-recovery.lux    |  2 +-
 .../lib/electric/plug/delete_shape_plug.ex    |  4 +-
 .../lib/electric/plug/serve_shape_plug.ex     | 30 +++++++--------
 .../lib/electric/shapes/consumer.ex           |  4 +-
 .../test/electric/plug/router_test.exs        |  8 ++--
 .../electric/plug/serve_shape_plug_test.exs   | 12 +++---
 .../electric/shape_cache/storage_test.exs     |  4 +-
 packages/typescript-client/src/constants.ts   | 16 ++++----
 packages/typescript-client/test/cache.test.ts | 11 ++----
 .../test/integration.test.ts                  |  2 +-
 website/docs/api/http.md                      |  2 +-
 website/docs/quickstart.md                    |  5 ++-
 website/electric-api.yaml                     | 37 +++++++++++++++++--
 13 files changed, 82 insertions(+), 55 deletions(-)

diff --git a/integration-tests/tests/crash-recovery.lux b/integration-tests/tests/crash-recovery.lux
index 5b9ec66107..3421842142 100644
--- a/integration-tests/tests/crash-recovery.lux
+++ b/integration-tests/tests/crash-recovery.lux
@@ -43,7 +43,7 @@
   !curl -v -X GET "http://localhost:3000/v1/shape?table=items&offset=-1"
   ?electric-handle: ([\d-]+)
   [local shape_handle=$1]
-  ?electric-chunk-last-offset: ([\w\d_]+)
+  ?electric-offset: ([\w\d_]+)
   [local last_offset=$1]
 
 ## Terminate electric
diff --git a/packages/sync-service/lib/electric/plug/delete_shape_plug.ex b/packages/sync-service/lib/electric/plug/delete_shape_plug.ex
index b24cb857ac..bffa0205d4 100644
--- a/packages/sync-service/lib/electric/plug/delete_shape_plug.ex
+++ b/packages/sync-service/lib/electric/plug/delete_shape_plug.ex
@@ -44,8 +44,8 @@ defmodule Electric.Plug.DeleteShapePlug do
   end
 
   defp truncate_or_delete_shape(%Plug.Conn{} = conn, _) do
-    if conn.assigns.shape_handle !== nil do
-      with :ok <- Shapes.clean_shape(conn.assigns.shape_handle, conn.assigns.config) do
+    if conn.assigns.handle !== nil do
+      with :ok <- Shapes.clean_shape(conn.assigns.handle, conn.assigns.config) do
         send_resp(conn, 202, "")
       end
     else
diff --git a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
index 13f3dcfde4..de9fd67164 100644
--- a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
+++ b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
@@ -65,7 +65,7 @@ defmodule Electric.Plug.ServeShapePlug do
     embedded_schema do
       field(:table, :string)
       field(:offset, :string)
-      field(:shape_handle, :string)
+      field(:handle, :string)
       field(:live, :boolean, default: false)
       field(:where, :string)
       field(:columns, :string)
@@ -80,7 +80,7 @@ defmodule Electric.Plug.ServeShapePlug do
       |> validate_required([:table, :offset])
       |> cast_offset()
       |> cast_columns()
-      |> validate_shape_handle_with_offset()
+      |> validate_handle_with_offset()
       |> validate_live_with_offset()
       |> cast_root_table(opts)
       |> apply_action(:validate)
@@ -127,16 +127,16 @@ defmodule Electric.Plug.ServeShapePlug do
       end
     end
 
-    def validate_shape_handle_with_offset(%Ecto.Changeset{valid?: false} = changeset),
+    def validate_handle_with_offset(%Ecto.Changeset{valid?: false} = changeset),
       do: changeset
 
-    def validate_shape_handle_with_offset(%Ecto.Changeset{} = changeset) do
+    def validate_handle_with_offset(%Ecto.Changeset{} = changeset) do
       offset = fetch_change!(changeset, :offset)
 
       if offset == LogOffset.before_all() do
         changeset
       else
-        validate_required(changeset, [:shape_handle], message: "can't be blank when offset != -1")
+        validate_required(changeset, [:handle], message: "can't be blank when offset != -1")
       end
     end
 
@@ -220,9 +220,9 @@ defmodule Electric.Plug.ServeShapePlug do
     end)
   end
 
-  # No shape_handle is provided so we can get the existing one for this shape
+  # No handle is provided so we can get the existing one for this shape
   # or create a new shape if it does not yet exist
-  defp get_or_create_shape_handle(%{shape_definition: shape, config: config, shape_handle: nil}) do
+  defp get_or_create_shape_handle(%{shape_definition: shape, config: config, handle: nil}) do
     Shapes.get_or_create_shape_handle(config, shape)
   end
 
@@ -232,7 +232,7 @@ defmodule Electric.Plug.ServeShapePlug do
   end
 
   defp handle_shape_info(
-         %Conn{assigns: %{shape_definition: shape, config: config, shape_handle: shape_handle}} =
+         %Conn{assigns: %{shape_definition: shape, config: config, handle: shape_handle}} =
            conn,
          nil
        ) do
@@ -256,7 +256,7 @@ defmodule Electric.Plug.ServeShapePlug do
   end
 
   defp handle_shape_info(
-         %Conn{assigns: %{shape_handle: shape_handle}} = conn,
+         %Conn{assigns: %{handle: shape_handle}} = conn,
          {active_shape_handle, last_offset}
        )
        when is_nil(shape_handle) or shape_handle == active_shape_handle do
@@ -322,7 +322,7 @@ defmodule Electric.Plug.ServeShapePlug do
 
     conn
     |> assign(:chunk_end_offset, chunk_end_offset)
-    |> put_resp_header("electric-chunk-last-offset", "#{chunk_end_offset}")
+    |> put_resp_header("electric-offset", "#{chunk_end_offset}")
   end
 
   defp determine_up_to_date(
@@ -343,11 +343,11 @@ defmodule Electric.Plug.ServeShapePlug do
       |> assign(:up_to_date, [])
       # header might have been added on first pass but no longer valid
       # if listening to live changes and an incomplete chunk is formed
-      |> delete_resp_header("electric-chunk-up-to-date")
+      |> delete_resp_header("electric-up-to-date")
     else
       conn
       |> assign(:up_to_date, [@up_to_date])
-      |> put_resp_header("electric-chunk-up-to-date", "")
+      |> put_resp_header("electric-up-to-date", "")
     end
   end
 
@@ -406,7 +406,7 @@ defmodule Electric.Plug.ServeShapePlug do
         "public, max-age=5, stale-while-revalidate=5"
       )
       |> put_resp_header(
-        "electric-next-cursor",
+        "electric-cursor",
         TimeUtils.seconds_since_oct9th_2024_next_interval(conn) |> Integer.to_string()
       )
 
@@ -576,7 +576,7 @@ defmodule Electric.Plug.ServeShapePlug do
         |> assign(:last_offset, latest_log_offset)
         |> assign(:chunk_end_offset, latest_log_offset)
         # update last offset header
-        |> put_resp_header("electric-chunk-last-offset", "#{latest_log_offset}")
+        |> put_resp_header("electric-offset", "#{latest_log_offset}")
         |> determine_up_to_date([])
         |> serve_shape_log()
 
@@ -602,7 +602,7 @@ defmodule Electric.Plug.ServeShapePlug do
       if is_struct(conn.query_params, Plug.Conn.Unfetched) do
         assigns[:active_shape_handle] || assigns[:shape_handle]
       else
-        conn.query_params["shape_id"] || assigns[:active_shape_id] || assigns[:shape_id]
+        conn.query_params["handle"] || assigns[:active_shape_handle] || assigns[:shape_handle]
       end
 
     maybe_up_to_date = if up_to_date = assigns[:up_to_date], do: up_to_date != []
diff --git a/packages/sync-service/lib/electric/shapes/consumer.ex b/packages/sync-service/lib/electric/shapes/consumer.ex
index 1d3e67b30c..c637c9f2e2 100644
--- a/packages/sync-service/lib/electric/shapes/consumer.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer.ex
@@ -132,13 +132,13 @@ defmodule Electric.Shapes.Consumer do
         {:snapshot_xmin_known, shape_handle, xmin},
         %{shape_handle: shape_handle} = state
       ) do
-    Logger.debug("Snapshot xmin known shape_id: #{shape_id} xmin: #{xmin}")
+    Logger.debug("Snapshot xmin known shape_handle: #{shape_handle} xmin: #{xmin}")
     state = set_snapshot_xmin(xmin, state)
     handle_txns(state.buffer, %{state | buffer: []})
   end
 
   def handle_cast({:snapshot_started, shape_handle}, %{shape_handle: shape_handle} = state) do
-    Logger.debug("Snapshot started shape_id: #{shape_id}")
+    Logger.debug("Snapshot started shape_handle: #{shape_handle}")
     state = set_snapshot_started(state)
     {:noreply, [], state}
   end
diff --git a/packages/sync-service/test/electric/plug/router_test.exs b/packages/sync-service/test/electric/plug/router_test.exs
index 1cbe31f9b5..6e8e71c510 100644
--- a/packages/sync-service/test/electric/plug/router_test.exs
+++ b/packages/sync-service/test/electric/plug/router_test.exs
@@ -691,7 +691,7 @@ defmodule Electric.Plug.RouterTest do
       conn = conn("GET", "/v1/shape?table=large_rows_table&offset=-1") |> Router.call(opts)
       assert %{status: 200} = conn
       [shape_handle] = Plug.Conn.get_resp_header(conn, "electric-handle")
-      [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset")
+      [next_offset] = Plug.Conn.get_resp_header(conn, "electric-offset")
 
       assert [] = Jason.decode!(conn.resp_body)
 
@@ -732,7 +732,7 @@ defmodule Electric.Plug.RouterTest do
                }
              ] = Jason.decode!(conn.resp_body)
 
-      [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset")
+      [next_offset] = Plug.Conn.get_resp_header(conn, "electric-offset")
 
       conn =
         conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&handle=#{shape_handle}")
@@ -767,7 +767,7 @@ defmodule Electric.Plug.RouterTest do
       assert conn.resp_body != ""
 
       shape_handle = get_resp_shape_handle(conn)
-      [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset")
+      [next_offset] = Plug.Conn.get_resp_header(conn, "electric-offset")
 
       # Make the next request but forget to include the where clause
       conn =
@@ -899,7 +899,7 @@ defmodule Electric.Plug.RouterTest do
   end
 
   defp get_resp_shape_handle(conn), do: get_resp_header(conn, "electric-handle")
-  defp get_resp_last_offset(conn), do: get_resp_header(conn, "electric-chunk-last-offset")
+  defp get_resp_last_offset(conn), do: get_resp_header(conn, "electric-offset")
 
   defp get_resp_header(conn, header) do
     assert [val] = Plug.Conn.get_resp_header(conn, header)
diff --git a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
index c0aba08582..cc21b20794 100644
--- a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
+++ b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
@@ -393,11 +393,11 @@ defmodule Electric.Plug.ServeShapePlugTest do
 
       assert Plug.Conn.get_resp_header(conn, "electric-handle") == [@test_shape_handle]
 
-      assert Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset") == [
+      assert Plug.Conn.get_resp_header(conn, "electric-offset") == [
                "#{next_next_offset}"
              ]
 
-      assert Plug.Conn.get_resp_header(conn, "electric-chunk-up-to-date") == []
+      assert Plug.Conn.get_resp_header(conn, "electric-up-to-date") == []
     end
 
     test "returns 304 Not Modified when If-None-Match matches ETag",
@@ -489,8 +489,8 @@ defmodule Electric.Plug.ServeShapePlugTest do
                "public, max-age=5, stale-while-revalidate=5"
              ]
 
-      assert Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset") == [next_offset_str]
-      assert Plug.Conn.get_resp_header(conn, "electric-chunk-up-to-date") == [""]
+      assert Plug.Conn.get_resp_header(conn, "electric-offset") == [next_offset_str]
+      assert Plug.Conn.get_resp_header(conn, "electric-up-to-date") == [""]
       assert Plug.Conn.get_resp_header(conn, "electric-schema") == []
     end
 
@@ -540,7 +540,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
 
       assert conn.status == 200
       assert Jason.decode!(conn.resp_body) == [%{"headers" => %{"control" => "up-to-date"}}]
-      assert Plug.Conn.get_resp_header(conn, "electric-chunk-up-to-date") == [""]
+      assert Plug.Conn.get_resp_header(conn, "electric-up-to-date") == [""]
     end
 
     test "sends an up-to-date response after a timeout if no changes are observed",
@@ -578,7 +578,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
                "public, max-age=5, stale-while-revalidate=5"
              ]
 
-      assert Plug.Conn.get_resp_header(conn, "electric-chunk-up-to-date") == [""]
+      assert Plug.Conn.get_resp_header(conn, "electric-up-to-date") == [""]
     end
 
     test "sends 409 with a redirect to existing shape when requested shape handle does not exist",
diff --git a/packages/sync-service/test/electric/shape_cache/storage_test.exs b/packages/sync-service/test/electric/shape_cache/storage_test.exs
index ae42b64f20..c4c2d2907a 100644
--- a/packages/sync-service/test/electric/shape_cache/storage_test.exs
+++ b/packages/sync-service/test/electric/shape_cache/storage_test.exs
@@ -33,7 +33,7 @@ defmodule Electric.ShapeCache.StorageTest do
 
   test "get_log_stream/4 correctly guards offset ordering" do
     storage = {Mock.Storage, :opts}
-    shape_id = "test"
+    shape_handle = "test"
     tenant_id = "test_tenant"
 
     Mock.Storage
@@ -43,7 +43,7 @@ defmodule Electric.ShapeCache.StorageTest do
     l1 = LogOffset.new(26_877_408, 10)
     l2 = LogOffset.new(26_877_648, 0)
 
-    shape_storage = Storage.for_shape(shape_id, tenant_id, storage)
+    shape_storage = Storage.for_shape(shape_handle, tenant_id, storage)
 
     Storage.get_log_stream(l1, l2, shape_storage)
 
diff --git a/packages/typescript-client/src/constants.ts b/packages/typescript-client/src/constants.ts
index 6c5fb8833c..880fe89bd9 100644
--- a/packages/typescript-client/src/constants.ts
+++ b/packages/typescript-client/src/constants.ts
@@ -1,13 +1,13 @@
+export const LIVE_CACHE_BUSTER_HEADER = `electric-cursor`
 export const SHAPE_HANDLE_HEADER = `electric-handle`
-export const LIVE_CACHE_BUSTER_HEADER = `electric-next-cursor`
-export const LIVE_CACHE_BUSTER_QUERY_PARAM = `cursor`
-export const CHUNK_LAST_OFFSET_HEADER = `electric-chunk-last-offset`
-export const CHUNK_UP_TO_DATE_HEADER = `electric-chunk-up-to-date`
+export const CHUNK_LAST_OFFSET_HEADER = `electric-offset`
 export const SHAPE_SCHEMA_HEADER = `electric-schema`
-export const SHAPE_HANDLE_QUERY_PARAM = `shape_handle`
+export const CHUNK_UP_TO_DATE_HEADER = `electric-up-to-date`
 export const DATABASE_ID_QUERY_PARAM = `database_id`
-export const OFFSET_QUERY_PARAM = `offset`
-export const WHERE_QUERY_PARAM = `where`
 export const COLUMNS_QUERY_PARAM = `columns`
-export const TABLE_QUERY_PARAM = `table`
+export const LIVE_CACHE_BUSTER_QUERY_PARAM = `cursor`
+export const SHAPE_HANDLE_QUERY_PARAM = `handle`
 export const LIVE_QUERY_PARAM = `live`
+export const OFFSET_QUERY_PARAM = `offset`
+export const TABLE_QUERY_PARAM = `table`
+export const WHERE_QUERY_PARAM = `where`
\ No newline at end of file
diff --git a/packages/typescript-client/test/cache.test.ts b/packages/typescript-client/test/cache.test.ts
index d26e3538d7..10d72f094d 100644
--- a/packages/typescript-client/test/cache.test.ts
+++ b/packages/typescript-client/test/cache.test.ts
@@ -83,8 +83,8 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => {
     await insertIssues({ title: `foo` })
     const searchParams = new URLSearchParams({
       table: issuesTableUrl,
-      offset: initialRes.headers.get(`electric-chunk-last-offset`)!,
-      shape_handle: initialRes.headers.get(`electric-handle`)!,
+      handle: initialRes.headers.get(`electric-handle`)!,
+      offset: initialRes.headers.get(`electric-offset`)!,
       live: `true`,
     })
 
@@ -171,13 +171,8 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => {
       {}
     )
     const lastOffset = originalRes.headers.get(CHUNK_LAST_OFFSET_HEADER)
-<<<<<<< HEAD
     const shapeHandle = originalRes.headers.get(SHAPE_HANDLE_HEADER)
     const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&handle=${shapeHandle}`
-=======
-    const shapeId = originalRes.headers.get(SHAPE_HANDLE_HEADER)
-    const urlToTest = `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=${lastOffset}&shape_handle=${shapeId}`
->>>>>>> 667e3032 (Fix more references to shape id)
 
     // Make a first request such that response is cached
     const originalUpToDateRes = await fetch(urlToTest, {})
@@ -241,7 +236,7 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => {
 
     expect(getCacheStatus(client2Res)).toBe(CacheStatus.HIT)
 
-    const latestOffset = client2Res.headers.get(`electric-chunk-last-offset`)
+    const latestOffset = client2Res.headers.get(`electric-offset`)
     assert(latestOffset, `latestOffset should be defined`)
 
     // Now GC the shape
diff --git a/packages/typescript-client/test/integration.test.ts b/packages/typescript-client/test/integration.test.ts
index 1cc61dc882..7dc8704a05 100644
--- a/packages/typescript-client/test/integration.test.ts
+++ b/packages/typescript-client/test/integration.test.ts
@@ -135,7 +135,7 @@ describe(`HTTP Sync`, () => {
       `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`,
       {}
     )
-    const lastOffset = res.headers.get(`electric-chunk-last-offset`)
+    const lastOffset = res.headers.get(`electric-offset`)
     expect(lastOffset).to.exist
   })
 
diff --git a/website/docs/api/http.md b/website/docs/api/http.md
index 5126edc321..a7210fb563 100644
--- a/website/docs/api/http.md
+++ b/website/docs/api/http.md
@@ -62,7 +62,7 @@ When you make an initial sync request, with `offset=-1`, you're telling the serv
 
 When a shape is first requested, Electric queries Postgres for the data and populates the log by turning the query results into insert operations. This allows you to sync shapes without having to pre-define them. Electric then streams out the log data in the response.
 
-Sometimes a log can fit in a single response. Sometimes it's too big and requires multiple requests. In this case, the first request will return a batch of data and an `x-electric-chunk-last-offset` header. An HTTP client should then continue to make requests setting the `offset` parameter to the this header value. This allows the client to paginate through the shape log until it has received all the current data.
+Sometimes a log can fit in a single response. Sometimes it's too big and requires multiple requests. In this case, the first request will return a batch of data and an `electric-offset` header. An HTTP client should then continue to make requests setting the `offset` parameter to the this header value. This allows the client to paginate through the shape log until it has received all the current data.
 
 ### Control messages
 
diff --git a/website/docs/quickstart.md b/website/docs/quickstart.md
index f0f9dcc7bf..2160859702 100644
--- a/website/docs/quickstart.md
+++ b/website/docs/quickstart.md
@@ -116,8 +116,9 @@ access-control-expose-headers: *
 access-control-allow-methods: GET, POST, OPTIONS
 content-type: application/json; charset=utf-8
 electric-handle: 3833821-1721299734314
-x-electric-chunk-last-offset: 0_0
-x-electric-schema: {"id":{"type":"int4","pk_index":0},"name":{"type":"varchar","max_length":255},"value":{"type":"float8"}}
+electric-offset: 0_0
+electric-schema: {"id":{"type":"int4","pk_index":0},"name":{"type":"varchar","max_length":255},"value":{"type":"float8"}}
+electric-up-to-date:
 etag: 3833821-1721299734314:-1:0_0
 
 [{"offset":"0_0","value":{"id":"1","name":"Alice","value":"3.14"},"key":"\"public\".\"foo\"/1","headers":{"operation"
diff --git a/website/electric-api.yaml b/website/electric-api.yaml
index 9629a85c46..8d8d6f5816 100644
--- a/website/electric-api.yaml
+++ b/website/electric-api.yaml
@@ -174,7 +174,17 @@ paths:
                 Etag header specifying the shape handle and offset for efficient caching.
 
                 In the format `{shape_handle}:{start_offset}:{end_offset}`.
-            electric-chunk-last-offset:
+            electric-cursor:
+              schema:
+                type: string
+              example: "1674440"
+              description: |-
+                If present, provides a cursor to use as the value of the `cursor`
+                parameter in the next `live` mode long polling request.
+
+                This works around some inconsistent request coalescing behaviour
+                with different CDNs.
+            electric-offset:
               schema:
                 type: string
               example: "26800584_4"
@@ -185,6 +195,9 @@ paths:
                 you have provided. This header simplifies client development by
                 avoiding the need to parse the last offset out of the stream of
                 log entries.
+
+                Must be used as the value of the `offset` parameter in your
+                next request.
             electric-handle:
               schema:
                 type: string
@@ -192,12 +205,12 @@ paths:
               description: |-
                 The shape handle.
 
-                Must be provided as the `shape_handle` parameter when making
+                Must be provided as the value of the `handle` parameter when making
                 subsequent requests where `offset` is not `-1`.
             electric-schema:
               schema:
                 type: string
-                example: '{"id":{"type":"int4","dimensions":0},"title":{"type":"text","dimensions":0},"status":{"type":"text","dimensions":0,"max_length":8}}'
+                example: '":0},"status":{"type":"text","dimensions":0,"max_length":8}}'
                 description: |-
                   A JSON string of an object that maps column names to the corresponding schema object.
                   The schema object contains the type of the column, the number of dimensions, and possibly additional properties.
@@ -211,6 +224,13 @@ paths:
                         `INTERVAL(4)` has an additional `"precision": 4` property,
                         `INTERVAL MINUTE TO SECOND` has an additional `"fields": "MINUTE TO SECOND"` property,
                         `BIT(5)` has an additional `"length": 5` property.
+            electric-up-to-date:
+              schema:
+                description: |-
+                  If present, this header indicates that the response ends with
+                  an `up-to-date` control message, indicating that the client has
+                  recieved all of the data that the server is aware of and can
+                  safely process/apply any accumulated messages.
           content:
             application/json:
               schema:
@@ -298,6 +318,17 @@ paths:
           description: >-
             No content. The `live=true` polling request timed out without
             any new content to process.
+          headers:
+            electric-cursor:
+              schema:
+                type: string
+              example: "1674440"
+              description: |-
+                Provides a cursor to use as the value of the `cursor` parameter
+                in the next long polling request.
+
+                This works around some inconsistent request coalescing behaviour
+                with different CDNs.
         "400":
           description: Bad request.
         "404":

From 02254427953210500af9023346474badb610f9ef Mon Sep 17 00:00:00 2001
From: James Arthur 
Date: Tue, 29 Oct 2024 16:47:52 +0100
Subject: [PATCH 04/11] renaming: fix and format.

---
 .../react-hooks/test/support/test-context.ts  |  9 +++++--
 .../lib/electric/plug/serve_shape_plug.ex     |  2 +-
 .../test/electric/plug/router_test.exs        | 24 ++++++++++++-------
 .../electric/plug/serve_shape_plug_test.exs   |  3 ++-
 packages/typescript-client/src/client.ts      |  5 +++-
 packages/typescript-client/src/constants.ts   |  2 +-
 packages/typescript-client/test/cache.test.ts |  7 +++---
 7 files changed, 34 insertions(+), 18 deletions(-)

diff --git a/packages/react-hooks/test/support/test-context.ts b/packages/react-hooks/test/support/test-context.ts
index 24dcbe6f51..815f47224a 100644
--- a/packages/react-hooks/test/support/test-context.ts
+++ b/packages/react-hooks/test/support/test-context.ts
@@ -11,7 +11,10 @@ export type UpdateIssueFn = (row: IssueRow) => Promise>
 export type DeleteIssueFn = (row: IssueRow) => Promise>
 export type InsertIssuesFn = (...rows: GeneratedIssueRow[]) => Promise
 export type ClearIssuesShapeFn = (shapeHandle?: string) => Promise
-export type ClearShapeFn = (table: string, shapeHandle?: string) => Promise
+export type ClearShapeFn = (
+  table: string,
+  shapeHandle?: string
+) => Promise
 
 export const testWithDbClient = test.extend<{
   dbClient: Client
@@ -50,7 +53,9 @@ export const testWithDbClient = test.extend<{
             `DELETE ${baseUrl}/v1/shape?table=${table}`
           )
         )
-        throw new Error(`Could not delete shape ${table} with handle ${shapeHandle}`)
+        throw new Error(
+          `Could not delete shape ${table} with handle ${shapeHandle}`
+        )
       }
     })
   },
diff --git a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
index de9fd67164..cf3ecf6df4 100644
--- a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
+++ b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
@@ -547,7 +547,7 @@ defmodule Electric.Plug.ServeShapePlug do
   defp listen_for_new_changes(%Conn{assigns: assigns} = conn, _) do
     # Only start listening when we know there is a possibility that nothing is going to be returned
     if LogOffset.compare(assigns.offset, assigns.last_offset) != :lt do
-      shape_handle = assigns.shape_handle
+      shape_handle = assigns.handle
 
       ref = make_ref()
       registry = conn.assigns.config[:registry]
diff --git a/packages/sync-service/test/electric/plug/router_test.exs b/packages/sync-service/test/electric/plug/router_test.exs
index 6e8e71c510..635aa481ed 100644
--- a/packages/sync-service/test/electric/plug/router_test.exs
+++ b/packages/sync-service/test/electric/plug/router_test.exs
@@ -144,7 +144,7 @@ defmodule Electric.Plug.RouterTest do
                Jason.decode!(conn.resp_body)
 
       assert %{status: 202} =
-               conn("DELETE", "/v1/shape/items?shape_handle=#{shape1_handle}")
+               conn("DELETE", "/v1/shape?table=items&handle=#{shape1_handle}")
                |> Router.call(opts)
 
       Postgrex.query!(db_conn, "DELETE FROM items", [])
@@ -156,7 +156,7 @@ defmodule Electric.Plug.RouterTest do
 
       assert %{status: 200} = conn
       shape2_handle = get_resp_shape_handle(conn)
-      assert shape1_id != shape2_handle
+      assert shape1_handle != shape2_handle
 
       assert [%{"value" => %{"value" => "test value 2"}}] =
                Jason.decode!(conn.resp_body)
@@ -452,7 +452,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn("GET", "/v1/shape?table=items", %{
             offset: "0_0",
-            shape_handle: shape_handle,
+            handle: shape_handle,
             where: where,
             live: true
           })
@@ -474,7 +474,7 @@ defmodule Electric.Plug.RouterTest do
                conn =
                conn("GET", "/v1/shape?table=items", %{
                  offset: new_offset,
-                 shape_handle: shape_handle,
+                 handle: shape_handle,
                  where: where
                })
                |> Router.call(opts)
@@ -514,7 +514,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn("GET", "/v1/shape?table=serial_ids", %{
             offset: "0_0",
-            shape_handle: shape_handle,
+            handle: shape_handle,
             where: where,
             live: true
           })
@@ -544,7 +544,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn("GET", "/v1/shape?table=serial_ids", %{
             offset: new_offset,
-            shape_handle: shape_handle,
+            handle: shape_handle,
             where: where,
             live: true
           })
@@ -628,7 +628,7 @@ defmodule Electric.Plug.RouterTest do
         Task.async(fn ->
           conn("GET", "/v1/shape?table=serial_ids", %{
             offset: "0_0",
-            shape_handle: shape_handle,
+            handle: shape_handle,
             where: where,
             live: true
           })
@@ -714,7 +714,10 @@ defmodule Electric.Plug.RouterTest do
       assert %{status: 200} = Task.await(task)
 
       conn =
-        conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&handle=#{shape_handle}")
+        conn(
+          "GET",
+          "/v1/shape?table=large_rows_table&offset=#{next_offset}&handle=#{shape_handle}"
+        )
         |> Router.call(opts)
 
       assert %{status: 200} = conn
@@ -735,7 +738,10 @@ defmodule Electric.Plug.RouterTest do
       [next_offset] = Plug.Conn.get_resp_header(conn, "electric-offset")
 
       conn =
-        conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&handle=#{shape_handle}")
+        conn(
+          "GET",
+          "/v1/shape?table=large_rows_table&offset=#{next_offset}&handle=#{shape_handle}"
+        )
         |> Router.call(opts)
 
       assert %{status: 200} = conn
diff --git a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
index cc21b20794..5952baaa03 100644
--- a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
+++ b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
@@ -191,7 +191,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
       assert conn.status == 400
 
       assert Jason.decode!(conn.resp_body) == %{
-               "shape_handle" => ["can't be blank when offset != -1"]
+               "handle" => ["can't be blank when offset != -1"]
              }
     end
 
@@ -638,6 +638,7 @@ defmodule Electric.Plug.ServeShapePlugTest do
 
       assert Jason.decode!(conn.resp_body) == [%{"headers" => %{"control" => "must-refetch"}}]
       assert get_resp_header(conn, "electric-handle") == [new_shape_handle]
+
       assert get_resp_header(conn, "location") == [
                "/?table=public.users&handle=#{new_shape_handle}&offset=-1"
              ]
diff --git a/packages/typescript-client/src/client.ts b/packages/typescript-client/src/client.ts
index 9d523d16f3..c9cb5e8924 100644
--- a/packages/typescript-client/src/client.ts
+++ b/packages/typescript-client/src/client.ts
@@ -241,7 +241,10 @@ export class ShapeStream = Row>
 
         if (this.#shapeHandle) {
           // This should probably be a header for better cache breaking?
-          fetchUrl.searchParams.set(SHAPE_HANDLE_QUERY_PARAM, this.#shapeHandle!)
+          fetchUrl.searchParams.set(
+            SHAPE_HANDLE_QUERY_PARAM,
+            this.#shapeHandle!
+          )
         }
 
         if (this.#databaseId) {
diff --git a/packages/typescript-client/src/constants.ts b/packages/typescript-client/src/constants.ts
index 880fe89bd9..ef487670e9 100644
--- a/packages/typescript-client/src/constants.ts
+++ b/packages/typescript-client/src/constants.ts
@@ -10,4 +10,4 @@ export const SHAPE_HANDLE_QUERY_PARAM = `handle`
 export const LIVE_QUERY_PARAM = `live`
 export const OFFSET_QUERY_PARAM = `offset`
 export const TABLE_QUERY_PARAM = `table`
-export const WHERE_QUERY_PARAM = `where`
\ No newline at end of file
+export const WHERE_QUERY_PARAM = `where`
diff --git a/packages/typescript-client/test/cache.test.ts b/packages/typescript-client/test/cache.test.ts
index 10d72f094d..47d74120b2 100644
--- a/packages/typescript-client/test/cache.test.ts
+++ b/packages/typescript-client/test/cache.test.ts
@@ -274,8 +274,9 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => {
       newInitialSyncRes.headers.get(`electric-handle`) ?? undefined
     expect(newInitialSyncRes.status).toBe(200)
     expect(getCacheStatus(newInitialSyncRes)).toBe(CacheStatus.HIT)
-    expect(cachedShapeHandle, `Got old shape handle that is out of scope`).not.toBe(
-      originalShapeHandle
-    )
+    expect(
+      cachedShapeHandle,
+      `Got old shape handle that is out of scope`
+    ).not.toBe(originalShapeHandle)
   })
 })

From f6456fe53c5741d07c8ee70bce8092a2b2b7c4c2 Mon Sep 17 00:00:00 2001
From: James Arthur 
Date: Tue, 29 Oct 2024 21:16:34 +0100
Subject: [PATCH 05/11] integration-tests: fix header name in
 crash-recovery.lux

---
 integration-tests/tests/crash-recovery.lux | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/integration-tests/tests/crash-recovery.lux b/integration-tests/tests/crash-recovery.lux
index 3421842142..0222752d0f 100644
--- a/integration-tests/tests/crash-recovery.lux
+++ b/integration-tests/tests/crash-recovery.lux
@@ -42,9 +42,9 @@
   # strip ANSI codes from response for easier matching
   !curl -v -X GET "http://localhost:3000/v1/shape?table=items&offset=-1"
   ?electric-handle: ([\d-]+)
-  [local shape_handle=$1]
+  [local handle=$1]
   ?electric-offset: ([\w\d_]+)
-  [local last_offset=$1]
+  [local offset=$1]
 
 ## Terminate electric
 [shell electric]
@@ -58,7 +58,7 @@
 
 # Client should be able to continue same shape
 [shell client]
-  !curl -v -X GET "http://localhost:3000/v1/shape?table=items&handle=$shape_handle&offset=$last_offset"
+  !curl -v -X GET "http://localhost:3000/v1/shape?table=items&handle=$handle&offset=$offset"
   ??HTTP/1.1 200 OK
 
 [cleanup]

From d68f269077690999d73dd21d97760085adb815d1 Mon Sep 17 00:00:00 2001
From: msfstef 
Date: Thu, 31 Oct 2024 18:04:29 +0200
Subject: [PATCH 06/11] Fix types and tests

---
 packages/react-hooks/test/react-hooks.test.tsx | 6 ++++--
 packages/typescript-client/src/fetch.ts        | 2 +-
 2 files changed, 5 insertions(+), 3 deletions(-)

diff --git a/packages/react-hooks/test/react-hooks.test.tsx b/packages/react-hooks/test/react-hooks.test.tsx
index 057e8c6a21..ba07052479 100644
--- a/packages/react-hooks/test/react-hooks.test.tsx
+++ b/packages/react-hooks/test/react-hooks.test.tsx
@@ -72,7 +72,8 @@ describe(`useShape`, () => {
     const manualAborter = new AbortController()
     renderHook(() =>
       useShape({
-        url: `${BASE_URL}/v1/shape/${issuesTableUrl}`,
+        url: `${BASE_URL}/v1/shape`,
+        table: issuesTableUrl,
         signal: manualAborter.signal,
         subscribe: false,
       })
@@ -84,7 +85,8 @@ describe(`useShape`, () => {
 
     const { result } = renderHook(() =>
       useShape({
-        url: `${BASE_URL}/v1/shape/${issuesTableUrl}`,
+        url: `${BASE_URL}/v1/shape`,
+        table: issuesTableUrl,
         signal: aborter?.signal,
         subscribe: false,
       })
diff --git a/packages/typescript-client/src/fetch.ts b/packages/typescript-client/src/fetch.ts
index df226c0257..bc06752a64 100644
--- a/packages/typescript-client/src/fetch.ts
+++ b/packages/typescript-client/src/fetch.ts
@@ -271,7 +271,7 @@ function getNextChunkUrl(url: string, res: Response): string | void {
  */
 function chainAborter(
   aborter: AbortController,
-  sourceSignal?: AbortSignal
+  sourceSignal?: AbortSignal | null
 ): AbortSignal {
   if (!sourceSignal) return aborter.signal
   if (sourceSignal.aborted) aborter.abort()

From 753c8b0eadf8805b6712c6687f12c48d62f99947 Mon Sep 17 00:00:00 2001
From: James Arthur 
Date: Thu, 31 Oct 2024 17:33:21 +0100
Subject: [PATCH 07/11] examples: lint changes to auth page/tsx.

---
 examples/auth/app/page.tsx | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/examples/auth/app/page.tsx b/examples/auth/app/page.tsx
index 8a9a9f4aff..c845007fa6 100644
--- a/examples/auth/app/page.tsx
+++ b/examples/auth/app/page.tsx
@@ -20,11 +20,12 @@ const usersShape = (): ShapeStreamOptions => {
     const queryParams = new URLSearchParams(window.location.search)
     const org_id = queryParams.get(`org_id`)
     return {
-      url: new URL(`/shape-proxy?org_id=${org_id}`, window.location.origin).href,
+      url: new URL(`/shape-proxy?org_id=${org_id}`, window.location.origin)
+        .href,
       table: `users`,
       headers: {
         Authorization: org_id || ``,
-      }
+      },
     }
   } else {
     return {

From 885eda710d2f25081db615fa9edb3d7aae5aa904 Mon Sep 17 00:00:00 2001
From: Ilia Borovitinov 
Date: Tue, 5 Nov 2024 18:21:33 +0300
Subject: [PATCH 08/11] Applied formatting

---
 .../lib/electric/plug/serve_shape_plug.ex     |  6 ++-
 .../electric/shape_cache/in_memory_storage.ex |  4 +-
 .../lib/electric/shape_cache/shape_status.ex  |  3 +-
 .../lib/electric/shapes/consumer.ex           |  6 ++-
 .../electric/shapes/consumer/snapshotter.ex   |  6 ++-
 .../electric/shapes/consumer/supervisor.ex    |  6 ++-
 .../electric/shapes/consumer_supervisor.ex    |  4 +-
 .../electric/plug/serve_shape_plug_test.exs   |  4 +-
 .../test/electric/shape_cache_test.exs        |  3 --
 .../test/electric/shapes/consumer_test.exs    | 51 +++++++++++--------
 .../test/support/test-context.ts              |  6 ++-
 11 files changed, 64 insertions(+), 35 deletions(-)

diff --git a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
index cf3ecf6df4..de77cf5758 100644
--- a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
+++ b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex
@@ -315,10 +315,12 @@ defmodule Electric.Plug.ServeShapePlug do
   # If chunk offsets are available, use those instead of the latest available offset
   # to optimize for cache hits and response sizes
   defp determine_log_chunk_offset(%Conn{assigns: assigns} = conn, _) do
-    %{config: config, active_shape_handle: shape_handle, offset: offset, tenant_id: tenant_id} = assigns
+    %{config: config, active_shape_handle: shape_handle, offset: offset, tenant_id: tenant_id} =
+      assigns
 
     chunk_end_offset =
-      Shapes.get_chunk_end_log_offset(config, shape_handle, offset, tenant_id) || assigns.last_offset
+      Shapes.get_chunk_end_log_offset(config, shape_handle, offset, tenant_id) ||
+        assigns.last_offset
 
     conn
     |> assign(:chunk_end_offset, chunk_end_offset)
diff --git a/packages/sync-service/lib/electric/shape_cache/in_memory_storage.ex b/packages/sync-service/lib/electric/shape_cache/in_memory_storage.ex
index 5ce4a485b7..ec77382841 100644
--- a/packages/sync-service/lib/electric/shape_cache/in_memory_storage.ex
+++ b/packages/sync-service/lib/electric/shape_cache/in_memory_storage.ex
@@ -51,7 +51,9 @@ defmodule Electric.ShapeCache.InMemoryStorage do
       }) do
     snapshot_table_name = :"#{table_base_name}.#{tenant_id}.Snapshot_#{shape_handle}"
     log_table_name = :"#{table_base_name}.#{tenant_id}.Log_#{shape_handle}"
-    chunk_checkpoint_table_name = :"#{table_base_name}.#{tenant_id}.ChunkCheckpoint_#{shape_handle}"
+
+    chunk_checkpoint_table_name =
+      :"#{table_base_name}.#{tenant_id}.ChunkCheckpoint_#{shape_handle}"
 
     %__MODULE__{
       table_base_name: table_base_name,
diff --git a/packages/sync-service/lib/electric/shape_cache/shape_status.ex b/packages/sync-service/lib/electric/shape_cache/shape_status.ex
index 47498e0a16..cda5e243f8 100644
--- a/packages/sync-service/lib/electric/shape_cache/shape_status.ex
+++ b/packages/sync-service/lib/electric/shape_cache/shape_status.ex
@@ -235,7 +235,8 @@ defmodule Electric.ShapeCache.ShapeStatus do
     snapshot_xmin(table, shape_handle)
   end
 
-  def snapshot_xmin(meta_table, shape_handle) when is_reference(meta_table) or is_atom(meta_table) do
+  def snapshot_xmin(meta_table, shape_handle)
+      when is_reference(meta_table) or is_atom(meta_table) do
     turn_raise_into_error(fn ->
       :ets.lookup_element(
         meta_table,
diff --git a/packages/sync-service/lib/electric/shapes/consumer.ex b/packages/sync-service/lib/electric/shapes/consumer.ex
index c637c9f2e2..64ee52e593 100644
--- a/packages/sync-service/lib/electric/shapes/consumer.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer.ex
@@ -17,7 +17,11 @@ defmodule Electric.Shapes.Consumer do
   @initial_log_state %{current_chunk_byte_size: 0}
 
   def name(
-        %{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_handle: shape_handle} =
+        %{
+          electric_instance_id: electric_instance_id,
+          tenant_id: tenant_id,
+          shape_handle: shape_handle
+        } =
           _config
       ) do
     name(electric_instance_id, tenant_id, shape_handle)
diff --git a/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex b/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
index f844914dd9..12d7df78e7 100644
--- a/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
@@ -10,7 +10,11 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
 
   require Logger
 
-  def name(%{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_handle: shape_handle}) do
+  def name(%{
+        electric_instance_id: electric_instance_id,
+        tenant_id: tenant_id,
+        shape_handle: shape_handle
+      }) do
     name(electric_instance_id, tenant_id, shape_handle)
   end
 
diff --git a/packages/sync-service/lib/electric/shapes/consumer/supervisor.ex b/packages/sync-service/lib/electric/shapes/consumer/supervisor.ex
index ac93b551ce..0630c653e8 100644
--- a/packages/sync-service/lib/electric/shapes/consumer/supervisor.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer/supervisor.ex
@@ -31,7 +31,11 @@ defmodule Electric.Shapes.Consumer.Supervisor do
     Electric.Application.process_name(electric_instance_id, tenant_id, __MODULE__, shape_handle)
   end
 
-  def name(%{electric_instance_id: electric_instance_id, tenant_id: tenant_id, shape_handle: shape_handle}) do
+  def name(%{
+        electric_instance_id: electric_instance_id,
+        tenant_id: tenant_id,
+        shape_handle: shape_handle
+      }) do
     name(electric_instance_id, tenant_id, shape_handle)
   end
 
diff --git a/packages/sync-service/lib/electric/shapes/consumer_supervisor.ex b/packages/sync-service/lib/electric/shapes/consumer_supervisor.ex
index 5d4f7dae34..6537aaaa05 100644
--- a/packages/sync-service/lib/electric/shapes/consumer_supervisor.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer_supervisor.ex
@@ -30,7 +30,9 @@ defmodule Electric.Shapes.ConsumerSupervisor do
   end
 
   def stop_shape_consumer(_name, electric_instance_id, tenant_id, shape_handle) do
-    case GenServer.whereis(Consumer.Supervisor.name(electric_instance_id, tenant_id, shape_handle)) do
+    case GenServer.whereis(
+           Consumer.Supervisor.name(electric_instance_id, tenant_id, shape_handle)
+         ) do
       nil ->
         {:error, "no consumer for shape handle #{inspect(shape_handle)}"}
 
diff --git a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
index 5952baaa03..68af2fc29a 100644
--- a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
+++ b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs
@@ -651,7 +651,9 @@ defmodule Electric.Plug.ServeShapePlugTest do
       |> stub(:has_shape?, fn @test_shape_handle, _opts -> true end)
 
       Mock.Storage
-      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, opts -> {@test_shape_handle, opts} end)
+      |> stub(:for_shape, fn @test_shape_handle, ^tenant_id, opts ->
+        {@test_shape_handle, opts}
+      end)
 
       conn =
         ctx
diff --git a/packages/sync-service/test/electric/shape_cache_test.exs b/packages/sync-service/test/electric/shape_cache_test.exs
index 860a337f34..6522bad750 100644
--- a/packages/sync-service/test/electric/shape_cache_test.exs
+++ b/packages/sync-service/test/electric/shape_cache_test.exs
@@ -761,11 +761,8 @@ defmodule Electric.ShapeCacheTest do
 
       ref =
         Process.monitor(
-
           module.name(ctx.electric_instance_id, ctx.tenant_id, shape_handle)
-
           |> GenServer.whereis()
-
         )
 
       log = capture_log(fn -> :ok = ShapeCache.clean_shape(shape_handle, opts) end)
diff --git a/packages/sync-service/test/electric/shapes/consumer_test.exs b/packages/sync-service/test/electric/shapes/consumer_test.exs
index d7b1c7aea6..b9e3e998e1 100644
--- a/packages/sync-service/test/electric/shapes/consumer_test.exs
+++ b/packages/sync-service/test/electric/shapes/consumer_test.exs
@@ -278,7 +278,10 @@ defmodule Electric.Shapes.ConsumerTest do
 
       Mock.ShapeCache
       |> expect(:update_shape_latest_offset, fn @shape_handle2, _offset, _ -> :ok end)
-      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
+      |> allow(
+        self(),
+        Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2)
+      )
 
       txn =
         %Transaction{xid: xid, lsn: lsn, last_log_offset: last_log_offset}
@@ -304,7 +307,10 @@ defmodule Electric.Shapes.ConsumerTest do
 
       Mock.ShapeCache
       |> expect(:handle_truncate, fn @shape_handle1, _ -> :ok end)
-      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
+      |> allow(
+        self(),
+        Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1)
+      )
 
       txn =
         %Transaction{xid: xid, lsn: lsn, last_log_offset: last_log_offset}
@@ -354,7 +360,10 @@ defmodule Electric.Shapes.ConsumerTest do
 
       Mock.ShapeCache
       |> expect(:handle_truncate, fn @shape_handle1, _ -> :ok end)
-      |> allow(self(), Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
+      |> allow(
+        self(),
+        Shapes.Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1)
+      )
 
       txn =
         %Transaction{xid: xid, lsn: lsn, last_log_offset: last_log_offset}
@@ -408,16 +417,16 @@ defmodule Electric.Shapes.ConsumerTest do
 
       ref1 =
         Process.monitor(
-
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
-
+          GenServer.whereis(
+            Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1)
+          )
         )
 
       ref2 =
         Process.monitor(
-
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
-
+          GenServer.whereis(
+            Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2)
+          )
         )
 
       Mock.ShapeStatus
@@ -444,16 +453,16 @@ defmodule Electric.Shapes.ConsumerTest do
 
       ref1 =
         Process.monitor(
-
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
-
+          GenServer.whereis(
+            Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1)
+          )
         )
 
       ref2 =
         Process.monitor(
-
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
-
+          GenServer.whereis(
+            Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2)
+          )
         )
 
       # also cleans up inspector cache and shape status cache
@@ -490,16 +499,16 @@ defmodule Electric.Shapes.ConsumerTest do
 
       ref1 =
         Process.monitor(
-
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1))
-
+          GenServer.whereis(
+            Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle1)
+          )
         )
 
       ref2 =
         Process.monitor(
-
-          GenServer.whereis(Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2))
-
+          GenServer.whereis(
+            Consumer.name(ctx.electric_instance_id, ctx.tenant_id, @shape_handle2)
+          )
         )
 
       # also cleans up inspector cache and shape status cache
diff --git a/packages/typescript-client/test/support/test-context.ts b/packages/typescript-client/test/support/test-context.ts
index d4824a7612..39d76f152a 100644
--- a/packages/typescript-client/test/support/test-context.ts
+++ b/packages/typescript-client/test/support/test-context.ts
@@ -4,7 +4,10 @@ import { Client, QueryResult } from 'pg'
 import { inject, test } from 'vitest'
 import { makePgClient } from './test-helpers'
 import { FetchError } from '../../src/error'
-import { DATABASE_ID_QUERY_PARAM, SHAPE_HANDLE_QUERY_PARAM } from '../../src/constants'
+import {
+  DATABASE_ID_QUERY_PARAM,
+  SHAPE_HANDLE_QUERY_PARAM,
+} from '../../src/constants'
 
 export type IssueRow = { id: string; title: string; priority?: string }
 export type GeneratedIssueRow = { id?: string; title: string }
@@ -223,7 +226,6 @@ export const testWithMultiTenantIssuesTable = testWithDbClients.extend<{
     }),
 })
 
-
 export const testWithMultitypeTable = testWithDbClient.extend<{
   tableSql: string
   tableUrl: string

From 4d92d2b54a096f749aefa709186613c05d1e5d30 Mon Sep 17 00:00:00 2001
From: Ilia Borovitinov 
Date: Tue, 5 Nov 2024 18:31:13 +0300
Subject: [PATCH 09/11] Fixed a test after a rebase

---
 packages/typescript-client/test/integration.test.ts | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)

diff --git a/packages/typescript-client/test/integration.test.ts b/packages/typescript-client/test/integration.test.ts
index 7dc8704a05..c20450e476 100644
--- a/packages/typescript-client/test/integration.test.ts
+++ b/packages/typescript-client/test/integration.test.ts
@@ -1001,7 +1001,8 @@ describe.sequential(`Multi tenancy sync`, () => {
 
     const shapeData = new Map()
     const issueStream = new ShapeStream({
-      url: `${BASE_URL}/v1/shape/${issuesTableUrl}`,
+      url: `${BASE_URL}/v1/shape`,
+      table: issuesTableUrl,
       databaseId,
       subscribe: false,
       signal: aborter.signal,
@@ -1036,7 +1037,8 @@ describe.sequential(`Multi tenancy sync`, () => {
 
       const shapeData = new Map()
       const issueStream = new ShapeStream({
-        url: `${BASE_URL}/v1/shape/${issuesTableUrl}`,
+        url: `${BASE_URL}/v1/shape`,
+        table: issuesTableUrl,
         databaseId: otherDatabaseId,
         subscribe: false,
         signal: aborter.signal,
@@ -1076,14 +1078,16 @@ describe.sequential(`Multi tenancy sync`, () => {
     }) => {
       // Set up streams for both databases
       const defaultStream = new ShapeStream({
-        url: `${BASE_URL}/v1/shape/${issuesTableUrl}`,
+        url: `${BASE_URL}/v1/shape`,
+        table: issuesTableUrl,
         databaseId,
         subscribe: true,
         signal: aborter.signal,
       })
 
       const otherStream = new ShapeStream({
-        url: `${BASE_URL}/v1/shape/${issuesTableUrl}`,
+        url: `${BASE_URL}/v1/shape`,
+        table: issuesTableUrl,
         databaseId: otherDatabaseId,
         subscribe: true,
         signal: otherAborter.signal,

From f1dbb84d2f22184d5af93ca3231a3f26ee13fc92 Mon Sep 17 00:00:00 2001
From: Ilia Borovitinov 
Date: Tue, 5 Nov 2024 18:36:16 +0300
Subject: [PATCH 10/11] rebase blip

---
 .../lib/electric/shapes/consumer/snapshotter.ex           | 8 --------
 1 file changed, 8 deletions(-)

diff --git a/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex b/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
index 12d7df78e7..e6cec6ba50 100644
--- a/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
+++ b/packages/sync-service/lib/electric/shapes/consumer/snapshotter.ex
@@ -175,14 +175,6 @@ defmodule Electric.Shapes.Consumer.Snapshotter do
     )
   end
 
-  defp query_span!(conn, span_name, span_attrs, query, params) do
-    OpenTelemetry.with_span(
-      span_name,
-      span_attrs,
-      fn -> Postgrex.query!(conn, query, params) end
-    )
-  end
-
   defp shape_attrs(shape_handle, shape) do
     [
       "shape.handle": shape_handle,

From 817bf7a311769f66ae282c6160f80502dfe36985 Mon Sep 17 00:00:00 2001
From: Ilia Borovitinov 
Date: Tue, 5 Nov 2024 18:43:09 +0300
Subject: [PATCH 11/11] added a changeset

---
 .changeset/silly-pants-fetch.md | 6 ++++++
 1 file changed, 6 insertions(+)
 create mode 100644 .changeset/silly-pants-fetch.md

diff --git a/.changeset/silly-pants-fetch.md b/.changeset/silly-pants-fetch.md
new file mode 100644
index 0000000000..16cff0dd32
--- /dev/null
+++ b/.changeset/silly-pants-fetch.md
@@ -0,0 +1,6 @@
+---
+"@electric-sql/client": minor
+"@core/sync-service": minor
+---
+
+[breaking] Changes the API contract for the server to use new, clearer header names and query parameter names. One highlight is the change from `shape_id` to `handle` as the URL query parameter