From 01c6f845206b70a37163b8e67ff769cd5ad181ce Mon Sep 17 00:00:00 2001 From: HugoCasa Date: Mon, 6 Jan 2025 12:07:09 +0100 Subject: [PATCH] feat: captures (#4807) * feat: captures * flow UI and improvements * fix: build * fix sqlx * Move Capture WIP * Add capture to webhook and websocket * Move connection status viewer in the head * change trigger section label * Add popover capture picker using melt ui * Add shortcut to triggers capture from input form * remove capture tab in input * Allways show capture * remove useless logs * Add email capture * Add kafka capture into triggers * Add edit option in capture table * use light header for arg input * Add prefilled group id * Change button label * fix logic * Open resource drawer if prototype has fields * fix default completion * Change name Prototype * Dissociate Editor Mode for triggers * Fix bug for script * Fix apply args * fix apply schema * Add capture table to script * fix apply args * Add capture button for script * Delete capture tab * Set capture on when opening triggers capture * fix connection indicator * fix minor issues * Add preprocessor logic * Use slot in log Panel for captures * handle capture refresh in script * Delete capture tab from script editor * reset kafka resource on toggle static * fix minor issue * Allow resource in kafka capture * use simple capture button in flow * Remove capture panel * Polish route trigger editor * Fix resource saving * Remove excessive padding * merge nits * add workflow_dispatch to build * add workflow_dispatch to build * better capture UI * fix sqlx * fix build * fix build * build * make initial_messages optional in line with db * fix npm check * better handle args for capture webhook and http * improve migration + http capture fixes * fix sqlx * update ee ref --------- Co-authored-by: Guilhem Co-authored-by: Ruben Fiszel Co-authored-by: Ruben Fiszel --- ...74e5ef0aaa2505facbea8c764003dfc8fffb1.json | 26 + ...254c783fc34b617c8a9a95a0eb0cda535dab5.json | 33 + ...ffca075a0550eada87df7162c5037164ad6bf.json | 16 - ...0828357b36bbcdcc151bd97605d7d7a0489e8.json | 15 + ...591b28cbf9ea8f61d379b84ee6e14c033035d.json | 17 + ...e0c5508da110f54bc5f465f1892afdf851af0.json | 75 ++ ...067d7dee6a7108cb8a46a20f26236009481da.json | 15 + ...770e6341624e98117d21b9f01e68b4e0ce033.json | 14 + ...bd0de7e03c539ee046955543d9693551246f7.json | 14 + ...b9d2fafd03a59197477aafc915b0e34a00eb2.json | 50 ++ ...1e9905387a20bf4b137adf0a33274f820ddc2.json | 50 ++ ...451217ccb9f1bc35b1ad6e10d16bc19c41447.json | 44 ++ ...92736e7088ed69b543adbcfd3b3b7a4ece430.json | 16 + ...651d65b1135092067061e7f4050776b733b03.json | 15 + ...5ccb7d7071a80a3f5da59da8948c0106e959c.json | 16 + ...4f574024e641e73072586aca88847259e8cfc.json | 16 + ...4e44ef5a0f082bdde854900064325adc4dd77.json | 15 + ...5aeffac05016abc4dfed37d5ae2872e8da564.json | 16 + ...fbb1df4bef54003b01fa901bfa782b5f83342.json | 15 + ...e6e3ae6c5add6ca02414140adb724120a6800.json | 16 - ...677774aa237508d5610714efd2e9b8b93c7b8.json | 55 ++ ...0da43239c9a5aaea41c9aed7ed33a6219a534.json | 30 + ...f966d751a4dda554d8215eedb8f65be98e100.json | 17 + ...bceffac637548841897341672da427a9140fc.json | 26 + ...6156e26b352511f408fd6464303301383461f.json | 15 + ...f59c8cff67bb1ec3926680fb691cc3573738a.json | 16 - ...0e7bc1f0dbeca2e4f44ced05bae0ca5ca1039.json | 50 ++ ...3ab5a528b0468715238e17ffed7d75e9c0c5c.json | 25 + ...c90ac1c18b5df7aab6a143a328a6bddc6ad32.json | 25 + ...a74407b78378d66d8a089407998074059e79b.json | 33 + backend/ee-repo-ref.txt | 2 +- .../20250102145420_more_captures.down.sql | 14 + .../20250102145420_more_captures.up.sql | 65 ++ backend/windmill-api/openapi.yaml | 154 +++- backend/windmill-api/src/capture.rs | 545 ++++++++++++-- backend/windmill-api/src/flows.rs | 34 + backend/windmill-api/src/http_triggers.rs | 52 +- backend/windmill-api/src/kafka_triggers_ee.rs | 4 + backend/windmill-api/src/lib.rs | 5 +- backend/windmill-api/src/scripts.rs | 34 + .../windmill-api/src/websocket_triggers.rs | 693 ++++++++++++------ backend/windmill-api/src/workspaces_extra.rs | 11 + frontend/package-lock.json | 98 ++- frontend/package.json | 2 + frontend/src/lib/components/ArgInput.svelte | 2 + frontend/src/lib/components/Dev.svelte | 18 +- .../src/lib/components/FieldHeader.svelte | 10 +- .../src/lib/components/FlowBuilder.svelte | 16 +- .../lib/components/FlowPreviewContent.svelte | 13 - frontend/src/lib/components/Label.svelte | 21 +- .../src/lib/components/ResourceEditor.svelte | 5 + .../components/ResourceEditorDrawer.svelte | 26 +- .../src/lib/components/ResourcePicker.svelte | 13 +- frontend/src/lib/components/SchemaForm.svelte | 2 + .../src/lib/components/ScriptBuilder.svelte | 74 +- .../src/lib/components/ScriptEditor.svelte | 116 +-- frontend/src/lib/components/Section.svelte | 71 +- frontend/src/lib/components/Subsection.svelte | 55 ++ .../common/alert/ConnectionIndicator.svelte | 36 + .../common/button/AnimatedButton.svelte | 2 +- .../toggleButton-v2/ToggleButton.svelte | 2 +- .../components/details/ClipboardPanel.svelte | 8 +- .../details/CopyableCodeBlock.svelte | 27 + .../details/DetailPageLayout.svelte | 4 +- .../details/EmailTriggerConfigSection.svelte | 135 ++++ .../details/EmailTriggerPanel.svelte | 111 +-- .../lib/components/flows/FlowEditor.svelte | 2 +- .../flows/content/CapturePayload.svelte | 125 ---- .../flows/content/FlowEditorPanel.svelte | 32 +- .../components/flows/content/FlowInput.svelte | 15 +- .../flows/content/FlowPathViewer.svelte | 4 +- .../lib/components/flows/flowStateUtils.ts | 40 +- .../flows/map/FlowModuleSchemaMap.svelte | 38 +- .../renderers/triggers/TriggersWrapper.svelte | 2 +- .../components/meltComponents/Popover.svelte | 50 ++ .../components/scriptEditor/LogPanel.svelte | 340 ++++----- frontend/src/lib/components/triggers.ts | 42 +- .../components/triggers/CaptureButton.svelte | 81 ++ .../components/triggers/CaptureIcon.svelte | 17 + .../components/triggers/CaptureSection.svelte | 91 +++ .../components/triggers/CaptureTable.svelte | 245 +++++++ .../components/triggers/CaptureWrapper.svelte | 255 +++++++ .../triggers/KafkaTriggerEditor.svelte | 8 +- .../triggers/KafkaTriggerEditorInner.svelte | 153 ++-- .../KafkaTriggersConfigSection.svelte | 222 ++++++ .../triggers/KafkaTriggersPanel.svelte | 111 +-- .../components/triggers/RouteEditor.svelte | 8 +- .../triggers/RouteEditorConfigSection.svelte | 172 +++++ .../triggers/RouteEditorInner.svelte | 194 ++--- .../components/triggers/RoutesPanel.svelte | 124 ++-- .../components/triggers/TriggersEditor.svelte | 210 +++--- .../triggers/TriggersEditorSection.svelte | 118 +++ .../triggers/TriggersWrapper.svelte | 62 ++ .../triggers/WebhooksConfigSection.svelte | 434 +++++++++++ .../components/triggers/WebhooksPanel.svelte | 404 +--------- .../WebsocketEditorConfigSection.svelte | 184 +++++ .../triggers/WebsocketTriggerEditor.svelte | 8 +- .../WebsocketTriggerEditorInner.svelte | 162 +--- .../triggers/WebsocketTriggersPanel.svelte | 143 ++-- frontend/src/lib/script_helpers.ts | 10 +- frontend/src/lib/utils.ts | 4 +- frontend/src/routes/flows/dev/+page.svelte | 21 +- frontend/svelte.config.js | 9 +- frontend/tsconfig.json | 2 +- 104 files changed, 5437 insertions(+), 1971 deletions(-) create mode 100644 backend/.sqlx/query-031d0d70b0aff52feaad487bddb74e5ef0aaa2505facbea8c764003dfc8fffb1.json create mode 100644 backend/.sqlx/query-07da723ce5c9ee2d7c236e8eabe254c783fc34b617c8a9a95a0eb0cda535dab5.json delete mode 100644 backend/.sqlx/query-0a9a191273c735c41d56ea46a39ffca075a0550eada87df7162c5037164ad6bf.json create mode 100644 backend/.sqlx/query-0dbd664c906ee3c65856520c22f0828357b36bbcdcc151bd97605d7d7a0489e8.json create mode 100644 backend/.sqlx/query-203fa78d423ec5a8c5ff6166aed591b28cbf9ea8f61d379b84ee6e14c033035d.json create mode 100644 backend/.sqlx/query-2f440ab6083764b49e309c1f8dde0c5508da110f54bc5f465f1892afdf851af0.json create mode 100644 backend/.sqlx/query-3b6ddfe4df620d5e34a01dbbc95067d7dee6a7108cb8a46a20f26236009481da.json create mode 100644 backend/.sqlx/query-3c9fc4d8579767f3ce7c3633fca770e6341624e98117d21b9f01e68b4e0ce033.json create mode 100644 backend/.sqlx/query-41e557e1b63b13c9fcc195901c0bd0de7e03c539ee046955543d9693551246f7.json create mode 100644 backend/.sqlx/query-438b8bbf49781fc362eecbdc99cb9d2fafd03a59197477aafc915b0e34a00eb2.json create mode 100644 backend/.sqlx/query-4f1bb3713bc52fb8cd4088947de1e9905387a20bf4b137adf0a33274f820ddc2.json create mode 100644 backend/.sqlx/query-71d51bbc35da7b9930e3ea3a634451217ccb9f1bc35b1ad6e10d16bc19c41447.json create mode 100644 backend/.sqlx/query-78b182167ba19d1dc103577bd3492736e7088ed69b543adbcfd3b3b7a4ece430.json create mode 100644 backend/.sqlx/query-87e0bee8b78facc62d4c225dc51651d65b1135092067061e7f4050776b733b03.json create mode 100644 backend/.sqlx/query-90ad280f6744937878b134142825ccb7d7071a80a3f5da59da8948c0106e959c.json create mode 100644 backend/.sqlx/query-90e74c28b417e73924ed337f6634f574024e641e73072586aca88847259e8cfc.json create mode 100644 backend/.sqlx/query-97942578df746c8c8103b403cfc4e44ef5a0f082bdde854900064325adc4dd77.json create mode 100644 backend/.sqlx/query-988b594f70ff7886985a0f90a095aeffac05016abc4dfed37d5ae2872e8da564.json create mode 100644 backend/.sqlx/query-a17b1b8d4f58c58c253e63ec4c2fbb1df4bef54003b01fa901bfa782b5f83342.json delete mode 100644 backend/.sqlx/query-b9468b9e16f55db11b33d8e9793e6e3ae6c5add6ca02414140adb724120a6800.json create mode 100644 backend/.sqlx/query-c223f8b7fa4ef1aa06e1ba2a56d677774aa237508d5610714efd2e9b8b93c7b8.json create mode 100644 backend/.sqlx/query-c5270ee815689e42b65df507b850da43239c9a5aaea41c9aed7ed33a6219a534.json create mode 100644 backend/.sqlx/query-d08f34000c3d96ccd0f44ca8520f966d751a4dda554d8215eedb8f65be98e100.json create mode 100644 backend/.sqlx/query-d9a6f75e4c4a1f61e55b313cc09bceffac637548841897341672da427a9140fc.json create mode 100644 backend/.sqlx/query-de0735f0f5b59ebb72fd876f7f46156e26b352511f408fd6464303301383461f.json delete mode 100644 backend/.sqlx/query-e02b99525cb1f8737acfec86809f59c8cff67bb1ec3926680fb691cc3573738a.json create mode 100644 backend/.sqlx/query-e23e110e1f0438d21534fc4323e0e7bc1f0dbeca2e4f44ced05bae0ca5ca1039.json create mode 100644 backend/.sqlx/query-e86295e181a82823ffce8234d413ab5a528b0468715238e17ffed7d75e9c0c5c.json create mode 100644 backend/.sqlx/query-ee9adcbf82d3f62088a38ff65e8c90ac1c18b5df7aab6a143a328a6bddc6ad32.json create mode 100644 backend/.sqlx/query-ef299490c4674c4c76e18d84620a74407b78378d66d8a089407998074059e79b.json create mode 100644 backend/migrations/20250102145420_more_captures.down.sql create mode 100644 backend/migrations/20250102145420_more_captures.up.sql create mode 100644 frontend/src/lib/components/Subsection.svelte create mode 100644 frontend/src/lib/components/common/alert/ConnectionIndicator.svelte create mode 100644 frontend/src/lib/components/details/CopyableCodeBlock.svelte create mode 100644 frontend/src/lib/components/details/EmailTriggerConfigSection.svelte delete mode 100644 frontend/src/lib/components/flows/content/CapturePayload.svelte create mode 100644 frontend/src/lib/components/meltComponents/Popover.svelte create mode 100644 frontend/src/lib/components/triggers/CaptureButton.svelte create mode 100644 frontend/src/lib/components/triggers/CaptureIcon.svelte create mode 100644 frontend/src/lib/components/triggers/CaptureSection.svelte create mode 100644 frontend/src/lib/components/triggers/CaptureTable.svelte create mode 100644 frontend/src/lib/components/triggers/CaptureWrapper.svelte create mode 100644 frontend/src/lib/components/triggers/KafkaTriggersConfigSection.svelte create mode 100644 frontend/src/lib/components/triggers/RouteEditorConfigSection.svelte create mode 100644 frontend/src/lib/components/triggers/TriggersEditorSection.svelte create mode 100644 frontend/src/lib/components/triggers/TriggersWrapper.svelte create mode 100644 frontend/src/lib/components/triggers/WebhooksConfigSection.svelte create mode 100644 frontend/src/lib/components/triggers/WebsocketEditorConfigSection.svelte diff --git a/backend/.sqlx/query-031d0d70b0aff52feaad487bddb74e5ef0aaa2505facbea8c764003dfc8fffb1.json b/backend/.sqlx/query-031d0d70b0aff52feaad487bddb74e5ef0aaa2505facbea8c764003dfc8fffb1.json new file mode 100644 index 0000000000000..8fce6ae729d8d --- /dev/null +++ b/backend/.sqlx/query-031d0d70b0aff52feaad487bddb74e5ef0aaa2505facbea8c764003dfc8fffb1.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET last_server_ping = now(), error = $1 WHERE workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'websocket' AND server_id = $5 AND last_client_ping > NOW() - INTERVAL '10 seconds' RETURNING 1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "?column?", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Bool", + "Text" + ] + }, + "nullable": [ + null + ] + }, + "hash": "031d0d70b0aff52feaad487bddb74e5ef0aaa2505facbea8c764003dfc8fffb1" +} diff --git a/backend/.sqlx/query-07da723ce5c9ee2d7c236e8eabe254c783fc34b617c8a9a95a0eb0cda535dab5.json b/backend/.sqlx/query-07da723ce5c9ee2d7c236e8eabe254c783fc34b617c8a9a95a0eb0cda535dab5.json new file mode 100644 index 0000000000000..a86748af87145 --- /dev/null +++ b/backend/.sqlx/query-07da723ce5c9ee2d7c236e8eabe254c783fc34b617c8a9a95a0eb0cda535dab5.json @@ -0,0 +1,33 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO capture (workspace_id, path, is_flow, trigger_kind, payload, trigger_extra, created_by)\n VALUES ($1, $2, $3, $4, $5, $6, $7)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Varchar", + "Bool", + { + "Custom": { + "name": "trigger_kind", + "kind": { + "Enum": [ + "webhook", + "http", + "websocket", + "kafka", + "email" + ] + } + } + }, + "Jsonb", + "Jsonb", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "07da723ce5c9ee2d7c236e8eabe254c783fc34b617c8a9a95a0eb0cda535dab5" +} diff --git a/backend/.sqlx/query-0a9a191273c735c41d56ea46a39ffca075a0550eada87df7162c5037164ad6bf.json b/backend/.sqlx/query-0a9a191273c735c41d56ea46a39ffca075a0550eada87df7162c5037164ad6bf.json deleted file mode 100644 index de6e87bffd8ff..0000000000000 --- a/backend/.sqlx/query-0a9a191273c735c41d56ea46a39ffca075a0550eada87df7162c5037164ad6bf.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO capture\n (workspace_id, path, created_by)\n VALUES ($1, $2, $3)\n ON CONFLICT (workspace_id, path)\n DO UPDATE SET created_at = now()\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "0a9a191273c735c41d56ea46a39ffca075a0550eada87df7162c5037164ad6bf" -} diff --git a/backend/.sqlx/query-0dbd664c906ee3c65856520c22f0828357b36bbcdcc151bd97605d7d7a0489e8.json b/backend/.sqlx/query-0dbd664c906ee3c65856520c22f0828357b36bbcdcc151bd97605d7d7a0489e8.json new file mode 100644 index 0000000000000..4b5bd41569870 --- /dev/null +++ b/backend/.sqlx/query-0dbd664c906ee3c65856520c22f0828357b36bbcdcc151bd97605d7d7a0489e8.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM capture_config WHERE path = $1 AND workspace_id = $2 AND is_flow IS TRUE", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "0dbd664c906ee3c65856520c22f0828357b36bbcdcc151bd97605d7d7a0489e8" +} diff --git a/backend/.sqlx/query-203fa78d423ec5a8c5ff6166aed591b28cbf9ea8f61d379b84ee6e14c033035d.json b/backend/.sqlx/query-203fa78d423ec5a8c5ff6166aed591b28cbf9ea8f61d379b84ee6e14c033035d.json new file mode 100644 index 0000000000000..65b1b24efb7d8 --- /dev/null +++ b/backend/.sqlx/query-203fa78d423ec5a8c5ff6166aed591b28cbf9ea8f61d379b84ee6e14c033035d.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET error = $1, server_id = NULL, last_server_ping = NULL WHERE workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'kafka'", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "203fa78d423ec5a8c5ff6166aed591b28cbf9ea8f61d379b84ee6e14c033035d" +} diff --git a/backend/.sqlx/query-2f440ab6083764b49e309c1f8dde0c5508da110f54bc5f465f1892afdf851af0.json b/backend/.sqlx/query-2f440ab6083764b49e309c1f8dde0c5508da110f54bc5f465f1892afdf851af0.json new file mode 100644 index 0000000000000..7abe77803700e --- /dev/null +++ b/backend/.sqlx/query-2f440ab6083764b49e309c1f8dde0c5508da110f54bc5f465f1892afdf851af0.json @@ -0,0 +1,75 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT id, created_at, trigger_kind as \"trigger_kind: _\", payload as \"payload: _\", trigger_extra as \"trigger_extra: _\"\n FROM capture\n WHERE workspace_id = $1\n AND path = $2 AND is_flow = $3\n AND ($4::trigger_kind IS NULL OR trigger_kind = $4)\n ORDER BY created_at DESC", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "trigger_kind: _", + "type_info": { + "Custom": { + "name": "trigger_kind", + "kind": { + "Enum": [ + "webhook", + "http", + "websocket", + "kafka", + "email" + ] + } + } + } + }, + { + "ordinal": 3, + "name": "payload: _", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "trigger_extra: _", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Bool", + { + "Custom": { + "name": "trigger_kind", + "kind": { + "Enum": [ + "webhook", + "http", + "websocket", + "kafka", + "email" + ] + } + } + } + ] + }, + "nullable": [ + false, + false, + false, + false, + true + ] + }, + "hash": "2f440ab6083764b49e309c1f8dde0c5508da110f54bc5f465f1892afdf851af0" +} diff --git a/backend/.sqlx/query-3b6ddfe4df620d5e34a01dbbc95067d7dee6a7108cb8a46a20f26236009481da.json b/backend/.sqlx/query-3b6ddfe4df620d5e34a01dbbc95067d7dee6a7108cb8a46a20f26236009481da.json new file mode 100644 index 0000000000000..d0174168a9c8a --- /dev/null +++ b/backend/.sqlx/query-3b6ddfe4df620d5e34a01dbbc95067d7dee6a7108cb8a46a20f26236009481da.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM capture_config WHERE path = $1 AND workspace_id = $2 AND is_flow IS FALSE", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "3b6ddfe4df620d5e34a01dbbc95067d7dee6a7108cb8a46a20f26236009481da" +} diff --git a/backend/.sqlx/query-3c9fc4d8579767f3ce7c3633fca770e6341624e98117d21b9f01e68b4e0ce033.json b/backend/.sqlx/query-3c9fc4d8579767f3ce7c3633fca770e6341624e98117d21b9f01e68b4e0ce033.json new file mode 100644 index 0000000000000..7b0d8aa4b0353 --- /dev/null +++ b/backend/.sqlx/query-3c9fc4d8579767f3ce7c3633fca770e6341624e98117d21b9f01e68b4e0ce033.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM capture_config WHERE workspace_id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [] + }, + "hash": "3c9fc4d8579767f3ce7c3633fca770e6341624e98117d21b9f01e68b4e0ce033" +} diff --git a/backend/.sqlx/query-41e557e1b63b13c9fcc195901c0bd0de7e03c539ee046955543d9693551246f7.json b/backend/.sqlx/query-41e557e1b63b13c9fcc195901c0bd0de7e03c539ee046955543d9693551246f7.json new file mode 100644 index 0000000000000..0df0167c42d2b --- /dev/null +++ b/backend/.sqlx/query-41e557e1b63b13c9fcc195901c0bd0de7e03c539ee046955543d9693551246f7.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM capture WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "41e557e1b63b13c9fcc195901c0bd0de7e03c539ee046955543d9693551246f7" +} diff --git a/backend/.sqlx/query-438b8bbf49781fc362eecbdc99cb9d2fafd03a59197477aafc915b0e34a00eb2.json b/backend/.sqlx/query-438b8bbf49781fc362eecbdc99cb9d2fafd03a59197477aafc915b0e34a00eb2.json new file mode 100644 index 0000000000000..a13521c31cfec --- /dev/null +++ b/backend/.sqlx/query-438b8bbf49781fc362eecbdc99cb9d2fafd03a59197477aafc915b0e34a00eb2.json @@ -0,0 +1,50 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT path, is_flow, workspace_id, trigger_config as \"trigger_config!: _\", owner, email FROM capture_config WHERE trigger_kind = 'kafka' AND last_client_ping > NOW() - INTERVAL '10 seconds' AND trigger_config IS NOT NULL AND (server_id IS NULL OR last_server_ping IS NULL OR last_server_ping < now() - interval '15 seconds')", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "path", + "type_info": "Varchar" + }, + { + "ordinal": 1, + "name": "is_flow", + "type_info": "Bool" + }, + { + "ordinal": 2, + "name": "workspace_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "trigger_config!: _", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "owner", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "email", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + true, + false, + false + ] + }, + "hash": "438b8bbf49781fc362eecbdc99cb9d2fafd03a59197477aafc915b0e34a00eb2" +} diff --git a/backend/.sqlx/query-4f1bb3713bc52fb8cd4088947de1e9905387a20bf4b137adf0a33274f820ddc2.json b/backend/.sqlx/query-4f1bb3713bc52fb8cd4088947de1e9905387a20bf4b137adf0a33274f820ddc2.json new file mode 100644 index 0000000000000..7ab68b14b6cc1 --- /dev/null +++ b/backend/.sqlx/query-4f1bb3713bc52fb8cd4088947de1e9905387a20bf4b137adf0a33274f820ddc2.json @@ -0,0 +1,50 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT path, is_flow, workspace_id, trigger_config as \"trigger_config!: _\", owner, email FROM capture_config WHERE trigger_kind = 'websocket' AND last_client_ping > NOW() - INTERVAL '10 seconds' AND trigger_config IS NOT NULL AND (server_id IS NULL OR last_server_ping IS NULL OR last_server_ping < now() - interval '15 seconds')", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "path", + "type_info": "Varchar" + }, + { + "ordinal": 1, + "name": "is_flow", + "type_info": "Bool" + }, + { + "ordinal": 2, + "name": "workspace_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "trigger_config!: _", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "owner", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "email", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + true, + false, + false + ] + }, + "hash": "4f1bb3713bc52fb8cd4088947de1e9905387a20bf4b137adf0a33274f820ddc2" +} diff --git a/backend/.sqlx/query-71d51bbc35da7b9930e3ea3a634451217ccb9f1bc35b1ad6e10d16bc19c41447.json b/backend/.sqlx/query-71d51bbc35da7b9930e3ea3a634451217ccb9f1bc35b1ad6e10d16bc19c41447.json new file mode 100644 index 0000000000000..1f23430419f27 --- /dev/null +++ b/backend/.sqlx/query-71d51bbc35da7b9930e3ea3a634451217ccb9f1bc35b1ad6e10d16bc19c41447.json @@ -0,0 +1,44 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT owner, email\n FROM capture_config\n WHERE workspace_id = $1 AND path = $2 AND is_flow = $3 AND trigger_kind = $4 AND last_client_ping > NOW() - INTERVAL '10 seconds'", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "owner", + "type_info": "Varchar" + }, + { + "ordinal": 1, + "name": "email", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Bool", + { + "Custom": { + "name": "trigger_kind", + "kind": { + "Enum": [ + "webhook", + "http", + "websocket", + "kafka", + "email" + ] + } + } + } + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "71d51bbc35da7b9930e3ea3a634451217ccb9f1bc35b1ad6e10d16bc19c41447" +} diff --git a/backend/.sqlx/query-78b182167ba19d1dc103577bd3492736e7088ed69b543adbcfd3b3b7a4ece430.json b/backend/.sqlx/query-78b182167ba19d1dc103577bd3492736e7088ed69b543adbcfd3b3b7a4ece430.json new file mode 100644 index 0000000000000..874bbb4ea98d4 --- /dev/null +++ b/backend/.sqlx/query-78b182167ba19d1dc103577bd3492736e7088ed69b543adbcfd3b3b7a4ece430.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET path = $1 WHERE path = $2 AND workspace_id = $3 AND is_flow IS FALSE", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "78b182167ba19d1dc103577bd3492736e7088ed69b543adbcfd3b3b7a4ece430" +} diff --git a/backend/.sqlx/query-87e0bee8b78facc62d4c225dc51651d65b1135092067061e7f4050776b733b03.json b/backend/.sqlx/query-87e0bee8b78facc62d4c225dc51651d65b1135092067061e7f4050776b733b03.json new file mode 100644 index 0000000000000..65792f3a2d5e6 --- /dev/null +++ b/backend/.sqlx/query-87e0bee8b78facc62d4c225dc51651d65b1135092067061e7f4050776b733b03.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM capture WHERE path = $1 AND workspace_id = $2 AND is_flow IS TRUE", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "87e0bee8b78facc62d4c225dc51651d65b1135092067061e7f4050776b733b03" +} diff --git a/backend/.sqlx/query-90ad280f6744937878b134142825ccb7d7071a80a3f5da59da8948c0106e959c.json b/backend/.sqlx/query-90ad280f6744937878b134142825ccb7d7071a80a3f5da59da8948c0106e959c.json new file mode 100644 index 0000000000000..782ba4002504f --- /dev/null +++ b/backend/.sqlx/query-90ad280f6744937878b134142825ccb7d7071a80a3f5da59da8948c0106e959c.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET path = $1 WHERE path = $2 AND workspace_id = $3 AND is_flow IS TRUE", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "90ad280f6744937878b134142825ccb7d7071a80a3f5da59da8948c0106e959c" +} diff --git a/backend/.sqlx/query-90e74c28b417e73924ed337f6634f574024e641e73072586aca88847259e8cfc.json b/backend/.sqlx/query-90e74c28b417e73924ed337f6634f574024e641e73072586aca88847259e8cfc.json new file mode 100644 index 0000000000000..22fe84b45268d --- /dev/null +++ b/backend/.sqlx/query-90e74c28b417e73924ed337f6634f574024e641e73072586aca88847259e8cfc.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture SET path = $1 WHERE path = $2 AND workspace_id = $3 AND is_flow IS FALSE", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "90e74c28b417e73924ed337f6634f574024e641e73072586aca88847259e8cfc" +} diff --git a/backend/.sqlx/query-97942578df746c8c8103b403cfc4e44ef5a0f082bdde854900064325adc4dd77.json b/backend/.sqlx/query-97942578df746c8c8103b403cfc4e44ef5a0f082bdde854900064325adc4dd77.json new file mode 100644 index 0000000000000..2014d9e6e495b --- /dev/null +++ b/backend/.sqlx/query-97942578df746c8c8103b403cfc4e44ef5a0f082bdde854900064325adc4dd77.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM capture\n WHERE workspace_id = $1\n AND created_at <=\n (\n SELECT created_at\n FROM capture\n WHERE workspace_id = $1\n ORDER BY created_at DESC\n OFFSET $2\n LIMIT 1\n )", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "97942578df746c8c8103b403cfc4e44ef5a0f082bdde854900064325adc4dd77" +} diff --git a/backend/.sqlx/query-988b594f70ff7886985a0f90a095aeffac05016abc4dfed37d5ae2872e8da564.json b/backend/.sqlx/query-988b594f70ff7886985a0f90a095aeffac05016abc4dfed37d5ae2872e8da564.json new file mode 100644 index 0000000000000..9dd307094df0d --- /dev/null +++ b/backend/.sqlx/query-988b594f70ff7886985a0f90a095aeffac05016abc4dfed37d5ae2872e8da564.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture SET path = $1 WHERE path = $2 AND workspace_id = $3 AND is_flow IS TRUE", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "988b594f70ff7886985a0f90a095aeffac05016abc4dfed37d5ae2872e8da564" +} diff --git a/backend/.sqlx/query-a17b1b8d4f58c58c253e63ec4c2fbb1df4bef54003b01fa901bfa782b5f83342.json b/backend/.sqlx/query-a17b1b8d4f58c58c253e63ec4c2fbb1df4bef54003b01fa901bfa782b5f83342.json new file mode 100644 index 0000000000000..a4d3cdb8290bf --- /dev/null +++ b/backend/.sqlx/query-a17b1b8d4f58c58c253e63ec4c2fbb1df4bef54003b01fa901bfa782b5f83342.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET workspace_id = $1 WHERE workspace_id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Text" + ] + }, + "nullable": [] + }, + "hash": "a17b1b8d4f58c58c253e63ec4c2fbb1df4bef54003b01fa901bfa782b5f83342" +} diff --git a/backend/.sqlx/query-b9468b9e16f55db11b33d8e9793e6e3ae6c5add6ca02414140adb724120a6800.json b/backend/.sqlx/query-b9468b9e16f55db11b33d8e9793e6e3ae6c5add6ca02414140adb724120a6800.json deleted file mode 100644 index 8f9f3dfc89c7d..0000000000000 --- a/backend/.sqlx/query-b9468b9e16f55db11b33d8e9793e6e3ae6c5add6ca02414140adb724120a6800.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE capture\n SET payload = $3\n WHERE workspace_id = $1\n AND path = $2\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "b9468b9e16f55db11b33d8e9793e6e3ae6c5add6ca02414140adb724120a6800" -} diff --git a/backend/.sqlx/query-c223f8b7fa4ef1aa06e1ba2a56d677774aa237508d5610714efd2e9b8b93c7b8.json b/backend/.sqlx/query-c223f8b7fa4ef1aa06e1ba2a56d677774aa237508d5610714efd2e9b8b93c7b8.json new file mode 100644 index 0000000000000..211e8fa8abd89 --- /dev/null +++ b/backend/.sqlx/query-c223f8b7fa4ef1aa06e1ba2a56d677774aa237508d5610714efd2e9b8b93c7b8.json @@ -0,0 +1,55 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT trigger_config as \"trigger_config: _\", trigger_kind as \"trigger_kind: _\", error, last_server_ping\n FROM capture_config\n WHERE workspace_id = $1 AND path = $2 AND is_flow = $3", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "trigger_config: _", + "type_info": "Jsonb" + }, + { + "ordinal": 1, + "name": "trigger_kind: _", + "type_info": { + "Custom": { + "name": "trigger_kind", + "kind": { + "Enum": [ + "webhook", + "http", + "websocket", + "kafka", + "email" + ] + } + } + } + }, + { + "ordinal": 2, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "last_server_ping", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Bool" + ] + }, + "nullable": [ + true, + false, + true, + true + ] + }, + "hash": "c223f8b7fa4ef1aa06e1ba2a56d677774aa237508d5610714efd2e9b8b93c7b8" +} diff --git a/backend/.sqlx/query-c5270ee815689e42b65df507b850da43239c9a5aaea41c9aed7ed33a6219a534.json b/backend/.sqlx/query-c5270ee815689e42b65df507b850da43239c9a5aaea41c9aed7ed33a6219a534.json new file mode 100644 index 0000000000000..52bb869ca651b --- /dev/null +++ b/backend/.sqlx/query-c5270ee815689e42b65df507b850da43239c9a5aaea41c9aed7ed33a6219a534.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET last_client_ping = now() WHERE workspace_id = $1 AND path = $2 AND is_flow = $3 AND trigger_kind = $4", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Bool", + { + "Custom": { + "name": "trigger_kind", + "kind": { + "Enum": [ + "webhook", + "http", + "websocket", + "kafka", + "email" + ] + } + } + } + ] + }, + "nullable": [] + }, + "hash": "c5270ee815689e42b65df507b850da43239c9a5aaea41c9aed7ed33a6219a534" +} diff --git a/backend/.sqlx/query-d08f34000c3d96ccd0f44ca8520f966d751a4dda554d8215eedb8f65be98e100.json b/backend/.sqlx/query-d08f34000c3d96ccd0f44ca8520f966d751a4dda554d8215eedb8f65be98e100.json new file mode 100644 index 0000000000000..431ffc04a24ba --- /dev/null +++ b/backend/.sqlx/query-d08f34000c3d96ccd0f44ca8520f966d751a4dda554d8215eedb8f65be98e100.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET error = $1, server_id = NULL, last_server_ping = NULL WHERE workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'websocket'", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "d08f34000c3d96ccd0f44ca8520f966d751a4dda554d8215eedb8f65be98e100" +} diff --git a/backend/.sqlx/query-d9a6f75e4c4a1f61e55b313cc09bceffac637548841897341672da427a9140fc.json b/backend/.sqlx/query-d9a6f75e4c4a1f61e55b313cc09bceffac637548841897341672da427a9140fc.json new file mode 100644 index 0000000000000..3ccfcc95820d0 --- /dev/null +++ b/backend/.sqlx/query-d9a6f75e4c4a1f61e55b313cc09bceffac637548841897341672da427a9140fc.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET last_server_ping = now(), error = $1 WHERE workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'kafka' AND server_id = $5 AND last_client_ping > NOW() - INTERVAL '10 seconds' RETURNING 1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "?column?", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Bool", + "Text" + ] + }, + "nullable": [ + null + ] + }, + "hash": "d9a6f75e4c4a1f61e55b313cc09bceffac637548841897341672da427a9140fc" +} diff --git a/backend/.sqlx/query-de0735f0f5b59ebb72fd876f7f46156e26b352511f408fd6464303301383461f.json b/backend/.sqlx/query-de0735f0f5b59ebb72fd876f7f46156e26b352511f408fd6464303301383461f.json new file mode 100644 index 0000000000000..b9c8f42f29a1f --- /dev/null +++ b/backend/.sqlx/query-de0735f0f5b59ebb72fd876f7f46156e26b352511f408fd6464303301383461f.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM capture WHERE path = $1 AND workspace_id = $2 AND is_flow IS FALSE", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "de0735f0f5b59ebb72fd876f7f46156e26b352511f408fd6464303301383461f" +} diff --git a/backend/.sqlx/query-e02b99525cb1f8737acfec86809f59c8cff67bb1ec3926680fb691cc3573738a.json b/backend/.sqlx/query-e02b99525cb1f8737acfec86809f59c8cff67bb1ec3926680fb691cc3573738a.json deleted file mode 100644 index d600d8069a8ba..0000000000000 --- a/backend/.sqlx/query-e02b99525cb1f8737acfec86809f59c8cff67bb1ec3926680fb691cc3573738a.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM capture\n WHERE workspace_id = $1\n AND created_by = $2\n AND created_at <=\n ( SELECT created_at\n FROM capture\n WHERE workspace_id = $1\n AND created_by = $2\n ORDER BY created_at DESC\n OFFSET $3\n LIMIT 1 )\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "e02b99525cb1f8737acfec86809f59c8cff67bb1ec3926680fb691cc3573738a" -} diff --git a/backend/.sqlx/query-e23e110e1f0438d21534fc4323e0e7bc1f0dbeca2e4f44ced05bae0ca5ca1039.json b/backend/.sqlx/query-e23e110e1f0438d21534fc4323e0e7bc1f0dbeca2e4f44ced05bae0ca5ca1039.json new file mode 100644 index 0000000000000..d1c5a4b4b919e --- /dev/null +++ b/backend/.sqlx/query-e23e110e1f0438d21534fc4323e0e7bc1f0dbeca2e4f44ced05bae0ca5ca1039.json @@ -0,0 +1,50 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT trigger_config as \"trigger_config: _\", owner, email\n FROM capture_config\n WHERE workspace_id = $1 AND path = $2 AND is_flow = $3 AND trigger_kind = $4 AND last_client_ping > NOW() - INTERVAL '10 seconds'", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "trigger_config: _", + "type_info": "Jsonb" + }, + { + "ordinal": 1, + "name": "owner", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Bool", + { + "Custom": { + "name": "trigger_kind", + "kind": { + "Enum": [ + "webhook", + "http", + "websocket", + "kafka", + "email" + ] + } + } + } + ] + }, + "nullable": [ + true, + false, + false + ] + }, + "hash": "e23e110e1f0438d21534fc4323e0e7bc1f0dbeca2e4f44ced05bae0ca5ca1039" +} diff --git a/backend/.sqlx/query-e86295e181a82823ffce8234d413ab5a528b0468715238e17ffed7d75e9c0c5c.json b/backend/.sqlx/query-e86295e181a82823ffce8234d413ab5a528b0468715238e17ffed7d75e9c0c5c.json new file mode 100644 index 0000000000000..42f6b47554523 --- /dev/null +++ b/backend/.sqlx/query-e86295e181a82823ffce8234d413ab5a528b0468715238e17ffed7d75e9c0c5c.json @@ -0,0 +1,25 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET server_id = $1, last_server_ping = now() WHERE last_client_ping > NOW() - INTERVAL '10 seconds' AND workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'websocket' AND (server_id IS NULL OR last_server_ping IS NULL OR last_server_ping < now() - interval '15 seconds') RETURNING true", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "?column?", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Text", + "Text", + "Bool" + ] + }, + "nullable": [ + null + ] + }, + "hash": "e86295e181a82823ffce8234d413ab5a528b0468715238e17ffed7d75e9c0c5c" +} diff --git a/backend/.sqlx/query-ee9adcbf82d3f62088a38ff65e8c90ac1c18b5df7aab6a143a328a6bddc6ad32.json b/backend/.sqlx/query-ee9adcbf82d3f62088a38ff65e8c90ac1c18b5df7aab6a143a328a6bddc6ad32.json new file mode 100644 index 0000000000000..1c1179b188463 --- /dev/null +++ b/backend/.sqlx/query-ee9adcbf82d3f62088a38ff65e8c90ac1c18b5df7aab6a143a328a6bddc6ad32.json @@ -0,0 +1,25 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE capture_config SET server_id = $1, last_server_ping = now() WHERE last_client_ping > NOW() - INTERVAL '10 seconds' AND workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'kafka' AND (server_id IS NULL OR last_server_ping IS NULL OR last_server_ping < now() - interval '15 seconds') RETURNING true", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "?column?", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Text", + "Text", + "Bool" + ] + }, + "nullable": [ + null + ] + }, + "hash": "ee9adcbf82d3f62088a38ff65e8c90ac1c18b5df7aab6a143a328a6bddc6ad32" +} diff --git a/backend/.sqlx/query-ef299490c4674c4c76e18d84620a74407b78378d66d8a089407998074059e79b.json b/backend/.sqlx/query-ef299490c4674c4c76e18d84620a74407b78378d66d8a089407998074059e79b.json new file mode 100644 index 0000000000000..a3301cfb9a7c9 --- /dev/null +++ b/backend/.sqlx/query-ef299490c4674c4c76e18d84620a74407b78378d66d8a089407998074059e79b.json @@ -0,0 +1,33 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO capture_config\n (workspace_id, path, is_flow, trigger_kind, trigger_config, owner, email)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ON CONFLICT (workspace_id, path, is_flow, trigger_kind)\n DO UPDATE SET trigger_config = $5, owner = $6, email = $7, server_id = NULL, last_server_ping = NULL, error = NULL", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Varchar", + "Bool", + { + "Custom": { + "name": "trigger_kind", + "kind": { + "Enum": [ + "webhook", + "http", + "websocket", + "kafka", + "email" + ] + } + } + }, + "Jsonb", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "ef299490c4674c4c76e18d84620a74407b78378d66d8a089407998074059e79b" +} diff --git a/backend/ee-repo-ref.txt b/backend/ee-repo-ref.txt index d1a043fc40574..252d770494ff0 100644 --- a/backend/ee-repo-ref.txt +++ b/backend/ee-repo-ref.txt @@ -1 +1 @@ -a1094bec38924de76936392b1148829b02628174 \ No newline at end of file +e9bb21fc9d651e4302f9c3b7b3e8f35e00679e4b \ No newline at end of file diff --git a/backend/migrations/20250102145420_more_captures.down.sql b/backend/migrations/20250102145420_more_captures.down.sql new file mode 100644 index 0000000000000..0a83206aa6747 --- /dev/null +++ b/backend/migrations/20250102145420_more_captures.down.sql @@ -0,0 +1,14 @@ +-- Add down migration script here +DROP TABLE capture_config; +DELETE FROM capture; +DROP POLICY see_from_allowed_runnables ON capture; +ALTER TABLE capture DROP CONSTRAINT capture_pkey; +ALTER TABLE capture DROP COLUMN is_flow, DROP COLUMN trigger_kind, DROP COLUMN trigger_extra, DROP COLUMN id; +ALTER TABLE capture ADD CONSTRAINT capture_pkey PRIMARY KEY (workspace_id, path); +DROP TYPE TRIGGER_KIND; + +CREATE POLICY see_folder_extra_perms_user ON capture FOR ALL TO windmill_user +USING (SPLIT_PART(capture.path, '/', 1) = 'f' AND SPLIT_PART(capture.path, '/', 2) = any(regexp_split_to_array(current_setting('session.folders_read'), ',')::text[])) +WITH CHECK (SPLIT_PART(capture.path, '/', 1) = 'f' AND SPLIT_PART(capture.path, '/', 2) = any(regexp_split_to_array(current_setting('session.folders_write'), ',')::text[])); +CREATE POLICY see_member ON public.capture TO windmill_user USING (((split_part((path)::text, '/'::text, 1) = 'g'::text) AND (split_part((path)::text, '/'::text, 2) = ANY (regexp_split_to_array(current_setting('session.groups'::text), ','::text))))); +CREATE POLICY see_own ON public.capture TO windmill_user USING (((split_part((path)::text, '/'::text, 1) = 'u'::text) AND (split_part((path)::text, '/'::text, 2) = current_setting('session.user'::text)))); diff --git a/backend/migrations/20250102145420_more_captures.up.sql b/backend/migrations/20250102145420_more_captures.up.sql new file mode 100644 index 0000000000000..21a15cec57ee9 --- /dev/null +++ b/backend/migrations/20250102145420_more_captures.up.sql @@ -0,0 +1,65 @@ +-- Add up migration script here +CREATE TYPE TRIGGER_KIND AS ENUM ('webhook', 'http', 'websocket', 'kafka', 'email'); +ALTER TABLE capture ADD COLUMN is_flow BOOLEAN NOT NULL DEFAULT TRUE, ADD COLUMN trigger_kind TRIGGER_KIND NOT NULL DEFAULT 'webhook', ADD COLUMN trigger_extra JSONB; +ALTER TABLE capture ALTER COLUMN is_flow DROP DEFAULT, ALTER COLUMN trigger_kind DROP DEFAULT; +ALTER TABLE capture DROP CONSTRAINT capture_pkey; +ALTER TABLE capture ADD COLUMN id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY; + +DROP POLICY see_own ON capture; +DROP POLICY see_member ON capture; +DROP POLICY see_folder_extra_perms_user ON capture; + +CREATE POLICY see_from_allowed_runnables ON capture FOR ALL TO windmill_user +USING ( + (capture.is_flow AND EXISTS ( + SELECT 1 + FROM flow + WHERE flow.workspace_id = capture.workspace_id + AND flow.path = capture.path + )) + OR (NOT capture.is_flow AND EXISTS ( + SELECT 1 + FROM script + WHERE script.workspace_id = capture.workspace_id + AND script.path = capture.path + )) +); + + +CREATE TABLE capture_config ( + workspace_id VARCHAR(50) NOT NULL, + path VARCHAR(255) NOT NULL, + is_flow BOOLEAN NOT NULL, + trigger_kind TRIGGER_KIND NOT NULL, + trigger_config JSONB NULL, + owner VARCHAR(50) NOT NULL, + email VARCHAR(255) NOT NULL, + server_id VARCHAR(50) NULL, + last_client_ping TIMESTAMPTZ NULL, + last_server_ping TIMESTAMPTZ NULL, + error TEXT NULL, + PRIMARY KEY (workspace_id, path, is_flow, trigger_kind), + FOREIGN KEY (workspace_id) REFERENCES workspace(id) ON DELETE CASCADE +); + +ALTER TABLE capture_config ENABLE ROW LEVEL SECURITY; + +CREATE POLICY see_from_allowed_runnables ON capture_config FOR ALL TO windmill_user +USING ( + (capture_config.is_flow AND EXISTS ( + SELECT 1 + FROM flow + WHERE flow.workspace_id = capture_config.workspace_id + AND flow.path = capture_config.path + )) + OR (NOT capture_config.is_flow AND EXISTS ( + SELECT 1 + FROM script + WHERE script.workspace_id = capture_config.workspace_id + AND script.path = capture_config.path + )) +); + + +GRANT ALL ON capture_config TO windmill_user; +GRANT ALL ON capture_config TO windmill_admin; diff --git a/backend/windmill-api/openapi.yaml b/backend/windmill-api/openapi.yaml index d1ea635ded569..31318d96506ce 100644 --- a/backend/windmill-api/openapi.yaml +++ b/backend/windmill-api/openapi.yaml @@ -8971,47 +8971,119 @@ paths: schema: type: string - /w/{workspace}/capture_u/{path}: + /w/{workspace}/capture/set_config: post: - summary: update flow preview capture - operationId: updateCapture + summary: set capture config + operationId: setCaptureConfig tags: - capture parameters: - $ref: "#/components/parameters/WorkspaceId" + requestBody: + description: capture config + required: true + content: + application/json: + schema: + type: object + properties: + trigger_kind: + $ref: "#/components/schemas/CaptureTriggerKind" + path: + type: string + is_flow: + type: boolean + trigger_config: + type: object + required: + - trigger_kind + - path + - is_flow + responses: + "200": + description: capture config set + + + /w/{workspace}/capture/ping_config/{trigger_kind}/{runnable_kind}/{path}: + post: + summary: ping capture config + operationId: pingCaptureConfig + tags: + - capture + parameters: + - $ref: "#/components/parameters/WorkspaceId" + - name: trigger_kind + in: path + required: true + schema: + $ref: "#/components/schemas/CaptureTriggerKind" + - $ref: "#/components/parameters/RunnableKind" - $ref: "#/components/parameters/Path" responses: - "204": - description: flow preview captured + "200": + description: capture config pinged - /w/{workspace}/capture/{path}: - put: - summary: create flow preview capture - operationId: createCapture + /w/{workspace}/capture/get_configs/{runnable_kind}/{path}: + get: + summary: get capture configs for a script or flow + operationId: getCaptureConfigs tags: - capture parameters: - $ref: "#/components/parameters/WorkspaceId" + - $ref: "#/components/parameters/RunnableKind" - $ref: "#/components/parameters/Path" responses: - "201": - description: flow preview capture created + "200": + description: capture configs for a script or flow + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/CaptureConfig" + + /w/{workspace}/capture/list/{runnable_kind}/{path}: get: - summary: get flow preview capture - operationId: getCapture + summary: list captures for a script or flow + operationId: listCaptures tags: - capture parameters: - $ref: "#/components/parameters/WorkspaceId" + - $ref: "#/components/parameters/RunnableKind" - $ref: "#/components/parameters/Path" + - name: trigger_kind + in: query + schema: + $ref: "#/components/schemas/CaptureTriggerKind" responses: "200": - description: captured flow preview + description: list of captures for a script or flow content: application/json: - schema: {} - "404": - description: capture does not exist for this flow + schema: + type: array + items: + $ref: "#/components/schemas/Capture" + + + /w/{workspace}/capture/{id}: + delete: + summary: delete a capture + operationId: deleteCapture + tags: + - capture + parameters: + - $ref: "#/components/parameters/WorkspaceId" + - name: id + in: path + required: true + schema: + type: integer + responses: + "200": + description: capture deleted /w/{workspace}/favorites/star: post: @@ -10668,6 +10740,13 @@ components: required: true schema: type: string + RunnableKind: + name: runnable_kind + in: path + required: true + schema: + type: string + enum: [script, flow] schemas: $ref: "../../openflow.openapi.yaml#/components/schemas" @@ -12351,8 +12430,6 @@ components: - workspace_id - enabled - filters - - initial_messages - - url_runnable_args NewWebsocketTrigger: type: object @@ -12391,8 +12468,6 @@ components: - url - is_flow - filters - - initial_messages - - url_runnable_args EditWebsocketTrigger: type: object @@ -12429,8 +12504,6 @@ components: - url - is_flow - filters - - initial_messages - - url_runnable_args WebsocketTriggerInitialMessage: anyOf: @@ -13525,3 +13598,38 @@ components: type: string nullable: true description: Workspace id if the alert is in the scope of a workspace + + CaptureTriggerKind: + type: string + enum: [webhook, http, websocket, kafka, email] + + Capture: + type: object + properties: + trigger_kind: + $ref: "#/components/schemas/CaptureTriggerKind" + payload: {} + trigger_extra: {} + id: + type: integer + created_at: + type: string + format: date-time + required: + - trigger_kind + - payload + - id + - created_at + CaptureConfig: + type: object + properties: + trigger_config: {} + trigger_kind: + $ref: "#/components/schemas/CaptureTriggerKind" + error: + type: string + last_server_ping: + type: string + format: date-time + required: + - trigger_kind diff --git a/backend/windmill-api/src/capture.rs b/backend/windmill-api/src/capture.rs index 7043a5c58aafd..742c2b407041d 100644 --- a/backend/windmill-api/src/capture.rs +++ b/backend/windmill-api/src/capture.rs @@ -7,138 +7,537 @@ */ use axum::{ - extract::{Extension, Path}, - routing::{get, post, put}, - Router, + extract::{Extension, Path, Query}, + routing::{delete, get, head, post}, + Json, Router, }; +#[cfg(feature = "http_trigger")] +use http::HeaderMap; use hyper::StatusCode; -use sqlx::types::Json; +#[cfg(feature = "http_trigger")] +use serde::de::DeserializeOwned; +use serde::{Deserialize, Serialize}; +use serde_json::value::RawValue; +use sqlx::types::Json as SqlxJson; +#[cfg(feature = "http_trigger")] +use std::collections::HashMap; +use std::fmt; +#[cfg(feature = "http_trigger")] +use windmill_common::error::Error; use windmill_common::{ db::UserDB, error::{JsonResult, Result}, utils::{not_found_if_none, StripPath}, + worker::{to_raw_value, CLOUD_HOSTED}, }; -use windmill_queue::PushArgs; +use windmill_queue::{PushArgs, PushArgsOwned}; +#[cfg(feature = "http_trigger")] +use crate::http_triggers::{build_http_trigger_extra, HttpMethod}; +#[cfg(all(feature = "enterprise", feature = "kafka"))] +use crate::kafka_triggers_ee::KafkaResourceSecurity; use crate::{ args::WebhookArgs, db::{ApiAuthed, DB}, + users::fetch_api_authed, }; -const KEEP_LAST: i64 = 8; +const KEEP_LAST: i64 = 20; pub fn workspaced_service() -> Router { Router::new() - .route("/*path", put(new_payload)) - .route("/*path", get(get_payload)) + .route("/set_config", post(set_config)) + .route( + "/ping_config/:trigger_kind/:runnable_kind/*path", + post(ping_config), + ) + .route("/get_configs/:runnable_kind/*path", get(get_configs)) + .route("/list/:runnable_kind/*path", get(list_captures)) + .route("/:id", delete(delete_capture)) } -pub fn global_service() -> Router { - Router::new().route("/*path", post(update_payload)) +pub fn workspaced_unauthed_service() -> Router { + let router = Router::new().route( + "/webhook/:runnable_kind/*path", + head(|| async {}).post(webhook_payload), + ); + + #[cfg(feature = "http_trigger")] + { + router.route("/http/:runnable_kind/:path/*route_path", { + head(|| async {}).fallback(http_payload) + }) + } + + #[cfg(not(feature = "http_trigger"))] + { + router + } +} + +#[derive(sqlx::Type, Serialize, Deserialize)] +#[sqlx(type_name = "TRIGGER_KIND", rename_all = "lowercase")] +#[serde(rename_all = "lowercase")] +pub enum TriggerKind { + Webhook, + Http, + Websocket, + Kafka, + Email, +} + +impl fmt::Display for TriggerKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let s = match self { + TriggerKind::Webhook => "webhook", + TriggerKind::Http => "http", + TriggerKind::Websocket => "websocket", + TriggerKind::Kafka => "kafka", + TriggerKind::Email => "email", + }; + write!(f, "{}", s) + } +} + +#[cfg(feature = "http_trigger")] +#[derive(Serialize, Deserialize)] +struct HttpTriggerConfig { + route_path: String, + http_method: HttpMethod, +} + +#[cfg(all(feature = "enterprise", feature = "kafka"))] +#[derive(Serialize, Deserialize)] +#[serde(untagged)] +pub enum KafkaTriggerConfigConnection { + Resource { kafka_resource_path: String }, + Static { brokers: Vec, security: KafkaResourceSecurity }, +} + +#[cfg(all(feature = "enterprise", feature = "kafka"))] +#[derive(Serialize, Deserialize)] +pub struct KafkaTriggerConfig { + #[serde(flatten)] + pub connection: KafkaTriggerConfigConnection, + pub topics: Vec, + pub group_id: String, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct WebsocketTriggerConfig { + pub url: String, + // have to use Value because RawValue is not supported inside untagged + pub url_runnable_args: Option, +} + +#[derive(Serialize, Deserialize)] +#[serde(untagged)] +enum TriggerConfig { + #[cfg(feature = "http_trigger")] + Http(HttpTriggerConfig), + Websocket(WebsocketTriggerConfig), + #[cfg(all(feature = "enterprise", feature = "kafka"))] + Kafka(KafkaTriggerConfig), +} + +#[derive(Serialize, Deserialize)] +struct NewCaptureConfig { + trigger_kind: TriggerKind, + path: String, + is_flow: bool, + trigger_config: Option, +} + +#[derive(Serialize, Deserialize)] +struct CaptureConfig { + trigger_config: Option>>, + trigger_kind: TriggerKind, + error: Option, + last_server_ping: Option>, } -pub async fn new_payload( +async fn get_configs( authed: ApiAuthed, Extension(user_db): Extension, - Path((w_id, path)): Path<(String, StripPath)>, -) -> Result { + Path((w_id, runnable_kind, path)): Path<(String, RunnableKind, StripPath)>, +) -> JsonResult> { let mut tx = user_db.begin(&authed).await?; - sqlx::query!( - " - INSERT INTO capture - (workspace_id, path, created_by) - VALUES ($1, $2, $3) - ON CONFLICT (workspace_id, path) - DO UPDATE SET created_at = now() - ", + let configs = sqlx::query_as!( + CaptureConfig, + r#"SELECT trigger_config as "trigger_config: _", trigger_kind as "trigger_kind: _", error, last_server_ping + FROM capture_config + WHERE workspace_id = $1 AND path = $2 AND is_flow = $3"#, &w_id, &path.to_path(), - &authed.username, + matches!(runnable_kind, RunnableKind::Flow), ) - .execute(&mut *tx) + .fetch_all(&mut *tx) .await?; - /* Retain only KEEP_LAST most recent captures by this user in this workspace. */ + tx.commit().await?; + + Ok(Json(configs)) +} + +async fn set_config( + authed: ApiAuthed, + Extension(user_db): Extension, + Path(w_id): Path, + Json(nc): Json, +) -> Result<()> { + let mut tx = user_db.begin(&authed).await?; + sqlx::query!( - " - DELETE FROM capture - WHERE workspace_id = $1 - AND created_by = $2 - AND created_at <= - ( SELECT created_at - FROM capture - WHERE workspace_id = $1 - AND created_by = $2 - ORDER BY created_at DESC - OFFSET $3 - LIMIT 1 ) - ", + "INSERT INTO capture_config + (workspace_id, path, is_flow, trigger_kind, trigger_config, owner, email) + VALUES ($1, $2, $3, $4, $5, $6, $7) + ON CONFLICT (workspace_id, path, is_flow, trigger_kind) + DO UPDATE SET trigger_config = $5, owner = $6, email = $7, server_id = NULL, last_server_ping = NULL, error = NULL", &w_id, + &nc.path, + nc.is_flow, + nc.trigger_kind as TriggerKind, + nc.trigger_config.map(|x| SqlxJson(to_raw_value(&x))) as Option>>, &authed.username, - KEEP_LAST, + &authed.email, ) .execute(&mut *tx) .await?; tx.commit().await?; - Ok(StatusCode::CREATED) + Ok(()) } -pub async fn update_payload( - Extension(db): Extension, - Path((w_id, path)): Path<(String, StripPath)>, - args: WebhookArgs, -) -> Result { - let args = args.args; - - let mut tx = db.begin().await?; - +async fn ping_config( + authed: ApiAuthed, + Extension(user_db): Extension, + Path((w_id, trigger_kind, runnable_kind, path)): Path<( + String, + TriggerKind, + RunnableKind, + StripPath, + )>, +) -> Result<()> { + let mut tx = user_db.begin(&authed).await?; sqlx::query!( - " - UPDATE capture - SET payload = $3 - WHERE workspace_id = $1 - AND path = $2 - ", + "UPDATE capture_config SET last_client_ping = now() WHERE workspace_id = $1 AND path = $2 AND is_flow = $3 AND trigger_kind = $4", &w_id, &path.to_path(), - Json(PushArgs { args: &args.args, extra: args.extra }) as Json, + matches!(runnable_kind, RunnableKind::Flow), + trigger_kind as TriggerKind, ) .execute(&mut *tx) .await?; - tx.commit().await?; + Ok(()) +} - Ok(StatusCode::NO_CONTENT) +#[derive(Serialize, Deserialize)] +struct Capture { + id: i64, + created_at: chrono::DateTime, + trigger_kind: TriggerKind, + payload: SqlxJson>, + trigger_extra: Option>>, } -#[derive(sqlx::FromRow)] -struct Payload { - payload: sqlx::types::Json>, +#[derive(Deserialize)] +#[serde(rename_all = "lowercase")] +enum RunnableKind { + Script, + Flow, } -pub async fn get_payload( + +#[derive(Deserialize)] +struct ListCapturesQuery { + trigger_kind: Option, +} + +async fn list_captures( authed: ApiAuthed, Extension(user_db): Extension, - Path((w_id, path)): Path<(String, StripPath)>, -) -> JsonResult> { + Path((w_id, runnable_kind, path)): Path<(String, RunnableKind, StripPath)>, + Query(query): Query, +) -> JsonResult> { let mut tx = user_db.begin(&authed).await?; - let payload = sqlx::query_as::<_, Payload>( - " - SELECT payload - FROM capture + let captures = sqlx::query_as!( + Capture, + r#"SELECT id, created_at, trigger_kind as "trigger_kind: _", payload as "payload: _", trigger_extra as "trigger_extra: _" + FROM capture WHERE workspace_id = $1 - AND path = $2 - ", + AND path = $2 AND is_flow = $3 + AND ($4::trigger_kind IS NULL OR trigger_kind = $4) + ORDER BY created_at DESC"#, + &w_id, + &path.to_path(), + matches!(runnable_kind, RunnableKind::Flow), + query.trigger_kind as Option, ) - .bind(&w_id) - .bind(&path.to_path()) - .fetch_optional(&mut *tx) + .fetch_all(&mut *tx) .await?; tx.commit().await?; - not_found_if_none(payload.map(|x| x.payload.0), "capture", path.to_path()).map(axum::Json) + Ok(Json(captures)) +} + +async fn delete_capture( + authed: ApiAuthed, + Extension(user_db): Extension, + Path((_, id)): Path<(String, i64)>, +) -> Result<()> { + let mut tx = user_db.begin(&authed).await?; + sqlx::query!("DELETE FROM capture WHERE id = $1", id) + .execute(&mut *tx) + .await?; + tx.commit().await?; + Ok(()) +} + +#[derive(Serialize, Deserialize)] +struct ActiveCaptureOwner { + owner: String, + email: String, +} + +pub async fn get_active_capture_owner_and_email( + db: &DB, + w_id: &str, + path: &str, + is_flow: bool, + kind: &TriggerKind, +) -> Result<(String, String)> { + let capture_config = sqlx::query_as!( + ActiveCaptureOwner, + "SELECT owner, email + FROM capture_config + WHERE workspace_id = $1 AND path = $2 AND is_flow = $3 AND trigger_kind = $4 AND last_client_ping > NOW() - INTERVAL '10 seconds'", + &w_id, + &path, + is_flow, + kind as &TriggerKind, + ) + .fetch_optional(db) + .await?; + + let capture_config = not_found_if_none( + capture_config, + &format!("capture config for {} trigger", kind), + path, + )?; + + Ok((capture_config.owner, capture_config.email)) +} + +#[cfg(feature = "http_trigger")] +async fn get_capture_trigger_config_and_owner( + db: &DB, + w_id: &str, + path: &str, + is_flow: bool, + kind: &TriggerKind, +) -> Result<(T, String, String)> { + #[derive(Deserialize)] + struct CaptureTriggerConfigAndOwner { + trigger_config: Option>>, + owner: String, + email: String, + } + + let capture_config = sqlx::query_as!( + CaptureTriggerConfigAndOwner, + r#"SELECT trigger_config as "trigger_config: _", owner, email + FROM capture_config + WHERE workspace_id = $1 AND path = $2 AND is_flow = $3 AND trigger_kind = $4 AND last_client_ping > NOW() - INTERVAL '10 seconds'"#, + &w_id, + &path, + is_flow, + kind as &TriggerKind, + ) + .fetch_optional(db) + .await?; + + let capture_config = not_found_if_none( + capture_config, + &format!("capture config for {} trigger", kind), + path, + )?; + + let trigger_config = not_found_if_none( + capture_config.trigger_config, + &format!("capture {} trigger config", kind), + path, + )?; + + Ok(( + serde_json::from_str(trigger_config.get()).map_err(|e| { + Error::InternalErr(format!( + "error parsing capture config for {} trigger: {}", + kind, e + )) + })?, + capture_config.owner, + capture_config.email, + )) +} + +async fn clear_captures_history(db: &DB, w_id: &str) -> Result<()> { + if *CLOUD_HOSTED { + /* Retain only KEEP_LAST most recent captures in this workspace. */ + sqlx::query!( + "DELETE FROM capture + WHERE workspace_id = $1 + AND created_at <= + ( + SELECT created_at + FROM capture + WHERE workspace_id = $1 + ORDER BY created_at DESC + OFFSET $2 + LIMIT 1 + )", + &w_id, + KEEP_LAST, + ) + .execute(db) + .await?; + } + Ok(()) +} + +pub async fn insert_capture_payload( + db: &DB, + w_id: &str, + path: &str, + is_flow: bool, + trigger_kind: &TriggerKind, + payload: PushArgsOwned, + trigger_extra: Option>, + owner: &str, +) -> Result<()> { + sqlx::query!( + "INSERT INTO capture (workspace_id, path, is_flow, trigger_kind, payload, trigger_extra, created_by) + VALUES ($1, $2, $3, $4, $5, $6, $7)", + &w_id, + path, + is_flow, + trigger_kind as &TriggerKind, + SqlxJson(to_raw_value(&PushArgs { + args: &payload.args, + extra: payload.extra + })) as SqlxJson>, + trigger_extra.map(SqlxJson) as Option>>, + owner, + ) + .execute(db) + .await?; + + clear_captures_history(db, &w_id).await?; + + Ok(()) +} + +async fn webhook_payload( + Extension(db): Extension, + Path((w_id, runnable_kind, path)): Path<(String, RunnableKind, StripPath)>, + args: WebhookArgs, +) -> Result { + let (owner, email) = get_active_capture_owner_and_email( + &db, + &w_id, + &path.to_path(), + matches!(runnable_kind, RunnableKind::Flow), + &TriggerKind::Webhook, + ) + .await?; + + let authed = fetch_api_authed(owner.clone(), email, &w_id, &db, None).await?; + let args = args.to_push_args_owned(&authed, &db, &w_id).await?; + + insert_capture_payload( + &db, + &w_id, + &path.to_path(), + matches!(runnable_kind, RunnableKind::Flow), + &TriggerKind::Webhook, + args, + Some(to_raw_value(&serde_json::json!({ + "wm_trigger": { + "kind": "webhook", + } + }))), + &owner, + ) + .await?; + + Ok(StatusCode::NO_CONTENT) +} + +#[cfg(feature = "http_trigger")] +async fn http_payload( + Extension(db): Extension, + Path((w_id, kind, path, route_path)): Path<(String, RunnableKind, String, StripPath)>, + Query(query): Query>, + method: http::Method, + headers: HeaderMap, + args: WebhookArgs, +) -> Result { + let route_path = route_path.to_path(); + let path = path.replace(".", "/"); + + let (http_trigger_config, owner, email): (HttpTriggerConfig, _, _) = + get_capture_trigger_config_and_owner( + &db, + &w_id, + &path, + matches!(kind, RunnableKind::Flow), + &TriggerKind::Http, + ) + .await?; + + let authed = fetch_api_authed(owner.clone(), email, &w_id, &db, None).await?; + let args = args.to_push_args_owned(&authed, &db, &w_id).await?; + + let mut router = matchit::Router::new(); + router.insert(&http_trigger_config.route_path, ()).ok(); + let match_ = router.at(route_path).ok(); + + let match_ = not_found_if_none(match_, "capture http trigger", &route_path)?; + + let matchit::Match { params, .. } = match_; + + let params: HashMap = params + .iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + + let extra: HashMap> = HashMap::from_iter(vec![( + "wm_trigger".to_string(), + build_http_trigger_extra( + &http_trigger_config.route_path, + route_path, + &method, + ¶ms, + &query, + &headers, + ) + .await, + )]); + + insert_capture_payload( + &db, + &w_id, + &path, + matches!(kind, RunnableKind::Flow), + &TriggerKind::Http, + args, + Some(to_raw_value(&extra)), + &owner, + ) + .await?; + + Ok(StatusCode::NO_CONTENT) } diff --git a/backend/windmill-api/src/flows.rs b/backend/windmill-api/src/flows.rs index df0562ee47f02..b37dbd9d9a021 100644 --- a/backend/windmill-api/src/flows.rs +++ b/backend/windmill-api/src/flows.rs @@ -740,6 +740,24 @@ async fn update_flow( "Error updating flow due to deleting old flow: {e:#}" )) })?; + + sqlx::query!( + "UPDATE capture_config SET path = $1 WHERE path = $2 AND workspace_id = $3 AND is_flow IS TRUE", + nf.path, + flow_path, + w_id + ) + .execute(&mut *tx) + .await?; + + sqlx::query!( + "UPDATE capture SET path = $1 WHERE path = $2 AND workspace_id = $3 AND is_flow IS TRUE", + nf.path, + flow_path, + w_id + ) + .execute(&mut *tx) + .await?; } let version = sqlx::query_scalar!( @@ -1123,6 +1141,22 @@ async fn delete_flow_by_path( .execute(&mut *tx) .await?; + sqlx::query!( + "DELETE FROM capture_config WHERE path = $1 AND workspace_id = $2 AND is_flow IS TRUE", + path, + &w_id + ) + .execute(&mut *tx) + .await?; + + sqlx::query!( + "DELETE FROM capture WHERE path = $1 AND workspace_id = $2 AND is_flow IS TRUE", + path, + &w_id + ) + .execute(&mut *tx) + .await?; + audit_log( &mut *tx, &authed, diff --git a/backend/windmill-api/src/http_triggers.rs b/backend/windmill-api/src/http_triggers.rs index d66a6b01711e3..85bbf64abda07 100644 --- a/backend/windmill-api/src/http_triggers.rs +++ b/backend/windmill-api/src/http_triggers.rs @@ -77,7 +77,7 @@ pub fn workspaced_service() -> Router { #[derive(Serialize, Deserialize, sqlx::Type)] #[sqlx(type_name = "HTTP_METHOD", rename_all = "lowercase")] #[serde(rename_all = "lowercase")] -enum HttpMethod { +pub enum HttpMethod { Get, Post, Put, @@ -523,6 +523,32 @@ async fn get_http_route_trigger( Ok((trigger, route_path.0, params, authed)) } +pub async fn build_http_trigger_extra( + route_path: &str, + called_path: &str, + method: &http::Method, + params: &HashMap, + query: &HashMap, + headers: &HeaderMap, +) -> Box { + let headers = headers + .iter() + .map(|(k, v)| (k.to_string(), v.to_str().unwrap_or("").to_string())) + .collect::>(); + + to_raw_value(&serde_json::json!({ + "kind": "http", + "http": { + "route": route_path, + "path": called_path, + "method": method.to_string().to_lowercase(), + "params": params, + "query": query, + "headers": headers + }, + })) +} + async fn route_job( Extension(db): Extension, Extension(user_db): Extension, @@ -639,24 +665,18 @@ async fn route_job( } } - let headers = headers - .iter() - .map(|(k, v)| (k.to_string(), v.to_str().unwrap_or("").to_string())) - .collect::>(); let extra = args.extra.get_or_insert_with(HashMap::new); extra.insert( "wm_trigger".to_string(), - to_raw_value(&serde_json::json!({ - "kind": "http", - "http": { - "route": trigger.route_path, - "path": called_path, - "method": method.to_string().to_lowercase(), - "params": params, - "query": query, - "headers": headers - }, - })), + build_http_trigger_extra( + &trigger.route_path, + &called_path, + &method, + ¶ms, + &query, + &headers, + ) + .await, ); let http_method = http::Method::from(trigger.http_method); diff --git a/backend/windmill-api/src/kafka_triggers_ee.rs b/backend/windmill-api/src/kafka_triggers_ee.rs index 3a03ebe864ed5..e3de5e6042c9c 100644 --- a/backend/windmill-api/src/kafka_triggers_ee.rs +++ b/backend/windmill-api/src/kafka_triggers_ee.rs @@ -1,5 +1,9 @@ use crate::db::DB; use axum::Router; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct KafkaResourceSecurity {} pub fn workspaced_service() -> Router { Router::new() diff --git a/backend/windmill-api/src/lib.rs b/backend/windmill-api/src/lib.rs index 3cb499af79dd4..969c796c4c785 100644 --- a/backend/windmill-api/src/lib.rs +++ b/backend/windmill-api/src/lib.rs @@ -250,9 +250,6 @@ pub async fn run_server( } } - // #[cfg(feature = "kafka")] - // start_listening().await; - let job_helpers_service = { #[cfg(feature = "parquet")] { @@ -424,7 +421,7 @@ pub async fn run_server( ) .nest( "/w/:workspace_id/capture_u", - capture::global_service().layer(cors.clone()), + capture::workspaced_unauthed_service().layer(cors.clone()), ) .nest( "/auth", diff --git a/backend/windmill-api/src/scripts.rs b/backend/windmill-api/src/scripts.rs index 69342120a5d75..15d95a031b3cd 100644 --- a/backend/windmill-api/src/scripts.rs +++ b/backend/windmill-api/src/scripts.rs @@ -662,6 +662,24 @@ async fn create_script_internal<'c>( .execute(&mut *tx) .await?; + sqlx::query!( + "UPDATE capture_config SET path = $1 WHERE path = $2 AND workspace_id = $3 AND is_flow IS FALSE", + ns.path, + p_path, + w_id + ) + .execute(&mut *tx) + .await?; + + sqlx::query!( + "UPDATE capture SET path = $1 WHERE path = $2 AND workspace_id = $3 AND is_flow IS FALSE", + ns.path, + p_path, + w_id + ) + .execute(&mut *tx) + .await?; + let mut schedulables = sqlx::query_as::<_, Schedule>( "UPDATE schedule SET script_path = $1 WHERE script_path = $2 AND path != $2 AND workspace_id = $3 AND is_flow IS false RETURNING *") .bind(&ns.path) @@ -1471,6 +1489,22 @@ async fn delete_script_by_path( .execute(&db) .await?; + sqlx::query!( + "DELETE FROM capture_config WHERE path = $1 AND workspace_id = $2 AND is_flow IS FALSE", + path, + w_id + ) + .execute(&db) + .await?; + + sqlx::query!( + "DELETE FROM capture WHERE path = $1 AND workspace_id = $2 AND is_flow IS FALSE", + path, + w_id + ) + .execute(&db) + .await?; + audit_log( &mut *tx, &authed, diff --git a/backend/windmill-api/src/websocket_triggers.rs b/backend/windmill-api/src/websocket_triggers.rs index 9c944f5cb3ec9..6fbdff49caa6a 100644 --- a/backend/windmill-api/src/websocket_triggers.rs +++ b/backend/windmill-api/src/websocket_triggers.rs @@ -15,6 +15,7 @@ use serde::{ use serde_json::{value::RawValue, Value}; use sql_builder::{bind::Bind, SqlBuilder}; use sqlx::prelude::FromRow; +use sqlx::types::Json as SqlxJson; use std::{collections::HashMap, fmt}; use tokio::net::TcpStream; use tokio_tungstenite::{connect_async, tungstenite::Message, MaybeTlsStream, WebSocketStream}; @@ -30,11 +31,14 @@ use windmill_common::{ use windmill_queue::PushArgsOwned; use crate::{ + capture::{insert_capture_payload, TriggerKind, WebsocketTriggerConfig}, db::{ApiAuthed, DB}, jobs::{run_flow_by_path_inner, run_script_by_path_inner, RunJobQuery}, users::fetch_api_authed, }; +use std::borrow::Cow; + pub fn workspaced_service() -> Router { Router::new() .route("/create", post(create_websocket_trigger)) @@ -54,19 +58,19 @@ struct NewWebsocketTrigger { is_flow: bool, enabled: Option, filters: Vec>, - initial_messages: Vec>, - url_runnable_args: Box, + initial_messages: Option>>, + url_runnable_args: Option>, } #[derive(Deserialize)] -struct JsonFilter { +pub struct JsonFilter { key: String, value: serde_json::Value, } #[derive(Deserialize)] #[serde(untagged)] -enum Filter { +pub enum Filter { JsonFilter(JsonFilter), } @@ -93,9 +97,9 @@ pub struct WebsocketTrigger { extra_perms: serde_json::Value, error: Option, enabled: bool, - filters: Vec>>, - initial_messages: Vec>>, - url_runnable_args: sqlx::types::Json>, + filters: Vec>>, + initial_messages: Option>>>, + url_runnable_args: Option>>, } #[derive(Deserialize)] @@ -105,8 +109,8 @@ struct EditWebsocketTrigger { script_path: String, is_flow: bool, filters: Vec>, - initial_messages: Vec>, - url_runnable_args: Box, + initial_messages: Option>>, + url_runnable_args: Option>, } #[derive(Deserialize)] @@ -190,11 +194,12 @@ async fn create_websocket_trigger( let mut tx = user_db.begin(&authed).await?; - let filters = ct.filters.into_iter().map(sqlx::types::Json).collect_vec(); + let filters = ct.filters.into_iter().map(SqlxJson).collect_vec(); let initial_messages = ct .initial_messages + .unwrap_or_default() .into_iter() - .map(sqlx::types::Json) + .map(SqlxJson) .collect_vec(); sqlx::query_as::<_, WebsocketTrigger>( "INSERT INTO websocket_trigger (workspace_id, path, url, script_path, is_flow, enabled, filters, initial_messages, url_runnable_args, edited_by, email, edited_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, now()) RETURNING *", @@ -207,7 +212,7 @@ async fn create_websocket_trigger( .bind(ct.enabled.unwrap_or(true)) .bind(filters.as_slice()) .bind(initial_messages.as_slice()) - .bind(sqlx::types::Json(ct.url_runnable_args)) + .bind(ct.url_runnable_args.map(SqlxJson)) .bind(&authed.username) .bind(&authed.email) .fetch_one(&mut *tx).await?; @@ -237,11 +242,12 @@ async fn update_websocket_trigger( let path = path.to_path(); let mut tx = user_db.begin(&authed).await?; - let filters = ct.filters.into_iter().map(sqlx::types::Json).collect_vec(); + let filters = ct.filters.into_iter().map(SqlxJson).collect_vec(); let initial_messages = ct .initial_messages + .unwrap_or_default() .into_iter() - .map(sqlx::types::Json) + .map(SqlxJson) .collect_vec(); // important to update server_id, last_server_ping and error to NULL to stop current websocket listener @@ -252,9 +258,9 @@ async fn update_websocket_trigger( ct.script_path, ct.path, ct.is_flow, - filters.as_slice() as &[sqlx::types::Json>], - initial_messages.as_slice() as &[sqlx::types::Json>], - sqlx::types::Json(ct.url_runnable_args) as sqlx::types::Json>, + filters.as_slice() as &[SqlxJson>], + initial_messages.as_slice() as &[SqlxJson>], + ct.url_runnable_args.map(SqlxJson) as Option>>, &authed.username, &authed.email, w_id, @@ -386,18 +392,36 @@ async fn listen_to_unlistened_websockets( Ok(mut triggers) => { triggers.shuffle(&mut rand::thread_rng()); for trigger in triggers { - maybe_listen_to_websocket(trigger, db.clone(), killpill_rx.resubscribe()).await; + trigger.maybe_listen_to_websocket(db.clone(), killpill_rx.resubscribe()).await; } } Err(err) => { tracing::error!("Error fetching websocket triggers: {:?}", err); } }; + + match sqlx::query_as!( + CaptureConfigForWebsocket, + r#"SELECT path, is_flow, workspace_id, trigger_config as "trigger_config!: _", owner, email FROM capture_config WHERE trigger_kind = 'websocket' AND last_client_ping > NOW() - INTERVAL '10 seconds' AND trigger_config IS NOT NULL AND (server_id IS NULL OR last_server_ping IS NULL OR last_server_ping < now() - interval '15 seconds')"# + ) + .fetch_all(db) + .await + { + Ok(mut captures) => { + captures.shuffle(&mut rand::thread_rng()); + for capture in captures { + capture.maybe_listen_to_websocket(db.clone(), killpill_rx.resubscribe()).await; + } + } + Err(err) => { + tracing::error!("Error fetching capture websocket triggers: {:?}", err); + } + } } pub async fn start_websockets(db: DB, mut killpill_rx: tokio::sync::broadcast::Receiver<()>) -> () { tokio::spawn(async move { - listen_to_unlistened_websockets(&db, &&killpill_rx).await; + listen_to_unlistened_websockets(&db, &killpill_rx).await; loop { tokio::select! { biased; @@ -405,37 +429,13 @@ pub async fn start_websockets(db: DB, mut killpill_rx: tokio::sync::broadcast::R return; } _ = tokio::time::sleep(tokio::time::Duration::from_secs(15)) => { - listen_to_unlistened_websockets(&db, &&killpill_rx).await; + listen_to_unlistened_websockets(&db, &killpill_rx).await; } } } }); } -async fn maybe_listen_to_websocket( - ws_trigger: WebsocketTrigger, - db: DB, - killpill_rx: tokio::sync::broadcast::Receiver<()>, -) -> () { - match sqlx::query_scalar!( - "UPDATE websocket_trigger SET server_id = $1, last_server_ping = now() WHERE enabled IS TRUE AND workspace_id = $2 AND path = $3 AND (server_id IS NULL OR last_server_ping IS NULL OR last_server_ping < now() - interval '15 seconds') RETURNING true", - *INSTANCE_NAME, - ws_trigger.workspace_id, - ws_trigger.path, - ).fetch_optional(&db).await { - Ok(has_lock) => { - if has_lock.flatten().unwrap_or(false) { - tokio::spawn(listen_to_websocket(ws_trigger, db, killpill_rx)); - } else { - tracing::info!("Websocket {} already being listened to", ws_trigger.url); - } - }, - Err(err) => { - tracing::error!("Error acquiring lock for websocket {}: {:?}", ws_trigger.path, err); - } - }; -} - struct SupersetVisitor<'a> { key: &'a str, value_to_check: &'a Value, @@ -505,32 +505,29 @@ where async fn wait_runnable_result( path: String, is_flow: bool, - args: &Box, - ws_trigger: &WebsocketTrigger, - username_override: String, + args: Option<&Box>, + authed: ApiAuthed, db: &DB, + workspace_id: &str, + trigger_path: &str, ) -> error::Result { let user_db = UserDB::new(db.clone()); - let authed = fetch_api_authed( - ws_trigger.edited_by.clone(), - ws_trigger.email.clone(), - &ws_trigger.workspace_id, - &db, - Some(username_override), - ) - .await?; - let args = serde_json::from_str::>>>(args.get()) - .map_err(|e| error::Error::BadRequest(format!("invalid json: {}", e)))? - .unwrap_or_else(HashMap::new); + let args = if let Some(args) = args { + serde_json::from_str::>>>(args.get()) + .map_err(|e| error::Error::BadRequest(format!("invalid json: {}", e)))? + .unwrap_or_else(HashMap::new) + } else { + HashMap::new() + }; - let label_prefix = Some(format!("ws-{}-", ws_trigger.path)); + let label_prefix = Some(format!("ws-{}-", trigger_path)); let (_, job_id) = if is_flow { run_flow_by_path_inner( authed, db.clone(), user_db, - ws_trigger.workspace_id.clone(), + workspace_id.to_string(), StripPath(path.clone()), RunJobQuery::default(), PushArgsOwned { args, extra: None }, @@ -542,7 +539,7 @@ async fn wait_runnable_result( authed, db.clone(), user_db, - ws_trigger.workspace_id.clone(), + workspace_id.to_string(), StripPath(path.clone()), RunJobQuery::default(), PushArgsOwned { args, extra: None }, @@ -563,7 +560,7 @@ async fn wait_runnable_result( #[derive(sqlx::FromRow)] struct RawResult { - result: Option>>, + result: Option>>, success: bool, } @@ -571,7 +568,7 @@ async fn wait_runnable_result( "SELECT result, success FROM completed_job WHERE id = $1 AND workspace_id = $2", ) .bind(Uuid::parse_str(&job_id).unwrap()) - .bind(&ws_trigger.workspace_id) + .bind(workspace_id) .fetch_optional(db) .await; @@ -601,92 +598,34 @@ async fn wait_runnable_result( } } -async fn send_initial_messages( - ws_trigger: &WebsocketTrigger, - mut writer: SplitSink>, Message>, - db: &DB, -) -> error::Result<()> { - let initial_messages: Vec = ws_trigger - .initial_messages - .iter() - .filter_map(|m| serde_json::from_str(m.get()).ok()) - .collect_vec(); - - for start_message in initial_messages { - match start_message { - InitialMessage::RawMessage(msg) => { - let msg = if msg.starts_with("\"") && msg.ends_with("\"") { - msg[1..msg.len() - 1].to_string() - } else { - msg - }; - tracing::info!( - "Sending raw message initial message to websocket {}: {}", - ws_trigger.url, - msg - ); - writer - .send(tokio_tungstenite::tungstenite::Message::Text(msg)) - .await - .map_err(to_anyhow) - .with_context(|| "failed to send raw message")?; - } - InitialMessage::RunnableResult { path, is_flow, args } => { - tracing::info!( - "Running runnable {path} (is_flow: {is_flow}) for initial message to websocket {}", - ws_trigger.url, - ); - - let result = wait_runnable_result( - path.clone(), - is_flow, - &args, - ws_trigger, - "init".to_string(), - db, - ) - .await?; - - tracing::info!( - "Sending runnable {path} (is_flow: {is_flow}) result to websocket {}", - ws_trigger.url - ); - - let result = if result.starts_with("\"") && result.ends_with("\"") { - result[1..result.len() - 1].to_string() - } else { - result - }; - - writer - .send(tokio_tungstenite::tungstenite::Message::Text(result)) - .await - .map_err(to_anyhow) - .with_context(|| { - format!("Failed to send runnable {path} (is_flow: {is_flow}) result") - })?; - } +async fn loop_ping(db: &DB, ws: &WebsocketEnum, error: Option<&str>) -> () { + loop { + if let None = ws.update_ping(db, error).await { + return; } + tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; } - - Ok(()) } async fn get_url_from_runnable( path: &str, is_flow: bool, - ws_trigger: &WebsocketTrigger, db: &DB, + authed: ApiAuthed, + args: Option<&Box>, + workspace_id: &str, + trigger_path: &str, ) -> error::Result { tracing::info!("Running runnable {path} (is_flow: {is_flow}) to get websocket URL",); let result = wait_runnable_result( path.to_string(), is_flow, - &ws_trigger.url_runnable_args.0, - ws_trigger, - "url".to_string(), + args, + authed, db, + workspace_id, + trigger_path, ) .await?; @@ -697,78 +636,394 @@ async fn get_url_from_runnable( } } -async fn update_ping(db: &DB, ws_trigger: &WebsocketTrigger, error: Option<&str>) -> Option<()> { - match sqlx::query_scalar!( +impl WebsocketTrigger { + async fn maybe_listen_to_websocket( + self, + db: DB, + killpill_rx: tokio::sync::broadcast::Receiver<()>, + ) -> () { + match sqlx::query_scalar!( + "UPDATE websocket_trigger SET server_id = $1, last_server_ping = now() WHERE enabled IS TRUE AND workspace_id = $2 AND path = $3 AND (server_id IS NULL OR last_server_ping IS NULL OR last_server_ping < now() - interval '15 seconds') RETURNING true", + *INSTANCE_NAME, + self.workspace_id, + self.path, + ).fetch_optional(&db).await { + Ok(has_lock) => { + if has_lock.flatten().unwrap_or(false) { + tokio::spawn(listen_to_websocket(WebsocketEnum::Trigger(self), db, killpill_rx)); + } else { + tracing::info!("Websocket {} already being listened to", self.url); + } + }, + Err(err) => { + tracing::error!("Error acquiring lock for websocket {}: {:?}", self.path, err); + } + }; + } + + async fn update_ping(&self, db: &DB, error: Option<&str>) -> Option<()> { + match sqlx::query_scalar!( "UPDATE websocket_trigger SET last_server_ping = now(), error = $1 WHERE workspace_id = $2 AND path = $3 AND server_id = $4 AND enabled IS TRUE RETURNING 1", error, - ws_trigger.workspace_id, - ws_trigger.path, + self.workspace_id, + self.path, *INSTANCE_NAME ).fetch_optional(db).await { Ok(updated) => { if updated.flatten().is_none() { - tracing::info!("Websocket {} changed, disabled, or deleted, stopping...", ws_trigger.url); + tracing::info!("Websocket {} changed, disabled, or deleted, stopping...", self.url); return None; } }, - Err(err) => { - tracing::warn!("Error updating ping of websocket {}: {:?}", ws_trigger.url, err); + Err(err) => { + tracing::warn!("Error updating ping of websocket {}: {:?}", self.url, err); + } + }; + + Some(()) + } + + async fn disable_with_error(&self, db: &DB, error: String) -> () { + match sqlx::query!( + "UPDATE websocket_trigger SET enabled = FALSE, error = $1, server_id = NULL, last_server_ping = NULL WHERE workspace_id = $2 AND path = $3", + error, + self.workspace_id, + self.path, + ) + .execute(db).await { + Ok(_) => { + report_critical_error(format!("Disabling websocket {} because of error: {}", self.url, error), db.clone(), Some(&self.workspace_id), None).await; + }, + Err(disable_err) => { + report_critical_error( + format!("Could not disable websocket {} with err {}, disabling because of error {}", self.path, disable_err, error), + db.clone(), + Some(&self.workspace_id), + None, + ).await; + } } - }; + } - Some(()) -} + async fn get_url_from_runnable( + &self, + path: &str, + is_flow: bool, + db: &DB, + ) -> error::Result { + get_url_from_runnable( + &path, + is_flow, + db, + self.fetch_authed(db, Some("url".to_string())).await?, + self.url_runnable_args.as_ref().map(|r| &r.0), + &self.workspace_id, + &self.path, + ) + .await + } -async fn loop_ping(db: &DB, ws_trigger: &WebsocketTrigger, error: Option<&str>) -> () { - loop { - if let None = update_ping(db, ws_trigger, error).await { - return; + async fn send_initial_messages( + &self, + mut writer: SplitSink>, Message>, + db: &DB, + ) -> error::Result<()> { + let initial_messages: Vec = self + .initial_messages + .as_deref() + .unwrap_or_default() + .iter() + .filter_map(|m| serde_json::from_str(m.get()).ok()) + .collect_vec(); + + for start_message in initial_messages { + match start_message { + InitialMessage::RawMessage(msg) => { + let msg = if msg.starts_with("\"") && msg.ends_with("\"") { + msg[1..msg.len() - 1].to_string() + } else { + msg + }; + tracing::info!( + "Sending raw message initial message to websocket {}: {}", + self.url, + msg + ); + writer + .send(tokio_tungstenite::tungstenite::Message::Text(msg)) + .await + .map_err(to_anyhow) + .with_context(|| "failed to send raw message")?; + } + InitialMessage::RunnableResult { path, is_flow, args } => { + tracing::info!( + "Running runnable {path} (is_flow: {is_flow}) for initial message to websocket {}", + self.url, + ); + + let result = wait_runnable_result( + path.clone(), + is_flow, + Some(&args), + self.fetch_authed(db, Some("init".to_string())).await?, + db, + &self.workspace_id, + &self.path, + ) + .await?; + + tracing::info!( + "Sending runnable {path} (is_flow: {is_flow}) result to websocket {}", + self.url + ); + + let result = if result.starts_with("\"") && result.ends_with("\"") { + result[1..result.len() - 1].to_string() + } else { + result + }; + + writer + .send(tokio_tungstenite::tungstenite::Message::Text(result)) + .await + .map_err(to_anyhow) + .with_context(|| { + format!("Failed to send runnable {path} (is_flow: {is_flow}) result") + })?; + } + } } - tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; + + Ok(()) } -} -async fn disable_with_error(db: &DB, ws_trigger: &WebsocketTrigger, error: String) { - match sqlx::query!( - "UPDATE websocket_trigger SET enabled = FALSE, error = $1, server_id = NULL, last_server_ping = NULL WHERE workspace_id = $2 AND path = $3", - error, - ws_trigger.workspace_id, - ws_trigger.path, - ) - .execute(db).await { - Ok(_) => { - report_critical_error(format!("Disabling websocket {} because of error: {}", ws_trigger.url, error), db.clone(), Some(&ws_trigger.workspace_id), None).await; - }, - Err(disable_err) => { + async fn handle(&self, db: &DB, args: PushArgsOwned) -> () { + if let Err(err) = run_job(db, self, args).await { report_critical_error( - format!("Could not disable websocket {} with err {}, disabling because of error {}", ws_trigger.path, disable_err, error), + format!( + "Failed to trigger job from websocket {}: {:?}", + self.url, err + ), db.clone(), - Some(&ws_trigger.workspace_id), + Some(&self.workspace_id), None, - ).await; + ) + .await; + }; + } + + async fn fetch_authed( + &self, + db: &DB, + username_override: Option, + ) -> error::Result { + fetch_api_authed( + self.edited_by.clone(), + self.email.clone(), + &self.workspace_id, + db, + username_override, + ) + .await + } +} + +#[derive(Deserialize)] +struct CaptureConfigForWebsocket { + trigger_config: SqlxJson, + path: String, + is_flow: bool, + workspace_id: String, + owner: String, + email: String, +} + +impl CaptureConfigForWebsocket { + async fn maybe_listen_to_websocket( + self, + db: DB, + killpill_rx: tokio::sync::broadcast::Receiver<()>, + ) -> () { + match sqlx::query_scalar!( + "UPDATE capture_config SET server_id = $1, last_server_ping = now() WHERE last_client_ping > NOW() - INTERVAL '10 seconds' AND workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'websocket' AND (server_id IS NULL OR last_server_ping IS NULL OR last_server_ping < now() - interval '15 seconds') RETURNING true", + *INSTANCE_NAME, + self.workspace_id, + self.path, + self.is_flow, + ).fetch_optional(&db).await { + Ok(has_lock) => { + if has_lock.flatten().unwrap_or(false) { + tokio::spawn(listen_to_websocket(WebsocketEnum::Capture(self), db, killpill_rx)); + } else { + tracing::info!("Websocket {} already being listened to", self.trigger_config.url); + } + }, + Err(err) => { + tracing::error!("Error acquiring lock for capture websocket {}: {:?}", self.path, err); + } + }; + } + + async fn update_ping(&self, db: &DB, error: Option<&str>) -> Option<()> { + match sqlx::query_scalar!( + "UPDATE capture_config SET last_server_ping = now(), error = $1 WHERE workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'websocket' AND server_id = $5 AND last_client_ping > NOW() - INTERVAL '10 seconds' RETURNING 1", + error, + self.workspace_id, + self.path, + self.is_flow, + *INSTANCE_NAME + ).fetch_optional(db).await { + Ok(updated) => { + if updated.flatten().is_none() { + tracing::info!("Websocket capture {} changed, disabled, or deleted, stopping...", self.trigger_config.url); + return None; + } + }, + Err(err) => { + tracing::warn!("Error updating ping of capture websocket {}: {:?}", self.trigger_config.url, err); + } + }; + + Some(()) + } + + async fn handle(&self, db: &DB, args: PushArgsOwned) -> () { + if let Err(err) = insert_capture_payload( + db, + &self.workspace_id, + &self.path, + self.is_flow, + &TriggerKind::Websocket, + PushArgsOwned { args: args.args, extra: None }, + args.extra.as_ref().map(to_raw_value), + &self.owner, + ) + .await + { + tracing::error!("Error inserting capture payload: {:?}", err); + } + } + + async fn get_url_from_runnable( + &self, + path: &str, + is_flow: bool, + db: &DB, + ) -> error::Result { + let url_runnable_args = self + .trigger_config + .url_runnable_args + .as_ref() + .map(to_raw_value); + get_url_from_runnable( + &path, + is_flow, + db, + self.fetch_authed(db, Some("url".to_string())).await?, + url_runnable_args.as_ref(), + &self.workspace_id, + &self.get_trigger_path(), + ) + .await + } + + async fn fetch_authed( + &self, + db: &DB, + username_override: Option, + ) -> error::Result { + fetch_api_authed( + self.owner.clone(), + self.email.clone(), + &self.workspace_id, + db, + username_override, + ) + .await + } + + async fn disable_with_error(&self, db: &DB, error: String) -> () { + if let Err(err) = sqlx::query!( + "UPDATE capture_config SET error = $1, server_id = NULL, last_server_ping = NULL WHERE workspace_id = $2 AND path = $3 AND is_flow = $4 AND trigger_kind = 'websocket'", + error, + self.workspace_id, + self.path, + self.is_flow, + ) + .execute(db).await { + tracing::error!("Could not disable websocket capture {} ({}) with err {}, disabling because of error {}", self.path, self.workspace_id, err, error); + } + } + + fn get_trigger_path(&self) -> String { + format!( + "{}-{}", + if self.is_flow { "flow" } else { "script" }, + self.path + ) + } +} + +enum WebsocketEnum { + Trigger(WebsocketTrigger), + Capture(CaptureConfigForWebsocket), +} + +impl WebsocketEnum { + async fn update_ping(&self, db: &DB, error: Option<&str>) -> Option<()> { + match self { + WebsocketEnum::Trigger(ws) => ws.update_ping(db, error).await, + WebsocketEnum::Capture(capture) => capture.update_ping(db, error).await, + } + } + + async fn get_url_from_runnable( + &self, + path: &str, + is_flow: bool, + db: &DB, + ) -> error::Result { + match self { + WebsocketEnum::Trigger(ws) => ws.get_url_from_runnable(path, is_flow, db).await, + WebsocketEnum::Capture(capture) => { + capture.get_url_from_runnable(path, is_flow, db).await + } + } + } + + async fn disable_with_error(&self, db: &DB, error: String) -> () { + match self { + WebsocketEnum::Trigger(ws) => ws.disable_with_error(db, error).await, + WebsocketEnum::Capture(capture) => capture.disable_with_error(db, error).await, } } } async fn listen_to_websocket( - ws_trigger: WebsocketTrigger, + ws: WebsocketEnum, db: DB, mut killpill_rx: tokio::sync::broadcast::Receiver<()>, ) -> () { - if let None = update_ping(&db, &ws_trigger, Some("Connecting...")).await { + if let None = ws.update_ping(&db, Some("Connecting")).await { return; } - let url = ws_trigger.url.as_str(); + let url = match &ws { + WebsocketEnum::Trigger(ws_trigger) => &ws_trigger.url, + WebsocketEnum::Capture(capture) => &capture.trigger_config.url, + }; - let filters: Vec = ws_trigger - .filters - .iter() - .filter_map(|m| serde_json::from_str(m.get()).ok()) - .collect_vec(); + let filters: Vec = match &ws { + WebsocketEnum::Trigger(ws_trigger) => ws_trigger + .filters + .iter() + .filter_map(|m| serde_json::from_str(m.get()).ok()) + .collect_vec(), + WebsocketEnum::Capture(_) => vec![], + }; loop { - let connect_url = if url.starts_with("$") { + let connect_url: Cow = if url.starts_with("$") { if url.starts_with("$flow:") || url.starts_with("$script:") { let path = url.splitn(2, ':').nth(1).unwrap(); tokio::select! { @@ -776,18 +1031,17 @@ async fn listen_to_websocket( _ = killpill_rx.recv() => { return; }, - _ = loop_ping(&db, &ws_trigger, Some( + _ = loop_ping(&db, &ws, Some( "Waiting on runnable to return websocket URL..." )) => { return; }, - url_result = get_url_from_runnable(path, url.starts_with("$flow:"), &ws_trigger, &db) => match url_result { - Ok(url) => url, + + + url_result = ws.get_url_from_runnable(path, url.starts_with("$flow:"), &db) => match url_result { + Ok(url) => Cow::Owned(url), Err(err) => { - disable_with_error( - &db, - &ws_trigger, - format!( + ws.disable_with_error(&db, format!( "Error getting websocket URL from runnable after 5 tries: {:?}", err ), @@ -798,16 +1052,12 @@ async fn listen_to_websocket( }, } } else { - disable_with_error( - &db, - &ws_trigger, - format!("Invalid websocket runnable path: {}", url), - ) - .await; + ws.disable_with_error(&db, format!("Invalid websocket runnable path: {}", url)) + .await; return; } } else { - url.to_string() + Cow::Borrowed(url) }; tokio::select! { @@ -815,14 +1065,14 @@ async fn listen_to_websocket( _ = killpill_rx.recv() => { return; }, - _ = loop_ping(&db, &ws_trigger, Some("Connecting...")) => { + _ = loop_ping(&db, &ws, Some("Connecting...")) => { return; }, - connection = connect_async(connect_url) => { + connection = connect_async(connect_url.as_ref()) => { match connection { Ok((ws_stream, _)) => { tracing::info!("Listening to websocket {}", url); - if let None = update_ping(&db, &ws_trigger, None).await { + if let None = ws.update_ping(&db, None).await { return; } let (writer, mut reader) = ws_stream.split(); @@ -834,12 +1084,19 @@ async fn listen_to_websocket( return; } _ = async { - if let Err(err) = send_initial_messages(&ws_trigger, writer, &db).await { - disable_with_error(&db, &ws_trigger, format!("Error sending initial messages: {:?}", err)).await; - } else { - tracing::debug!("Initial messages sent successfully to websocket {}", url); - // if initial messages sent successfully, wait forever - futures::future::pending::<()>().await; + match &ws { + WebsocketEnum::Trigger(ws_trigger) => { + if let Err(err) = ws_trigger.send_initial_messages(writer, &db).await { + ws_trigger.disable_with_error(&db, format!("Error sending initial messages: {:?}", err)).await; + } else { + tracing::debug!("Initial messages sent successfully to websocket {}", url); + // if initial messages sent successfully, wait forever + futures::future::pending::<()>().await; + } + }, + WebsocketEnum::Capture(_) => { + futures::future::pending::<()>().await; + } } } => { // was disabled => exit @@ -852,7 +1109,7 @@ async fn listen_to_websocket( msg = reader.next() => { if let Some(msg) = msg { if last_ping.elapsed() > tokio::time::Duration::from_secs(5) { - if let None = update_ping(&db, &ws_trigger, None).await { + if let None = ws.update_ping(&db, None).await { return; } last_ping = tokio::time::Instant::now(); @@ -883,9 +1140,22 @@ async fn listen_to_websocket( } } if should_handle { - if let Err(err) = run_job(&db, &ws_trigger, text).await { - report_critical_error(format!("Failed to trigger job from websocket {}: {:?}", ws_trigger.url, err), db.clone(), Some(&ws_trigger.workspace_id), None).await; - }; + + let args = HashMap::from([("msg".to_string(), to_raw_value(&text))]); + let extra = Some(HashMap::from([( + "wm_trigger".to_string(), + to_raw_value(&serde_json::json!({"kind": "websocket", "websocket": { "url": url }})), + )])); + + let args = PushArgsOwned { args, extra }; + match &ws { + WebsocketEnum::Trigger(ws_trigger) => { + ws_trigger.handle(&db, args).await; + }, + WebsocketEnum::Capture(capture) => { + capture.handle(&db, args).await; + } + } } }, a @ _ => { @@ -899,9 +1169,7 @@ async fn listen_to_websocket( } } else { tracing::error!("Websocket {} closed", url); - if let None = - update_ping(&db, &ws_trigger, Some("Websocket closed")).await - { + if let None = ws.update_ping(&db, Some("Websocket closed")).await { return; } tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; @@ -910,7 +1178,7 @@ async fn listen_to_websocket( }, _ = tokio::time::sleep(tokio::time::Duration::from_secs(5)) => { tracing::debug!("Sending ping to websocket {}", url); - if let None = update_ping(&db, &ws_trigger, None).await { + if let None = ws.update_ping(&db, None).await { return; } last_ping = tokio::time::Instant::now(); @@ -924,9 +1192,7 @@ async fn listen_to_websocket( } Err(err) => { tracing::error!("Error connecting to websocket {}: {:?}", url, err); - if let None = - update_ping(&db, &ws_trigger, Some(err.to_string().as_str())).await - { + if let None = ws.update_ping(&db, Some(err.to_string().as_str())).await { return; } tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; @@ -937,16 +1203,7 @@ async fn listen_to_websocket( } } -async fn run_job(db: &DB, trigger: &WebsocketTrigger, msg: String) -> anyhow::Result<()> { - let args = PushArgsOwned { - args: HashMap::from([("msg".to_string(), to_raw_value(&msg))]), - extra: Some(HashMap::from([( - "wm_trigger".to_string(), - to_raw_value( - &serde_json::json!({"kind": "websocket", "websocket": { "url": trigger.url }}), - ), - )])), - }; +async fn run_job(db: &DB, trigger: &WebsocketTrigger, args: PushArgsOwned) -> anyhow::Result<()> { let label_prefix = Some(format!("ws-{}-", trigger.path)); let authed = fetch_api_authed( diff --git a/backend/windmill-api/src/workspaces_extra.rs b/backend/windmill-api/src/workspaces_extra.rs index 8fba5d0be58da..77a11b75c16c0 100644 --- a/backend/windmill-api/src/workspaces_extra.rs +++ b/backend/windmill-api/src/workspaces_extra.rs @@ -103,6 +103,14 @@ pub(crate) async fn change_workspace_id( .execute(&mut *tx) .await?; + sqlx::query!( + "UPDATE capture_config SET workspace_id = $1 WHERE workspace_id = $2", + &rw.new_id, + &old_id + ) + .execute(&mut *tx) + .await?; + sqlx::query!( "UPDATE completed_job SET workspace_id = $1 WHERE workspace_id = $2", &rw.new_id, @@ -396,6 +404,9 @@ pub(crate) async fn delete_workspace( sqlx::query!("DELETE FROM capture WHERE workspace_id = $1", &w_id) .execute(&mut *tx) .await?; + sqlx::query!("DELETE FROM capture_config WHERE workspace_id = $1", &w_id) + .execute(&mut *tx) + .await?; sqlx::query!("DELETE FROM draft WHERE workspace_id = $1", &w_id) .execute(&mut *tx) .await?; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index e09106eca7730..97958257eb679 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -86,6 +86,8 @@ "devDependencies": { "@floating-ui/core": "^1.3.1", "@hey-api/openapi-ts": "^0.43.0", + "@melt-ui/pp": "^0.3.2", + "@melt-ui/svelte": "^0.86.2", "@playwright/test": "^1.34.3", "@rgossiaux/svelte-headlessui": "^2.0.0", "@sveltejs/adapter-static": "^3.0.6", @@ -3560,6 +3562,16 @@ "integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==", "dev": true }, + "node_modules/@internationalized/date": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.6.0.tgz", + "integrity": "sha512-+z6ti+CcJnRlLHok/emGEsWQhe7kfSmEW+/6qCzvKY67YPh7YOBfvc7+/+NXq+zJlbArg30tYpqLjNgcAYv2YQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@swc/helpers": "^0.5.0" + } + }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", @@ -3672,6 +3684,58 @@ "resolved": "https://registry.npmjs.org/@mapbox/unitbezier/-/unitbezier-0.0.0.tgz", "integrity": "sha512-HPnRdYO0WjFjRTSwO3frz1wKaU649OBFPX3Zo/2WZvuRi6zMiRGui8SnPQiQABgqCf8YikDe5t3HViTVw1WUzA==" }, + "node_modules/@melt-ui/pp": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@melt-ui/pp/-/pp-0.3.2.tgz", + "integrity": "sha512-xKkPvaIAFinklLXcQOpwZ8YSpqAFxykjWf8Y/fSJQwsixV/0rcFs07hJ49hJjPy5vItvw5Qa0uOjzFUbXzBypQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "estree-walker": "^3.0.3", + "magic-string": "^0.30.5" + }, + "peerDependencies": { + "@melt-ui/svelte": ">= 0.29.0", + "svelte": "^3.55.0 || ^4.0.0 || ^5.0.0-next.1" + } + }, + "node_modules/@melt-ui/svelte": { + "version": "0.86.2", + "resolved": "https://registry.npmjs.org/@melt-ui/svelte/-/svelte-0.86.2.tgz", + "integrity": "sha512-wRVN603oIt1aXvx2QRmKqVDJgTScSvr/WJLLokkD8c4QzHgn6pfpPtUKmhV6Dvkk+OY89OG/1Irkd6ouA50Ztw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^1.3.1", + "@floating-ui/dom": "^1.4.5", + "@internationalized/date": "^3.5.0", + "dequal": "^2.0.3", + "focus-trap": "^7.5.2", + "nanoid": "^5.0.4" + }, + "peerDependencies": { + "svelte": "^3.0.0 || ^4.0.0 || ^5.0.0-next.118" + } + }, + "node_modules/@melt-ui/svelte/node_modules/nanoid": { + "version": "5.0.9", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz", + "integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.js" + }, + "engines": { + "node": "^18 || >=20" + } + }, "node_modules/@mistralai/mistralai": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/@mistralai/mistralai/-/mistralai-1.3.0.tgz", @@ -4323,6 +4387,16 @@ "vite": "^5.0.0" } }, + "node_modules/@swc/helpers": { + "version": "0.5.15", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", + "integrity": "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.8.0" + } + }, "node_modules/@tailwindcss/forms": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.7.tgz", @@ -7302,6 +7376,16 @@ "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==", "dev": true }, + "node_modules/focus-trap": { + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-7.6.2.tgz", + "integrity": "sha512-9FhUxK1hVju2+AiQIDJ5Dd//9R2n2RAfJ0qfhF4IHGHgcoEUTMpbTeG/zbEuwaiYXfuAH6XE0/aCyxDdRM+W5w==", + "dev": true, + "license": "MIT", + "dependencies": { + "tabbable": "^6.2.0" + } + }, "node_modules/follow-redirects": { "version": "1.15.6", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", @@ -12839,6 +12923,13 @@ "node": ">= 10" } }, + "node_modules/tabbable": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz", + "integrity": "sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==", + "dev": true, + "license": "MIT" + }, "node_modules/table": { "version": "6.8.2", "resolved": "https://registry.npmjs.org/table/-/table-6.8.2.tgz", @@ -13123,9 +13214,10 @@ "dev": true }, "node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" }, "node_modules/tsutils": { "version": "3.21.0", diff --git a/frontend/package.json b/frontend/package.json index 3ad978dcdc370..f8bfaac307904 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -19,6 +19,8 @@ "devDependencies": { "@floating-ui/core": "^1.3.1", "@hey-api/openapi-ts": "^0.43.0", + "@melt-ui/pp": "^0.3.2", + "@melt-ui/svelte": "^0.86.2", "@playwright/test": "^1.34.3", "@rgossiaux/svelte-headlessui": "^2.0.0", "@sveltejs/adapter-static": "^3.0.6", diff --git a/frontend/src/lib/components/ArgInput.svelte b/frontend/src/lib/components/ArgInput.svelte index 8d75d5648bdbd..1084fc16c67c9 100644 --- a/frontend/src/lib/components/ArgInput.svelte +++ b/frontend/src/lib/components/ArgInput.svelte @@ -95,6 +95,7 @@ | { type: 'hash'; hash: string } | undefined = undefined export let otherArgs: Record = {} + export let lightHeader = false let oneOfSelected: string | undefined = undefined async function updateOneOfSelected(oneOf: SchemaProperty[] | undefined) { @@ -344,6 +345,7 @@ {contentEncoding} {format} {simpleTooltip} + {lightHeader} /> {/if} diff --git a/frontend/src/lib/components/Dev.svelte b/frontend/src/lib/components/Dev.svelte index 02967efaeb9ca..9a86f8680b10f 100644 --- a/frontend/src/lib/components/Dev.svelte +++ b/frontend/src/lib/components/Dev.svelte @@ -488,7 +488,9 @@ primarySchedule: primaryScheduleStore, selectedTrigger: selectedTriggerStore, triggersCount: triggersCount, - simplifiedPoll: writable(false) + simplifiedPoll: writable(false), + defaultValues: writable(undefined), + captureOn: writable(undefined) }) setContext('FlowEditorContext', { selectedId: selectedIdStore, @@ -715,7 +717,19 @@ {#key reload} - + { + if (ev.detail.kind === 'preprocessor') { + $testStepStore['preprocessor'] = ev.detail.args ?? {} + $selectedIdStore = 'preprocessor' + } else { + $previewArgsStore = ev.detail.args ?? {} + flowPreviewButtons?.openPreview() + } + }} + /> {/key} diff --git a/frontend/src/lib/components/FieldHeader.svelte b/frontend/src/lib/components/FieldHeader.svelte index 0dea7e0a14f89..2bbbacf1989c5 100644 --- a/frontend/src/lib/components/FieldHeader.svelte +++ b/frontend/src/lib/components/FieldHeader.svelte @@ -14,10 +14,18 @@ export let labelClass: string = '' export let prettify = false export let simpleTooltip: string | undefined = undefined + export let lightHeader = false
- + {#if prettify} {label.replace(/_/g, ' ').split(' ').map(capitalize).join(' ')} {:else} diff --git a/frontend/src/lib/components/FlowBuilder.svelte b/frontend/src/lib/components/FlowBuilder.svelte index 156954d8e121f..97c0bce3504a9 100644 --- a/frontend/src/lib/components/FlowBuilder.svelte +++ b/frontend/src/lib/components/FlowBuilder.svelte @@ -536,7 +536,9 @@ selectedTrigger: selectedTriggerStore, primarySchedule: primaryScheduleStore, triggersCount, - simplifiedPoll + simplifiedPoll, + defaultValues: writable(undefined), + captureOn: writable(undefined) }) async function loadTriggers() { @@ -1204,6 +1206,7 @@ let deploymentMsg = '' let msgInput: HTMLInputElement | undefined = undefined + let flowPreviewButtons: FlowPreviewButtons @@ -1415,7 +1418,7 @@ {abortController} /> {/if} - + {/if} - -
diff --git a/frontend/src/lib/components/Label.svelte b/frontend/src/lib/components/Label.svelte index 3d53070c6c8c1..dec8f69aae636 100644 --- a/frontend/src/lib/components/Label.svelte +++ b/frontend/src/lib/components/Label.svelte @@ -1,14 +1,25 @@ -
+
-
- {label} - -
+ {#if !headless} +
+ {label} + {#if required} + + {/if} + +
+ {/if}
diff --git a/frontend/src/lib/components/ResourceEditor.svelte b/frontend/src/lib/components/ResourceEditor.svelte index e6bd21e40b578..d87268aaf8229 100644 --- a/frontend/src/lib/components/ResourceEditor.svelte +++ b/frontend/src/lib/components/ResourceEditor.svelte @@ -24,6 +24,11 @@ export let newResource: boolean = false export let hidePath: boolean = false export let watchChanges: boolean = false + export let defaultValues: Record | undefined = undefined + + $: if (defaultValues && Object.keys(defaultValues).length > 0) { + args = defaultValues + } let isValid = true let jsonError = '' diff --git a/frontend/src/lib/components/ResourceEditorDrawer.svelte b/frontend/src/lib/components/ResourceEditorDrawer.svelte index 114e2f32f6609..c5ee96daa1b22 100644 --- a/frontend/src/lib/components/ResourceEditorDrawer.svelte +++ b/frontend/src/lib/components/ResourceEditorDrawer.svelte @@ -8,8 +8,10 @@ let drawer: Drawer let canSave = true let resource_type: string | undefined = undefined + let defaultValues: Record | undefined = undefined - let resourceEditor: { editResource: () => void } | undefined = undefined + let resourceEditor: { editResource: () => void; createResource: () => void } | undefined = + undefined let path: string | undefined = undefined @@ -21,16 +23,27 @@ drawer.openDrawer?.() } - export async function initNew(resourceType: string): Promise { + export async function initNew( + resourceType: string, + nDefaultValues?: Record + ): Promise { newResource = true path = undefined resource_type = resourceType + defaultValues = nDefaultValues drawer.openDrawer?.() } + + let mode: 'edit' | 'new' = newResource ? 'new' : 'edit' + + $: path ? (mode = 'edit') : (mode = 'new') - + {#await import('./ResourceEditor.svelte')} {:then Module} @@ -38,6 +51,7 @@ {newResource} {path} {resource_type} + {defaultValues} on:refresh bind:this={resourceEditor} bind:canSave @@ -47,7 +61,11 @@
{ saveDraft() }} + on:openTriggers={openTriggers} + on:applyArgs={applyArgs} + on:addPreprocessor={addPreprocessor} bind:editor bind:this={scriptEditor} bind:schema={script.schema} @@ -1409,6 +1470,9 @@ kind={script.kind} {template} tag={script.tag} + bind:args + bind:hasPreprocessor + bind:captureTable />
{:else} diff --git a/frontend/src/lib/components/ScriptEditor.svelte b/frontend/src/lib/components/ScriptEditor.svelte index 3ec1b0ac81189..a61402dd0d8e4 100644 --- a/frontend/src/lib/components/ScriptEditor.svelte +++ b/frontend/src/lib/components/ScriptEditor.svelte @@ -28,6 +28,8 @@ import Tabs from './common/tabs/Tabs.svelte' import Tab from './common/tabs/Tab.svelte' import { slide } from 'svelte/transition' + import CaptureTable from '$lib/components/triggers/CaptureTable.svelte' + import CaptureButton from './triggers/CaptureButton.svelte' // Exported export let schema: Schema | any = emptySchema() @@ -49,6 +51,10 @@ export let saveToWorkspace = false export let watchChanges = false export let customUi: ScriptEditorWhitelabelCustomUi = {} + export let args: Record = initialArgs + export let selectedTab: 'main' | 'preprocessor' = 'main' + export let hasPreprocessor = false + export let captureTable: CaptureTable | undefined = undefined let jobProgressReset: () => void @@ -70,9 +76,6 @@ let testJobLoader: TestJobLoader - // Test args input - let args: Record = initialArgs - let isValid: boolean = true let scriptProgress = undefined @@ -126,8 +129,6 @@ }) } - let hasPreprocessor = false - export async function inferSchema(code: string, nlang?: SupportedLanguage, resetArgs = false) { let nschema = schema ?? emptySchema() @@ -141,6 +142,10 @@ hasPreprocessor = (selectedTab === 'preprocessor' ? !result?.no_main_func : result?.has_preprocessor) ?? false + if (!hasPreprocessor && selectedTab === 'preprocessor') { + selectedTab = 'main' + } + validCode = true if (resetArgs) { args = {} @@ -238,11 +243,12 @@ url.search = '' return `${url}?collab=1` + (edit ? '' : `&path=${path}`) } - let selectedTab: 'main' | 'preprocessor' = 'main' + $: showTabs = hasPreprocessor $: !hasPreprocessor && (selectedTab = 'main') - $: selectedTab && inferSchema(code) + + let argsRender = 0 Main - Preprocessor + {#if hasPreprocessor} +
+ Preprocessor +
+ {/if}
{/if} +
{#if testIsLoading} {:else} - +
+ + +
{/if}
- + {#key argsRender} + + {/key}
- + {#if scriptProgress} @@ -448,6 +465,23 @@ compact={true} /> {/if} + +
+ +
+
diff --git a/frontend/src/lib/components/Section.svelte b/frontend/src/lib/components/Section.svelte index cea644eb48e51..1889fc708a8b1 100644 --- a/frontend/src/lib/components/Section.svelte +++ b/frontend/src/lib/components/Section.svelte @@ -10,46 +10,51 @@ export let small: boolean = false export let collapsable: boolean = false - let collapsed: boolean = true + export let collapsed: boolean = true + export let headless: boolean = false
-
-

- {#if collapsable} - + {:else} {label} - - {:else} - {label} - {/if} + {/if} - - {#if tooltip} - {tooltip} - {/if} - {#if eeOnly} - {#if !$enterpriseLicense} -
- - EE only Enterprise Edition only feature -
+ + {#if tooltip} + {tooltip} {/if} + {#if eeOnly} + {#if !$enterpriseLicense} +
+ + EE only Enterprise Edition only feature +
+ {/if} + {/if} +

+ + {#if collapsable && collapsed} + {/if} - - - -
+
+ {/if}
diff --git a/frontend/src/lib/components/Subsection.svelte b/frontend/src/lib/components/Subsection.svelte new file mode 100644 index 0000000000000..a3dd13bde11f0 --- /dev/null +++ b/frontend/src/lib/components/Subsection.svelte @@ -0,0 +1,55 @@ + + +
+ {#if !headless} +
+

+ {#if collapsable} + + {:else} + {label} + {/if} + + + {#if tooltip} + {tooltip} + {/if} + {#if eeOnly} + {#if !$enterpriseLicense} +
+ + EE only Enterprise Edition only feature +
+ {/if} + {/if} +

+ + {#if collapsable && collapsed} + + {/if} +
+ {/if} +
+ +
+
diff --git a/frontend/src/lib/components/common/alert/ConnectionIndicator.svelte b/frontend/src/lib/components/common/alert/ConnectionIndicator.svelte new file mode 100644 index 0000000000000..d378d232c8339 --- /dev/null +++ b/frontend/src/lib/components/common/alert/ConnectionIndicator.svelte @@ -0,0 +1,36 @@ + + + + +{#if connectionInfo} +
+ {#if connectionInfo.connected} + + + + + +
{connectionInfo.message ?? ''}
+
+ {:else} + + + + + + +
{connectionInfo.message ?? ''}
+
+ {/if} +
+{/if} diff --git a/frontend/src/lib/components/common/button/AnimatedButton.svelte b/frontend/src/lib/components/common/button/AnimatedButton.svelte index 438b43dc0ab64..7396b5314bdca 100644 --- a/frontend/src/lib/components/common/button/AnimatedButton.svelte +++ b/frontend/src/lib/components/common/button/AnimatedButton.svelte @@ -46,7 +46,7 @@ background: inherit; z-index: 1; overflow: hidden; - padding: var(--margin-width, 2px); + padding: var(--margin-width, 1px); } /* Circular gradient */ diff --git a/frontend/src/lib/components/common/toggleButton-v2/ToggleButton.svelte b/frontend/src/lib/components/common/toggleButton-v2/ToggleButton.svelte index e5bc62aa889fb..7f903debd8d86 100644 --- a/frontend/src/lib/components/common/toggleButton-v2/ToggleButton.svelte +++ b/frontend/src/lib/components/common/toggleButton-v2/ToggleButton.svelte @@ -48,7 +48,7 @@ {#if icon} diff --git a/frontend/src/lib/components/details/ClipboardPanel.svelte b/frontend/src/lib/components/details/ClipboardPanel.svelte index 53d442fbb6c7d..c0a230f1f772f 100644 --- a/frontend/src/lib/components/details/ClipboardPanel.svelte +++ b/frontend/src/lib/components/details/ClipboardPanel.svelte @@ -6,6 +6,7 @@ export let content: string export let title: string | undefined = undefined export let size: 'sm' | 'md' = 'sm' + export let disabled = false {#if title !== undefined} @@ -15,8 +16,13 @@
{ + if (disabled) { + return + } e.preventDefault() copyToClipboard(content) }} diff --git a/frontend/src/lib/components/details/CopyableCodeBlock.svelte b/frontend/src/lib/components/details/CopyableCodeBlock.svelte new file mode 100644 index 0000000000000..1a98f81412f1a --- /dev/null +++ b/frontend/src/lib/components/details/CopyableCodeBlock.svelte @@ -0,0 +1,27 @@ + + + + +
{ + if (disabled) { + return + } + e.preventDefault() + copyToClipboard(code) + }} +> + + +
diff --git a/frontend/src/lib/components/details/DetailPageLayout.svelte b/frontend/src/lib/components/details/DetailPageLayout.svelte index b9ba53128e0dd..d951f757411c4 100644 --- a/frontend/src/lib/components/details/DetailPageLayout.svelte +++ b/frontend/src/lib/components/details/DetailPageLayout.svelte @@ -28,7 +28,9 @@ selectedTrigger: selectedTriggerStore, primarySchedule: primaryScheduleStore, triggersCount, - simplifiedPoll + simplifiedPoll, + defaultValues: writable(undefined), + captureOn: writable(undefined) }) diff --git a/frontend/src/lib/components/details/EmailTriggerConfigSection.svelte b/frontend/src/lib/components/details/EmailTriggerConfigSection.svelte new file mode 100644 index 0000000000000..7c01cf4959973 --- /dev/null +++ b/frontend/src/lib/components/details/EmailTriggerConfigSection.svelte @@ -0,0 +1,135 @@ + + +
+ {#if showCapture && captureInfo} + + + + {/if} +
+ {#if SCRIPT_VIEW_SHOW_CREATE_TOKEN_BUTTON} + + {/if} + + {#if !isFlow} +
+
+
Call method
+ + + + +
+
+ {/if} + + {#key requestType} + {#key token} + + {/key} + {/key} + + + To trigger the job by email, send an email to the address above. The job will receive two + arguments: `raw_email` containing the raw email as string, and `parsed_email` containing the + parsed email as an object. + +
+
diff --git a/frontend/src/lib/components/details/EmailTriggerPanel.svelte b/frontend/src/lib/components/details/EmailTriggerPanel.svelte index defcdf62df660..3867564c56252 100644 --- a/frontend/src/lib/components/details/EmailTriggerPanel.svelte +++ b/frontend/src/lib/components/details/EmailTriggerPanel.svelte @@ -1,69 +1,40 @@ @@ -84,60 +55,24 @@ Email triggers execute scripts and flows when emails are sent to specific addresses. Each trigger has its own unique email address that can be used to invoke the script or flow. - {#if loading} {:else} {#if emailDomain} - {#if SCRIPT_VIEW_SHOW_CREATE_TOKEN_BUTTON} - - {/if} - - {#if !isFlow} -
-
-
Call method
- - - - -
-
- {/if} - - {#key requestType} - {#key token} - - {/key} - {/key} - - To trigger the job by email, send an email to the address above. The job will receive two - arguments: `raw_email` containing the raw email as string, and `parsed_email` containing the - parsed email as an object. - + {:else}
diff --git a/frontend/src/lib/components/flows/FlowEditor.svelte b/frontend/src/lib/components/flows/FlowEditor.svelte index 842c5df04d9d3..e6c205f33cbda 100644 --- a/frontend/src/lib/components/flows/FlowEditor.svelte +++ b/frontend/src/lib/components/flows/FlowEditor.svelte @@ -72,7 +72,7 @@
{:else} - + {/if} diff --git a/frontend/src/lib/components/flows/content/CapturePayload.svelte b/frontend/src/lib/components/flows/content/CapturePayload.svelte deleted file mode 100644 index 65fa03002fc13..0000000000000 --- a/frontend/src/lib/components/flows/content/CapturePayload.svelte +++ /dev/null @@ -1,125 +0,0 @@ - - - { - startCapturePoint() - interval = setInterval(() => { - getCaptureInput() - }, 1000) - }} - on:close={() => interval && clearInterval(interval)} -> - - Send a payload at: -

CURL example

- -
-
{`curl -X POST ${hostname}/api/w/${$workspaceStore}/capture_u/${$pathStore} \\
-   -H 'Content-Type: application/json' \\
-   -d '{"foo": 42}'`}
-
-
- Listening for new requests - - - - -
-
- -
- - - - -

Derived schema

-
- -
-

Test args

- -
-
diff --git a/frontend/src/lib/components/flows/content/FlowEditorPanel.svelte b/frontend/src/lib/components/flows/content/FlowEditorPanel.svelte index deb9c6dd1749c..d432c43d24822 100644 --- a/frontend/src/lib/components/flows/content/FlowEditorPanel.svelte +++ b/frontend/src/lib/components/flows/content/FlowEditorPanel.svelte @@ -12,6 +12,8 @@ import { initFlowStepWarnings } from '../utils' import { dfs } from '../dfs' import FlowPreprocessorModule from './FlowPreprocessorModule.svelte' + import type { TriggerContext } from '$lib/components/triggers' + import { insertNewPreprocessorModule } from '../flowStateUtils' export let noEditor = false export let enableAi = false @@ -21,6 +23,7 @@ const { selectedId, flowStore, flowStateStore, flowInputsStore, pathStore, initialPath } = getContext('FlowEditorContext') + const { selectedTrigger, defaultValues, captureOn } = getContext('TriggerContext') function checkDup(modules: FlowModule[]): string | undefined { let seenModules: string[] = [] for (const m of modules) { @@ -60,7 +63,24 @@ {#if $selectedId?.startsWith('settings')} {:else if $selectedId === 'Input'} - + { + $selectedId = 'triggers' + selectedTrigger.set(ev.detail.kind) + defaultValues.set(ev.detail.config) + captureOn.set(true) + }} + on:applyArgs + on:updateSchema={(e) => { + const { schema, redirect } = e.detail + $flowStore.schema = schema + if (redirect) { + $selectedId = 'Input' + } + }} + /> {:else if $selectedId === 'Result'}

The result of the flow will be the result of the last node.

{:else if $selectedId === 'constants'} @@ -71,12 +91,22 @@ {:else if $selectedId === 'triggers'} { + await insertNewPreprocessorModule(flowStore, flowStateStore, { + language: 'bun', + subkind: 'preprocessor' + }) + $selectedId = 'preprocessor' + }} currentPath={$pathStore} {initialPath} schema={$flowStore.schema} {noEditor} newItem={newFlow} isFlow={true} + hasPreprocessor={!!$flowStore.value.preprocessor_module} + canHavePreprocessor={true} /> {:else if $selectedId.startsWith('subflow:')}
('FlowEditorContext') - let capturePayload: CapturePayload let inputLibraryDrawer: Drawer let jsonPayload: Drawer let pendingJson: string @@ -41,21 +39,10 @@ const yOffset = 191 - - {#if !disabled}
-
Copy input's schema from
- +
Copy input's schema from
+ +{#if open} +
+ {#if displayArrow} +
+ {/if} + + {#if closeButton} + + {/if} +
+{/if} + + diff --git a/frontend/src/lib/components/scriptEditor/LogPanel.svelte b/frontend/src/lib/components/scriptEditor/LogPanel.svelte index 8b7a59c44d59c..2c1c6ba18c3b0 100644 --- a/frontend/src/lib/components/scriptEditor/LogPanel.svelte +++ b/frontend/src/lib/components/scriptEditor/LogPanel.svelte @@ -37,6 +37,7 @@ export let diffEditor: DiffEditor | undefined = undefined export let args: Record | undefined = undefined export let workspace: string | undefined = undefined + export let showCaptures: boolean = false type DrawerContent = { mode: 'json' | Preview['language'] | 'plain' @@ -87,170 +88,181 @@ - - Logs & Result - History +
+ + Logs & Result + History + {#if showCaptures} + Captures + {/if} - - - {#if selectedTab === 'logs'} - - - {#if previewJob?.is_flow_step == false && previewJob?.flow_status && !(typeof previewJob.flow_status == 'object' && '_metadata' in previewJob.flow_status)} - - - - {/if} - - - - - - {#if previewJob != undefined && 'result' in previewJob} -
-
- - - {#if lang && editor && diffEditor && args && previewJob?.result && typeof previewJob?.result == 'object' && `error` in previewJob?.result && previewJob?.result.error} - + +
+ {#if selectedTab === 'logs'} + + + {#if previewJob?.is_flow_step == false && previewJob?.flow_status && !(typeof previewJob.flow_status == 'object' && '_metadata' in previewJob.flow_status)} + + + + {/if} + + + + + + {#if previewJob != undefined && 'result' in previewJob} +
+
+ + + {#if lang && editor && diffEditor && args && previewJob?.result && typeof previewJob?.result == 'object' && `error` in previewJob?.result && previewJob?.result.error} + + {/if} + + +
+
+ {:else} +
+ + {#if previewIsLoading} + + {:else} + Test to see the result here {/if} - - -
-
- {:else} -
- - {#if previewIsLoading} - - {:else} - Test to see the result here - {/if} - - - The result renderer in Windmill supports rich display rendering, allowing you to - customize the display format of your results. - -
- {/if} - - - - {/if} - {#if selectedTab === 'history'} -
- - - - Id - Created at - Success - Result - Code - Logs - - - - {#each pastPreviews as { id, created_at, success }} - - - {id.substring(30)} - - {displayDate(created_at)} - - {#if success} - - {:else} - - {/if} - - - - - - - - - - - - {/each} - - + + + The result renderer in Windmill supports rich display rendering, allowing you + to customize the display format of your results. + +
+ {/if} + + + + {/if} + {#if selectedTab === 'history'} +
+ + + + Id + Created at + Success + Result + Code + Logs + + + + {#each pastPreviews as { id, created_at, success }} + + + {id.substring(30)} + + {displayDate(created_at)} + + {#if success} + + {:else} + + {/if} + + + + + + + + + + + + {/each} + + +
+ {/if} + {#if selectedTab === 'captures'} + + {/if}
- {/if} - - + + +
diff --git a/frontend/src/lib/components/triggers.ts b/frontend/src/lib/components/triggers.ts index efc8c8a901395..e585c7d989621 100644 --- a/frontend/src/lib/components/triggers.ts +++ b/frontend/src/lib/components/triggers.ts @@ -1,4 +1,4 @@ -import type { TriggersCount } from '$lib/gen' +import type { CaptureTriggerKind, TriggersCount } from '$lib/gen' import type { Writable } from 'svelte/store' export type ScheduleTrigger = { @@ -10,19 +10,12 @@ export type ScheduleTrigger = { } export type TriggerContext = { - selectedTrigger: Writable< - | 'webhooks' - | 'emails' - | 'schedules' - | 'cli' - | 'routes' - | 'websockets' - | 'scheduledPoll' - | 'kafka' - > + selectedTrigger: Writable primarySchedule: Writable triggersCount: Writable simplifiedPoll: Writable + defaultValues: Writable | undefined> + captureOn: Writable } export function setScheduledPollSchedule( @@ -45,3 +38,30 @@ export function setScheduledPollSchedule( } }) } + +export type TriggerKind = + | 'webhooks' + | 'emails' + | 'schedules' + | 'cli' + | 'routes' + | 'websockets' + | 'scheduledPoll' + | 'kafka' + +export function captureTriggerKindToTriggerKind(kind: CaptureTriggerKind): TriggerKind { + switch (kind) { + case 'webhook': + return 'webhooks' + case 'email': + return 'emails' + case 'http': + return 'routes' + case 'websocket': + return 'websockets' + case 'kafka': + return 'kafka' + default: + throw new Error(`Unknown CaptureTriggerKind: ${kind}`) + } +} diff --git a/frontend/src/lib/components/triggers/CaptureButton.svelte b/frontend/src/lib/components/triggers/CaptureButton.svelte new file mode 100644 index 0000000000000..153c2a99923ab --- /dev/null +++ b/frontend/src/lib/components/triggers/CaptureButton.svelte @@ -0,0 +1,81 @@ + + + + + + + +
+ + + + + +
+
+
diff --git a/frontend/src/lib/components/triggers/CaptureIcon.svelte b/frontend/src/lib/components/triggers/CaptureIcon.svelte new file mode 100644 index 0000000000000..a53a61e078133 --- /dev/null +++ b/frontend/src/lib/components/triggers/CaptureIcon.svelte @@ -0,0 +1,17 @@ + + +
+ +
+ + + +
+
diff --git a/frontend/src/lib/components/triggers/CaptureSection.svelte b/frontend/src/lib/components/triggers/CaptureSection.svelte new file mode 100644 index 0000000000000..81f4b894e75ae --- /dev/null +++ b/frontend/src/lib/components/triggers/CaptureSection.svelte @@ -0,0 +1,91 @@ + + + + +
+
+
+
+ + + + + {#if captureInfo.active} + + {:else} + + Start capturing to test your runnables with real data. Once active, all incoming + payloads will be captured and displayed below, allowing you to test your runnables + effectively. + + {/if} +
+ + {#if disabled} +
+ Enter a valid configuration to start capturing. +
+ {/if} +
+ + {#if $$slots.default} +
+ +
+ {/if} + + +
+
diff --git a/frontend/src/lib/components/triggers/CaptureTable.svelte b/frontend/src/lib/components/triggers/CaptureTable.svelte new file mode 100644 index 0000000000000..a46885d818ccf --- /dev/null +++ b/frontend/src/lib/components/triggers/CaptureTable.svelte @@ -0,0 +1,245 @@ + + +{#if captures.length > 0 || !hideCapturesWhenEmpty} +
+ {/each} + {/if} +
+ +{/if} diff --git a/frontend/src/lib/components/triggers/CaptureWrapper.svelte b/frontend/src/lib/components/triggers/CaptureWrapper.svelte new file mode 100644 index 0000000000000..03625869a9f31 --- /dev/null +++ b/frontend/src/lib/components/triggers/CaptureWrapper.svelte @@ -0,0 +1,255 @@ + + +
+ {#if cloudDisabled} + + {capitalize(captureType)} triggers are disabled in the multi-tenant cloud. + + {:else if captureType === 'websocket'} + { + handleCapture() + }} + /> + {:else if captureType === 'webhook'} + { + handleCapture() + }} + /> + {:else if captureType === 'http'} + { + handleCapture() + }} + /> + {:else if captureType === 'email'} + { + handleCapture() + }} + /> + {:else if captureType === 'kafka'} + { + handleCapture() + }} + /> + {/if} +
diff --git a/frontend/src/lib/components/triggers/KafkaTriggerEditor.svelte b/frontend/src/lib/components/triggers/KafkaTriggerEditor.svelte index 33083d7ec62b5..4044225223307 100644 --- a/frontend/src/lib/components/triggers/KafkaTriggerEditor.svelte +++ b/frontend/src/lib/components/triggers/KafkaTriggerEditor.svelte @@ -9,10 +9,14 @@ drawer?.openEdit(ePath, isFlow) } - export async function openNew(is_flow: boolean, initial_script_path?: string) { + export async function openNew( + is_flow: boolean, + initial_script_path?: string, + defaultValues?: Record + ) { open = true await tick() - drawer?.openNew(is_flow, initial_script_path) + drawer?.openNew(is_flow, initial_script_path, defaultValues) } let drawer: KafkaTriggerEditorInner diff --git a/frontend/src/lib/components/triggers/KafkaTriggerEditorInner.svelte b/frontend/src/lib/components/triggers/KafkaTriggerEditorInner.svelte index c934cb0e1e8f5..f4ed0968ed592 100644 --- a/frontend/src/lib/components/triggers/KafkaTriggerEditorInner.svelte +++ b/frontend/src/lib/components/triggers/KafkaTriggerEditorInner.svelte @@ -10,11 +10,10 @@ import { canWrite, emptyString, sendUserToast } from '$lib/utils' import { createEventDispatcher } from 'svelte' import Section from '$lib/components/Section.svelte' - import { Loader2, Save, X, Plus } from 'lucide-svelte' + import { Loader2, Save } from 'lucide-svelte' import Label from '$lib/components/Label.svelte' import Toggle from '../Toggle.svelte' - import { fade } from 'svelte/transition' - import ResourcePicker from '../ResourcePicker.svelte' + import KafkaTriggersConfigSection from './KafkaTriggersConfigSection.svelte' let drawer: Drawer let is_flow: boolean = false @@ -26,14 +25,12 @@ let fixedScriptPath = '' let path: string = '' let pathError = '' - let kafka_resource_path = '' - let group_id = '' - let topics: string[] = [''] - let dirtyGroupId = false let enabled = false let dirtyPath = false let can_write = true let drawerLoading = true + let defaultValues: Record | undefined = undefined + let args: Record = {} const dispatch = createEventDispatcher() @@ -47,7 +44,6 @@ itemKind = isFlow ? 'flow' : 'script' edit = true dirtyPath = false - dirtyGroupId = false await loadTrigger() } catch (err) { sendUserToast(`Could not load kafka trigger: ${err}`, true) @@ -56,23 +52,27 @@ } } - export async function openNew(nis_flow: boolean, fixedScriptPath_?: string) { + export async function openNew( + nis_flow: boolean, + fixedScriptPath_?: string, + nDefaultValues?: Record + ) { drawerLoading = true try { drawer?.openDrawer() is_flow = nis_flow edit = false itemKind = nis_flow ? 'flow' : 'script' - kafka_resource_path = '' - group_id = '' - topics = [''] - dirtyGroupId = false + args.kafka_resource_path = nDefaultValues?.kafka_resource_path ?? '' + args.group_id = nDefaultValues?.group_id ?? '' + args.topics = nDefaultValues?.topics ?? [''] initialScriptPath = '' fixedScriptPath = fixedScriptPath_ ?? '' script_path = fixedScriptPath path = '' initialPath = '' dirtyPath = false + defaultValues = nDefaultValues } finally { drawerLoading = false } @@ -88,9 +88,9 @@ is_flow = s.is_flow path = s.path - kafka_resource_path = s.kafka_resource_path - group_id = s.group_id - topics = s.topics + args.kafka_resource_path = s.kafka_resource_path + args.group_id = s.group_id + args.topics = s.topics enabled = s.enabled can_write = canWrite(s.path, s.extra_perms, $userStore) @@ -105,9 +105,9 @@ path, script_path, is_flow, - kafka_resource_path, - group_id, - topics + kafka_resource_path: args.kafka_resource_path, + group_id: args.group_id, + topics: args.topics } }) sendUserToast(`Kafka trigger ${path} updated`) @@ -119,9 +119,9 @@ script_path, is_flow, enabled: true, - kafka_resource_path, - group_id, - topics + kafka_resource_path: args.kafka_resource_path, + group_id: args.group_id, + topics: args.topics } }) sendUserToast(`Kafka trigger ${path} created`) @@ -133,12 +133,21 @@ drawer.closeDrawer() } - $: topicsError = topics.some((b) => /[^[a-zA-Z0-9-_.]/.test(b)) ? 'Invalid topics' : '' - $: groupIdError = /[^a-zA-Z0-9-_.]/.test(group_id) ? 'Invalid group ID' : '' + function useDefaultValues() { + if (args.kafka_resource_path && args.kafka_resource_path != '') { + return false + } + if (!defaultValues) { + return false + } + return ( + defaultValues.brokers && + defaultValues.brokers.length > 0 && + defaultValues.brokers.some((broker: string) => broker.trim() !== '') + ) + } - $: !dirtyGroupId && - path && - (group_id = `windmill_consumer-${$workspaceStore}-${path.replaceAll('/', '__')}`) + let isValid = false @@ -172,15 +181,7 @@ {#if can_write}
-
-
-
-
- Resource - -
- -
- - - -
-
+

diff --git a/frontend/src/lib/components/triggers/KafkaTriggersConfigSection.svelte b/frontend/src/lib/components/triggers/KafkaTriggersConfigSection.svelte new file mode 100644 index 0000000000000..9e9320a8f99bf --- /dev/null +++ b/frontend/src/lib/components/triggers/KafkaTriggersConfigSection.svelte @@ -0,0 +1,222 @@ + + +

+ {#if showCapture && captureInfo} + + {/if} +
+
+
+ + + {#if !staticInputDisabled} + { + if (ev.detail === 'static') { + delete args.kafka_resource_path + args.brokers = [''] + args.security = { + label: 'PLAINTEXT' + } + } else { + delete args.brokers + delete args.security + } + }} + > + + + + {/if} + + + {#if selected === 'resource'} + + {:else} + + {/if} + +
+ +
+ + + +
+
+
+
diff --git a/frontend/src/lib/components/triggers/KafkaTriggersPanel.svelte b/frontend/src/lib/components/triggers/KafkaTriggersPanel.svelte index fa277f4428559..718415be03fff 100644 --- a/frontend/src/lib/components/triggers/KafkaTriggersPanel.svelte +++ b/frontend/src/lib/components/triggers/KafkaTriggersPanel.svelte @@ -1,26 +1,41 @@ -
- - Kafka triggers execute scripts and flows in response to messages published to Kafka topics. - +{#if !$enterpriseLicense} + + Kafka triggers are an enterprise only feature. + +{:else if isCloudHosted()} + + Kafka triggers are disabled in the multi-tenant cloud. + +{:else} +
+ + Kafka triggers execute scripts and flows in response to messages published to Kafka topics. + + { + saveTrigger(path, e.detail.config) + }} + on:applyArgs + on:addPreprocessor + cloudDisabled={false} + triggerType="kafka" + {isFlow} + {data} + {path} + {isEditor} + {canHavePreprocessor} + {hasPreprocessor} + {newItem} + /> - {#if !$enterpriseLicense} - - Kafka triggers are an enterprise only feature. - - {:else if isCloudHosted()} - - Kafka triggers are disabled in the multi-tenant cloud. - - {:else} -
- {#if newItem} - - Deploy the {isFlow ? 'flow' : 'script'} to add kafka triggers. - - {:else} - - {#if kafkaTriggers} + {#if !newItem} + {#if kafkaTriggers} +
{#if kafkaTriggers.length == 0} -
No kafka triggers
+
No kafka triggers
{:else}
{#each kafkaTriggers as kafkaTrigger (kafkaTrigger.path)} @@ -104,10 +131,10 @@ {/each}
{/if} - {:else} - - {/if} +
+ {:else} + {/if} -
- {/if} -
+ {/if} +
+{/if} diff --git a/frontend/src/lib/components/triggers/RouteEditor.svelte b/frontend/src/lib/components/triggers/RouteEditor.svelte index 0b670423efbbe..ce093aa976710 100644 --- a/frontend/src/lib/components/triggers/RouteEditor.svelte +++ b/frontend/src/lib/components/triggers/RouteEditor.svelte @@ -9,10 +9,14 @@ drawer?.openEdit(ePath, isFlow) } - export async function openNew(is_flow: boolean, initial_script_path?: string) { + export async function openNew( + is_flow: boolean, + initial_script_path?: string, + defaultValues?: Record + ) { open = true await tick() - drawer?.openNew(is_flow, initial_script_path) + drawer?.openNew(is_flow, initial_script_path, defaultValues) } export async function getTriggers() { diff --git a/frontend/src/lib/components/triggers/RouteEditorConfigSection.svelte b/frontend/src/lib/components/triggers/RouteEditorConfigSection.svelte new file mode 100644 index 0000000000000..1ebc2da408879 --- /dev/null +++ b/frontend/src/lib/components/triggers/RouteEditorConfigSection.svelte @@ -0,0 +1,172 @@ + + +
+ {#if showCapture && captureInfo} + + + + + + {/if} +
+ {#if !($userStore?.is_admin || $userStore?.is_super_admin)} + + Route endpoints can only be edited by workspace admins + +
+ {/if} +
+ + + + + + + + + +
+
+ + Full endpoint + + { + currentTarget.select() + }} + /> +
+ +
{dirtyRoutePath ? routeError : ''}
+
+
+
+
diff --git a/frontend/src/lib/components/triggers/RouteEditorInner.svelte b/frontend/src/lib/components/triggers/RouteEditorInner.svelte index a25939e7bef7f..14f7332bd35eb 100644 --- a/frontend/src/lib/components/triggers/RouteEditorInner.svelte +++ b/frontend/src/lib/components/triggers/RouteEditorInner.svelte @@ -1,5 +1,5 @@ {#if static_asset_config} @@ -220,7 +189,7 @@
-
- {#if !($userStore?.is_admin || $userStore?.is_super_admin)} - - Route endpoints can only be edited by workspace admins - -
- {/if} -
- - - - - - - - - -
-
- - Full endpoint - - { - currentTarget.select() - }} - /> -
- -
{dirtyRoutePath ? routeError : ''}
-
-
-
+
-
+
-
Request type
- - - - -
-
-
Authentication
- - - - +
+
diff --git a/frontend/src/lib/components/triggers/RoutesPanel.svelte b/frontend/src/lib/components/triggers/RoutesPanel.svelte index b09a0347a4683..4bb8f97d4dcad 100644 --- a/frontend/src/lib/components/triggers/RoutesPanel.svelte +++ b/frontend/src/lib/components/triggers/RoutesPanel.svelte @@ -1,25 +1,38 @@ - - -
- {#if !$simplifiedPoll} - - Webhooks - Schedules - Routes - Websockets - Kafka - Email - {#if isFlow} - Scheduled Poll - {/if} - - - {#if $selectedTrigger === 'webhooks'} -
- -
- {/if} - {#if $selectedTrigger === 'emails'} -
- -
- {/if} + const dispatch = createEventDispatcher() - {#if $selectedTrigger === 'routes'} -
- -
- {/if} - - {#if $selectedTrigger === 'websockets'} -
- -
- {/if} - - {#if $selectedTrigger === 'kafka'} -
- -
- {/if} + onDestroy(() => { + dispatch('exitTriggers') + }) + - {#if $selectedTrigger === 'schedules'} -
- -
- {/if} + + {#if !$simplifiedPoll} + + Webhooks + Schedules + HTTP + Websockets + Kafka + Email + {#if isFlow} + Scheduled Poll + {/if} - {#if $selectedTrigger === 'scheduledPoll'} -
- -
- {/if} -
-
- {:else} -
- -
- {/if} -
+ + {#if $selectedTrigger === 'webhooks'} +
+ +
+ {:else if $selectedTrigger === 'emails'} +
+ +
+ {:else if $selectedTrigger === 'routes'} +
+ +
+ {:else if $selectedTrigger === 'websockets'} +
+ +
+ {:else if $selectedTrigger === 'kafka'} +
+ +
+ {:else if $selectedTrigger === 'schedules'} +
+ +
+ {:else if $selectedTrigger === 'scheduledPoll'} +
+ +
+ {/if} +
+ + {:else} +
+ +
+ {/if}
diff --git a/frontend/src/lib/components/triggers/TriggersEditorSection.svelte b/frontend/src/lib/components/triggers/TriggersEditorSection.svelte new file mode 100644 index 0000000000000..6f7820c40b224 --- /dev/null +++ b/frontend/src/lib/components/triggers/TriggersEditorSection.svelte @@ -0,0 +1,118 @@ + + +
+ +
+ {#if isEditor} + + {/if} + + {#if !noSave} + {@const disabled = newItem || cloudDisabled} + + + + {#if disabled} + {#if newItem} + Deploy the runnable to enable trigger creation + {:else if cloudDisabled} + {capitalize(triggerType)} triggers are disabled in the multi-tenant cloud + {/if} + {:else} + Create new {captureTypeLabels[triggerType].toLowerCase()} + {/if} + + + {/if} +
+
+ + {#if isEditor} + { + const { schema, redirect } = e.detail + $flowStore.schema = schema + if (redirect) { + $selectedId = 'Input' + } + }} + on:saveTrigger + bind:args + {data} + showCapture={$captureOn} + /> + {:else} + + {/if} +
diff --git a/frontend/src/lib/components/triggers/TriggersWrapper.svelte b/frontend/src/lib/components/triggers/TriggersWrapper.svelte new file mode 100644 index 0000000000000..1883099ee6e32 --- /dev/null +++ b/frontend/src/lib/components/triggers/TriggersWrapper.svelte @@ -0,0 +1,62 @@ + + +
+ {#if cloudDisabled} + + {capitalize(triggerType)} triggers are disabled in the multi-tenant cloud. + + {:else if triggerType === 'websocket'} + + {:else if triggerType === 'webhook'} + + {:else if triggerType === 'http'} + + {:else if triggerType === 'email'} + + {:else if triggerType === 'kafka'} + + {/if} +
diff --git a/frontend/src/lib/components/triggers/WebhooksConfigSection.svelte b/frontend/src/lib/components/triggers/WebhooksConfigSection.svelte new file mode 100644 index 0000000000000..da67569709b7a --- /dev/null +++ b/frontend/src/lib/components/triggers/WebhooksConfigSection.svelte @@ -0,0 +1,434 @@ + + + { + token = e.detail + triggerTokens?.listTokens() + }} + newTokenWorkspace={$workspaceStore} + newTokenLabel={`webhook-${$userStore?.username ?? 'superadmin'}-${generateRandomString(4)}`} + {scopes} +/> + +
+ {#if showCapture && captureInfo} + + + + + + {/if} + +
+ {#if SCRIPT_VIEW_SHOW_CREATE_TOKEN_BUTTON} + + {/if} + +
+
+
Request type
+ + + + +
+
+
Call method
+ + + {#if !isFlow} + + {/if} + + + +
+
+
Token configuration
+ + + + +
+
+ + +
+ + REST + {#if SCRIPT_VIEW_SHOW_EXAMPLE_CURL} + Curl + {/if} + Fetch + + + {#key token} + +
+ + + {#if requestType !== 'get_path'} + + {/if} + {#key requestType} + {#key tokenType} + + {/key} + {/key} +
+
+ +
+ {#key args} + {#key requestType} + {#key webhookType} + {#key tokenType} +
{ + e.preventDefault() + copyToClipboard(curlCode()) + }} + > + + +
+ {/key} + {/key} + {/key} + {/key} +
+
+ + {#key args} + {#key requestType} + {#key webhookType} + {#key tokenType} + {#key token} +
{ + e.preventDefault() + copyToClipboard(fetchCode()) + }} + > + + +
+ {/key}{/key}{/key}{/key} + {/key} +
+ {/key} +
+
+
+ +
+
diff --git a/frontend/src/lib/components/triggers/WebhooksPanel.svelte b/frontend/src/lib/components/triggers/WebhooksPanel.svelte index 3746ab67cac86..a853f0b629090 100644 --- a/frontend/src/lib/components/triggers/WebhooksPanel.svelte +++ b/frontend/src/lib/components/triggers/WebhooksPanel.svelte @@ -1,27 +1,8 @@ @@ -233,173 +30,26 @@ done` Webhooks trigger scripts or flows via HTTP requests. Each webhook can be configured to run synchronously or asynchronously. You can secure webhooks using tokens with specific permissions. - - { - token = e.detail - triggerTokens?.listTokens() - }} - newTokenWorkspace={$workspaceStore} - newTokenLabel={`webhook-${$userStore?.username ?? 'superadmin'}-${generateRandomString(4)}`} - {scopes} - /> - {#if SCRIPT_VIEW_SHOW_CREATE_TOKEN_BUTTON} - - {/if} - -
-
-
Request type
- - - - -
-
-
Call method
- - - {#if !isFlow} - - {/if} - - - -
-
-
Token configuration
- - - - -
-
- - - - REST - {#if SCRIPT_VIEW_SHOW_EXAMPLE_CURL} - Curl - {/if} - Fetch - - - {#key token} - -
- - - {#if requestType !== 'get_path'} - - {/if} - {#key requestType} - {#key tokenType} - - {/key} - {/key} -
-
- -
- {#key args} - {#key requestType} - {#key webhookType} - {#key tokenType} -
{ - e.preventDefault() - copyToClipboard(curlCode()) - }} - > - - -
- {/key} - {/key} - {/key} - {/key} -
-
- - {#key args} - {#key requestType} - {#key webhookType} - {#key tokenType} - {#key token} -
{ - e.preventDefault() - copyToClipboard(fetchCode()) - }} - > - - -
- {/key}{/key}{/key}{/key} - {/key} -
- {/key} -
-
- -
- - {#if newItem} -
The webhooks are only valid for a given path and will only trigger the deployed version of the {isFlow ? 'flow' : 'script'}. {/if} + +
diff --git a/frontend/src/lib/components/triggers/WebsocketEditorConfigSection.svelte b/frontend/src/lib/components/triggers/WebsocketEditorConfigSection.svelte new file mode 100644 index 0000000000000..bba6bf7c9345a --- /dev/null +++ b/frontend/src/lib/components/triggers/WebsocketEditorConfigSection.svelte @@ -0,0 +1,184 @@ + + +
+ {#if showCapture && captureInfo} + + {/if} +
+
+ { + url = ev.detail === 'runnable' ? '$script:' : '' + url_runnable_args = {} + }} + > + + + +
+ {#if url?.startsWith('$')} +
+
+
+
+ Runnable + +
+
+ { + dirtyUrl = true + const { path, itemKind } = ev.detail + url = `$${itemKind}:${path ?? ''}` + }} + /> +
+ {dirtyUrl ? urlError : ''} +
+
+
+ + {#if url.split(':')[1]?.length > 0} + {#if urlRunnableSchema} +

Arguments

+ {#await import('$lib/components/SchemaForm.svelte')} + + {:then Module} + {#key urlRunnableSchema} + + {/key} + {/await} + {#if urlRunnableSchema.properties && Object.keys(urlRunnableSchema.properties).length === 0} +
This runnable takes no arguments
+ {/if} + {:else} + + {/if} + {/if} + {:else} +
+ +
+ {/if} +
+
diff --git a/frontend/src/lib/components/triggers/WebsocketTriggerEditor.svelte b/frontend/src/lib/components/triggers/WebsocketTriggerEditor.svelte index d92c468b8259f..65c0f7f1b3e14 100644 --- a/frontend/src/lib/components/triggers/WebsocketTriggerEditor.svelte +++ b/frontend/src/lib/components/triggers/WebsocketTriggerEditor.svelte @@ -9,10 +9,14 @@ drawer?.openEdit(ePath, isFlow) } - export async function openNew(is_flow: boolean, initial_script_path?: string) { + export async function openNew( + is_flow: boolean, + initial_script_path?: string, + defaultValues?: Record + ) { open = true await tick() - drawer?.openNew(is_flow, initial_script_path) + drawer?.openNew(is_flow, initial_script_path, defaultValues) } let drawer: WebsocketTriggerEditorInner diff --git a/frontend/src/lib/components/triggers/WebsocketTriggerEditorInner.svelte b/frontend/src/lib/components/triggers/WebsocketTriggerEditorInner.svelte index d8001197abf14..b6e1825b828f9 100644 --- a/frontend/src/lib/components/triggers/WebsocketTriggerEditorInner.svelte +++ b/frontend/src/lib/components/triggers/WebsocketTriggerEditorInner.svelte @@ -24,8 +24,7 @@ import { fade } from 'svelte/transition' import JsonEditor from '../apps/editor/settingsPanel/inputEditor/JsonEditor.svelte' import type { Schema } from '$lib/common' - import ToggleButtonGroup from '../common/toggleButton-v2/ToggleButtonGroup.svelte' - import ToggleButton from '../common/toggleButton-v2/ToggleButton.svelte' + import WebsocketEditorConfigSection from './WebsocketEditorConfigSection.svelte' let drawer: Drawer let is_flow: boolean = false @@ -38,7 +37,6 @@ let path: string = '' let pathError = '' let url = '' - let urlError = '' let dirtyUrl = false let enabled = false let filters: { @@ -46,7 +44,7 @@ value: any }[] = [] let initial_messages: WebsocketTriggerInitialMessage[] = [] - let url_runnable_args: Record = {} + let url_runnable_args: Record | undefined = {} let dirtyPath = false let can_write = true let drawerLoading = true @@ -72,14 +70,18 @@ } } - export async function openNew(nis_flow: boolean, fixedScriptPath_?: string) { + export async function openNew( + nis_flow: boolean, + fixedScriptPath_?: string, + defaultValues?: Record + ) { drawerLoading = true try { drawer?.openDrawer() is_flow = nis_flow edit = false itemKind = nis_flow ? 'flow' : 'script' - url = '' + url = defaultValues?.url ?? '' dirtyUrl = false initialScriptPath = '' fixedScriptPath = fixedScriptPath_ ?? '' @@ -88,7 +90,7 @@ initialPath = '' filters = [] initial_messages = [] - url_runnable_args = {} + url_runnable_args = defaultValues?.url_runnable_args ?? {} dirtyPath = false } finally { drawerLoading = false @@ -108,7 +110,7 @@ url = s.url enabled = s.enabled filters = s.filters - initial_messages = s.initial_messages + initial_messages = s.initial_messages ?? [] url_runnable_args = s.url_runnable_args can_write = canWrite(s.path, s.extra_perms, $userStore) @@ -147,35 +149,6 @@ .filter((v): v is { path: string; is_flow: boolean; args: ScriptArgs } => !!v) $: loadInitialMessageRunnableSchemas(initialMessageRunnables) - let urlRunnableSchema: Schema | undefined = emptySchema() - async function loadUrlRunnableSchema(url: string) { - if (url.startsWith('$')) { - const path = url.split(':')[1] - if (path && path.length > 0) { - try { - let scriptOrFlow: Script | Flow = url.startsWith('$flow:') - ? await FlowService.getFlowByPath({ - workspace: $workspaceStore!, - path: url.split(':')[1] - }) - : await ScriptService.getScriptByPath({ - workspace: $workspaceStore!, - path: url.split(':')[1] - }) - urlRunnableSchema = scriptOrFlow.schema as Schema - } catch (err) { - sendUserToast( - `Could not query runnable schema for ${url.startsWith('$flow:') ? 'flow' : 'script'} ${ - url.split(':')[1] - }: ${err}`, - true - ) - } - } - } - } - $: loadUrlRunnableSchema(url) - $: invalidInitialMessages = initial_messages.some((v) => { if ('runnable_result' in v) { return !v.runnable_result.path @@ -222,25 +195,7 @@ drawer.closeDrawer() } - let validateTimeout: NodeJS.Timeout | undefined = undefined - function validateUrl(url: string) { - urlError = '' - if (validateTimeout) { - clearTimeout(validateTimeout) - } - validateTimeout = setTimeout(() => { - console.log('validating ' + url) - if (url.startsWith('$')) { - if (/^(\$script|\$flow):[^\s]+$/.test(url) === false) { - urlError = 'Invalid runnable path' - } - } else if (/^(ws:|wss:)\/\/[^\s]+$/.test(url) === false) { - urlError = 'Invalid websocket URL' - } - validateTimeout = undefined - }, 500) - } - $: validateUrl(url) + let isValid = false @@ -277,7 +232,7 @@
-
-
- { - url = ev.detail === 'runnable' ? '$script:' : '' - url_runnable_args = {} - }} - > - - - -
- {#if url.startsWith('$')} -
-
-
-
- Runnable - -
-
- { - dirtyUrl = true - const { path, itemKind } = ev.detail - url = `$${itemKind}:${path ?? ''}` - }} - /> -
- {dirtyUrl ? urlError : ''} -
-
-
- - {#if url.split(':')[1]?.length > 0} - {#if urlRunnableSchema} -

Arguments

- {#await import('$lib/components/SchemaForm.svelte')} - - {:then Module} - - {/await} - {#if urlRunnableSchema.properties && Object.keys(urlRunnableSchema.properties).length === 0} -
This runnable takes no arguments
- {/if} - {:else} - - {/if} - {/if} - {:else} -
- -
- {/if} -
+

diff --git a/frontend/src/lib/components/triggers/WebsocketTriggersPanel.svelte b/frontend/src/lib/components/triggers/WebsocketTriggersPanel.svelte index 3443fba21bb72..85c28c3bb1c79 100644 --- a/frontend/src/lib/components/triggers/WebsocketTriggersPanel.svelte +++ b/frontend/src/lib/components/triggers/WebsocketTriggersPanel.svelte @@ -1,26 +1,41 @@ @@ -252,7 +256,7 @@

- +
@@ -269,7 +273,18 @@ {/if} - + { + if (ev.detail.kind === 'preprocessor') { + $testStepStore['preprocessor'] = ev.detail.args ?? {} + $selectedIdStore = 'preprocessor' + } else { + $previewArgsStore = ev.detail.args ?? {} + flowPreviewButtons?.openPreview() + } + }} + /> diff --git a/frontend/svelte.config.js b/frontend/svelte.config.js index 81961f82b9cdb..12eae9d139352 100644 --- a/frontend/svelte.config.js +++ b/frontend/svelte.config.js @@ -1,15 +1,18 @@ import preprocess from 'svelte-preprocess' import adapter from '@sveltejs/adapter-static' +import { preprocessMeltUI, sequence } from '@melt-ui/pp' /** @type {import('@sveltejs/kit').Config} */ const config = { // Consult https://github.com/sveltejs/svelte-preprocess // for more information about preprocessors - preprocess: [ + preprocess: sequence([ preprocess({ postcss: true - }) - ], + }), + preprocessMeltUI() + ]), + kit: { adapter: process.env.CLOUDFLARE || process.env.NOCATCHALL diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index c6e52bf449525..85bb4245adfb3 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -31,6 +31,6 @@ "strict": true, "types": [] }, - "include": ["src/**/*.js", "src/**/*.ts", "src/**/*.d.ts", "src/**/*.svelte"], + "include": ["src/**/*.js", "src/**/*.ts", "src/**/*.d.ts", "src/**/*.svelte", "src/lib/components/triggers/TriggersEditorSectionsvelte"], "extends": "./.svelte-kit/tsconfig.json" }