From 5eb9afe74ee03118b5c7bc30e99e4a86893d7f36 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Fri, 27 Sep 2024 18:00:01 -0300 Subject: [PATCH 01/18] Broadcast vanish requests --- Dockerfile | 8 +-- compose.yml | 13 ++++ {spam_filter => event_deleter}/.gitignore | 0 {spam_filter => event_deleter}/Cargo.lock | 36 +++++------ {spam_filter => event_deleter}/Cargo.toml | 2 +- {spam_filter => event_deleter}/README.md | 0 .../src/analyzer_worker.rs | 0 .../src/bin/spam_cleaner.rs | 2 +- .../src/deletion_task.rs | 0 .../src/event_analyzer.rs | 0 {spam_filter => event_deleter}/src/lib.rs | 0 .../src/relay_commander.rs | 0 .../src/worker_pool.rs | 0 {spam_filter => event_deleter}/test.jsonl | 0 strfry/plugins/broadcast_vanish_requests.ts | 59 +++++++++++++++++++ strfry/plugins/nos_policy.ts | 7 ++- strfry/plugins/policies.ts | 6 +- 17 files changed, 107 insertions(+), 26 deletions(-) rename {spam_filter => event_deleter}/.gitignore (100%) rename {spam_filter => event_deleter}/Cargo.lock (99%) rename {spam_filter => event_deleter}/Cargo.toml (96%) rename {spam_filter => event_deleter}/README.md (100%) rename {spam_filter => event_deleter}/src/analyzer_worker.rs (100%) rename {spam_filter => event_deleter}/src/bin/spam_cleaner.rs (99%) rename {spam_filter => event_deleter}/src/deletion_task.rs (100%) rename {spam_filter => event_deleter}/src/event_analyzer.rs (100%) rename {spam_filter => event_deleter}/src/lib.rs (100%) rename {spam_filter => event_deleter}/src/relay_commander.rs (100%) rename {spam_filter => event_deleter}/src/worker_pool.rs (100%) rename {spam_filter => event_deleter}/test.jsonl (100%) create mode 100644 strfry/plugins/broadcast_vanish_requests.ts diff --git a/Dockerfile b/Dockerfile index 68a1d81..1fe4a64 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,13 +19,13 @@ ENV PATH="/root/.cargo/bin:${PATH}" RUN rustc --version -COPY ./spam_filter/Cargo.toml ./spam_filter/Cargo.lock /build/spam_filter/ +COPY ./event_deleter/Cargo.toml ./event_deleter/Cargo.lock /build/event_deleter/ -WORKDIR /build/spam_filter +WORKDIR /build/event_deleter RUN cargo fetch -COPY ./spam_filter/src /build/spam_filter/src +COPY ./event_deleter/src /build/event_deleter/src RUN cargo build --release @@ -54,7 +54,7 @@ WORKDIR /app COPY --from=build /build/strfry/strfry strfry -COPY --from=build /build/spam_filter/target/release/spam_cleaner /usr/local/bin/spam_cleaner +COPY --from=build /build/event_deleter/target/release/spam_cleaner /usr/local/bin/spam_cleaner RUN chmod +x /usr/local/bin/spam_cleaner diff --git a/compose.yml b/compose.yml index 1553005..20d3f6a 100644 --- a/compose.yml +++ b/compose.yml @@ -3,4 +3,17 @@ services: build: . ports: - "7777:7777" + environment: + - RELAY_URL=wss://example.com + - REDIS_URL=redis://redis:6379 + redis: + image: redis:7.2.4 + ports: + - "6379:6379" + command: redis-server --loglevel notice + volumes: + - redis_data:/data + +volumes: + redis_data: \ No newline at end of file diff --git a/spam_filter/.gitignore b/event_deleter/.gitignore similarity index 100% rename from spam_filter/.gitignore rename to event_deleter/.gitignore diff --git a/spam_filter/Cargo.lock b/event_deleter/Cargo.lock similarity index 99% rename from spam_filter/Cargo.lock rename to event_deleter/Cargo.lock index dd7335a..25e1362 100644 --- a/spam_filter/Cargo.lock +++ b/event_deleter/Cargo.lock @@ -511,6 +511,24 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "event_deleter" +version = "0.1.0" +dependencies = [ + "async-trait", + "clap", + "env_logger", + "nostr-sdk", + "regex", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-util", + "tracing", + "tracing-subscriber", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1657,24 +1675,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "spam_filter" -version = "0.1.0" -dependencies = [ - "async-trait", - "clap", - "env_logger", - "nostr-sdk", - "regex", - "serde", - "serde_json", - "thiserror", - "tokio", - "tokio-util", - "tracing", - "tracing-subscriber", -] - [[package]] name = "spin" version = "0.9.8" diff --git a/spam_filter/Cargo.toml b/event_deleter/Cargo.toml similarity index 96% rename from spam_filter/Cargo.toml rename to event_deleter/Cargo.toml index 0a7a18d..d94c32d 100644 --- a/spam_filter/Cargo.toml +++ b/event_deleter/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "spam_filter" +name = "event_deleter" version = "0.1.0" edition = "2021" diff --git a/spam_filter/README.md b/event_deleter/README.md similarity index 100% rename from spam_filter/README.md rename to event_deleter/README.md diff --git a/spam_filter/src/analyzer_worker.rs b/event_deleter/src/analyzer_worker.rs similarity index 100% rename from spam_filter/src/analyzer_worker.rs rename to event_deleter/src/analyzer_worker.rs diff --git a/spam_filter/src/bin/spam_cleaner.rs b/event_deleter/src/bin/spam_cleaner.rs similarity index 99% rename from spam_filter/src/bin/spam_cleaner.rs rename to event_deleter/src/bin/spam_cleaner.rs index b20f22d..1514e8a 100644 --- a/spam_filter/src/bin/spam_cleaner.rs +++ b/event_deleter/src/bin/spam_cleaner.rs @@ -1,7 +1,7 @@ use clap::Parser; use nostr_sdk::Event; use serde_json::Deserializer; -use spam_filter::{ +use event_deleter::{ analyzer_worker::ValidationWorker, deletion_task::spawn_deletion_task, event_analyzer::{RejectReason, Validator}, diff --git a/spam_filter/src/deletion_task.rs b/event_deleter/src/deletion_task.rs similarity index 100% rename from spam_filter/src/deletion_task.rs rename to event_deleter/src/deletion_task.rs diff --git a/spam_filter/src/event_analyzer.rs b/event_deleter/src/event_analyzer.rs similarity index 100% rename from spam_filter/src/event_analyzer.rs rename to event_deleter/src/event_analyzer.rs diff --git a/spam_filter/src/lib.rs b/event_deleter/src/lib.rs similarity index 100% rename from spam_filter/src/lib.rs rename to event_deleter/src/lib.rs diff --git a/spam_filter/src/relay_commander.rs b/event_deleter/src/relay_commander.rs similarity index 100% rename from spam_filter/src/relay_commander.rs rename to event_deleter/src/relay_commander.rs diff --git a/spam_filter/src/worker_pool.rs b/event_deleter/src/worker_pool.rs similarity index 100% rename from spam_filter/src/worker_pool.rs rename to event_deleter/src/worker_pool.rs diff --git a/spam_filter/test.jsonl b/event_deleter/test.jsonl similarity index 100% rename from spam_filter/test.jsonl rename to event_deleter/test.jsonl diff --git a/strfry/plugins/broadcast_vanish_requests.ts b/strfry/plugins/broadcast_vanish_requests.ts new file mode 100644 index 0000000..1e70e61 --- /dev/null +++ b/strfry/plugins/broadcast_vanish_requests.ts @@ -0,0 +1,59 @@ +import type { Policy } from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/export_log/mod.ts"; +import { log } from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/export_log/mod.ts"; +import { connect, parseURL } from "https://deno.land/x/redis/mod.ts"; + +const REQUEST_TO_VANISH_KIND = 62; +const REDIS_URL = Deno.env.get("REDIS_URL"); +const RELAY_URL = Deno.env.get("RELAY_URL"); +const REDIS_CONNECT_OPTIONS = parseURL(REDIS_URL); +const REDIS = await connect(REDIS_CONNECT_OPTIONS); +const STREAM_KEY = "vanish_requests"; +const ONE_WEEK_MS = 7 * 24 * 60 * 60 * 1000; // One week in milliseconds + +if (!REDIS_URL) { + throw new Error("REDIS_URL environment variable is not set."); +} + +if (!RELAY_URL) { + throw new Error("RELAY_URL environment variable is not set."); +} + +const broadcastVanishRequests: Policy = async (msg) => { + const event = msg.event; + const accept: { id: string; action: string; msg: string } = { + id: event.id, + action: "accept", + msg: "", + }; + + if (event.kind !== REQUEST_TO_VANISH_KIND) { + return accept; + } + + const match = event.tags + .filter((tag) => tag["0"].toLowerCase().trim() === "relay") + .map((tag) => tag["1"].toLowerCase().trim()) + .find((relay) => relay === "all_relays" || relay === RELAY_URL); + + if (!match) { + return accept; + } + + await broadcastVanishRequest(event); + + return accept; +}; + +async function broadcastVanishRequest(event: any) { + log( + `Pushing vanish request: id: ${event.id}, pubkey: ${event.pubkey}, tags: ${event.tags}, content: ${event.content}` + ); + + try { + await REDIS.xadd(STREAM_KEY, "*", event); + } catch (error) { + log(`Failed to push request ${event.id} to Redis Stream: ${error}`); + } +} + +export default broadcastVanishRequests; diff --git a/strfry/plugins/nos_policy.ts b/strfry/plugins/nos_policy.ts index 9c475ab..3f452b8 100644 --- a/strfry/plugins/nos_policy.ts +++ b/strfry/plugins/nos_policy.ts @@ -12,6 +12,7 @@ const ALLOWED = { 5, // Event deletion 6, // Repost 7, // Reaction + 62, // Request to Vanish 1059, // Gift wrap messages 1984, // Reports 10000, // Mute list @@ -29,7 +30,11 @@ const DISALLOWED = { const nosPolicy: Policy = (msg) => { const event = msg.event; const content = event.content; - let res = { id: event.id, action: "reject", msg: "blocked: not authorized" }; + let res = { + id: event.id, + action: "reject", + msg: "blocked: not authorized", + }; const isAllowedPub = ALLOWED.pubs.hasOwnProperty(event.pubkey); const isAllowedEventKind = ALLOWED.eventKinds.includes(event.kind); diff --git a/strfry/plugins/policies.ts b/strfry/plugins/policies.ts index b815389..1986eb6 100755 --- a/strfry/plugins/policies.ts +++ b/strfry/plugins/policies.ts @@ -7,8 +7,9 @@ import { rateLimitPolicy, readStdin, writeStdout, -} from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/nos-changes/mod.ts"; +} from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/export_log/mod.ts"; import nosPolicy from "./nos_policy.ts"; +import broadcastVanishRequests from "./broadcast_vanish_requests.ts"; const localhost = "127.0.0.1"; const eventsIp = await getEventsIp(); @@ -53,6 +54,9 @@ const policies = [ whitelist: [localhost, eventsIp], }, ], + + // Broadcast vanish requests to Redis + broadcastVanishRequests, ]; for await (const msg of readStdin()) { From f264a4ccc93863b85351d67f41e684ad67a86b1d Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Tue, 1 Oct 2024 11:08:50 -0300 Subject: [PATCH 02/18] Add tests, use nos-changes branch --- strfry/plugins/broadcast_vanish_requests.ts | 76 ++++++------ strfry/plugins/policies.ts | 12 +- .../tests/broadcast_vanish_requests.test.ts | 112 ++++++++++++++++++ strfry/plugins/tests/run_tests.sh | 2 + strfry/plugins/tests/test.ts | 34 ++++++ 5 files changed, 197 insertions(+), 39 deletions(-) create mode 100644 strfry/plugins/tests/broadcast_vanish_requests.test.ts create mode 100755 strfry/plugins/tests/run_tests.sh create mode 100644 strfry/plugins/tests/test.ts diff --git a/strfry/plugins/broadcast_vanish_requests.ts b/strfry/plugins/broadcast_vanish_requests.ts index 1e70e61..9aa3a1b 100644 --- a/strfry/plugins/broadcast_vanish_requests.ts +++ b/strfry/plugins/broadcast_vanish_requests.ts @@ -1,59 +1,61 @@ -import type { Policy } from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/export_log/mod.ts"; -import { log } from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/export_log/mod.ts"; -import { connect, parseURL } from "https://deno.land/x/redis/mod.ts"; +import type { + Policy, + OutputMessage, +} from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/nos-changes/mod.ts"; +import { log } from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/nos-changes/mod.ts"; const REQUEST_TO_VANISH_KIND = 62; -const REDIS_URL = Deno.env.get("REDIS_URL"); -const RELAY_URL = Deno.env.get("RELAY_URL"); -const REDIS_CONNECT_OPTIONS = parseURL(REDIS_URL); -const REDIS = await connect(REDIS_CONNECT_OPTIONS); const STREAM_KEY = "vanish_requests"; -const ONE_WEEK_MS = 7 * 24 * 60 * 60 * 1000; // One week in milliseconds -if (!REDIS_URL) { - throw new Error("REDIS_URL environment variable is not set."); -} +function createBroadcastVanishRequests( + redis: any, + relay_url: string +): Policy { + if (!redis) { + throw new Error("REDIS_URL environment variable is not set."); + } -if (!RELAY_URL) { - throw new Error("RELAY_URL environment variable is not set."); -} + if (!relay_url) { + throw new Error("RELAY_URL environment variable is not set."); + } -const broadcastVanishRequests: Policy = async (msg) => { - const event = msg.event; - const accept: { id: string; action: string; msg: string } = { - id: event.id, - action: "accept", - msg: "", - }; + return async (msg) => { + const event = msg.event; + const accept: OutputMessage = { + id: event.id, + action: "accept", + msg: "", + }; - if (event.kind !== REQUEST_TO_VANISH_KIND) { - return accept; - } + if (event.kind !== REQUEST_TO_VANISH_KIND) { + return accept; + } - const match = event.tags - .filter((tag) => tag["0"].toLowerCase().trim() === "relay") - .map((tag) => tag["1"].toLowerCase().trim()) - .find((relay) => relay === "all_relays" || relay === RELAY_URL); + const match = event.tags + .filter((tag) => tag["0"].toLowerCase().trim() === "relay") + .map((tag) => tag["1"].toLowerCase().trim()) + .find((relay) => relay === "all_relays" || relay === relay_url); - if (!match) { - return accept; - } + if (!match) { + return accept; + } - await broadcastVanishRequest(event); + await broadcastVanishRequest(event, redis); - return accept; -}; + return accept; + }; +} -async function broadcastVanishRequest(event: any) { +async function broadcastVanishRequest(event: any, redis: any) { log( `Pushing vanish request: id: ${event.id}, pubkey: ${event.pubkey}, tags: ${event.tags}, content: ${event.content}` ); try { - await REDIS.xadd(STREAM_KEY, "*", event); + await redis.xadd(STREAM_KEY, "*", event); } catch (error) { log(`Failed to push request ${event.id} to Redis Stream: ${error}`); } } -export default broadcastVanishRequests; +export { createBroadcastVanishRequests }; diff --git a/strfry/plugins/policies.ts b/strfry/plugins/policies.ts index 1986eb6..61f2e9e 100755 --- a/strfry/plugins/policies.ts +++ b/strfry/plugins/policies.ts @@ -7,9 +7,10 @@ import { rateLimitPolicy, readStdin, writeStdout, -} from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/export_log/mod.ts"; +} from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/nos-changes/mod.ts"; import nosPolicy from "./nos_policy.ts"; -import broadcastVanishRequests from "./broadcast_vanish_requests.ts"; +import { createBroadcastVanishRequests } from "./broadcast_vanish_requests.ts"; +import { connect, parseURL } from "https://deno.land/x/redis/mod.ts"; const localhost = "127.0.0.1"; const eventsIp = await getEventsIp(); @@ -19,6 +20,13 @@ const one_hour = 60 * one_minute; const one_day = 24 * one_hour; const two_days = 2 * one_day; +const redis_url = Deno.env.get("REDIS_URL"); +const redis_connect_options = parseURL(redis_url); +const redis = await connect(redis_connect_options); + +const relay_url = Deno.env.get("RELAY_URL"); +const broadcastVanishRequests = createBroadcastVanishRequests(redis, relay_url); + // Policies that reject faster should be at the top. So synchronous policies should be at the top. const policies = [ nosPolicy, diff --git a/strfry/plugins/tests/broadcast_vanish_requests.test.ts b/strfry/plugins/tests/broadcast_vanish_requests.test.ts new file mode 100644 index 0000000..e01ab4c --- /dev/null +++ b/strfry/plugins/tests/broadcast_vanish_requests.test.ts @@ -0,0 +1,112 @@ +import { assertEquals } from "https://deno.land/std@0.181.0/testing/asserts.ts"; +import { buildEvent, buildInputMessage } from "./test.ts"; +import { createBroadcastVanishRequests } from "../broadcast_vanish_requests.ts"; +import type { Event } from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/nos-changes/mod.ts"; + +class RedisMock { + called: boolean = false; + + async xadd(streamKey: string, id: string, event: Event): Promise { + this.called = true; + } +} + +Deno.test({ + name: "pushes a vanish request with global relay filter", + fn: async () => { + const msg = buildInputMessage({ + sourceType: "IP4", + sourceInfo: "1.1.1.1", + event: buildEvent({ + kind: 62, + tags: [ + ["relay", "ALL_RELAYS"], + ["relay", "notexample.com"], + ], + }), + }); + + const redisMock = new RedisMock(); + const broadcastVanishRequests = createBroadcastVanishRequests( + redisMock, + "example.com" + ); + + assertEquals((await broadcastVanishRequests(msg)).action, "accept"); + assertEquals(redisMock.called, true); + }, + sanitizeResources: false, +}); + +Deno.test({ + name: "pushes a vanish request with specific relay filter", + fn: async () => { + const msg = buildInputMessage({ + sourceType: "IP4", + sourceInfo: "1.1.1.1", + event: buildEvent({ + kind: 62, + tags: [ + ["relay", "example.com"], + ["relay", "notexample.com"], + ], + }), + }); + + const redisMock = new RedisMock(); + const broadcastVanishRequests = createBroadcastVanishRequests( + redisMock, + "example.com" + ); + + assertEquals((await broadcastVanishRequests(msg)).action, "accept"); + assertEquals(redisMock.called, true); + }, + sanitizeResources: false, +}); + +Deno.test({ + name: "doesn't push a vanish request with no matching relay filter", + fn: async () => { + const msg = buildInputMessage({ + sourceType: "IP4", + sourceInfo: "1.1.1.1", + event: buildEvent({ + kind: 62, + tags: [["relay", "notexample.com"]], + }), + }); + + const redisMock = new RedisMock(); + const broadcastVanishRequests = createBroadcastVanishRequests( + redisMock, + "example.com" + ); + + assertEquals((await broadcastVanishRequests(msg)).action, "accept"); + assertEquals(redisMock.called, false); + }, + sanitizeResources: false, +}); + +Deno.test({ + name: "doesn't push when kind is not a vanish request", + fn: async () => { + const msg = buildInputMessage({ + sourceType: "IP4", + sourceInfo: "1.1.1.1", + event: buildEvent({ + kind: 1, + }), + }); + + const redisMock = new RedisMock(); + const broadcastVanishRequests = createBroadcastVanishRequests( + redisMock, + "example.com" + ); + assertEquals((await broadcastVanishRequests(msg)).action, "accept"); + assertEquals(redisMock.called, false); + }, + sanitizeResources: false, +}); diff --git a/strfry/plugins/tests/run_tests.sh b/strfry/plugins/tests/run_tests.sh new file mode 100755 index 0000000..112b4bf --- /dev/null +++ b/strfry/plugins/tests/run_tests.sh @@ -0,0 +1,2 @@ +#!/bin/bash +deno test --watch --allow-read --allow-write --allow-env --log-level=info \ No newline at end of file diff --git a/strfry/plugins/tests/test.ts b/strfry/plugins/tests/test.ts new file mode 100644 index 0000000..ce81066 --- /dev/null +++ b/strfry/plugins/tests/test.ts @@ -0,0 +1,34 @@ +import type { + Event, + InputMessage, +} from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/nos-changes/mod.ts"; + +/** Constructs a fake event for tests. */ +function buildEvent(attrs: Partial = {}): Event { + const event: Event = { + kind: 1, + id: "", + content: "", + created_at: 0, + pubkey: "", + sig: "", + tags: [], + }; + + return Object.assign(event, attrs); +} + +/** Constructs a fake strfry input message for tests. */ +function buildInputMessage(attrs: Partial = {}): InputMessage { + const msg = { + event: buildEvent(), + receivedAt: 0, + sourceInfo: "127.0.0.1", + sourceType: "IP4", + type: "new", + }; + + return Object.assign(msg, attrs); +} + +export { buildEvent, buildInputMessage }; From 8391733b695d3c7ebf9c637cb4b32c2273fae5fa Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Mon, 30 Sep 2024 13:53:25 -0300 Subject: [PATCH 03/18] Delete from vanish requests --- Dockerfile | 5 +- compose.yml | 6 + event_deleter/Cargo.lock | 667 +++++++++++++++++++- event_deleter/Cargo.toml | 4 + event_deleter/README.md | 4 + event_deleter/src/analyzer_worker.rs | 13 +- event_deleter/src/bin/spam_cleaner.rs | 38 +- event_deleter/src/bin/vanish_listener.rs | 82 +++ event_deleter/src/deletion_task.rs | 40 +- event_deleter/src/event_analyzer.rs | 79 ++- event_deleter/src/lib.rs | 1 + event_deleter/src/relay_commander.rs | 13 +- event_deleter/src/vanish_listener_task.rs | 139 ++++ event_deleter/src/worker_pool.rs | 11 +- start.sh | 13 +- strfry/plugins/broadcast_vanish_requests.ts | 4 +- 16 files changed, 1056 insertions(+), 63 deletions(-) create mode 100644 event_deleter/src/bin/vanish_listener.rs create mode 100644 event_deleter/src/vanish_listener_task.rs diff --git a/Dockerfile b/Dockerfile index 1fe4a64..a2446c1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -55,7 +55,10 @@ WORKDIR /app COPY --from=build /build/strfry/strfry strfry COPY --from=build /build/event_deleter/target/release/spam_cleaner /usr/local/bin/spam_cleaner +COPY --from=build /build/event_deleter/target/release/vanish_listener vanish_listener RUN chmod +x /usr/local/bin/spam_cleaner +RUN chmod +x /app/vanish_listener -ENTRYPOINT ["/app/strfry", "relay"] +COPY ./start.sh start.sh +CMD ./start.sh diff --git a/compose.yml b/compose.yml index 20d3f6a..8311cf3 100644 --- a/compose.yml +++ b/compose.yml @@ -6,6 +6,12 @@ services: environment: - RELAY_URL=wss://example.com - REDIS_URL=redis://redis:6379 + - APP__event_deleter__redis_url=redis://redis:6379 + - APP__ENVIRONMENT=development + depends_on: + - redis + restart: always + redis: image: redis:7.2.4 diff --git a/event_deleter/Cargo.lock b/event_deleter/Cargo.lock index 25e1362..54eb7fb 100644 --- a/event_deleter/Cargo.lock +++ b/event_deleter/Cargo.lock @@ -114,6 +114,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + [[package]] name = "arrayvec" version = "0.7.6" @@ -178,6 +184,33 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +[[package]] +name = "aws-lc-rs" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f95446d919226d587817a7d21379e6eb099b97b45110a7f272a444ca5c54070" +dependencies = [ + "aws-lc-sys", + "mirai-annotations", + "paste", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3ddc4a5b231dd6958b140ff3151b6412b3f4321fab354f399eec8f14b06df62" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", + "libc", + "paste", +] + [[package]] name = "backtrace" version = "0.3.74" @@ -203,6 +236,12 @@ dependencies = [ "bitcoin_hashes 0.14.0", ] +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + [[package]] name = "base64" version = "0.22.1" @@ -221,6 +260,29 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d965446196e3b7decd44aa7ee49e31d630118f90ef12f97900f262eb915c951d" +[[package]] +name = "bindgen" +version = "0.69.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "itertools", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn", + "which", +] + [[package]] name = "bip39" version = "2.0.0" @@ -297,6 +359,9 @@ name = "bitflags" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +dependencies = [ + "serde", +] [[package]] name = "block-buffer" @@ -349,9 +414,20 @@ version = "1.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0" dependencies = [ + "jobserver", + "libc", "shlex", ] +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -393,6 +469,17 @@ dependencies = [ "zeroize", ] +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + [[package]] name = "clap" version = "4.5.18" @@ -433,12 +520,100 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +[[package]] +name = "cmake" +version = "0.1.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb1e43aa7fd152b1f968787f7dbcdeb306d1867ff373c69955211876c053f91a" +dependencies = [ + "cc", +] + [[package]] name = "colorchoice" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "config" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7328b20597b53c2454f0b1919720c25c7339051c02b72b7e05409e00b14132be" +dependencies = [ + "async-trait", + "convert_case", + "json5", + "lazy_static", + "nom", + "pathdiff", + "ron", + "rust-ini", + "serde", + "serde_json", + "toml", + "yaml-rust", +] + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + [[package]] name = "cpufeatures" version = "0.2.14" @@ -448,6 +623,12 @@ dependencies = [ "libc", ] +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + [[package]] name = "crypto-common" version = "0.1.6" @@ -476,6 +657,21 @@ dependencies = [ "subtle", ] +[[package]] +name = "dlv-list" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" +dependencies = [ + "const-random", +] + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + [[package]] name = "either" version = "1.13.0" @@ -511,19 +707,33 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "event_deleter" version = "0.1.0" dependencies = [ "async-trait", "clap", + "config", "env_logger", + "nonzero_ext", "nostr-sdk", + "redis", "regex", "serde", "serde_json", "thiserror", "tokio", + "tokio-rustls", "tokio-util", "tracing", "tracing-subscriber", @@ -544,6 +754,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "futures" version = "0.3.30" @@ -650,6 +866,12 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + [[package]] name = "gloo-timers" version = "0.2.6" @@ -662,6 +884,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" + [[package]] name = "hashbrown" version = "0.14.5" @@ -708,6 +936,15 @@ dependencies = [ "digest", ] +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + [[package]] name = "http" version = "1.1.0" @@ -828,7 +1065,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.14.5", ] [[package]] @@ -865,12 +1102,30 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +[[package]] +name = "jobserver" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + [[package]] name = "js-sys" version = "0.3.70" @@ -880,18 +1135,57 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "json5" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" +dependencies = [ + "pest", + "pest_derive", + "serde", +] + [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +[[package]] +name = "libloading" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" +dependencies = [ + "cfg-if", + "windows-targets", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + [[package]] name = "lnurl-pay" version = "0.6.0" @@ -926,7 +1220,7 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", ] [[package]] @@ -950,6 +1244,12 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "miniz_oxide" version = "0.8.0" @@ -971,6 +1271,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "mirai-annotations" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" + [[package]] name = "negentropy" version = "0.3.1" @@ -983,6 +1289,22 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a88da9dd148bbcdce323dd6ac47d369b4769d4a3b78c6c52389b9269f77932" +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" + [[package]] name = "nostr" version = "0.35.0" @@ -990,7 +1312,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56db234b2e07901e372f34e9463f91590579cd8e6dbd34ed2ccc7e461e4ba639" dependencies = [ "aes", - "base64", + "base64 0.22.1", "bech32", "bip39", "bitcoin", @@ -1102,6 +1424,34 @@ dependencies = [ "winapi", ] +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + [[package]] name = "nwc" version = "0.35.0" @@ -1137,6 +1487,22 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "ordered-multimap" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ed8acf08e98e744e5384c8bc63ceb0364e68a6854187221c18df61c4797690e" +dependencies = [ + "dlv-list", + "hashbrown 0.13.2", +] + [[package]] name = "overload" version = "0.1.1" @@ -1177,6 +1543,18 @@ dependencies = [ "subtle", ] +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pathdiff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" + [[package]] name = "pbkdf2" version = "0.12.2" @@ -1193,6 +1571,51 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "pest" +version = "2.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdbef9d1d47087a895abd220ed25eb4ad973a5e26f6a4367b038c25e28dfc2d9" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d3a6e3394ec80feb3b6393c725571754c6188490265c61aaf260810d6b95aa0" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94429506bde1ca69d1b5601962c73f4172ab4726571a59ea95931218cb0e930e" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pest_meta" +version = "2.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac8a071862e93690b6e34e9a5fb8e33ff3734473ac0245b27232222c4906a33f" +dependencies = [ + "once_cell", + "pest", + "sha2", +] + [[package]] name = "pin-project" version = "1.1.5" @@ -1245,6 +1668,16 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "prettyplease" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" +dependencies = [ + "proc-macro2", + "syn", +] + [[package]] name = "proc-macro2" version = "1.0.86" @@ -1264,7 +1697,7 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash", + "rustc-hash 2.0.0", "rustls", "socket2", "thiserror", @@ -1281,7 +1714,7 @@ dependencies = [ "bytes", "rand", "ring", - "rustc-hash", + "rustc-hash 2.0.0", "rustls", "slab", "thiserror", @@ -1341,6 +1774,35 @@ dependencies = [ "getrandom", ] +[[package]] +name = "redis" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7e86f5670bd8b028edfb240f0616cad620705b31ec389d55e4f3da2c38dcd48" +dependencies = [ + "arc-swap", + "async-trait", + "bytes", + "combine", + "futures-util", + "itoa", + "num-bigint", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-native-certs", + "rustls-pemfile", + "rustls-pki-types", + "ryu", + "sha1_smol", + "socket2", + "tokio", + "tokio-rustls", + "tokio-util", + "url", + "webpki-roots", +] + [[package]] name = "redox_syscall" version = "0.5.4" @@ -1400,7 +1862,7 @@ version = "0.12.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" dependencies = [ - "base64", + "base64 0.22.1", "bytes", "futures-core", "futures-util", @@ -1452,24 +1914,67 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "ron" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" +dependencies = [ + "base64 0.21.7", + "bitflags", + "serde", + "serde_derive", +] + +[[package]] +name = "rust-ini" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e2a3bcec1f113553ef1c88aae6c020a369d03d55b58de9869a0908930385091" +dependencies = [ + "cfg-if", + "ordered-multimap", +] + [[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + [[package]] name = "rustc-hash" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +[[package]] +name = "rustix" +version = "0.38.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + [[package]] name = "rustls" version = "0.23.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" dependencies = [ + "aws-lc-rs", + "log", "once_cell", "ring", "rustls-pki-types", @@ -1478,13 +1983,26 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "rustls-pki-types", + "schannel", + "security-framework", +] + [[package]] name = "rustls-pemfile" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" dependencies = [ - "base64", + "base64 0.22.1", "rustls-pki-types", ] @@ -1500,6 +2018,7 @@ version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -1520,6 +2039,15 @@ dependencies = [ "cipher", ] +[[package]] +name = "schannel" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "scopeguard" version = "1.2.0" @@ -1559,6 +2087,29 @@ dependencies = [ "cc", ] +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "serde" version = "1.0.210" @@ -1592,6 +2143,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +dependencies = [ + "serde", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -1615,6 +2175,12 @@ dependencies = [ "digest", ] +[[package]] +name = "sha1_smol" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" + [[package]] name = "sha2" version = "0.10.8" @@ -1743,6 +2309,15 @@ dependencies = [ "once_cell", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "tinyvec" version = "1.8.0" @@ -1847,11 +2422,45 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "hashbrown", + "hashbrown 0.14.5", "pin-project-lite", "tokio", ] +[[package]] +name = "toml" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + [[package]] name = "tower" version = "0.4.13" @@ -1972,6 +2581,12 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + [[package]] name = "unicode-bidi" version = "0.3.15" @@ -1993,6 +2608,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + [[package]] name = "universal-hash" version = "0.5.1" @@ -2146,6 +2767,18 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + [[package]] name = "winapi" version = "0.3.9" @@ -2280,6 +2913,24 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "winnow" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" +dependencies = [ + "memchr", +] + +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + [[package]] name = "zerocopy" version = "0.7.35" diff --git a/event_deleter/Cargo.toml b/event_deleter/Cargo.toml index d94c32d..2cdd9e5 100644 --- a/event_deleter/Cargo.toml +++ b/event_deleter/Cargo.toml @@ -6,13 +6,17 @@ edition = "2021" [dependencies] async-trait = "0.1.82" clap = { version = "4.5.18", features = ["derive"] } +config = { version = "0.14.0", features = ["yaml"] } env_logger = "0.11.5" +nonzero_ext = "0.3.0" nostr-sdk = "0.35.0" +redis = { version = "0.27.2", features = ["tls-rustls", "tls-rustls-webpki-roots", "tokio", "tokio-comp", "tokio-rustls", "tokio-rustls-comp"] } regex = "1.10.6" serde = { version = "1.0.210", features = ["derive"] } serde_json = "1.0.128" thiserror = "1.0.64" tokio = { version = "1.40.0", features = ["full"] } +tokio-rustls = "0.26.0" tokio-util = { version = "0.7.12", features = ["rt"] } tracing = "0.1.40" tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } diff --git a/event_deleter/README.md b/event_deleter/README.md index 5042ec7..bf2c465 100644 --- a/event_deleter/README.md +++ b/event_deleter/README.md @@ -1,3 +1,7 @@ +# Vanish Listener + +Vanish Listener is a tool that listens for vanish requests on a Redis stream and processes them by deleting the corresponding events from the Strfry database. It continuously monitors the `vanish_requests` stream in Redis and handles incoming deletion requests in real-time. + # Spam Cleaner Spam Cleaner is a tool to delete events that don't comply with our policies directly from the Strfry database. Currently, it provides a command to clean the database based on a JSONL stream from `stdin`. Integration with Strfry plugins is planned. The tool creates a pool of worker tasks that analyze each event through various checks using a local Nostr connection to `ws://localhost:7777`. diff --git a/event_deleter/src/analyzer_worker.rs b/event_deleter/src/analyzer_worker.rs index ab714b7..d056da5 100644 --- a/event_deleter/src/analyzer_worker.rs +++ b/event_deleter/src/analyzer_worker.rs @@ -1,7 +1,8 @@ -use crate::event_analyzer::{EventAnalysisResult, RejectReason, Validator}; +use crate::event_analyzer::{DeleteRequest, EventAnalysisResult, Validator}; use crate::worker_pool::WorkerTask; use async_trait::async_trait; use nostr_sdk::prelude::*; +use std::num::NonZeroU64; use thiserror::Error; use tokio::sync::mpsc; use tokio::time::Duration; @@ -9,15 +10,15 @@ use tracing::{debug, error, info}; pub struct ValidationWorker { validator: Validator, - deletion_sender: mpsc::Sender, - validation_timeout: u64, + deletion_sender: mpsc::Sender, + validation_timeout: NonZeroU64, } impl ValidationWorker { pub fn new( validator: Validator, - deletion_sender: mpsc::Sender, - validation_timeout: u64, + deletion_sender: mpsc::Sender, + validation_timeout: NonZeroU64, ) -> Self { ValidationWorker { validator, @@ -33,7 +34,7 @@ impl WorkerTask for ValidationWorker { debug!("Validating event {}", event.id); match tokio::time::timeout( - Duration::from_secs(self.validation_timeout), + Duration::from_secs(self.validation_timeout.get()), self.validator.validate_event(event.clone()), ) .await diff --git a/event_deleter/src/bin/spam_cleaner.rs b/event_deleter/src/bin/spam_cleaner.rs index 1514e8a..7291cfc 100644 --- a/event_deleter/src/bin/spam_cleaner.rs +++ b/event_deleter/src/bin/spam_cleaner.rs @@ -1,15 +1,17 @@ use clap::Parser; -use nostr_sdk::Event; -use serde_json::Deserializer; use event_deleter::{ analyzer_worker::ValidationWorker, deletion_task::spawn_deletion_task, - event_analyzer::{RejectReason, Validator}, + event_analyzer::{DeleteRequest, Validator}, worker_pool::WorkerPool, }; +use nonzero_ext::nonzero; +use nostr_sdk::Event; +use serde_json::Deserializer; use std::error::Error; use std::io; -use std::num::NonZeroUsize; +use std::num::NonZeroU16; +use std::num::NonZeroU64; use tokio::sync::mpsc; use tokio_util::{sync::CancellationToken, task::TaskTracker}; use tracing::{debug, error, info}; @@ -24,16 +26,16 @@ use tracing_subscriber::{fmt, prelude::*, EnvFilter}; // Leave the comments, they are used for the --help message struct Args { /// Buffer size for batching delete commands - #[arg(short, long, default_value_t = 10)] - buffer_size: usize, + #[arg(short, long, default_value_t = nonzero!(10u16))] + buffer_size: NonZeroU16, /// Maximum number of concurrent validation tasks - #[arg(short = 'c', long, default_value_t = 10)] - concurrency_limit: usize, + #[arg(short = 'c', long, default_value_t = nonzero!(10u16))] + concurrency_limit: NonZeroU16, /// Timeout (in seconds) for validating each event - #[arg(short = 't', long, default_value_t = 10)] - validation_timeout: u64, + #[arg(short = 't', long, default_value_t = nonzero!(10u64))] + validation_timeout: NonZeroU64, /// Dry run mode. If set, events will not be deleted #[arg(short = 'd', long)] @@ -67,7 +69,7 @@ async fn main() -> Result<(), Box> { }); let (validation_sender, validation_receiver) = mpsc::channel::(100); - let (deletion_sender, deletion_receiver) = mpsc::channel::(100); + let (deletion_sender, deletion_receiver) = mpsc::channel::(100); let validator = Validator::new().await?; let validator_worker = @@ -77,17 +79,21 @@ async fn main() -> Result<(), Box> { WorkerPool::start( &tracker, "validation_pool", - NonZeroUsize::new(args.concurrency_limit) - .expect("concurrency_limit must be greater than zero"), - NonZeroUsize::new(args.validation_timeout as usize) - .expect("validation_timeout must be greater than zero"), + args.concurrency_limit, + args.validation_timeout, validation_receiver, cancellation_token.clone(), validator_worker, ); // Spawn the deletion task with dry_run flag - spawn_deletion_task(&tracker, deletion_receiver, args.buffer_size, args.dry_run); + spawn_deletion_task( + &tracker, + deletion_receiver, + None, + args.buffer_size, + args.dry_run, + ); tracker.close(); diff --git a/event_deleter/src/bin/vanish_listener.rs b/event_deleter/src/bin/vanish_listener.rs new file mode 100644 index 0000000..b0d2207 --- /dev/null +++ b/event_deleter/src/bin/vanish_listener.rs @@ -0,0 +1,82 @@ +use clap::Parser; +use event_deleter::{ + deletion_task::spawn_deletion_task, event_analyzer::DeleteRequest, + vanish_listener_task::spawn_vanish_listener, +}; +use nonzero_ext::nonzero; +use std::error::Error; +use std::{env, sync::LazyLock}; +use tokio::sync::mpsc; +use tokio_util::{sync::CancellationToken, task::TaskTracker}; +use tracing::info; +use tracing_subscriber::{fmt, prelude::*, EnvFilter}; + +static REDIS_URL: LazyLock = + LazyLock::new(|| env::var("REDIS_URL").expect("REDIS_URL must be set")); + +#[derive(Parser, Debug)] +#[command( + version, + about = "Checks events for spam and deletes them from the strfry database", + long_about = None +)] +// Leave the comments, they are used for the --help message +struct Args { + /// Dry run mode. If set, events will not be deleted + #[arg(short = 'd', long)] + dry_run: bool, +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + tracing_subscriber::registry() + .with(fmt::layer()) + .with(EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"))) + .init(); + + let args = Args::parse(); + let tracker = TaskTracker::new(); + let cancellation_token = CancellationToken::new(); + let token = cancellation_token.clone(); + + info!("Starting vanish listener..."); + info!("Dry run: {}", args.dry_run); + + tokio::spawn(async move { + tokio::signal::ctrl_c() + .await + .expect("Failed to listen for Ctrl+C"); + token.cancel(); + info!("Shutdown signal received. Initiating graceful shutdown..."); + }); + + // We may never need to change these constants so for the moment lets leave them hardcoded + let delete_command_batch_size = nonzero!(50u16); + let vanish_channel_size = 10; + let (deletion_sender, deletion_receiver) = mpsc::channel::(vanish_channel_size); + let (ack_sender, ack_receiver) = mpsc::channel::(vanish_channel_size); + + // Read the Redis stream and send the delete requests to the deletion task + spawn_vanish_listener( + &tracker, + deletion_sender, + ack_receiver, + &*REDIS_URL, + cancellation_token, + ) + .await?; + spawn_deletion_task( + &tracker, + deletion_receiver, + Some(ack_sender), + delete_command_batch_size, + args.dry_run, + ); + + tracker.close(); + tracker.wait().await; + + info!("Exiting vanish listener"); + + Ok(()) +} diff --git a/event_deleter/src/deletion_task.rs b/event_deleter/src/deletion_task.rs index 1f994df..e972c91 100644 --- a/event_deleter/src/deletion_task.rs +++ b/event_deleter/src/deletion_task.rs @@ -1,5 +1,6 @@ -use crate::event_analyzer::RejectReason; +use crate::event_analyzer::DeleteRequest; use crate::relay_commander::RelayCommander; +use std::num::NonZeroU16; use tokio::sync::mpsc; use tokio::time; use tokio_util::task::TaskTracker; @@ -7,14 +8,15 @@ use tracing::{debug, error, info}; pub fn spawn_deletion_task( tracker: &TaskTracker, - mut deletion_receiver: mpsc::Receiver, - buffer_size: usize, + mut deletion_receiver: mpsc::Receiver, + ack_sender: Option>, + buffer_size: NonZeroU16, dry_run: bool, ) { let relay_commander = RelayCommander; tracker.spawn(async move { - let mut buffer = Vec::with_capacity(buffer_size); + let mut buffer = Vec::with_capacity(buffer_size.get() as usize); let flush_period_seconds = 30; let flush_period = time::Duration::from_secs(flush_period_seconds); @@ -25,16 +27,21 @@ pub fn spawn_deletion_task( loop { tokio::select! { // The first condition to send the current buffer is the - // time interval. We wait a max of `seconds_threshold` + // time interval. We wait a max of `flush_period_seconds` // seconds, after that the buffer is cleared and sent _ = interval.tick() => { - flush_buffer(&relay_commander, &mut buffer, dry_run).await; + flush_buffer(&relay_commander, &mut buffer, &ack_sender, dry_run).await; } recv_result = deletion_receiver.recv() => { match recv_result { Some(reject_reason) => { buffer.push(reject_reason); + + // We also check if the buffer is full based on the buffer size + if buffer.len() >= buffer_size.get() as usize { + flush_buffer(&relay_commander, &mut buffer, &ack_sender, dry_run).await; + } } None => { break; @@ -44,25 +51,34 @@ pub fn spawn_deletion_task( } } - flush_buffer(&relay_commander, &mut buffer, dry_run).await; + // Flush any pending items before exiting + flush_buffer(&relay_commander, &mut buffer, &ack_sender, dry_run).await; debug!("Deletion task finished"); }); } async fn flush_buffer( relay_commander: &RelayCommander, - buffer: &mut Vec, + buffer: &mut Vec, + ack_sender: &Option>, dry_run: bool, ) { - debug!( - "Time based threshold elapsed, publishing buffer, {} items", - buffer.len() - ); + debug!("Flushing delete command buffer, {} items", buffer.len()); if !buffer.is_empty() { + let chunk_clone = buffer.clone(); let chunk = std::mem::take(buffer); + if let Err(e) = relay_commander.execute_delete(chunk, dry_run).await { error!("{}", e); } + + if let Some(ack_sender) = ack_sender { + for item in chunk_clone { + if let Err(e) = ack_sender.send(item).await { + error!("{}", e); + } + } + } } } diff --git a/event_deleter/src/event_analyzer.rs b/event_deleter/src/event_analyzer.rs index 6ffb622..ef26c99 100644 --- a/event_deleter/src/event_analyzer.rs +++ b/event_deleter/src/event_analyzer.rs @@ -1,4 +1,5 @@ use nostr_sdk::prelude::*; +use redis::{streams::StreamId, Value}; use regex::Regex; use std::fmt::Display; use std::sync::LazyLock; @@ -15,20 +16,75 @@ static REJECTED_NAME_REGEXES: LazyLock> = #[derive(Debug, Clone)] pub enum EventAnalysisResult { Accept, - Reject(RejectReason), + Reject(DeleteRequest), } #[derive(Debug, Clone)] -pub enum RejectReason { +pub enum DeleteRequest { ReplyCopy(EventId), ForbiddenName(PublicKey), + Vanish(String, PublicKey, Option), } -impl Display for RejectReason { +impl Display for DeleteRequest { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - RejectReason::ReplyCopy(_) => write!(f, "Reply copy"), - RejectReason::ForbiddenName(_) => write!(f, "Forbidden nip05"), + DeleteRequest::ReplyCopy(_) => write!(f, "Reply copy"), + DeleteRequest::ForbiddenName(_) => write!(f, "Forbidden nip05"), + DeleteRequest::Vanish(_, _, _) => write!(f, "Request to vanish"), + } + } +} + +impl TryFrom<&StreamId> for DeleteRequest { + type Error = EventAnalysisError; + + fn try_from(stream_id: &StreamId) -> Result { + let mut reason = Option::::None; + let mut public_key = Option::::None; + + for (key, value) in stream_id.map.iter() { + match key.as_str() { + "pubkey" => { + if let Value::BulkString(bytes) = value { + let public_key_string = String::from_utf8(bytes.clone()) + .map_err(|_| EventAnalysisError::PublicKeyError)?; + + public_key = Some( + PublicKey::from_hex(public_key_string) + .map_err(|_| EventAnalysisError::PublicKeyError)?, + ); + } + } + "kind" => { + if let Value::Int(kind) = value { + let kind = Kind::Custom(*kind as u16); + if kind != Kind::Custom(62) { + return Err(EventAnalysisError::NotVanishKindError); + } + } + } + "content" => { + if let Value::BulkString(bytes) = value { + reason = Some( + String::from_utf8(bytes.clone()) + .map_err(|_| EventAnalysisError::ConversionError)?, + ); + } else { + return Err(EventAnalysisError::ConversionError); + } + } + _ => {} + } + } + + match public_key { + Some(public_key) => Ok(DeleteRequest::Vanish( + stream_id.id.clone(), + public_key, + reason, + )), + None => Err(EventAnalysisError::ConversionError), } } } @@ -70,13 +126,13 @@ impl Validator { let is_forbidden_name = is_forbidden_name_res?; if is_reply_copy { - return Ok(EventAnalysisResult::Reject(RejectReason::ReplyCopy( + return Ok(EventAnalysisResult::Reject(DeleteRequest::ReplyCopy( event.id, ))); } if is_forbidden_name { - return Ok(EventAnalysisResult::Reject(RejectReason::ForbiddenName( + return Ok(EventAnalysisResult::Reject(DeleteRequest::ForbiddenName( event.pubkey, ))); } @@ -179,4 +235,13 @@ pub enum EventAnalysisError { #[error("Nostr error: {0}")] NostrError(Error), + + #[error("Conversion error")] + ConversionError, + + #[error("PublicKey error")] + PublicKeyError, + + #[error("Not vanish kind")] + NotVanishKindError, } diff --git a/event_deleter/src/lib.rs b/event_deleter/src/lib.rs index 4e2aac3..393dd9b 100644 --- a/event_deleter/src/lib.rs +++ b/event_deleter/src/lib.rs @@ -2,4 +2,5 @@ pub mod analyzer_worker; pub mod deletion_task; pub mod event_analyzer; pub mod relay_commander; +pub mod vanish_listener_task; pub mod worker_pool; diff --git a/event_deleter/src/relay_commander.rs b/event_deleter/src/relay_commander.rs index 10205e1..7c8de85 100644 --- a/event_deleter/src/relay_commander.rs +++ b/event_deleter/src/relay_commander.rs @@ -1,4 +1,4 @@ -use crate::event_analyzer::RejectReason; +use crate::event_analyzer::DeleteRequest; use nostr_sdk::prelude::*; use std::collections::HashSet; use std::error::Error; @@ -10,18 +10,21 @@ pub struct RelayCommander; impl RelayCommander { pub async fn execute_delete( &self, - reject_reasons: Vec, + delete_reason: Vec, dry_run: bool, ) -> Result<(), Box> { let mut ids = HashSet::new(); let mut authors = HashSet::new(); - for reason in reject_reasons { + for reason in delete_reason { match reason { - RejectReason::ReplyCopy(id) => { + DeleteRequest::ReplyCopy(id) => { ids.insert(id); } - RejectReason::ForbiddenName(pubkey) => { + DeleteRequest::ForbiddenName(pubkey) => { + authors.insert(pubkey); + } + DeleteRequest::Vanish(_, pubkey, _) => { authors.insert(pubkey); } } diff --git a/event_deleter/src/vanish_listener_task.rs b/event_deleter/src/vanish_listener_task.rs new file mode 100644 index 0000000..d35e5a0 --- /dev/null +++ b/event_deleter/src/vanish_listener_task.rs @@ -0,0 +1,139 @@ +use crate::event_analyzer::DeleteRequest; +use redis::{ + streams::{StreamId, StreamKey, StreamReadOptions, StreamReadReply}, + AsyncCommands, RedisError, +}; +use std::error::Error; +use std::sync::Arc; +use tokio::sync::mpsc; +use tokio_util::{sync::CancellationToken, task::TaskTracker}; +use tracing::{debug, error, info}; + +static BLOCK_MILLIS: usize = 5000; +static VANISH_STREAM_KEY: &str = "vanish_requests"; +static VANISH_LAST_ID_KEY: &str = "vanish_listener:last_id"; + +pub async fn spawn_vanish_listener( + tracker: &TaskTracker, + deletion_sender: mpsc::Sender, + mut ack_receiver: mpsc::Receiver, + redis_url: &str, + cancellation_token: CancellationToken, +) -> Result<(), Box> { + let redis = Arc::new(redis::Client::open(redis_url)?); + + let client = redis.clone(); + tracker.spawn(async move { + let mut con = match client.get_multiplexed_async_connection().await { + Ok(con) => con, + Err(e) => { + error!("Failed to connect to Redis: {}", e); + return; + } + }; + + let mut last_id = con + .get(VANISH_LAST_ID_KEY) + .await + .unwrap_or("0-0".to_string()); + + loop { + match ack_receiver.recv().await { + Some(ack) => match ack { + DeleteRequest::Vanish(id, ..) => { + debug!("Received ack"); + + if id > last_id { + let save_last_id_result: Result<(), RedisError> = + con.set(VANISH_LAST_ID_KEY, last_id.clone()).await; + + if let Err(e) = save_last_id_result { + error!("Failed to save last id: {}", e); + } else { + info!("Last id processed: {}", last_id); + } + last_id = id.clone(); + } + } + _ => {} + }, + None => { + break; + } + } + } + }); + + let client = redis.clone(); + tracker.spawn(async move { + let mut con = match client.get_multiplexed_async_connection().await { + Ok(con) => con, + Err(e) => { + error!("Failed to connect to Redis: {}", e); + return; + } + }; + + let opts = StreamReadOptions::default().block(BLOCK_MILLIS); + let mut last_id = con + .get(VANISH_LAST_ID_KEY) + .await + .unwrap_or("0-0".to_string()); + + info!("Last id processed: {}", last_id); + + loop { + tokio::select! { + _ = cancellation_token.cancelled() => { + break; + } + + _ = async { + let reply: StreamReadReply = match con + .xread_options(&[VANISH_STREAM_KEY], &[last_id.clone()], &opts) + .await + { + Ok(reply) => reply, + Err(e) => { + error!("Failed to read from Redis: {}", e); + return; + } + }; + + for StreamKey { ids, .. } in reply.keys { + for stream_id in ids { + if let Err(_) = process_stream_id(&stream_id, &deletion_sender).await { + return; + } + last_id = stream_id.id.clone(); + } + } + } => {} + } + } + }); + + Ok(()) +} + +async fn process_stream_id( + stream_id: &StreamId, + deletion_sender: &mpsc::Sender, +) -> Result<(), Box> { + let vanish_request = DeleteRequest::try_from(stream_id).map_err(|e| { + error!( + "Failed to parse vanish request: {:?}. Error: {}", + stream_id, e + ); + e + })?; + + info!("Received vanish request: {:?}", vanish_request); + + deletion_sender.send(vanish_request).await.map_err(|e| { + error!("Failed to send vanish request: {}", e); + e + })?; + + Ok(()) +} diff --git a/event_deleter/src/worker_pool.rs b/event_deleter/src/worker_pool.rs index 9ee1ce0..68655ec 100644 --- a/event_deleter/src/worker_pool.rs +++ b/event_deleter/src/worker_pool.rs @@ -1,6 +1,7 @@ +use std::error::Error; use std::fmt::Debug; +use std::num::{NonZeroU16, NonZeroU64}; use std::sync::Arc; -use std::{error::Error, num::NonZeroUsize}; use tokio::sync::mpsc::{self, Sender}; use tokio::time::{timeout, Duration}; use tokio_util::sync::CancellationToken; @@ -16,8 +17,8 @@ impl WorkerPool { pub fn start( tracker: &TaskTracker, pool_name: &str, - num_workers: NonZeroUsize, - worker_timeout_secs: NonZeroUsize, + num_workers: NonZeroU16, + worker_timeout_secs: NonZeroU64, item_receiver: mpsc::Receiver, cancellation_token: CancellationToken, worker: Worker, @@ -111,10 +112,10 @@ fn create_dispatcher_task( fn create_worker_task( tracker: &TaskTracker, pool_name: String, - worker_timeout_secs: NonZeroUsize, + worker_timeout_secs: NonZeroU64, mut worker_rx: mpsc::Receiver, worker: Arc, - worker_index: usize, + worker_index: u16, ) where Item: Debug + Send + Sync + Clone + 'static, Worker: WorkerTask + Send + Sync + 'static, diff --git a/start.sh b/start.sh index f46210b..6a01a92 100755 --- a/start.sh +++ b/start.sh @@ -1,2 +1,13 @@ -docker compose up --build --force-recreate +#!/bin/bash +/app/strfry relay & +/app/vanish_listener & + +# Wait for any process to exit +wait -n + +EXIT_STATUS=$? +pkill -P $$ + +# Exit with the status of the first exited process +exit $EXIT_STATUS \ No newline at end of file diff --git a/strfry/plugins/broadcast_vanish_requests.ts b/strfry/plugins/broadcast_vanish_requests.ts index 9aa3a1b..a89bc2e 100644 --- a/strfry/plugins/broadcast_vanish_requests.ts +++ b/strfry/plugins/broadcast_vanish_requests.ts @@ -5,7 +5,7 @@ import type { import { log } from "https://raw.githubusercontent.com/planetary-social/strfry-policies/refs/heads/nos-changes/mod.ts"; const REQUEST_TO_VANISH_KIND = 62; -const STREAM_KEY = "vanish_requests"; +const VANISH_STREAM_KEY = "vanish_requests"; function createBroadcastVanishRequests( redis: any, @@ -52,7 +52,7 @@ async function broadcastVanishRequest(event: any, redis: any) { ); try { - await redis.xadd(STREAM_KEY, "*", event); + await redis.xadd(VANISH_STREAM_KEY, "*", event); } catch (error) { log(`Failed to push request ${event.id} to Redis Stream: ${error}`); } From 14d039f7e02c25e60551ebd24fdd906695cb6e25 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Tue, 1 Oct 2024 14:46:34 -0300 Subject: [PATCH 04/18] Cleanup + comments --- Dockerfile | 4 +-- compose.yml | 2 -- ...anish_listener.rs => vanish_subscriber.rs} | 34 ++++++++++++++----- event_deleter/src/lib.rs | 2 +- ...ener_task.rs => vanish_subscriber_task.rs} | 13 ++++--- start.sh | 2 +- 6 files changed, 39 insertions(+), 18 deletions(-) rename event_deleter/src/bin/{vanish_listener.rs => vanish_subscriber.rs} (65%) rename event_deleter/src/{vanish_listener_task.rs => vanish_subscriber_task.rs} (89%) diff --git a/Dockerfile b/Dockerfile index a2446c1..d22df85 100644 --- a/Dockerfile +++ b/Dockerfile @@ -55,10 +55,10 @@ WORKDIR /app COPY --from=build /build/strfry/strfry strfry COPY --from=build /build/event_deleter/target/release/spam_cleaner /usr/local/bin/spam_cleaner -COPY --from=build /build/event_deleter/target/release/vanish_listener vanish_listener +COPY --from=build /build/event_deleter/target/release/vanish_subscriber vanish_subscriber RUN chmod +x /usr/local/bin/spam_cleaner -RUN chmod +x /app/vanish_listener +RUN chmod +x /app/vanish_subscriber COPY ./start.sh start.sh CMD ./start.sh diff --git a/compose.yml b/compose.yml index 8311cf3..6fb2089 100644 --- a/compose.yml +++ b/compose.yml @@ -6,8 +6,6 @@ services: environment: - RELAY_URL=wss://example.com - REDIS_URL=redis://redis:6379 - - APP__event_deleter__redis_url=redis://redis:6379 - - APP__ENVIRONMENT=development depends_on: - redis restart: always diff --git a/event_deleter/src/bin/vanish_listener.rs b/event_deleter/src/bin/vanish_subscriber.rs similarity index 65% rename from event_deleter/src/bin/vanish_listener.rs rename to event_deleter/src/bin/vanish_subscriber.rs index b0d2207..476c916 100644 --- a/event_deleter/src/bin/vanish_listener.rs +++ b/event_deleter/src/bin/vanish_subscriber.rs @@ -1,11 +1,12 @@ use clap::Parser; use event_deleter::{ deletion_task::spawn_deletion_task, event_analyzer::DeleteRequest, - vanish_listener_task::spawn_vanish_listener, + vanish_subscriber_task::spawn_vanish_subscriber, }; use nonzero_ext::nonzero; use std::error::Error; use std::{env, sync::LazyLock}; +use tokio::signal::unix::{signal, SignalKind}; use tokio::sync::mpsc; use tokio_util::{sync::CancellationToken, task::TaskTracker}; use tracing::info; @@ -17,7 +18,7 @@ static REDIS_URL: LazyLock = #[derive(Parser, Debug)] #[command( version, - about = "Checks events for spam and deletes them from the strfry database", + about = "Checks queued vanish requests and deletes their events in the strfry database", long_about = None )] // Leave the comments, they are used for the --help message @@ -37,17 +38,29 @@ async fn main() -> Result<(), Box> { let args = Args::parse(); let tracker = TaskTracker::new(); let cancellation_token = CancellationToken::new(); - let token = cancellation_token.clone(); info!("Starting vanish listener..."); info!("Dry run: {}", args.dry_run); + let token = cancellation_token.clone(); tokio::spawn(async move { - tokio::signal::ctrl_c() - .await - .expect("Failed to listen for Ctrl+C"); + // Create streams for SIGINT and SIGTERM + let mut sigint_stream = + signal(SignalKind::interrupt()).expect("Failed to set up SIGINT handler"); + let mut sigterm_stream = + signal(SignalKind::terminate()).expect("Failed to set up SIGTERM handler"); + + // Wait for either SIGINT or SIGTERM + tokio::select! { + _ = sigint_stream.recv() => { + info!("Received SIGINT (Ctrl+C). Initiating graceful shutdown..."); + } + _ = sigterm_stream.recv() => { + info!("Received SIGTERM. Initiating graceful shutdown..."); + } + } + token.cancel(); - info!("Shutdown signal received. Initiating graceful shutdown..."); }); // We may never need to change these constants so for the moment lets leave them hardcoded @@ -57,7 +70,9 @@ async fn main() -> Result<(), Box> { let (ack_sender, ack_receiver) = mpsc::channel::(vanish_channel_size); // Read the Redis stream and send the delete requests to the deletion task - spawn_vanish_listener( + // On ack, we update the last id processed. It's safe/idempotent to process + // the same id multiple times but we want to avoid that + spawn_vanish_subscriber( &tracker, deletion_sender, ack_receiver, @@ -65,6 +80,9 @@ async fn main() -> Result<(), Box> { cancellation_token, ) .await?; + + // Batches the delete requests and sends them to the strfry delete command. + // Sends ack messages to the Redis vanish stream listener spawn_deletion_task( &tracker, deletion_receiver, diff --git a/event_deleter/src/lib.rs b/event_deleter/src/lib.rs index 393dd9b..b12f362 100644 --- a/event_deleter/src/lib.rs +++ b/event_deleter/src/lib.rs @@ -2,5 +2,5 @@ pub mod analyzer_worker; pub mod deletion_task; pub mod event_analyzer; pub mod relay_commander; -pub mod vanish_listener_task; +pub mod vanish_subscriber_task; pub mod worker_pool; diff --git a/event_deleter/src/vanish_listener_task.rs b/event_deleter/src/vanish_subscriber_task.rs similarity index 89% rename from event_deleter/src/vanish_listener_task.rs rename to event_deleter/src/vanish_subscriber_task.rs index d35e5a0..44e66a3 100644 --- a/event_deleter/src/vanish_listener_task.rs +++ b/event_deleter/src/vanish_subscriber_task.rs @@ -11,9 +11,9 @@ use tracing::{debug, error, info}; static BLOCK_MILLIS: usize = 5000; static VANISH_STREAM_KEY: &str = "vanish_requests"; -static VANISH_LAST_ID_KEY: &str = "vanish_listener:last_id"; +static VANISH_LAST_ID_KEY: &str = "vanish_requests:deletion_subscriber:last_id"; -pub async fn spawn_vanish_listener( +pub async fn spawn_vanish_subscriber( tracker: &TaskTracker, deletion_sender: mpsc::Sender, mut ack_receiver: mpsc::Receiver, @@ -50,7 +50,7 @@ pub async fn spawn_vanish_listener( if let Err(e) = save_last_id_result { error!("Failed to save last id: {}", e); } else { - info!("Last id processed: {}", last_id); + info!("Updating last vanish stream id processed to {}", last_id); } last_id = id.clone(); } @@ -80,7 +80,7 @@ pub async fn spawn_vanish_listener( .await .unwrap_or("0-0".to_string()); - info!("Last id processed: {}", last_id); + info!("Starting from last id processed: {}", last_id); loop { tokio::select! { @@ -102,6 +102,11 @@ pub async fn spawn_vanish_listener( for StreamKey { ids, .. } in reply.keys { for stream_id in ids { + if stream_id.id == last_id { + // This one was already processed + continue; + } + if let Err(_) = process_stream_id(&stream_id, &deletion_sender).await { return; } diff --git a/start.sh b/start.sh index 6a01a92..8b0c51d 100755 --- a/start.sh +++ b/start.sh @@ -1,7 +1,7 @@ #!/bin/bash /app/strfry relay & -/app/vanish_listener & +/app/vanish_subscriber & # Wait for any process to exit wait -n From 1c4f259e0a29f95ac7a5681d4018839e9fd2882a Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Tue, 1 Oct 2024 15:14:44 -0300 Subject: [PATCH 05/18] Better test --- strfry/plugins/tests/broadcast_vanish_requests.test.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/strfry/plugins/tests/broadcast_vanish_requests.test.ts b/strfry/plugins/tests/broadcast_vanish_requests.test.ts index e01ab4c..6d7e084 100644 --- a/strfry/plugins/tests/broadcast_vanish_requests.test.ts +++ b/strfry/plugins/tests/broadcast_vanish_requests.test.ts @@ -97,6 +97,10 @@ Deno.test({ sourceInfo: "1.1.1.1", event: buildEvent({ kind: 1, + tags: [ + ["relay", "ALL_RELAYS"], + ["relay", "example.com"], + ], }), }); From a28048a07ca3c5a1190e561c29ad3bcae45a8a7f Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 2 Oct 2024 10:04:31 -0300 Subject: [PATCH 06/18] More coverage, run_tests.sh, pass traits --- event_deleter/Cargo.toml | 2 +- event_deleter/src/bin/spam_cleaner.rs | 4 + event_deleter/src/bin/vanish_subscriber.rs | 31 +- event_deleter/src/deletion_task.rs | 149 +++++++- event_deleter/src/event_analyzer.rs | 2 +- event_deleter/src/relay_commander.rs | 77 ++-- event_deleter/src/vanish_subscriber_task.rs | 339 +++++++++++++++--- event_deleter/src/worker_pool.rs | 2 +- run_tests.sh | 30 ++ .../tests/broadcast_vanish_requests.test.ts | 4 + strfry/plugins/tests/run_tests.sh | 2 +- 11 files changed, 540 insertions(+), 102 deletions(-) create mode 100755 run_tests.sh diff --git a/event_deleter/Cargo.toml b/event_deleter/Cargo.toml index 2cdd9e5..392764b 100644 --- a/event_deleter/Cargo.toml +++ b/event_deleter/Cargo.toml @@ -15,7 +15,7 @@ regex = "1.10.6" serde = { version = "1.0.210", features = ["derive"] } serde_json = "1.0.128" thiserror = "1.0.64" -tokio = { version = "1.40.0", features = ["full"] } +tokio = { version = "1.40.0", features = ["full", "test-util"] } tokio-rustls = "0.26.0" tokio-util = { version = "0.7.12", features = ["rt"] } tracing = "0.1.40" diff --git a/event_deleter/src/bin/spam_cleaner.rs b/event_deleter/src/bin/spam_cleaner.rs index 7291cfc..0b5d781 100644 --- a/event_deleter/src/bin/spam_cleaner.rs +++ b/event_deleter/src/bin/spam_cleaner.rs @@ -3,6 +3,7 @@ use event_deleter::{ analyzer_worker::ValidationWorker, deletion_task::spawn_deletion_task, event_analyzer::{DeleteRequest, Validator}, + relay_commander, worker_pool::WorkerPool, }; use nonzero_ext::nonzero; @@ -86,11 +87,14 @@ async fn main() -> Result<(), Box> { validator_worker, ); + let relay_commander = relay_commander::RelayCommander::default(); + // Spawn the deletion task with dry_run flag spawn_deletion_task( &tracker, deletion_receiver, None, + relay_commander, args.buffer_size, args.dry_run, ); diff --git a/event_deleter/src/bin/vanish_subscriber.rs b/event_deleter/src/bin/vanish_subscriber.rs index 476c916..02a8be0 100644 --- a/event_deleter/src/bin/vanish_subscriber.rs +++ b/event_deleter/src/bin/vanish_subscriber.rs @@ -1,7 +1,9 @@ use clap::Parser; use event_deleter::{ - deletion_task::spawn_deletion_task, event_analyzer::DeleteRequest, - vanish_subscriber_task::spawn_vanish_subscriber, + deletion_task::spawn_deletion_task, + event_analyzer::DeleteRequest, + relay_commander::RelayCommander, + vanish_subscriber_task::{spawn_vanish_subscriber, RedisClient}, }; use nonzero_ext::nonzero; use std::error::Error; @@ -68,6 +70,19 @@ async fn main() -> Result<(), Box> { let vanish_channel_size = 10; let (deletion_sender, deletion_receiver) = mpsc::channel::(vanish_channel_size); let (ack_sender, ack_receiver) = mpsc::channel::(vanish_channel_size); + let redis_client = RedisClient::new(&REDIS_URL); + let relay_commander = RelayCommander::default(); + + // Batches the delete requests and sends them to the strfry delete command. + // Sends ack messages to the Redis vanish stream listener + spawn_deletion_task( + &tracker, + deletion_receiver, + Some(ack_sender), + relay_commander, + delete_command_batch_size, + args.dry_run, + ); // Read the Redis stream and send the delete requests to the deletion task // On ack, we update the last id processed. It's safe/idempotent to process @@ -76,21 +91,11 @@ async fn main() -> Result<(), Box> { &tracker, deletion_sender, ack_receiver, - &*REDIS_URL, + redis_client, cancellation_token, ) .await?; - // Batches the delete requests and sends them to the strfry delete command. - // Sends ack messages to the Redis vanish stream listener - spawn_deletion_task( - &tracker, - deletion_receiver, - Some(ack_sender), - delete_command_batch_size, - args.dry_run, - ); - tracker.close(); tracker.wait().await; diff --git a/event_deleter/src/deletion_task.rs b/event_deleter/src/deletion_task.rs index e972c91..5dedfb3 100644 --- a/event_deleter/src/deletion_task.rs +++ b/event_deleter/src/deletion_task.rs @@ -1,20 +1,19 @@ use crate::event_analyzer::DeleteRequest; -use crate::relay_commander::RelayCommander; +use crate::relay_commander::{RawCommanderTrait, RelayCommander}; use std::num::NonZeroU16; use tokio::sync::mpsc; use tokio::time; use tokio_util::task::TaskTracker; use tracing::{debug, error, info}; -pub fn spawn_deletion_task( +pub fn spawn_deletion_task( tracker: &TaskTracker, mut deletion_receiver: mpsc::Receiver, ack_sender: Option>, + relay_commander: RelayCommander, buffer_size: NonZeroU16, dry_run: bool, ) { - let relay_commander = RelayCommander; - tracker.spawn(async move { let mut buffer = Vec::with_capacity(buffer_size.get() as usize); let flush_period_seconds = 30; @@ -57,8 +56,8 @@ pub fn spawn_deletion_task( }); } -async fn flush_buffer( - relay_commander: &RelayCommander, +async fn flush_buffer( + relay_commander: &RelayCommander, buffer: &mut Vec, ack_sender: &Option>, dry_run: bool, @@ -82,3 +81,141 @@ async fn flush_buffer( } } } + +#[cfg(test)] +mod tests { + use super::*; + use nostr_sdk::prelude::*; + use std::collections::BTreeSet; + use std::num::NonZeroU16; + use std::sync::{Arc, Mutex}; + use tokio::sync::mpsc; + use tokio::time::{self, Duration}; + use tokio_util::task::TaskTracker; + + #[derive(Debug)] + struct CommandRun { + filter: Filter, + dry_run: bool, + } + + // MockRelayCommander that records calls to execute_delete + #[derive(Clone)] + struct MockRelayCommander { + executed_deletes: Arc>>, + } + + #[async_trait::async_trait] + impl RawCommanderTrait for MockRelayCommander { + async fn delete_from_filter( + &self, + filter: Filter, + dry_run: bool, + ) -> Result<(), Box> { + let command_run = CommandRun { filter, dry_run }; + let mut executed_deletes = self.executed_deletes.lock().unwrap(); + executed_deletes.push(command_run); + Ok(()) + } + } + + #[tokio::test(start_paused = true)] + async fn test_deletion_task() { + let buffer_size = NonZeroU16::new(3).unwrap(); // Small buffer size for testing + let dry_run = false; + let tracker = TaskTracker::new(); + let (deletion_sender, deletion_receiver) = mpsc::channel(10); + let (ack_sender, mut ack_receiver) = mpsc::channel(10); // Optional acknowledgment channel + let executed_deletes = Arc::new(Mutex::new(Vec::new())); + let mock_commander = MockRelayCommander { + executed_deletes: executed_deletes.clone(), + }; + let relay_commander = RelayCommander::new(mock_commander); + + spawn_deletion_task( + &tracker, + deletion_receiver, + Some(ack_sender), + relay_commander, + buffer_size, + dry_run, + ); + tracker.close(); + + // Send DeleteRequests + let forbidden_public_key = Keys::generate().public_key(); + let forbidden_name = DeleteRequest::ForbiddenName(forbidden_public_key); + let event_id = + EventId::parse("ae7603d8af87cb3b055fd6955692e3201cbd42ae1e327e16fc0c32ab5e888d63") + .unwrap(); + let reply_copy = DeleteRequest::ReplyCopy(event_id); + let vanish_public_key = Keys::generate().public_key(); + let vanish = DeleteRequest::Vanish("streamid".to_string(), vanish_public_key, None); + + deletion_sender.send(forbidden_name.clone()).await.unwrap(); + deletion_sender.send(reply_copy.clone()).await.unwrap(); + deletion_sender.send(vanish.clone()).await.unwrap(); + + // Wait an interval cycle + time::advance(Duration::from_secs(30)).await; + + // Check that execute_delete was called with the correct filters + assert_executed_deletes( + &executed_deletes, + dry_run, + vec![ + CommandExpectation { + expected_ids: Some(BTreeSet::from([event_id])), + expected_authors: None, + }, + CommandExpectation { + expected_ids: None, + expected_authors: Some(BTreeSet::from([ + forbidden_public_key, + vanish_public_key, + ])), + }, + ], + ); + + // Check that acknowledgments were sent + assert_acks_received(&mut ack_receiver, vec![forbidden_name, reply_copy, vanish]).await; + + drop(deletion_sender); + tracker.wait().await; + } + + struct CommandExpectation { + expected_ids: Option>, + expected_authors: Option>, + } + + fn assert_executed_deletes( + executed_deletes: &Arc>>, + expected_dry_run: bool, + expected_commands: Vec, + ) { + let executed = executed_deletes.lock().unwrap(); + assert_eq!(executed.len(), expected_commands.len()); + + for (command_run, expectation) in executed.iter().zip(expected_commands.iter()) { + assert_eq!(command_run.dry_run, expected_dry_run); + assert_eq!(&command_run.filter.ids, &expectation.expected_ids); + assert_eq!(&command_run.filter.authors, &expectation.expected_authors); + } + } + + async fn assert_acks_received( + ack_receiver: &mut mpsc::Receiver, + expected_acks: Vec, + ) { + let mut acks = Vec::new(); + while let Ok(ack) = ack_receiver.try_recv() { + acks.push(ack); + } + assert_eq!(acks.len(), expected_acks.len()); + for expected_ack in expected_acks { + assert!(acks.contains(&expected_ack)); + } + } +} diff --git a/event_deleter/src/event_analyzer.rs b/event_deleter/src/event_analyzer.rs index ef26c99..24c2530 100644 --- a/event_deleter/src/event_analyzer.rs +++ b/event_deleter/src/event_analyzer.rs @@ -19,7 +19,7 @@ pub enum EventAnalysisResult { Reject(DeleteRequest), } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum DeleteRequest { ReplyCopy(EventId), ForbiddenName(PublicKey), diff --git a/event_deleter/src/relay_commander.rs b/event_deleter/src/relay_commander.rs index 7c8de85..019d3d1 100644 --- a/event_deleter/src/relay_commander.rs +++ b/event_deleter/src/relay_commander.rs @@ -1,13 +1,29 @@ use crate::event_analyzer::DeleteRequest; +use async_trait::async_trait; use nostr_sdk::prelude::*; use std::collections::HashSet; use std::error::Error; use tokio::process::Command; -#[derive(Clone, Default)] -pub struct RelayCommander; +#[derive(Clone)] +pub struct RelayCommander { + raw_commander: T, +} + +impl RelayCommander { + pub fn new(raw_commander: T) -> Self { + RelayCommander { raw_commander } + } +} + +impl Default for RelayCommander { + fn default() -> Self { + let raw_commander = RawCommander {}; + RelayCommander::new(raw_commander) + } +} -impl RelayCommander { +impl RelayCommander { pub async fn execute_delete( &self, delete_reason: Vec, @@ -32,38 +48,51 @@ impl RelayCommander { if !ids.is_empty() { let ids_filter = Filter::new().ids(ids); - delete_from_filter(ids_filter, dry_run).await?; + self.raw_commander + .delete_from_filter(ids_filter, dry_run) + .await?; } if !authors.is_empty() { let authors_filter = Filter::new().authors(authors); - delete_from_filter(authors_filter, dry_run).await?; + self.raw_commander + .delete_from_filter(authors_filter, dry_run) + .await?; } Ok(()) } } -async fn delete_from_filter( - filter: Filter, - dry_run: bool, -) -> std::result::Result<(), Box> { - let json_filter = filter.as_json(); - let command_str = format!( - "./strfry delete --filter='{}' {}", - json_filter, - if dry_run { "--dry-run" } else { "" } - ); +#[derive(Default)] +pub struct RawCommander {} + +#[async_trait] +impl RawCommanderTrait for RawCommander {} +#[async_trait] +pub trait RawCommanderTrait: Sync + Send + 'static { + async fn delete_from_filter( + &self, + filter: Filter, + dry_run: bool, + ) -> std::result::Result<(), Box> { + let json_filter = filter.as_json(); + let command_str = format!( + "./strfry delete --filter='{}' {}", + json_filter, + if dry_run { "--dry-run" } else { "" } + ); - let status = Command::new("bash") - .arg("-c") - .arg(&command_str) - .status() - .await?; + let status = Command::new("bash") + .arg("-c") + .arg(&command_str) + .status() + .await?; - if status.success() { - Ok(()) - } else { - Err(format!("Delete command failed with status: {}", status).into()) + if status.success() { + Ok(()) + } else { + Err(format!("Delete command failed with status: {}", status).into()) + } } } diff --git a/event_deleter/src/vanish_subscriber_task.rs b/event_deleter/src/vanish_subscriber_task.rs index 44e66a3..6dfa800 100644 --- a/event_deleter/src/vanish_subscriber_task.rs +++ b/event_deleter/src/vanish_subscriber_task.rs @@ -1,4 +1,5 @@ use crate::event_analyzer::DeleteRequest; +use async_trait::async_trait; use redis::{ streams::{StreamId, StreamKey, StreamReadOptions, StreamReadReply}, AsyncCommands, RedisError, @@ -13,72 +14,118 @@ static BLOCK_MILLIS: usize = 5000; static VANISH_STREAM_KEY: &str = "vanish_requests"; static VANISH_LAST_ID_KEY: &str = "vanish_requests:deletion_subscriber:last_id"; -pub async fn spawn_vanish_subscriber( +pub struct RedisClient { + client: redis::Client, +} + +#[async_trait] +pub trait RedisClientTrait: Send + Sync + 'static { + type Connection: RedisClientConnectionTrait; + async fn get_multiplexed_async_connection(&self) -> Result; +} + +impl RedisClient { + pub fn new(url: &str) -> Self { + let client = redis::Client::open(url).expect("Failed to create Redis client"); + RedisClient { client } + } +} + +#[async_trait] +impl RedisClientTrait for RedisClient { + type Connection = RedisClientConnection; + async fn get_multiplexed_async_connection(&self) -> Result { + let con = self.client.get_multiplexed_async_connection().await?; + Ok(RedisClientConnection { con }) + } +} + +pub struct RedisClientConnection { + con: redis::aio::MultiplexedConnection, +} + +#[async_trait] +pub trait RedisClientConnectionTrait: Send + Sync + 'static { + async fn get(&mut self, key: &str) -> Result; + async fn set(&mut self, key: &str, value: String) -> Result<(), RedisError>; + async fn xread_options( + &mut self, + keys: &[&str], + ids: &[String], + opts: &StreamReadOptions, + ) -> Result; +} + +#[async_trait] +impl RedisClientConnectionTrait for RedisClientConnection { + async fn get(&mut self, key: &str) -> Result { + self.con.get(key).await + } + + async fn set(&mut self, key: &str, value: String) -> Result<(), RedisError> { + self.con.set(key, value).await + } + + async fn xread_options( + &mut self, + keys: &[&str], + ids: &[String], + opts: &StreamReadOptions, + ) -> Result { + self.con.xread_options(keys, ids, opts).await + } +} + +pub async fn spawn_vanish_subscriber( tracker: &TaskTracker, deletion_sender: mpsc::Sender, mut ack_receiver: mpsc::Receiver, - redis_url: &str, + redis_client: T, cancellation_token: CancellationToken, ) -> Result<(), Box> { - let redis = Arc::new(redis::Client::open(redis_url)?); + let redis_client = Arc::new(redis_client); - let client = redis.clone(); + let redis_client_clone = redis_client.clone(); tracker.spawn(async move { - let mut con = match client.get_multiplexed_async_connection().await { - Ok(con) => con, + let (mut con, mut last_id) = match get_connection_and_last_id(redis_client_clone).await { + Ok(result) => result, Err(e) => { - error!("Failed to connect to Redis: {}", e); + error!("Failed to get Redis connection: {}", e); return; } }; - let mut last_id = con - .get(VANISH_LAST_ID_KEY) - .await - .unwrap_or("0-0".to_string()); + while let Some(ack) = ack_receiver.recv().await { + if let DeleteRequest::Vanish(id, ..) = ack { + debug!("Received ack"); - loop { - match ack_receiver.recv().await { - Some(ack) => match ack { - DeleteRequest::Vanish(id, ..) => { - debug!("Received ack"); - - if id > last_id { - let save_last_id_result: Result<(), RedisError> = - con.set(VANISH_LAST_ID_KEY, last_id.clone()).await; - - if let Err(e) = save_last_id_result { - error!("Failed to save last id: {}", e); - } else { - info!("Updating last vanish stream id processed to {}", last_id); - } - last_id = id.clone(); - } + if id > last_id { + let save_last_id_result: Result<(), RedisError> = + con.set(VANISH_LAST_ID_KEY, last_id.clone()).await; + + if let Err(e) = save_last_id_result { + error!("Failed to save last id: {}", e); + } else { + info!("Updating last vanish stream id processed to {}", last_id); } - _ => {} - }, - None => { - break; + + last_id = id.clone(); } } } }); - let client = redis.clone(); + let redis_client_clone = redis_client.clone(); tracker.spawn(async move { - let mut con = match client.get_multiplexed_async_connection().await { - Ok(con) => con, + let (mut con, mut last_id) = match get_connection_and_last_id(redis_client_clone).await { + Ok(result) => result, Err(e) => { - error!("Failed to connect to Redis: {}", e); + error!("Failed to get Redis connection: {}", e); return; } }; let opts = StreamReadOptions::default().block(BLOCK_MILLIS); - let mut last_id = con - .get(VANISH_LAST_ID_KEY) - .await - .unwrap_or("0-0".to_string()); info!("Starting from last id processed: {}", last_id); @@ -88,32 +135,28 @@ pub async fn spawn_vanish_subscriber( break; } - _ = async { - let reply: StreamReadReply = match con + result = async { + let reply: StreamReadReply = con .xread_options(&[VANISH_STREAM_KEY], &[last_id.clone()], &opts) - .await - { - Ok(reply) => reply, - Err(e) => { - error!("Failed to read from Redis: {}", e); - return; - } - }; + .await?; for StreamKey { ids, .. } in reply.keys { for stream_id in ids { if stream_id.id == last_id { - // This one was already processed continue; } - if let Err(_) = process_stream_id(&stream_id, &deletion_sender).await { - return; - } + process_stream_id(&stream_id, &deletion_sender).await?; last_id = stream_id.id.clone(); } } - } => {} + Ok::<(), Box>(()) + } => { + if let Err(e) = result { + error!("Error in Redis stream reader task: {}", e); + continue; + } + } } } }); @@ -121,6 +164,17 @@ pub async fn spawn_vanish_subscriber( Ok(()) } +async fn get_connection_and_last_id( + redis_client: Arc, +) -> Result<(T::Connection, String), RedisError> { + let mut con = redis_client.get_multiplexed_async_connection().await?; + let last_id = con + .get(VANISH_LAST_ID_KEY) + .await + .unwrap_or_else(|_| "0-0".to_string()); + Ok((con, last_id)) +} + async fn process_stream_id( stream_id: &StreamId, deletion_sender: &mpsc::Sender, @@ -142,3 +196,178 @@ async fn process_stream_id( Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + use nostr_sdk::prelude::Keys; + use std::collections::HashMap; + use std::sync::{Arc, Mutex}; + + struct MockRedisClient { + last_id: String, + stream_ids_sequence: Arc>>, + } + struct MockRedisClientConnection { + last_id: String, + stream_ids_sequence: Arc>>, + index: usize, + } + + #[async_trait::async_trait] + impl RedisClientConnectionTrait for MockRedisClientConnection { + async fn get(&mut self, _key: &str) -> Result { + Ok(self.last_id.clone()) + } + + async fn set(&mut self, _key: &str, value: String) -> Result<(), RedisError> { + self.last_id = value; + Ok(()) + } + + async fn xread_options( + &mut self, + _keys: &[&str], + _ids: &[String], + _opts: &StreamReadOptions, + ) -> Result { + tokio::task::yield_now().await; + + let sequence = self.stream_ids_sequence.lock().unwrap(); + + if self.index < sequence.len() { + let reply = sequence[self.index].clone(); + self.index += 1; + Ok(reply) + } else { + Ok(StreamReadReply { keys: Vec::new() }) + } + } + } + + impl MockRedisClient { + fn new(last_id: String, stream_ids_sequence: Arc>>) -> Self { + MockRedisClient { + last_id, + stream_ids_sequence, + } + } + } + + #[async_trait::async_trait] + impl RedisClientTrait for MockRedisClient { + type Connection = MockRedisClientConnection; + async fn get_multiplexed_async_connection(&self) -> Result { + Ok(MockRedisClientConnection { + last_id: self.last_id.clone(), + stream_ids_sequence: self.stream_ids_sequence.clone(), + index: 0, + }) + } + } + + #[tokio::test] + async fn test_spawn_vanish_subscriber() { + let expected_public_key_1 = Keys::generate().public_key(); + let expected_public_key_2 = Keys::generate().public_key(); + + let stream_read_reply_1 = StreamReadReply { + keys: vec![StreamKey { + key: VANISH_STREAM_KEY.to_string(), + ids: vec![StreamId { + id: "1-0".to_string(), + map: HashMap::from([ + ( + "pubkey".to_string(), + redis::Value::BulkString(expected_public_key_1.to_hex().into()), + ), + ("kind".to_string(), redis::Value::Int(62)), + ( + "content".to_string(), + redis::Value::BulkString("First message".into()), + ), + ]), + }], + }], + }; + + let stream_read_reply_2 = StreamReadReply { + keys: vec![StreamKey { + key: VANISH_STREAM_KEY.to_string(), + ids: vec![StreamId { + id: "2-0".to_string(), + map: HashMap::from([ + ( + "pubkey".to_string(), + redis::Value::BulkString(expected_public_key_2.to_hex().into()), + ), + ("kind".to_string(), redis::Value::Int(62)), + ( + "content".to_string(), + redis::Value::BulkString("Second message".into()), + ), + ]), + }], + }], + }; + + let stream_ids_sequence = + Arc::new(Mutex::new(vec![stream_read_reply_1, stream_read_reply_2])); + + let redis_client = MockRedisClient::new("0-0".to_string(), stream_ids_sequence.clone()); + let (deletion_sender, mut deletion_receiver) = mpsc::channel::(10); + let (ack_sender, ack_receiver) = mpsc::channel(10); + let cancellation_token = CancellationToken::new(); + let tracker = TaskTracker::new(); + + let received_requests = Arc::new(Mutex::new(Vec::new())); + let received_requests_clone = Arc::clone(&received_requests); + let token = cancellation_token.clone(); + + // Faked deletion task + let len = stream_ids_sequence.lock().unwrap().len(); + tracker.spawn(async move { + for _ in 0..len { + let request = deletion_receiver.recv().await.unwrap(); + received_requests_clone + .lock() + .unwrap() + .push(request.clone()); + ack_sender.send(request).await.unwrap(); + } + + token.cancel(); + }); + + spawn_vanish_subscriber( + &tracker, + deletion_sender, + ack_receiver, + redis_client, + cancellation_token, + ) + .await + .unwrap(); + tracker.close(); + tracker.wait().await; + + let requests = received_requests.lock().unwrap(); + assert_eq!(requests.len(), 2); + + if let DeleteRequest::Vanish(id, public_key, reason) = &requests[0] { + assert_eq!(id, "1-0"); + assert_eq!(*public_key, expected_public_key_1); + assert_eq!(reason, &Some("First message".to_string())); + } else { + panic!("Expected first request to be Vanish"); + } + + if let DeleteRequest::Vanish(id, public_key, reason) = &requests[1] { + assert_eq!(id, "2-0"); + assert_eq!(*public_key, expected_public_key_2); + assert_eq!(reason, &Some("Second message".to_string())); + } else { + panic!("Expected second request to be Vanish"); + } + } +} diff --git a/event_deleter/src/worker_pool.rs b/event_deleter/src/worker_pool.rs index 68655ec..2ba6507 100644 --- a/event_deleter/src/worker_pool.rs +++ b/event_deleter/src/worker_pool.rs @@ -132,7 +132,7 @@ fn create_worker_task( } Some(item) => { trace!("{}: Worker task processing item {:?}", worker_name, item); - let result = timeout(Duration::from_secs(worker_timeout_secs.get() as u64), worker.call(item)).await; + let result = timeout(Duration::from_secs(worker_timeout_secs.get()), worker.call(item)).await; match result { Ok(Ok(())) => { diff --git a/run_tests.sh b/run_tests.sh new file mode 100755 index 0000000..95c48a9 --- /dev/null +++ b/run_tests.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +./strfry/plugins/tests/run_tests.sh & +DENOTEST_PID=$! + +( cd ./event_deleter && cargo test ) & +RUSTTEST_PID=$! + +wait $DENOTEST_PID +DENOTEST_STATUS=$? + +wait $RUSTTEST_PID +RUSTTEST_STATUS=$? + +pkill -P $$ + +if [ $DENOTEST_STATUS -ne 0 ]; then + echo "Deno tests failed." +fi + +if [ $RUSTTEST_STATUS -ne 0 ]; then + echo "Rust tests failed." +fi + +if [ $DENOTEST_STATUS -ne 0 ] || [ $RUSTTEST_STATUS -ne 0 ]; then + exit 1 +else + echo "All tests passed." + exit 0 +fi diff --git a/strfry/plugins/tests/broadcast_vanish_requests.test.ts b/strfry/plugins/tests/broadcast_vanish_requests.test.ts index e01ab4c..6d7e084 100644 --- a/strfry/plugins/tests/broadcast_vanish_requests.test.ts +++ b/strfry/plugins/tests/broadcast_vanish_requests.test.ts @@ -97,6 +97,10 @@ Deno.test({ sourceInfo: "1.1.1.1", event: buildEvent({ kind: 1, + tags: [ + ["relay", "ALL_RELAYS"], + ["relay", "example.com"], + ], }), }); diff --git a/strfry/plugins/tests/run_tests.sh b/strfry/plugins/tests/run_tests.sh index 112b4bf..36f32ad 100755 --- a/strfry/plugins/tests/run_tests.sh +++ b/strfry/plugins/tests/run_tests.sh @@ -1,2 +1,2 @@ #!/bin/bash -deno test --watch --allow-read --allow-write --allow-env --log-level=info \ No newline at end of file +deno test --allow-read --allow-write --allow-env --log-level=info \ No newline at end of file From 454ce217e617e9e58726b222a7365738c351f0a9 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Thu, 3 Oct 2024 13:58:20 -0300 Subject: [PATCH 07/18] Integration test and CI workflow --- .github/workflows/run-tests.yml | 29 ++++++ Dockerfile | 70 +++++++++++--- compose.yml | 21 +++-- event_deleter/Cargo.toml | 4 + event_deleter/src/deletion_task.rs | 9 +- run_integration_tests.sh | 94 +++++++++++++++++++ run_tests.sh | 25 ++++- .../tests/broadcast_vanish_requests.test.ts | 16 ++++ strfry/plugins/tests/run_deno_tests.sh | 2 + strfry/plugins/tests/run_tests.sh | 2 - 10 files changed, 243 insertions(+), 29 deletions(-) create mode 100644 .github/workflows/run-tests.yml create mode 100755 run_integration_tests.sh create mode 100755 strfry/plugins/tests/run_deno_tests.sh delete mode 100755 strfry/plugins/tests/run_tests.sh diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml new file mode 100644 index 0000000..4ee0b05 --- /dev/null +++ b/.github/workflows/run-tests.yml @@ -0,0 +1,29 @@ +name: Run Tests + +on: + pull_request: + branches: ['main'] + push: + branches: ['main'] + +jobs: + run_tests: + name: Run Integration Tests + runs-on: ubuntu-latest + + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + if: ${{ !env.ACT }} # Skip this step locally if running in Act + uses: docker/setup-buildx-action@v3 + + - name: Build and run tests with Docker Compose + run: | + docker compose up --build --abort-on-container-exit --exit-code-from tests + + - name: Check for test failures + if: failure() + run: | + echo "Tests failed, check the logs for details." \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index d22df85..047b36a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,12 @@ -FROM ubuntu:jammy AS build +ARG PLATFORM=linux/amd64 + +# Stage 1: Build +FROM --platform=${PLATFORM} ubuntu:jammy AS build WORKDIR /build RUN apt update && apt install -y --no-install-recommends \ - git g++ make pkg-config libtool ca-certificates \ + unzip cmake git g++ make pkg-config libtool ca-certificates \ libyaml-perl libtemplate-perl libregexp-grammars-perl libssl-dev zlib1g-dev \ liblmdb-dev libflatbuffers-dev libsecp256k1-dev libzstd-dev curl build-essential @@ -16,35 +19,77 @@ RUN git clone --branch 0.9.6 https://github.com/hoytech/strfry.git && \ RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain 1.80.1 ENV PATH="/root/.cargo/bin:${PATH}" - RUN rustc --version COPY ./event_deleter/Cargo.toml ./event_deleter/Cargo.lock /build/event_deleter/ - WORKDIR /build/event_deleter - RUN cargo fetch COPY ./event_deleter/src /build/event_deleter/src - RUN cargo build --release +RUN cargo test --release --no-run -FROM ubuntu:jammy AS runner +RUN curl -fsSL https://deno.land/install.sh | sh +ENV DENO_INSTALL="/root/.deno" +ENV PATH="$DENO_INSTALL/bin:$PATH" +RUN echo "Deno is located at: $(which deno)" -EXPOSE 7777 + +# Stage 2: tests +FROM --platform=${PLATFORM} ubuntu:jammy AS tests + +RUN apt update && apt install -y --no-install-recommends \ + curl jq git g++ make pkg-config libtool ca-certificates \ + libyaml-perl libtemplate-perl libregexp-grammars-perl libssl-dev zlib1g-dev \ + && rm -rf /var/lib/apt/lists/* + +RUN curl -L https://github.com/fiatjaf/nak/releases/download/v0.7.6/nak-v0.7.6-linux-amd64 -o /usr/local/bin/nak && \ + chmod +x /usr/local/bin/nak + +RUN nak --version + +COPY ./run_integration_tests.sh /usr/local/bin/run_integration_tests.sh +RUN chmod +x /usr/local/bin/run_integration_tests.sh + +COPY --from=build /build/event_deleter/Cargo.toml /tests/event_deleter/ +COPY --from=build /build/event_deleter/Cargo.lock /tests/event_deleter/ +COPY --from=build /build/event_deleter/src /tests/event_deleter/src +COPY --from=build /build/event_deleter/target /tests/event_deleter/target + +COPY --from=build /root/.cargo /root/.cargo +ENV PATH="/root/.cargo/bin:${PATH}" + +RUN rustup default stable + +COPY ./strfry/plugins/ /tests/strfry/plugins/ +COPY ./run_tests.sh /usr/local/bin/run_tests.sh +RUN chmod +x /usr/local/bin/run_tests.sh + +WORKDIR /tests + +COPY --from=build /root/.deno /root/.deno +ENV PATH="/root/.deno/bin:${PATH}" + +RUN deno --version + +CMD ["run_tests.sh"] + +# Stage 3: runner +FROM --platform=${PLATFORM} ubuntu:jammy AS runner RUN apt-get update && apt-get install -y --no-install-recommends \ - vim curl unzip ca-certificates \ + vim curl jq ca-certificates \ liblmdb0 libflatbuffers1 libsecp256k1-0 libb2-1 libzstd1 \ && rm -rf /var/lib/apt/lists/* RUN update-ca-certificates -RUN curl -fsSL https://deno.land/install.sh | sh -ENV DENO_INSTALL="/root/.deno" -ENV PATH="$DENO_INSTALL/bin:$PATH" +COPY --from=build /root/.deno /root/.deno +ENV PATH="/root/.deno/bin:${PATH}" RUN deno --version +EXPOSE 7777 + COPY ./strfry/config/strfry.conf /etc/strfry.conf RUN mkdir -p /app/strfry-db COPY ./strfry/plugins/ /app/plugins/ @@ -53,7 +98,6 @@ RUN chmod +x /app/plugins/policies.ts WORKDIR /app COPY --from=build /build/strfry/strfry strfry - COPY --from=build /build/event_deleter/target/release/spam_cleaner /usr/local/bin/spam_cleaner COPY --from=build /build/event_deleter/target/release/vanish_subscriber vanish_subscriber diff --git a/compose.yml b/compose.yml index 6fb2089..dbaefb1 100644 --- a/compose.yml +++ b/compose.yml @@ -1,6 +1,8 @@ services: nosrelay: - build: . + build: + context: . + dockerfile: Dockerfile ports: - "7777:7777" environment: @@ -8,16 +10,21 @@ services: - REDIS_URL=redis://redis:6379 depends_on: - redis - restart: always - redis: image: redis:7.2.4 ports: - "6379:6379" command: redis-server --loglevel notice - volumes: - - redis_data:/data -volumes: - redis_data: \ No newline at end of file + tests: + build: + context: . + dockerfile: Dockerfile + target: tests + environment: + - RELAY_URL=wss://example.com + - REDIS_URL=redis://redis:6379 + depends_on: + - redis + restart: "no" diff --git a/event_deleter/Cargo.toml b/event_deleter/Cargo.toml index 392764b..05d5caf 100644 --- a/event_deleter/Cargo.toml +++ b/event_deleter/Cargo.toml @@ -25,6 +25,10 @@ tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } name = "spam_cleaner" path = "src/bin/spam_cleaner.rs" +[[bin]] +name = "vanish_subscriber" +path = "src/bin/vanish_subscriber.rs" + [profile.release] panic = "abort" diff --git a/event_deleter/src/deletion_task.rs b/event_deleter/src/deletion_task.rs index 5dedfb3..c09155a 100644 --- a/event_deleter/src/deletion_task.rs +++ b/event_deleter/src/deletion_task.rs @@ -6,6 +6,8 @@ use tokio::time; use tokio_util::task::TaskTracker; use tracing::{debug, error, info}; +static FLUSH_PERIOD_SECONDS: u64 = 10; + pub fn spawn_deletion_task( tracker: &TaskTracker, mut deletion_receiver: mpsc::Receiver, @@ -16,10 +18,9 @@ pub fn spawn_deletion_task( ) { tracker.spawn(async move { let mut buffer = Vec::with_capacity(buffer_size.get() as usize); - let flush_period_seconds = 30; - let flush_period = time::Duration::from_secs(flush_period_seconds); + let flush_period = time::Duration::from_secs(FLUSH_PERIOD_SECONDS); - info!("Publishing messages every {} seconds", flush_period_seconds); + info!("Publishing messages every {} seconds", FLUSH_PERIOD_SECONDS); let mut interval = time::interval(flush_period); @@ -157,7 +158,7 @@ mod tests { deletion_sender.send(vanish.clone()).await.unwrap(); // Wait an interval cycle - time::advance(Duration::from_secs(30)).await; + time::advance(Duration::from_secs(FLUSH_PERIOD_SECONDS)).await; // Check that execute_delete was called with the correct filters assert_executed_deletes( diff --git a/run_integration_tests.sh b/run_integration_tests.sh new file mode 100755 index 0000000..c113111 --- /dev/null +++ b/run_integration_tests.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +assert_jsonl_equals() { + local jsonl_data="$1" + local expected_data="$2" + local message="$3" + + local sorted_jsonl_data=$(echo "$jsonl_data" | sort) + local sorted_expected_data=$(echo "$expected_data" | sort) + + if [[ "$sorted_jsonl_data" == "$sorted_expected_data" ]]; then + return 0 + else + echo "Assertion failed: $message" + echo "Expected:" + echo "$sorted_expected_data" + echo "Got:" + echo "$sorted_jsonl_data" + return 1 + fi +} + + +test_status=0 + +key1=$(nak key generate) +pubkey1=$(nak key public $key1) +event1=$(nak event -q -k 1 -c "content 1" --sec $key1 ws://nosrelay:7777) + +key2=$(nak key generate) +pubkey2=$(nak key public $key2) +event2=$(nak event -q -k 1 -c "content 2" --sec $key2 ws://nosrelay:7777) + +req=$(nak req -q ws://nosrelay:7777 | jq -c .) + +expected_req=$(cat < { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + Deno.test({ name: "pushes a vanish request with global relay filter", fn: async () => { @@ -34,6 +38,9 @@ Deno.test({ assertEquals((await broadcastVanishRequests(msg)).action, "accept"); assertEquals(redisMock.called, true); + + // Some time to let the logs flush + await wait(100); }, sanitizeResources: false, }); @@ -61,6 +68,9 @@ Deno.test({ assertEquals((await broadcastVanishRequests(msg)).action, "accept"); assertEquals(redisMock.called, true); + + // Some time to let the logs flush + await wait(100); }, sanitizeResources: false, }); @@ -85,6 +95,9 @@ Deno.test({ assertEquals((await broadcastVanishRequests(msg)).action, "accept"); assertEquals(redisMock.called, false); + + // Some time to let the logs flush + await wait(100); }, sanitizeResources: false, }); @@ -111,6 +124,9 @@ Deno.test({ ); assertEquals((await broadcastVanishRequests(msg)).action, "accept"); assertEquals(redisMock.called, false); + + // Some time to let the logs flush + await wait(100); }, sanitizeResources: false, }); diff --git a/strfry/plugins/tests/run_deno_tests.sh b/strfry/plugins/tests/run_deno_tests.sh new file mode 100755 index 0000000..e927493 --- /dev/null +++ b/strfry/plugins/tests/run_deno_tests.sh @@ -0,0 +1,2 @@ +#!/bin/bash +deno test --allow-read --allow-write --allow-env --trace-leaks --log-level=info \ No newline at end of file diff --git a/strfry/plugins/tests/run_tests.sh b/strfry/plugins/tests/run_tests.sh deleted file mode 100755 index 36f32ad..0000000 --- a/strfry/plugins/tests/run_tests.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -deno test --allow-read --allow-write --allow-env --log-level=info \ No newline at end of file From 4623b9254eb7eb676e702657456429e0f94234ba Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Fri, 4 Oct 2024 13:31:42 -0300 Subject: [PATCH 08/18] More CI workflow changes --- .github/workflows/ci.yml | 85 ++++++++++++++++++++++ .github/workflows/publish-docker-image.yml | 45 ------------ .github/workflows/run-tests.yml | 29 -------- Dockerfile | 28 ++++--- compose.yml | 2 +- test_ci_workflow_on_mac.sh | 3 + 6 files changed, 107 insertions(+), 85 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/publish-docker-image.yml delete mode 100644 .github/workflows/run-tests.yml create mode 100644 test_ci_workflow_on_mac.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..b15d4b1 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,85 @@ +name: CI Pipeline + +on: + push: + branches: ['main'] + pull_request: + branches: ['main'] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + test: + name: Run Integration Tests + runs-on: ubuntu-latest + + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + if: ${{ !env.ACT }} + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image + uses: docker/build-push-action@v5 + with: + context: . + push: false + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:test + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Run integration tests + run: | + docker compose up --abort-on-container-exit --exit-code-from tests + + - name: Check for test failures + if: failure() + run: | + echo "Tests failed, check the logs for details." + + build_and_push: + name: Build and Push Docker image + needs: test + runs-on: ubuntu-latest + if: ${{ github.ref == 'refs/heads/main' && needs.test.result == 'success' }} + permissions: + packages: write + contents: read + + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Log in to the GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + if: ${{ !env.ACT }} + uses: docker/setup-buildx-action@v3 + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v4 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch,suffix=_{{ github.sha }} + type=raw,value=latest,enable={{ github.ref == 'refs/heads/main' }} + + - name: Build and push Docker image. The build was cached + uses: docker/build-push-action@v5 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max \ No newline at end of file diff --git a/.github/workflows/publish-docker-image.yml b/.github/workflows/publish-docker-image.yml deleted file mode 100644 index 950dd23..0000000 --- a/.github/workflows/publish-docker-image.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: Build and Publish Docker image - -on: - push: - branches: ['main'] - -env: - REGISTRY: ghcr.io - IMAGE_NAME: ${{ github.repository }} - -jobs: - push_to_registry: - name: Push Docker image to ghcr registry - runs-on: ubuntu-latest - permissions: - packages: write - contents: read - - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - - name: Log in to the Github Container registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@1294d94f8ee362ab42b6da04c35f4cd03a0e6af7 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=ref,event=branch,suffix=_{{sha}} - type=raw,value=latest,enable={{is_default_branch}} - - - name: Build and push Docker images - uses: docker/build-push-action@v5 - with: - context: . - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml deleted file mode 100644 index 4ee0b05..0000000 --- a/.github/workflows/run-tests.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Run Tests - -on: - pull_request: - branches: ['main'] - push: - branches: ['main'] - -jobs: - run_tests: - name: Run Integration Tests - runs-on: ubuntu-latest - - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - - name: Set up Docker Buildx - if: ${{ !env.ACT }} # Skip this step locally if running in Act - uses: docker/setup-buildx-action@v3 - - - name: Build and run tests with Docker Compose - run: | - docker compose up --build --abort-on-container-exit --exit-code-from tests - - - name: Check for test failures - if: failure() - run: | - echo "Tests failed, check the logs for details." \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 047b36a..4d7c082 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,16 @@ -ARG PLATFORM=linux/amd64 - # Stage 1: Build -FROM --platform=${PLATFORM} ubuntu:jammy AS build +ARG BUILDPLATFORM=linux/amd64 + +FROM --platform=$BUILDPLATFORM ubuntu:jammy AS build + +ARG TARGETPLATFORM +ARG BUILDPLATFORM WORKDIR /build +RUN apt-get update && apt-get install -y gnupg +RUN apt-key adv --refresh-keys --keyserver keyserver.ubuntu.com + RUN apt update && apt install -y --no-install-recommends \ unzip cmake git g++ make pkg-config libtool ca-certificates \ libyaml-perl libtemplate-perl libregexp-grammars-perl libssl-dev zlib1g-dev \ @@ -19,16 +25,18 @@ RUN git clone --branch 0.9.6 https://github.com/hoytech/strfry.git && \ RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain 1.80.1 ENV PATH="/root/.cargo/bin:${PATH}" + RUN rustc --version COPY ./event_deleter/Cargo.toml ./event_deleter/Cargo.lock /build/event_deleter/ +COPY ./event_deleter/src /build/event_deleter/src WORKDIR /build/event_deleter RUN cargo fetch +RUN cargo build --release --bins --tests -COPY ./event_deleter/src /build/event_deleter/src -RUN cargo build --release -RUN cargo test --release --no-run - +RUN ls /build/event_deleter +RUN ls /build/event_deleter/target +RUN find /build/event_deleter/target -type d RUN curl -fsSL https://deno.land/install.sh | sh ENV DENO_INSTALL="/root/.deno" ENV PATH="$DENO_INSTALL/bin:$PATH" @@ -36,7 +44,7 @@ RUN echo "Deno is located at: $(which deno)" # Stage 2: tests -FROM --platform=${PLATFORM} ubuntu:jammy AS tests +FROM --platform=${BUILDPLATFORM} ubuntu:jammy AS tests RUN apt update && apt install -y --no-install-recommends \ curl jq git g++ make pkg-config libtool ca-certificates \ @@ -75,7 +83,7 @@ RUN deno --version CMD ["run_tests.sh"] # Stage 3: runner -FROM --platform=${PLATFORM} ubuntu:jammy AS runner +FROM --platform=${BUILDPLATFORM} ubuntu:jammy AS runner RUN apt-get update && apt-get install -y --no-install-recommends \ vim curl jq ca-certificates \ @@ -99,7 +107,7 @@ WORKDIR /app COPY --from=build /build/strfry/strfry strfry COPY --from=build /build/event_deleter/target/release/spam_cleaner /usr/local/bin/spam_cleaner -COPY --from=build /build/event_deleter/target/release/vanish_subscriber vanish_subscriber +COPY --from=build /build/event_deleter/target/release/vanish_subscriber ./vanish_subscriber RUN chmod +x /usr/local/bin/spam_cleaner RUN chmod +x /app/vanish_subscriber diff --git a/compose.yml b/compose.yml index dbaefb1..c759790 100644 --- a/compose.yml +++ b/compose.yml @@ -27,4 +27,4 @@ services: - REDIS_URL=redis://redis:6379 depends_on: - redis - restart: "no" + restart: "no" \ No newline at end of file diff --git a/test_ci_workflow_on_mac.sh b/test_ci_workflow_on_mac.sh new file mode 100644 index 0000000..965ca11 --- /dev/null +++ b/test_ci_workflow_on_mac.sh @@ -0,0 +1,3 @@ +#!/bin/bash +# Run the test job +act --container-architecture linux/amd64 -j test -P ubuntu-latest=ghcr.io/catthehacker/ubuntu:act-latest From 00c23930b14685b7b116a407e709da90321bfb1d Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Mon, 7 Oct 2024 12:37:09 -0300 Subject: [PATCH 09/18] Integration tests, CI workflow, vanish from command line --- Dockerfile | 31 +++++- event_deleter/src/event_analyzer.rs | 24 ++++ event_deleter/src/vanish_subscriber_task.rs | 27 +++-- push_vanish_request.ts | 116 ++++++++++++++++++++ run_integration_tests.sh | 59 +++++----- run_tests.sh | 69 ++++-------- 6 files changed, 237 insertions(+), 89 deletions(-) create mode 100755 push_vanish_request.ts diff --git a/Dockerfile b/Dockerfile index 4d7c082..05c634e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -42,21 +42,32 @@ ENV DENO_INSTALL="/root/.deno" ENV PATH="$DENO_INSTALL/bin:$PATH" RUN echo "Deno is located at: $(which deno)" +RUN curl -L https://github.com/fiatjaf/nak/releases/download/v0.7.6/nak-v0.7.6-linux-amd64 -o /usr/local/bin/nak && \ + chmod +x /usr/local/bin/nak + +RUN curl -L https://github.com/IBM-Cloud/redli/releases/download/v0.13.0/redli_0.13.0_linux_amd64.tar.gz -o /tmp/redli.tar.gz && \ + tar -xvf /tmp/redli.tar.gz -C /usr/local/bin/ redli_linux_amd64 && \ + mv /usr/local/bin/redli_linux_amd64 /usr/local/bin/redli && \ + chmod +x /usr/local/bin/redli + +RUN nak --version +RUN redli --version # Stage 2: tests FROM --platform=${BUILDPLATFORM} ubuntu:jammy AS tests +COPY --from=build /usr/local/bin/nak /usr/local/bin/nak +RUN chmod +x /usr/local/bin/nak + RUN apt update && apt install -y --no-install-recommends \ curl jq git g++ make pkg-config libtool ca-certificates \ libyaml-perl libtemplate-perl libregexp-grammars-perl libssl-dev zlib1g-dev \ && rm -rf /var/lib/apt/lists/* -RUN curl -L https://github.com/fiatjaf/nak/releases/download/v0.7.6/nak-v0.7.6-linux-amd64 -o /usr/local/bin/nak && \ - chmod +x /usr/local/bin/nak - -RUN nak --version - +COPY ./push_vanish_request.ts /usr/local/bin/push_vanish_request.ts COPY ./run_integration_tests.sh /usr/local/bin/run_integration_tests.sh + +RUN chmod +x /usr/local/bin/push_vanish_request.ts RUN chmod +x /usr/local/bin/run_integration_tests.sh COPY --from=build /build/event_deleter/Cargo.toml /tests/event_deleter/ @@ -108,9 +119,17 @@ WORKDIR /app COPY --from=build /build/strfry/strfry strfry COPY --from=build /build/event_deleter/target/release/spam_cleaner /usr/local/bin/spam_cleaner COPY --from=build /build/event_deleter/target/release/vanish_subscriber ./vanish_subscriber +COPY --from=build /usr/local/bin/nak /usr/local/bin/nak +COPY --from=build /usr/local/bin/redli /usr/local/bin/redli +COPY ./push_vanish_request.ts /app/push_vanish_request.ts -RUN chmod +x /usr/local/bin/spam_cleaner RUN chmod +x /app/vanish_subscriber +# Tools +RUN chmod +x /usr/local/bin/nak +RUN chmod +x /usr/local/bin/redli +RUN chmod +x /usr/local/bin/spam_cleaner +RUN chmod +x /app/push_vanish_request.ts + COPY ./start.sh start.sh CMD ./start.sh diff --git a/event_deleter/src/event_analyzer.rs b/event_deleter/src/event_analyzer.rs index 24c2530..b136032 100644 --- a/event_deleter/src/event_analyzer.rs +++ b/event_deleter/src/event_analyzer.rs @@ -1,6 +1,7 @@ use nostr_sdk::prelude::*; use redis::{streams::StreamId, Value}; use regex::Regex; +use std::env; use std::fmt::Display; use std::sync::LazyLock; use thiserror::Error as ThisError; @@ -13,6 +14,9 @@ static LOCAL_RELAY_URL: &str = "ws://localhost:7777"; static REJECTED_NAME_REGEXES: LazyLock> = LazyLock::new(|| vec![Regex::new(r".*Reply.*(Guy|Girl|Gal).*").unwrap()]); +static RELAY_URL: LazyLock = + LazyLock::new(|| env::var("RELAY_URL").expect("RELAY_URL must be set")); + #[derive(Debug, Clone)] pub enum EventAnalysisResult { Accept, @@ -42,6 +46,7 @@ impl TryFrom<&StreamId> for DeleteRequest { fn try_from(stream_id: &StreamId) -> Result { let mut reason = Option::::None; let mut public_key = Option::::None; + let mut has_matching_tag = false; for (key, value) in stream_id.map.iter() { match key.as_str() { @@ -74,10 +79,26 @@ impl TryFrom<&StreamId> for DeleteRequest { return Err(EventAnalysisError::ConversionError); } } + "tags" => { + if let Value::BulkString(bytes) = value { + let tags = String::from_utf8(bytes.clone()) + .map_err(|_| EventAnalysisError::ConversionError)?; + + has_matching_tag = tags.split(',').any(|tag| { + tag.to_lowercase() == "all_relays" || tag.to_lowercase() == *RELAY_URL + }); + } + } _ => {} } } + // We also filter the tag when we push to the stream, from the strfry + // delete command, but doesn't hurt to check here as well + if !has_matching_tag { + return Err(EventAnalysisError::NoMatchingTag); + } + match public_key { Some(public_key) => Ok(DeleteRequest::Vanish( stream_id.id.clone(), @@ -244,4 +265,7 @@ pub enum EventAnalysisError { #[error("Not vanish kind")] NotVanishKindError, + + #[error("No matching tag")] + NoMatchingTag, } diff --git a/event_deleter/src/vanish_subscriber_task.rs b/event_deleter/src/vanish_subscriber_task.rs index 6dfa800..d716044 100644 --- a/event_deleter/src/vanish_subscriber_task.rs +++ b/event_deleter/src/vanish_subscriber_task.rs @@ -179,13 +179,18 @@ async fn process_stream_id( stream_id: &StreamId, deletion_sender: &mpsc::Sender, ) -> Result<(), Box> { - let vanish_request = DeleteRequest::try_from(stream_id).map_err(|e| { - error!( - "Failed to parse vanish request: {:?}. Error: {}", - stream_id, e - ); - e - })?; + let vanish_request = match DeleteRequest::try_from(stream_id) { + Ok(vanish_request) => vanish_request, + Err(e) => { + // Log the error and continue processing the next stream id + error!( + "Couldn't process vanish request: {:?}. Error: {}", + stream_id, e + ); + + return Ok(()); + } + }; info!("Received vanish request: {:?}", vanish_request); @@ -286,6 +291,10 @@ mod tests { "content".to_string(), redis::Value::BulkString("First message".into()), ), + ( + "tags".to_string(), + redis::Value::BulkString("all_relays".into()), + ), ]), }], }], @@ -306,6 +315,10 @@ mod tests { "content".to_string(), redis::Value::BulkString("Second message".into()), ), + ( + "tags".to_string(), + redis::Value::BulkString("all_relays".into()), + ), ]), }], }], diff --git a/push_vanish_request.ts b/push_vanish_request.ts new file mode 100755 index 0000000..d21b663 --- /dev/null +++ b/push_vanish_request.ts @@ -0,0 +1,116 @@ +#!/usr/bin/env -S deno run --allow-net --allow-env + +import { connect, parseURL } from "https://deno.land/x/redis@v0.29.0/mod.ts"; +import { parse } from "https://deno.land/std@0.171.0/flags/mod.ts"; +import { readLines } from "https://deno.land/std@0.171.0/io/mod.ts"; + +// Script to manually push a vanish request to the vanish_requests stream in +// Redis. Using deno to avoid any discrepancy vs the strfry policy. The script +// pushed an unsigned request so that we can do it based on out of band +// requests, not using the nostr network. + +const args = parse(Deno.args, { + alias: { + p: "pubkey", + r: "reason", + h: "help", + y: "yes", + }, + string: ["pubkey", "reason", "relay"], + boolean: ["y"], + default: { + relay: "wss://relay.nos.social", + yes: false, + }, +}); + +if (args.help) { + showUsage(); + Deno.exit(0); +} + +if (!args.pubkey) { + console.error("Error: PUBKEY is required."); + showUsage(); + Deno.exit(1); +} + +function showUsage() { + console.log(`Usage: push_vanish_request.ts -p PUBKEY [-r REASON] [--relay RELAY_URL] +-p, --pubkey PUBKEY The public key (required) +-r, --reason REASON The reason for the vanish request (optional) +-y, Confirm the vanish request +-h, --help Show this help message +`); +} + +async function main() { + const pubkey = args.pubkey; + const reason = args.reason || ""; + const skipConfirmation = args.yes; + + const redisUrl = Deno.env.get("REDIS_URL") || "redis://localhost:6379"; + const relayUrl = Deno.env.get("RELAY_URL") || "ws://localhost:7777"; + const redis_connect_options = parseURL(redisUrl); + const redis = await connect(redis_connect_options); + + const VANISH_STREAM_KEY = "vanish_requests"; + const REQUEST_TO_VANISH_KIND = 62; + const CREATED_AT = Math.floor(Date.now() / 1000); + + const event = { + kind: REQUEST_TO_VANISH_KIND, + pubkey: pubkey, + created_at: CREATED_AT, + tags: [["relay", relayUrl]], + content: reason, + }; + + const confirmed = skipConfirmation || (await getConfirmation(event)); + if (!confirmed) { + console.log("\nCanceled"); + redis.close(); + Deno.exit(0); + } + + try { + const xaddResult = await redis.xadd(VANISH_STREAM_KEY, "*", event); + + console.log( + `Vanish request pushed successfully for pubkey '${pubkey}'. Stream ID: ${JSON.stringify( + xaddResult, + null, + 2 + )}` + ); + console.log(`Event: ${JSON.stringify(event, null, 2)}`); + } catch (error) { + console.error("Failed to push vanish request."); + console.error(error); + Deno.exit(1); + } finally { + redis.close(); + } +} + +async function getConfirmation(event: any): Promise { + const encoder = new TextEncoder(); + await Deno.stdout.write( + encoder.encode( + `\nAre you sure you want to create this vanish request? \n'${JSON.stringify( + event, + null, + 2 + )}'? (y/N): ` + ) + ); + + for await (const line of readLines(Deno.stdin)) { + const input = line.trim().toLowerCase(); + return input === "y" || input === "yes"; + } + + return false; +} + +await main(); diff --git a/run_integration_tests.sh b/run_integration_tests.sh index c113111..4d8c0bf 100755 --- a/run_integration_tests.sh +++ b/run_integration_tests.sh @@ -16,20 +16,22 @@ assert_jsonl_equals() { echo "$sorted_expected_data" echo "Got:" echo "$sorted_jsonl_data" - return 1 + exit 1 # Exit immediately if the assertion fails fi } +echo Assert we start with an empty relay +req=$(nak req -q ws://nosrelay:7777 | jq -c .) +assert_jsonl_equals "$req" "" "Initial relay state should be empty" -test_status=0 - +echo Assert initial events key1=$(nak key generate) -pubkey1=$(nak key public $key1) -event1=$(nak event -q -k 1 -c "content 1" --sec $key1 ws://nosrelay:7777) +pubkey1=$(nak key public "$key1") +event1=$(nak event -q -k 1 -c "content 1" --sec "$key1" ws://nosrelay:7777) key2=$(nak key generate) -pubkey2=$(nak key public $key2) -event2=$(nak event -q -k 1 -c "content 2" --sec $key2 ws://nosrelay:7777) +pubkey2=$(nak key public "$key2") +event2=$(nak event -q -k 1 -c "content 2" --sec "$key2" ws://nosrelay:7777) req=$(nak req -q ws://nosrelay:7777 | jq -c .) @@ -39,12 +41,10 @@ $event1 EOF ) -# Assert initial events assert_jsonl_equals "$req" "$expected_req" "Failed to assert initial events" -test_status=$((test_status + $?)) -# Send a vanish request pointing to a different relay -vanish_to_another_relay=$(nak event -q -k 62 -c "Delete all my events!" -t relay=wss://notexample.com --sec $key1 ws://nosrelay:7777) +echo Send a vanish request pointing to a different relay +vanish_to_another_relay=$(nak event -q -k 62 -c "Delete all my events!" -t relay=wss://notexample.com --sec "$key1" ws://nosrelay:7777) req=$(nak req -q ws://nosrelay:7777 | jq -c .) @@ -56,11 +56,10 @@ EOF ) assert_jsonl_equals "$req" "$expected_req" "Failed after sending vanish request to another relay" -test_status=$((test_status + $?)) -# Send a vanish request pointing to this relay specifically -vanish_to_this_relay=$(nak event -q -k 62 -c "Delete all my events!" -t relay=wss://example.com --sec $key1 ws://nosrelay:7777) -sleep 10 +echo Send a vanish request pointing to this relay specifically +vanish_to_this_relay=$(nak event -q -k 62 -c "Delete all my events!" -t relay=wss://example.com --sec "$key1" ws://nosrelay:7777) +sleep 10 # Allow time for the vanish request to be processed req=$(nak req -q ws://nosrelay:7777 | jq -c .) @@ -70,25 +69,27 @@ EOF ) assert_jsonl_equals "$req" "$expected_req" "Failed after sending vanish request to this relay" -test_status=$((test_status + $?)) -# For the last event, send a global vanish request -vanish_from_all_relays=$(nak event -q -k 62 -c "Delete all my events!" -t relay=ALL_RELAYS --sec $key2 ws://nosrelay:7777) -sleep 10 +echo For the last event, send a global vanish request +vanish_from_all_relays=$(nak event -q -k 62 -c "Delete all my events!" -t relay=ALL_RELAYS --sec "$key2" ws://nosrelay:7777) +sleep 10 # Allow time for the vanish request to be processed req=$(nak req -q ws://nosrelay:7777 | jq -c .) -expected_req=$(cat < Date: Wed, 9 Oct 2024 12:42:40 -0300 Subject: [PATCH 10/18] Better comment --- push_vanish_request.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/push_vanish_request.ts b/push_vanish_request.ts index d21b663..ec81ee1 100755 --- a/push_vanish_request.ts +++ b/push_vanish_request.ts @@ -8,6 +8,14 @@ import { readLines } from "https://deno.land/std@0.171.0/io/mod.ts"; // Redis. Using deno to avoid any discrepancy vs the strfry policy. The script // pushed an unsigned request so that we can do it based on out of band // requests, not using the nostr network. +// The script assumes is being run locally from within the relay container so it +// expects that both REDIS_URL and RELAY_URL are set in the environment. +// +// Example usage: +// +//./push_vanish_request.ts -p 79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81234 -r 'Requested through email from trusted user' +// +// The script asks for confirmation before pushing the request to the stream. const args = parse(Deno.args, { alias: { From 20b0789b6db2c9c3877087b262b3c05749e15907 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 9 Oct 2024 13:11:39 -0300 Subject: [PATCH 11/18] Use is_default_branch syntax See https://github.com/docker/metadata-action?tab=readme-ov-file#is_default_branch --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b15d4b1..0e7342d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -71,8 +71,8 @@ jobs: with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} tags: | - type=ref,event=branch,suffix=_{{ github.sha }} - type=raw,value=latest,enable={{ github.ref == 'refs/heads/main' }} + type=ref,event=branch,suffix=_{{sha}} + type=raw,value=latest,enable={{is_default_branch}} - name: Build and push Docker image. The build was cached uses: docker/build-push-action@v5 From 758d975045ac951d42752c3254b27253357470c2 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 9 Oct 2024 13:49:38 -0300 Subject: [PATCH 12/18] Setup ring --- event_deleter/src/bin/vanish_subscriber.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/event_deleter/src/bin/vanish_subscriber.rs b/event_deleter/src/bin/vanish_subscriber.rs index 02a8be0..2355533 100644 --- a/event_deleter/src/bin/vanish_subscriber.rs +++ b/event_deleter/src/bin/vanish_subscriber.rs @@ -10,6 +10,7 @@ use std::error::Error; use std::{env, sync::LazyLock}; use tokio::signal::unix::{signal, SignalKind}; use tokio::sync::mpsc; +use tokio_rustls::rustls::crypto::ring; use tokio_util::{sync::CancellationToken, task::TaskTracker}; use tracing::info; use tracing_subscriber::{fmt, prelude::*, EnvFilter}; @@ -37,6 +38,10 @@ async fn main() -> Result<(), Box> { .with(EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"))) .init(); + ring::default_provider() + .install_default() + .expect("Failed to install ring crypto provider"); + let args = Args::parse(); let tracker = TaskTracker::new(); let cancellation_token = CancellationToken::new(); From 105b1b491c1bf0f0a6d3d92c988193aa23371fee Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 9 Oct 2024 16:10:51 -0300 Subject: [PATCH 13/18] Test for CI issue --- Dockerfile | 2 +- run_integration_tests.sh | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 05c634e..9e5bdca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Stage 1: Build -ARG BUILDPLATFORM=linux/amd64 +#ARG BUILDPLATFORM=linux/amd64 FROM --platform=$BUILDPLATFORM ubuntu:jammy AS build diff --git a/run_integration_tests.sh b/run_integration_tests.sh index 4d8c0bf..71ad4c6 100755 --- a/run_integration_tests.sh +++ b/run_integration_tests.sh @@ -1,5 +1,26 @@ #!/bin/bash +# Disable integration tests for the moment to debug error: +# See https://github.com/planetary-social/nosrelay/actions/runs/11259467822/job/31309644214 +#nosrelay-1 | Download https://deno.land/std@0.88.0/async/pool.ts +#nosrelay-1 | Download https://deno.land/std@0.88.0/fmt/colors.ts +#nosrelay-1 | Download https://deno.land/std@0.88.0/testing/_diff.ts +#nosrelay-1 | error: Uncaught (in promise) TypeError: Deno.seekSync is not a function +#nosrelay-1 | Deno.seekSync(rid, offset, Deno.SeekMode.Start); +#nosrelay-1 | ^ +#nosrelay-1 | at js_read (https://deno.land/x/sqlite@v3.7.1/build/vfs.js:48:12) +#nosrelay-1 | at (wasm://wasm/0027cea2:1:5885) +#nosrelay-1 | at (wasm://wasm/0027cea2:1:145143) +#nosrelay-1 | at (wasm://wasm/0027cea2:1:140310) +#nosrelay-1 | at (wasm://wasm/0027cea2:1:146451) +#nosrelay-1 | at (wasm://wasm/0027cea2:1:3856) +#nosrelay-1 | at (wasm://wasm/0027cea2:1:602396) +#nosrelay-1 | at https://deno.land/x/sqlite@v3.7.1/src/db.ts:208:27 +#nosrelay-1 | at setStr (https://deno.land/x/sqlite@v3.7.1/src/wasm.ts:19:20) +#nosrelay-1 | at new DB (https://deno.land/x/sqlite@v3.7.1/src/db.ts:205:20) +#nosrelay-1 | 2024-10-09 17:24:06.639 ( 2.441s) [Writer ] ERR| Couldn't setup plugin: pipe to plugin was closed (plugin crashed?) +#exit 0 + assert_jsonl_equals() { local jsonl_data="$1" local expected_data="$2" From 30fbb2b5fa91111b2ac1f920007355b9a5e6f775 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 9 Oct 2024 16:33:31 -0300 Subject: [PATCH 14/18] Pin Deno version --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 9e5bdca..c165122 100644 --- a/Dockerfile +++ b/Dockerfile @@ -37,6 +37,7 @@ RUN cargo build --release --bins --tests RUN ls /build/event_deleter RUN ls /build/event_deleter/target RUN find /build/event_deleter/target -type d +ENV DENO_VERSION=v1.46.3 RUN curl -fsSL https://deno.land/install.sh | sh ENV DENO_INSTALL="/root/.deno" ENV PATH="$DENO_INSTALL/bin:$PATH" From b1c34f52a9bae3fb7d16b150c6f46cbfae25bcb6 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 9 Oct 2024 16:40:41 -0300 Subject: [PATCH 15/18] One more try --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c165122..ae9c6ba 100644 --- a/Dockerfile +++ b/Dockerfile @@ -38,7 +38,7 @@ RUN ls /build/event_deleter RUN ls /build/event_deleter/target RUN find /build/event_deleter/target -type d ENV DENO_VERSION=v1.46.3 -RUN curl -fsSL https://deno.land/install.sh | sh +RUN DENO_VERSION=v1.46.3 curl -fsSL https://deno.land/install.sh | sh ENV DENO_INSTALL="/root/.deno" ENV PATH="$DENO_INSTALL/bin:$PATH" RUN echo "Deno is located at: $(which deno)" From d6724642f81ca59899a6d6d30b54892eb8afd543 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 9 Oct 2024 18:00:45 -0300 Subject: [PATCH 16/18] Try disabling cache --- .github/workflows/ci.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0e7342d..e7e6ef1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,8 +29,6 @@ jobs: context: . push: false tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:test - cache-from: type=gha - cache-to: type=gha,mode=max - name: Run integration tests run: | @@ -74,12 +72,10 @@ jobs: type=ref,event=branch,suffix=_{{sha}} type=raw,value=latest,enable={{is_default_branch}} - - name: Build and push Docker image. The build was cached + - name: Build and push Docker image uses: docker/build-push-action@v5 with: context: . push: true tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=gha - cache-to: type=gha,mode=max \ No newline at end of file + labels: ${{ steps.meta.outputs.labels }} \ No newline at end of file From dd621dd8461754b3ee68b1af5abd172740463fa2 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 9 Oct 2024 18:24:24 -0300 Subject: [PATCH 17/18] Try disabling integration test --- run_integration_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run_integration_tests.sh b/run_integration_tests.sh index 71ad4c6..e1441ae 100755 --- a/run_integration_tests.sh +++ b/run_integration_tests.sh @@ -19,7 +19,7 @@ #nosrelay-1 | at setStr (https://deno.land/x/sqlite@v3.7.1/src/wasm.ts:19:20) #nosrelay-1 | at new DB (https://deno.land/x/sqlite@v3.7.1/src/db.ts:205:20) #nosrelay-1 | 2024-10-09 17:24:06.639 ( 2.441s) [Writer ] ERR| Couldn't setup plugin: pipe to plugin was closed (plugin crashed?) -#exit 0 +exit 0 assert_jsonl_equals() { local jsonl_data="$1" From 9d4fc22007047880ec5322e8845ae612f9785693 Mon Sep 17 00:00:00 2001 From: Daniel Cadenas Date: Wed, 9 Oct 2024 18:45:53 -0300 Subject: [PATCH 18/18] Try again with removed caches, linux/amd64 and tests enabled --- Dockerfile | 2 +- run_integration_tests.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index ae9c6ba..ccf019d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Stage 1: Build -#ARG BUILDPLATFORM=linux/amd64 +ARG BUILDPLATFORM=linux/amd64 FROM --platform=$BUILDPLATFORM ubuntu:jammy AS build diff --git a/run_integration_tests.sh b/run_integration_tests.sh index e1441ae..71ad4c6 100755 --- a/run_integration_tests.sh +++ b/run_integration_tests.sh @@ -19,7 +19,7 @@ #nosrelay-1 | at setStr (https://deno.land/x/sqlite@v3.7.1/src/wasm.ts:19:20) #nosrelay-1 | at new DB (https://deno.land/x/sqlite@v3.7.1/src/db.ts:205:20) #nosrelay-1 | 2024-10-09 17:24:06.639 ( 2.441s) [Writer ] ERR| Couldn't setup plugin: pipe to plugin was closed (plugin crashed?) -exit 0 +#exit 0 assert_jsonl_equals() { local jsonl_data="$1"