diff --git a/.sqlx/query-0be48b74255947b8550eb98dfba0bafd0038147973cf7c9a2c4d0bd33f4776ab.json b/.sqlx/query-0be48b74255947b8550eb98dfba0bafd0038147973cf7c9a2c4d0bd33f4776ab.json new file mode 100644 index 00000000..93e9d929 --- /dev/null +++ b/.sqlx/query-0be48b74255947b8550eb98dfba0bafd0038147973cf7c9a2c4d0bd33f4776ab.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_id\n FROM team_members\n WHERE (team_id = $1 AND role = $2)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [ + false + ] + }, + "hash": "0be48b74255947b8550eb98dfba0bafd0038147973cf7c9a2c4d0bd33f4776ab" +} diff --git a/.sqlx/query-fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019.json b/.sqlx/query-26bf18543c97850a1387221a8d15d602f5ff06c9e1a9e40b2ef8016011943502.json similarity index 65% rename from .sqlx/query-fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019.json rename to .sqlx/query-26bf18543c97850a1387221a8d15d602f5ff06c9e1a9e40b2ef8016011943502.json index e2b9c106..e00c8502 100644 --- a/.sqlx/query-fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019.json +++ b/.sqlx/query-26bf18543c97850a1387221a8d15d602f5ff06c9e1a9e40b2ef8016011943502.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO mods (\n id, team_id, title, description, body,\n published, downloads, icon_url, issues_url,\n source_url, wiki_url, status, requested_status, discord_url,\n license_url, license,\n slug, color, monetization_status\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7, $8, $9,\n $10, $11, $12, $13, $14,\n $15, $16, \n LOWER($17), $18, $19\n )\n ", + "query": "\n INSERT INTO mods (\n id, team_id, title, description, body,\n published, downloads, icon_url, issues_url,\n source_url, wiki_url, status, requested_status, discord_url,\n license_url, license,\n slug, color, monetization_status,\n organization_id\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7, $8, $9,\n $10, $11, $12, $13, $14,\n $15, $16, \n LOWER($17), $18, $19,\n $20\n )\n ", "describe": { "columns": [], "parameters": { @@ -23,10 +23,11 @@ "Varchar", "Text", "Int4", - "Varchar" + "Varchar", + "Int8" ] }, "nullable": [] }, - "hash": "fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019" + "hash": "26bf18543c97850a1387221a8d15d602f5ff06c9e1a9e40b2ef8016011943502" } diff --git a/.sqlx/query-35984b9f53e0c54a2b34d715c926d9ea52ea89a38fd0420f351080e3e93eb4fb.json b/.sqlx/query-35984b9f53e0c54a2b34d715c926d9ea52ea89a38fd0420f351080e3e93eb4fb.json new file mode 100644 index 00000000..4ec3dcaa --- /dev/null +++ b/.sqlx/query-35984b9f53e0c54a2b34d715c926d9ea52ea89a38fd0420f351080e3e93eb4fb.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM user_follows WHERE follower_id=$1 AND target_id=$2", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "35984b9f53e0c54a2b34d715c926d9ea52ea89a38fd0420f351080e3e93eb4fb" +} diff --git a/.sqlx/query-79889e768dd4b129ed10f46a6b973208e7897cad64a4e371a6f3b76310d0479b.json b/.sqlx/query-79889e768dd4b129ed10f46a6b973208e7897cad64a4e371a6f3b76310d0479b.json new file mode 100644 index 00000000..b1468732 --- /dev/null +++ b/.sqlx/query-79889e768dd4b129ed10f46a6b973208e7897cad64a4e371a6f3b76310d0479b.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO events (\n id,\n target_id,\n target_id_type,\n triggerer_id,\n triggerer_id_type,\n event_type,\n metadata\n )\n SELECT * FROM UNNEST (\n $1::bigint[],\n $2::bigint[],\n $3::text[],\n $4::bigint[],\n $5::text[],\n $6::text[],\n $7::jsonb[]\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8Array", + "Int8Array", + "TextArray", + "Int8Array", + "TextArray", + "TextArray", + "JsonbArray" + ] + }, + "nullable": [] + }, + "hash": "79889e768dd4b129ed10f46a6b973208e7897cad64a4e371a6f3b76310d0479b" +} diff --git a/.sqlx/query-7d4a842888bb3ed24090f9cb903a0a1fae6852ad81db66e0333bb0f373d9b9a3.json b/.sqlx/query-7d4a842888bb3ed24090f9cb903a0a1fae6852ad81db66e0333bb0f373d9b9a3.json new file mode 100644 index 00000000..e4d13f68 --- /dev/null +++ b/.sqlx/query-7d4a842888bb3ed24090f9cb903a0a1fae6852ad81db66e0333bb0f373d9b9a3.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT follower_id, target_id FROM organization_follows WHERE follower_id=$1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "follower_id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "target_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "7d4a842888bb3ed24090f9cb903a0a1fae6852ad81db66e0333bb0f373d9b9a3" +} diff --git a/.sqlx/query-84fde217f2982fbb918157da1509bc1bcd97b78c724b6d08791d98811d966194.json b/.sqlx/query-84fde217f2982fbb918157da1509bc1bcd97b78c724b6d08791d98811d966194.json new file mode 100644 index 00000000..45dfe2e4 --- /dev/null +++ b/.sqlx/query-84fde217f2982fbb918157da1509bc1bcd97b78c724b6d08791d98811d966194.json @@ -0,0 +1,69 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT \n id,\n target_id,\n target_id_type as \"target_id_type: _\",\n triggerer_id,\n triggerer_id_type as \"triggerer_id_type: _\",\n event_type as \"event_type: _\",\n metadata,\n created\n FROM events e\n WHERE \n (target_id, target_id_type, event_type)\n = ANY(SELECT * FROM UNNEST ($1::bigint[], $2::text[], $3::text[]))\n OR\n (triggerer_id, triggerer_id_type, event_type)\n = ANY(SELECT * FROM UNNEST ($4::bigint[], $5::text[], $6::text[]))\n ORDER BY created DESC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "target_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "target_id_type: _", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "triggerer_id", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "triggerer_id_type: _", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "event_type: _", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "metadata", + "type_info": "Jsonb" + }, + { + "ordinal": 7, + "name": "created", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int8Array", + "TextArray", + "TextArray", + "Int8Array", + "TextArray", + "TextArray" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + true, + false + ] + }, + "hash": "84fde217f2982fbb918157da1509bc1bcd97b78c724b6d08791d98811d966194" +} diff --git a/.sqlx/query-9699feae1d917bc90738f3e6b46f49e4f76c06ceb1ce1ab3fb9317b1035deb65.json b/.sqlx/query-9699feae1d917bc90738f3e6b46f49e4f76c06ceb1ce1ab3fb9317b1035deb65.json new file mode 100644 index 00000000..3ca3e3af --- /dev/null +++ b/.sqlx/query-9699feae1d917bc90738f3e6b46f49e4f76c06ceb1ce1ab3fb9317b1035deb65.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT follower_id, target_id FROM user_follows WHERE follower_id=$1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "follower_id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "target_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "9699feae1d917bc90738f3e6b46f49e4f76c06ceb1ce1ab3fb9317b1035deb65" +} diff --git a/.sqlx/query-9d73bfe2094b88fb7bcb0ef47741c612370373c6263566fc870da4a82b8ded7b.json b/.sqlx/query-9d73bfe2094b88fb7bcb0ef47741c612370373c6263566fc870da4a82b8ded7b.json new file mode 100644 index 00000000..af3befe5 --- /dev/null +++ b/.sqlx/query-9d73bfe2094b88fb7bcb0ef47741c612370373c6263566fc870da4a82b8ded7b.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM organization_follows WHERE follower_id=$1 AND target_id=$2", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "9d73bfe2094b88fb7bcb0ef47741c612370373c6263566fc870da4a82b8ded7b" +} diff --git a/.sqlx/query-c337079ccc53022395a4f3c35fa816ae3421cfa811be32ce478a405d5ef99c06.json b/.sqlx/query-c337079ccc53022395a4f3c35fa816ae3421cfa811be32ce478a405d5ef99c06.json new file mode 100644 index 00000000..69bbe2ae --- /dev/null +++ b/.sqlx/query-c337079ccc53022395a4f3c35fa816ae3421cfa811be32ce478a405d5ef99c06.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": " INSERT INTO user_follows (follower_id, target_id) VALUES ($1, $2)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "c337079ccc53022395a4f3c35fa816ae3421cfa811be32ce478a405d5ef99c06" +} diff --git a/.sqlx/query-cbeb96926e6d958bcaf5ed28201e36c5ae2c2cb0067ff551a4ecea8a1685b87c.json b/.sqlx/query-cbeb96926e6d958bcaf5ed28201e36c5ae2c2cb0067ff551a4ecea8a1685b87c.json new file mode 100644 index 00000000..8179cbf5 --- /dev/null +++ b/.sqlx/query-cbeb96926e6d958bcaf5ed28201e36c5ae2c2cb0067ff551a4ecea8a1685b87c.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT EXISTS(SELECT 1 FROM events WHERE id=$1)", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "exists", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "cbeb96926e6d958bcaf5ed28201e36c5ae2c2cb0067ff551a4ecea8a1685b87c" +} diff --git a/.sqlx/query-e50f350a4eb67ddbbfec7597185af09e5db89675320111e8006587b2666e72a3.json b/.sqlx/query-e50f350a4eb67ddbbfec7597185af09e5db89675320111e8006587b2666e72a3.json new file mode 100644 index 00000000..c56772e1 --- /dev/null +++ b/.sqlx/query-e50f350a4eb67ddbbfec7597185af09e5db89675320111e8006587b2666e72a3.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": " INSERT INTO organization_follows (follower_id, target_id) VALUES ($1, $2)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "e50f350a4eb67ddbbfec7597185af09e5db89675320111e8006587b2666e72a3" +} diff --git a/Cargo.lock b/Cargo.lock index 06b62adc..56847814 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -445,6 +445,12 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341" +[[package]] +name = "assert_matches" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" + [[package]] name = "async-channel" version = "1.9.0" @@ -2245,6 +2251,7 @@ dependencies = [ "actix-web-prom", "actix-ws", "argon2", + "assert_matches", "async-trait", "base64 0.21.5", "bitflags 2.4.1", diff --git a/Cargo.toml b/Cargo.toml index bb38733c..0f0ffddb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -109,3 +109,4 @@ derive-new = "0.5.9" [dev-dependencies] actix-http = "3.4.0" +assert_matches = "1.5.0" diff --git a/Dockerfile b/Dockerfile index c0a65d8e..f57fa864 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.68.0 as build +FROM rust:1.70.0 as build ENV PKG_CONFIG_ALLOW_CROSS=1 WORKDIR /usr/src/labrinth diff --git a/migrations/20231020223602_user_and_organization_follows.sql b/migrations/20231020223602_user_and_organization_follows.sql new file mode 100644 index 00000000..742adaab --- /dev/null +++ b/migrations/20231020223602_user_and_organization_follows.sql @@ -0,0 +1,12 @@ +CREATE TABLE user_follows( + follower_id bigint NOT NULL REFERENCES users ON DELETE CASCADE, + target_id bigint NOT NULL REFERENCES users (id) ON DELETE CASCADE, + created timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL, + PRIMARY KEY (follower_id, target_id) +); +CREATE TABLE organization_follows( + follower_id bigint NOT NULL REFERENCES users ON DELETE CASCADE, + target_id bigint NOT NULL REFERENCES organizations (id) ON DELETE CASCADE, + created timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL, + PRIMARY KEY (follower_id, target_id) +); \ No newline at end of file diff --git a/migrations/20231023212957_feeds.sql b/migrations/20231023212957_feeds.sql new file mode 100644 index 00000000..3ee02945 --- /dev/null +++ b/migrations/20231023212957_feeds.sql @@ -0,0 +1,20 @@ +CREATE TABLE events( + id bigint NOT NULL PRIMARY KEY, + target_id bigint NOT NULL, + target_id_type text NOT NULL, + triggerer_id bigint NULL, + triggerer_id_type text NULL, + event_type text NOT NULL, + metadata jsonb NULL, + created timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP +); +CREATE INDEX events_targets ON events ( + target_id, + target_id_type, + event_type +); +CREATE INDEX events_triggerers ON events ( + triggerer_id, + triggerer_id_type, + event_type +); \ No newline at end of file diff --git a/src/auth/checks.rs b/src/auth/checks.rs index 4d47e72c..9f60cf7d 100644 --- a/src/auth/checks.rs +++ b/src/auth/checks.rs @@ -79,7 +79,7 @@ pub async fn is_authorized( pub async fn filter_authorized_projects( projects: Vec, - user_option: &Option, + user_option: Option<&User>, pool: &web::Data, ) -> Result, ApiError> { let mut return_projects = Vec::new(); @@ -87,10 +87,7 @@ pub async fn filter_authorized_projects( for project in projects { if !project.inner.status.is_hidden() - || user_option - .as_ref() - .map(|x| x.role.is_mod()) - .unwrap_or(false) + || user_option.map(|x| x.role.is_mod()).unwrap_or(false) { return_projects.push(project.into()); } else if user_option.is_some() { @@ -194,7 +191,7 @@ impl ValidateAuthorized for crate::database::models::OAuthClient { pub async fn filter_authorized_versions( versions: Vec, - user_option: &Option, + user_option: Option<&User>, pool: &web::Data, ) -> Result, ApiError> { let mut return_versions = Vec::new(); diff --git a/src/database/models/creator_follows.rs b/src/database/models/creator_follows.rs new file mode 100644 index 00000000..28f9c360 --- /dev/null +++ b/src/database/models/creator_follows.rs @@ -0,0 +1,99 @@ +use itertools::Itertools; + +use super::{OrganizationId, UserId}; +use crate::database::models::DatabaseError; + +#[derive(Copy, Clone, Debug)] +pub struct UserFollow { + pub follower_id: UserId, + pub target_id: UserId, +} + +#[derive(Copy, Clone, Debug)] +pub struct OrganizationFollow { + pub follower_id: UserId, + pub target_id: OrganizationId, +} + +struct FollowQuery { + follower_id: i64, + target_id: i64, +} + +impl From for UserFollow { + fn from(value: FollowQuery) -> Self { + UserFollow { + follower_id: UserId(value.follower_id), + target_id: UserId(value.target_id), + } + } +} + +impl From for OrganizationFollow { + fn from(value: FollowQuery) -> Self { + OrganizationFollow { + follower_id: UserId(value.follower_id), + target_id: OrganizationId(value.target_id), + } + } +} + +macro_rules! impl_follow { + ($target_struct:ident, $table_name:tt, $target_id_type:ident, $target_id_ctor:expr) => { + impl $target_struct { + pub async fn insert( + &self, + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + ) -> Result<(), DatabaseError> { + sqlx::query!( + " INSERT INTO " + $table_name + " (follower_id, target_id) VALUES ($1, $2)", + self.follower_id.0, + self.target_id.0 + ) + .execute(exec) + .await?; + + Ok(()) + } + + pub async fn get_follows_by_follower( + follower_user_id: UserId, + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + ) -> Result, DatabaseError> { + let res = sqlx::query_as!( + FollowQuery, + "SELECT follower_id, target_id FROM " + $table_name + " WHERE follower_id=$1", + follower_user_id.0 + ) + .fetch_all(exec) + .await?; + + Ok(res.into_iter().map(|r| r.into()).collect_vec()) + } + + pub async fn unfollow( + follower_id: UserId, + target_id: $target_id_type, + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + ) -> Result<(), DatabaseError> { + sqlx::query!( + "DELETE FROM " + $table_name + " WHERE follower_id=$1 AND target_id=$2", + follower_id.0, + target_id.0, + ) + .execute(exec) + .await?; + + Ok(()) + } + } + }; +} + +impl_follow!(UserFollow, "user_follows", UserId, UserId); +impl_follow!( + OrganizationFollow, + "organization_follows", + OrganizationId, + OrganizationId +); diff --git a/src/database/models/event_item.rs b/src/database/models/event_item.rs new file mode 100644 index 00000000..a93ffdcb --- /dev/null +++ b/src/database/models/event_item.rs @@ -0,0 +1,324 @@ +use super::{ + dynamic::{DynamicId, IdType}, + generate_event_id, DatabaseError, EventId, OrganizationId, ProjectId, UserId, VersionId, +}; +use chrono::{DateTime, Utc}; +use itertools::Itertools; +use sqlx::postgres::{PgHasArrayType, PgTypeInfo}; +use std::convert::{TryFrom, TryInto}; + +#[derive(sqlx::Type, Clone, Copy, Debug)] +#[sqlx(type_name = "text")] +#[sqlx(rename_all = "snake_case")] +pub enum EventType { + ProjectPublished, + VersionCreated, +} + +impl PgHasArrayType for EventType { + fn array_type_info() -> sqlx::postgres::PgTypeInfo { + PgTypeInfo::with_name("_text") + } +} + +#[derive(Debug)] +pub enum CreatorId { + User(UserId), + Organization(OrganizationId), +} + +#[derive(Debug)] +pub enum EventData { + ProjectPublished { + project_id: ProjectId, + creator_id: CreatorId, + }, + VersionCreated { + version_id: VersionId, + creator_id: CreatorId, + }, +} + +#[derive(Debug)] +pub struct Event { + pub id: EventId, + pub event_data: EventData, + pub time: DateTime, +} + +struct RawEvent { + pub id: EventId, + pub target_id: i64, + pub target_id_type: IdType, + pub triggerer_id: Option, + pub triggerer_id_type: Option, + pub event_type: EventType, + pub metadata: Option, + pub created: Option>, +} + +pub struct EventSelector { + pub id: DynamicId, + pub event_type: EventType, +} + +impl Event { + pub async fn new( + event_data: EventData, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + let id = generate_event_id(transaction).await?; + Ok(Self { + id, + event_data, + time: Default::default(), + }) + } +} + +impl From for DynamicId { + fn from(value: CreatorId) -> Self { + match value { + CreatorId::User(user_id) => user_id.into(), + CreatorId::Organization(organization_id) => organization_id.into(), + } + } +} + +impl TryFrom for CreatorId { + type Error = DatabaseError; + + fn try_from(value: DynamicId) -> Result { + match value.id_type { + IdType::UserId => Ok(CreatorId::User(value.try_into()?)), + _ => Ok(CreatorId::Organization(value.try_into()?)), + } + } +} + +impl From for RawEvent { + fn from(value: Event) -> Self { + match value.event_data { + EventData::ProjectPublished { + project_id, + creator_id, + } => { + let target_id = DynamicId::from(project_id); + let triggerer_id = DynamicId::from(creator_id); + RawEvent { + id: value.id, + target_id: target_id.id, + target_id_type: target_id.id_type, + triggerer_id: Some(triggerer_id.id), + triggerer_id_type: Some(triggerer_id.id_type), + event_type: EventType::ProjectPublished, + metadata: None, + created: None, + } + } + EventData::VersionCreated { + version_id, + creator_id, + } => { + let target_id = DynamicId::from(version_id); + let triggerer_id = DynamicId::from(creator_id); + RawEvent { + id: value.id, + target_id: target_id.id, + target_id_type: target_id.id_type, + triggerer_id: Some(triggerer_id.id), + triggerer_id_type: Some(triggerer_id.id_type), + event_type: EventType::VersionCreated, + metadata: None, + created: None, + } + } + } + } +} + +impl TryFrom for Event { + type Error = DatabaseError; + + fn try_from(value: RawEvent) -> Result { + let target_id = DynamicId { + id: value.target_id, + id_type: value.target_id_type, + }; + let triggerer_id = match (value.triggerer_id, value.triggerer_id_type) { + (Some(id), Some(id_type)) => Some(DynamicId { id, id_type }), + _ => None, + }; + + let event = Event { + id : value.id, + event_data : match value.event_type { + EventType::ProjectPublished => EventData::ProjectPublished { + project_id: target_id.try_into()?, + creator_id: triggerer_id.map_or_else(|| { + Err(DatabaseError::UnexpectedNull( + "Neither triggerer_id nor triggerer_id_type should be null for project publishing".to_string(), + )) + }, |v| v.try_into())?, + }, + EventType::VersionCreated => EventData::VersionCreated { + version_id: target_id.try_into()?, + creator_id: triggerer_id.map_or_else(|| { + Err(DatabaseError::UnexpectedNull( + "Neither triggerer_id nor triggerer_id_type should be null for version creation".to_string(), + )) + }, |v| v.try_into())?, + }, + }, + time : value.created.map_or_else( + || { + Err(DatabaseError::UnexpectedNull( + "the value of created should not be null".to_string(), + )) + }, + Ok, + )?, + }; + + Ok(event) + } +} + +impl Event { + pub async fn insert( + self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result<(), DatabaseError> { + Self::insert_many(vec![self], transaction).await + } + + pub async fn insert_many( + events: Vec, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result<(), DatabaseError> { + RawEvent::insert_many( + events.into_iter().map(|e| e.into()).collect_vec(), + transaction, + ) + .await + } + + pub async fn get_events( + target_selectors: &[EventSelector], + triggerer_selectors: &[EventSelector], + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + ) -> Result, DatabaseError> { + let (target_ids, target_id_types, target_event_types) = + unzip_event_selectors(target_selectors); + let (triggerer_ids, triggerer_id_types, triggerer_event_types) = + unzip_event_selectors(triggerer_selectors); + + let r = sqlx::query_as!( + RawEvent, + r#" + SELECT + id, + target_id, + target_id_type as "target_id_type: _", + triggerer_id, + triggerer_id_type as "triggerer_id_type: _", + event_type as "event_type: _", + metadata, + created + FROM events e + WHERE + (target_id, target_id_type, event_type) + = ANY(SELECT * FROM UNNEST ($1::bigint[], $2::text[], $3::text[])) + OR + (triggerer_id, triggerer_id_type, event_type) + = ANY(SELECT * FROM UNNEST ($4::bigint[], $5::text[], $6::text[])) + ORDER BY created DESC + "#, + &target_ids[..], + &target_id_types[..] as &[IdType], + &target_event_types[..] as &[EventType], + &triggerer_ids[..], + &triggerer_id_types[..] as &[IdType], + &triggerer_event_types[..] as &[EventType] + ) + .fetch_all(exec) + .await? + .into_iter() + .map(|r| r.try_into()) + .collect::, _>>()?; + + Ok(r) + } +} + +impl RawEvent { + pub async fn insert_many( + events: Vec, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result<(), DatabaseError> { + let ( + ids, + target_ids, + target_id_types, + triggerer_ids, + triggerer_id_types, + event_types, + metadata, + ): (Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>) = events + .into_iter() + .map(|e| { + ( + e.id.0, + e.target_id, + e.target_id_type, + e.triggerer_id, + e.triggerer_id_type, + e.event_type, + e.metadata, + ) + }) + .multiunzip(); + sqlx::query!( + " + INSERT INTO events ( + id, + target_id, + target_id_type, + triggerer_id, + triggerer_id_type, + event_type, + metadata + ) + SELECT * FROM UNNEST ( + $1::bigint[], + $2::bigint[], + $3::text[], + $4::bigint[], + $5::text[], + $6::text[], + $7::jsonb[] + ) + ", + &ids[..], + &target_ids[..], + &target_id_types[..] as &[IdType], + &triggerer_ids[..] as &[Option], + &triggerer_id_types[..] as &[Option], + &event_types[..] as &[EventType], + &metadata[..] as &[Option] + ) + .execute(&mut **transaction) + .await?; + + Ok(()) + } +} + +fn unzip_event_selectors( + target_selectors: &[EventSelector], +) -> (Vec, Vec, Vec) { + target_selectors + .iter() + .map(|t| (t.id.id, t.id.id_type, t.event_type)) + .multiunzip() +} diff --git a/src/database/models/ids.rs b/src/database/models/ids.rs index bd274fb5..610f4620 100644 --- a/src/database/models/ids.rs +++ b/src/database/models/ids.rs @@ -183,6 +183,13 @@ generate_ids!( "SELECT EXISTS(SELECT 1 FROM oauth_access_tokens WHERE id=$1)", OAuthAccessTokenId ); +generate_ids!( + pub generate_event_id, + EventId, + 8, + "SELECT EXISTS(SELECT 1 FROM events WHERE id=$1)", + EventId +); #[derive(Copy, Clone, Debug, PartialEq, Eq, Type, Hash, Serialize, Deserialize)] #[sqlx(transparent)] @@ -298,6 +305,16 @@ pub struct OAuthRedirectUriId(pub i64); #[sqlx(transparent)] pub struct OAuthAccessTokenId(pub i64); +#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)] +#[sqlx(transparent)] +pub struct EventId(pub i64); + +impl From for EventId { + fn from(value: i64) -> Self { + EventId(value) + } +} + use crate::models::ids; impl From for ProjectId { @@ -420,6 +437,12 @@ impl From for ids::PatId { ids::PatId(id.0 as u64) } } +impl From for ids::FeedItemId { + fn from(value: crate::database::models::ids::EventId) -> Self { + Self(value.0 as u64) + } +} + impl From for ids::OAuthClientId { fn from(id: OAuthClientId) -> Self { ids::OAuthClientId(id.0 as u64) @@ -440,3 +463,63 @@ impl From for ids::OAuthClientAuthorizationId { ids::OAuthClientAuthorizationId(id.0 as u64) } } + +pub mod dynamic { + use super::*; + use sqlx::postgres::{PgHasArrayType, PgTypeInfo}; + + #[derive(sqlx::Type, PartialEq, Debug, Clone, Copy)] + #[sqlx(type_name = "text")] + #[sqlx(rename_all = "snake_case")] + pub enum IdType { + ProjectId, + UserId, + OrganizationId, + VersionId, + } + + impl PgHasArrayType for IdType { + fn array_type_info() -> sqlx::postgres::PgTypeInfo { + PgTypeInfo::with_name("_text") + } + } + + #[derive(Clone)] + pub struct DynamicId { + pub id: i64, + pub id_type: IdType, + } + + macro_rules! from_static_impl { + ($struct:ident, $variant:expr) => { + impl From<$struct> for DynamicId { + fn from(value: $struct) -> Self { + DynamicId { + id: value.0, + id_type: $variant, + } + } + } + + impl std::convert::TryFrom for $struct { + type Error = DatabaseError; + + fn try_from(value: DynamicId) -> Result { + if value.id_type == $variant { + return Ok($struct(value.id)); + } + + Err(DatabaseError::DynamicIdConversionError { + expected: $variant, + actual: value.id_type, + }) + } + } + }; + } + + from_static_impl!(ProjectId, IdType::ProjectId); + from_static_impl!(VersionId, IdType::VersionId); + from_static_impl!(UserId, IdType::UserId); + from_static_impl!(OrganizationId, IdType::OrganizationId); +} diff --git a/src/database/models/mod.rs b/src/database/models/mod.rs index eb4335cb..64c78d01 100644 --- a/src/database/models/mod.rs +++ b/src/database/models/mod.rs @@ -2,6 +2,8 @@ use thiserror::Error; pub mod categories; pub mod collection_item; +pub mod creator_follows; +pub mod event_item; pub mod flow_item; pub mod ids; pub mod image_item; @@ -22,6 +24,7 @@ pub mod user_item; pub mod version_item; pub use collection_item::Collection; +pub use event_item::Event; pub use ids::*; pub use image_item::Image; pub use oauth_client_item::OAuthClient; @@ -33,10 +36,18 @@ pub use thread_item::{Thread, ThreadMessage}; pub use user_item::User; pub use version_item::Version; +use self::dynamic::IdType; + #[derive(Error, Debug)] pub enum DatabaseError { #[error("Error while interacting with the database: {0}")] Database(#[from] sqlx::Error), + #[error( + "Error converting from a dynamic id in the database (expected {expected:#?}, was {actual:#?})" + )] + DynamicIdConversionError { expected: IdType, actual: IdType }, + #[error("Didn't expect value to be null: {0}")] + UnexpectedNull(String), #[error("Error while trying to generate random ID")] RandomId, #[error("Error while interacting with the cache: {0}")] diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 61dd2464..1338dcec 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -285,14 +285,16 @@ impl Project { published, downloads, icon_url, issues_url, source_url, wiki_url, status, requested_status, discord_url, license_url, license, - slug, color, monetization_status + slug, color, monetization_status, + organization_id ) VALUES ( $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, - LOWER($17), $18, $19 + LOWER($17), $18, $19, + $20 ) ", self.id as ProjectId, @@ -314,6 +316,7 @@ impl Project { self.slug.as_ref(), self.color.map(|x| x as i32), self.monetization_status.as_str(), + self.organization_id.map(|x| x.0), ) .execute(&mut **transaction) .await?; @@ -331,16 +334,6 @@ impl Project { if let Some(project) = project { Project::clear_cache(id, project.inner.slug, Some(true), redis).await?; - sqlx::query!( - " - DELETE FROM mod_follows - WHERE mod_id = $1 - ", - id as ProjectId - ) - .execute(&mut **transaction) - .await?; - sqlx::query!( " DELETE FROM mods_gallery diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index a513aefe..df59c623 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -390,6 +390,32 @@ impl TeamMember { } } + pub async fn get_owner_id<'a, 'b, E>( + id: TeamId, + executor: E, + ) -> Result, super::DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let result = sqlx::query!( + " + SELECT user_id + FROM team_members + WHERE (team_id = $1 AND role = $2) + ", + id as TeamId, + crate::models::teams::OWNER_ROLE, + ) + .fetch_optional(executor) + .await?; + + if let Some(m) = result { + Ok(Some(UserId(m.user_id))) + } else { + Ok(None) + } + } + pub async fn insert( &self, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, diff --git a/src/models/mod.rs b/src/models/mod.rs index c4ff81a2..e6f1653a 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -4,6 +4,7 @@ pub mod v3; pub use v3::analytics; pub use v3::collections; pub use v3::error; +pub use v3::feeds; pub use v3::ids; pub use v3::images; pub use v3::notifications; diff --git a/src/models/v3/feeds.rs b/src/models/v3/feeds.rs new file mode 100644 index 00000000..956bb6c5 --- /dev/null +++ b/src/models/v3/feeds.rs @@ -0,0 +1,54 @@ +use super::ids::Base62Id; +use super::ids::OrganizationId; +use super::users::UserId; +use crate::models::ids::{ProjectId, VersionId}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +use crate::database::models::event_item as DBEvent; + +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[serde(from = "Base62Id")] +#[serde(into = "Base62Id")] +pub struct FeedItemId(pub u64); + +#[derive(Serialize, Deserialize, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum CreatorId { + User { id: UserId }, + Organization { id: OrganizationId }, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct FeedItem { + pub id: FeedItemId, + pub body: FeedItemBody, + pub time: DateTime, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum FeedItemBody { + ProjectPublished { + project_id: ProjectId, + creator_id: CreatorId, + project_title: String, + }, + VersionCreated { + project_id: ProjectId, + version_id: VersionId, + creator_id: CreatorId, + project_title: String, + }, +} + +impl From for CreatorId { + fn from(value: crate::database::models::event_item::CreatorId) -> Self { + match value { + DBEvent::CreatorId::User(user_id) => CreatorId::User { id: user_id.into() }, + DBEvent::CreatorId::Organization(organization_id) => CreatorId::Organization { + id: organization_id.into(), + }, + } + } +} diff --git a/src/models/v3/ids.rs b/src/models/v3/ids.rs index 8cea089f..165a5d41 100644 --- a/src/models/v3/ids.rs +++ b/src/models/v3/ids.rs @@ -1,6 +1,7 @@ use thiserror::Error; pub use super::collections::CollectionId; +pub use super::feeds::FeedItemId; pub use super::images::ImageId; pub use super::notifications::NotificationId; pub use super::oauth_clients::OAuthClientAuthorizationId; @@ -127,6 +128,7 @@ base62_id_impl!(ImageId, ImageId); base62_id_impl!(OAuthClientId, OAuthClientId); base62_id_impl!(OAuthRedirectUriId, OAuthRedirectUriId); base62_id_impl!(OAuthClientAuthorizationId, OAuthClientAuthorizationId); +base62_id_impl!(FeedItemId, FeedItemId); pub mod base62_impl { use serde::de::{self, Deserializer, Visitor}; diff --git a/src/models/v3/mod.rs b/src/models/v3/mod.rs index 7c97ad31..e0bc2c23 100644 --- a/src/models/v3/mod.rs +++ b/src/models/v3/mod.rs @@ -1,6 +1,7 @@ pub mod analytics; pub mod collections; pub mod error; +pub mod feeds; pub mod ids; pub mod images; pub mod notifications; diff --git a/src/models/v3/organizations.rs b/src/models/v3/organizations.rs index 6163ddee..b583aced 100644 --- a/src/models/v3/organizations.rs +++ b/src/models/v3/organizations.rs @@ -5,7 +5,7 @@ use super::{ use serde::{Deserialize, Serialize}; /// The ID of a team -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] #[serde(from = "Base62Id")] #[serde(into = "Base62Id")] pub struct OrganizationId(pub u64); diff --git a/src/models/v3/projects.rs b/src/models/v3/projects.rs index 63c6593b..ad0058fe 100644 --- a/src/models/v3/projects.rs +++ b/src/models/v3/projects.rs @@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize}; use validator::Validate; /// The ID of a specific project, encoded as base62 for usage in the API -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] #[serde(from = "Base62Id")] #[serde(into = "Base62Id")] pub struct ProjectId(pub u64); diff --git a/src/routes/updates.rs b/src/routes/updates.rs index f4d6d2f8..ac9d6453 100644 --- a/src/routes/updates.rs +++ b/src/routes/updates.rs @@ -73,7 +73,7 @@ pub async fn forge_updates( .into_iter() .filter(|x| x.loaders.iter().any(loaders)) .collect(), - &user_option, + user_option.as_ref(), &pool, ) .await?; diff --git a/src/routes/v2/project_creation.rs b/src/routes/v2/project_creation.rs index 6e36de5c..56f6f267 100644 --- a/src/routes/v2/project_creation.rs +++ b/src/routes/v2/project_creation.rs @@ -1,7 +1,6 @@ use crate::database::models::version_item; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; -use crate::models; use crate::models::ids::ImageId; use crate::models::projects::{DonationLink, Loader, Project, ProjectStatus, SideType}; use crate::models::v2::projects::LegacyProject; @@ -130,7 +129,7 @@ struct ProjectCreateData { pub uploaded_images: Vec, /// The id of the organization to create the project in - pub organization_id: Option, + pub organization_id: Option, } #[post("project")] diff --git a/src/routes/v3/analytics_get.rs b/src/routes/v3/analytics_get.rs index dc31c69c..bbe7b131 100644 --- a/src/routes/v3/analytics_get.rs +++ b/src/routes/v3/analytics_get.rs @@ -580,7 +580,7 @@ async fn filter_allowed_ids( .map(|id| Ok(ProjectId(parse_base62(id)?).into())) .collect::, ApiError>>()?; let projects = project_item::Project::get_many_ids(&ids, &***pool, redis).await?; - let ids: Vec = filter_authorized_projects(projects, &Some(user.clone()), pool) + let ids: Vec = filter_authorized_projects(projects, Some(&user), pool) .await? .into_iter() .map(|x| x.id) @@ -596,7 +596,7 @@ async fn filter_allowed_ids( .map(|id| Ok(VersionId(parse_base62(id)?).into())) .collect::, ApiError>>()?; let versions = version_item::Version::get_many(&ids, &***pool, redis).await?; - let ids: Vec = filter_authorized_versions(versions, &Some(user), pool) + let ids: Vec = filter_authorized_versions(versions, Some(&user), pool) .await? .into_iter() .map(|x| x.id) diff --git a/src/routes/v3/mod.rs b/src/routes/v3/mod.rs index 7616d095..d1196699 100644 --- a/src/routes/v3/mod.rs +++ b/src/routes/v3/mod.rs @@ -49,7 +49,9 @@ pub fn config(cfg: &mut web::ServiceConfig) { .configure(threads::config) .configure(users::config) .configure(version_file::config) - .configure(versions::config), + .configure(versions::config) + .configure(users::config) + .configure(organizations::config), ); } diff --git a/src/routes/v3/organizations.rs b/src/routes/v3/organizations.rs index d48cd2a9..1e39738a 100644 --- a/src/routes/v3/organizations.rs +++ b/src/routes/v3/organizations.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use super::ApiError; use crate::auth::{filter_authorized_projects, get_user_from_headers}; use crate::database::models::team_item::TeamMember; +use crate::database::models::DatabaseError; use crate::database::models::{generate_organization_id, team_item, Organization}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; @@ -17,6 +18,8 @@ use crate::util::routes::read_from_payload; use crate::util::validate::validation_errors_to_string; use crate::{database, models}; use actix_web::{web, HttpRequest, HttpResponse}; +use database::models::creator_follows::OrganizationFollow as DBOrganizationFollow; +use database::models::organization_item::Organization as DBOrganization; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; use sqlx::PgPool; @@ -41,7 +44,9 @@ pub fn config(cfg: &mut web::ServiceConfig) { .route( "{id}/members", web::get().to(super::teams::team_members_get_organization), - ), + ) + .route("{id}/follow", web::post().to(organization_follow)) + .route("{id}/follow", web::delete().to(organization_unfollow)), ); } @@ -84,7 +89,7 @@ pub async fn organization_projects_get( let projects_data = crate::database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?; - let projects = filter_authorized_projects(projects_data, ¤t_user, &pool).await?; + let projects = filter_authorized_projects(projects_data, current_user.as_ref(), &pool).await?; Ok(HttpResponse::Ok().json(projects)) } @@ -918,3 +923,71 @@ pub async fn delete_organization_icon( Ok(HttpResponse::NoContent().body("")) } + +pub async fn organization_follow( + req: HttpRequest, + target_id: web::Path, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let (_, current_user) = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_WRITE]), + ) + .await?; + + let target = DBOrganization::get(&target_id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified organization does not exist!".to_string()) + })?; + + DBOrganizationFollow { + follower_id: current_user.id.into(), + target_id: target.id, + } + .insert(&**pool) + .await + .map_err(|e| match e { + DatabaseError::Database(e) + if e.as_database_error() + .is_some_and(|e| e.is_unique_violation()) => + { + ApiError::InvalidInput("You are already following this organization!".to_string()) + } + e => e.into(), + })?; + + Ok(HttpResponse::NoContent().body("")) +} + +pub async fn organization_unfollow( + req: HttpRequest, + target_id: web::Path, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let (_, current_user) = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_WRITE]), + ) + .await?; + + let target = DBOrganization::get(&target_id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified organization does not exist!".to_string()) + })?; + + DBOrganizationFollow::unfollow(current_user.id.into(), target.id, &**pool).await?; + + Ok(HttpResponse::NoContent().body("")) +} diff --git a/src/routes/v3/project_creation.rs b/src/routes/v3/project_creation.rs index 584225f4..fb868231 100644 --- a/src/routes/v3/project_creation.rs +++ b/src/routes/v3/project_creation.rs @@ -12,7 +12,7 @@ use crate::models::pats::Scopes; use crate::models::projects::{ DonationLink, License, MonetizationStatus, ProjectId, ProjectStatus, VersionId, VersionStatus, }; -use crate::models::teams::ProjectPermissions; +use crate::models::teams::{OrganizationPermissions, ProjectPermissions}; use crate::models::threads::ThreadType; use crate::models::users::UserId; use crate::queue::session::AuthQueue; @@ -445,6 +445,29 @@ async fn project_create_inner( } } + // If organization_id is set, make sure the user is a member of the organization + if let Some(organization_id) = create_data.organization_id { + let organization_team_member = + models::team_item::TeamMember::get_from_user_id_organization( + organization_id.into(), + current_user.id.into(), + &mut **transaction, + ) + .await?; + + let permissions = OrganizationPermissions::get_permissions_by_role( + ¤t_user.role, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(OrganizationPermissions::ADD_PROJECT) { + return Err(CreateError::CustomAuthenticationError( + "You do not have permission to add projects to this organization!".to_string(), + )); + } + } + // Create VersionBuilders for the versions specified in `initial_versions` versions = Vec::with_capacity(create_data.initial_versions.len()); for (i, data) in create_data.initial_versions.iter().enumerate() { diff --git a/src/routes/v3/projects.rs b/src/routes/v3/projects.rs index 6106f661..447d6a39 100644 --- a/src/routes/v3/projects.rs +++ b/src/routes/v3/projects.rs @@ -1,10 +1,11 @@ use std::sync::Arc; use crate::auth::{filter_authorized_projects, get_user_from_headers, is_authorized}; +use crate::database::models::event_item::{CreatorId, EventData}; use crate::database::models::notification_item::NotificationBuilder; use crate::database::models::project_item::{GalleryItem, ModCategory}; use crate::database::models::thread_item::ThreadMessageBuilder; -use crate::database::models::{ids as db_ids, image_item}; +use crate::database::models::{ids as db_ids, image_item, Event, TeamMember}; use crate::database::redis::RedisPool; use crate::database::{self, models as db_models}; use crate::file_hosting::FileHost; @@ -26,6 +27,7 @@ use crate::util::routes::read_from_payload; use crate::util::validate::validation_errors_to_string; use actix_web::{web, HttpRequest, HttpResponse}; use chrono::{DateTime, Utc}; +use db_ids::{OrganizationId, UserId}; use futures::TryStreamExt; use meilisearch_sdk::indexes::IndexesResults; use serde::{Deserialize, Serialize}; @@ -134,7 +136,7 @@ pub async fn projects_get( .map(|x| x.1) .ok(); - let projects = filter_authorized_projects(projects_data, &user_option, &pool).await?; + let projects = filter_authorized_projects(projects_data, user_option.as_ref(), &pool).await?; Ok(HttpResponse::Ok().json(projects)) } @@ -411,6 +413,21 @@ pub async fn project_edit( ) .execute(&mut *transaction) .await?; + + // On publish event, we send out notification using team *owner* as publishing user, at the time of mod approval + // (even though 'user' is the mod and doing the publishing) + let owner_id = + TeamMember::get_owner_id(project_item.inner.team_id, &mut *transaction) + .await?; + if let Some(owner_id) = owner_id { + insert_project_publish_event( + id.into(), + project_item.inner.organization_id, + owner_id, + &mut transaction, + ) + .await?; + } } if status.is_searchable() && !project_item.inner.webhook_sent { if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") { @@ -2493,3 +2510,22 @@ pub async fn project_unfollow( )) } } + +async fn insert_project_publish_event( + project_id: ProjectId, + organization_id: Option, + owner_id: UserId, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, +) -> Result<(), ApiError> { + let event = Event::new( + EventData::ProjectPublished { + project_id: project_id.into(), + creator_id: organization_id + .map_or_else(|| CreatorId::User(owner_id), CreatorId::Organization), + }, + transaction, + ) + .await?; + event.insert(transaction).await?; + Ok(()) +} diff --git a/src/routes/v3/users.rs b/src/routes/v3/users.rs index 093cc1df..6480a7f6 100644 --- a/src/routes/v3/users.rs +++ b/src/routes/v3/users.rs @@ -12,19 +12,38 @@ use validator::Validate; use crate::{ auth::get_user_from_headers, + auth::{filter_authorized_projects, filter_authorized_versions}, + database::{ + self, + models::{ + event_item::{EventData, EventSelector, EventType}, + DatabaseError, + }, + }, database::{models::User, redis::RedisPool}, file_hosting::FileHost, models::{ collections::{Collection, CollectionStatus}, + feeds::{FeedItem, FeedItemBody}, ids::UserId, + ids::{ProjectId, VersionId}, notifications::Notification, pats::Scopes, projects::Project, + projects::Version, users::{Badges, Payout, PayoutStatus, RecipientStatus, Role, UserPayoutData}, }, queue::{payouts::PayoutsQueue, session::AuthQueue}, util::{routes::read_from_payload, validate::validation_errors_to_string}, }; +use itertools::Itertools; +use std::iter::FromIterator; + +use database::models as db_models; +use database::models::creator_follows::OrganizationFollow as DBOrganizationFollow; +use database::models::creator_follows::UserFollow as DBUserFollow; +use database::models::event_item::Event as DBEvent; +use database::models::user_item::User as DBUser; use super::{oauth_clients::get_user_clients, ApiError}; @@ -34,6 +53,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("user") + .route("feed", web::get().to(current_user_feed)) .route("{user_id}/projects", web::get().to(projects_list)) .route("{id}", web::get().to(user_get)) .route("{user_id}/collections", web::get().to(collections_list)) @@ -46,7 +66,9 @@ pub fn config(cfg: &mut web::ServiceConfig) { .route("{id}/payouts", web::get().to(user_payouts)) .route("{id}/payouts_fees", web::get().to(user_payouts_fees)) .route("{id}/payouts", web::post().to(user_payouts_request)) - .route("{id}/oauth_apps", web::get().to(get_user_clients)), + .route("{id}/oauth_apps", web::get().to(get_user_clients)) + .route("{id}/follow", web::post().to(user_follow)) + .route("{id}/follow", web::delete().to(user_unfollow)), ); } @@ -912,3 +934,251 @@ pub async fn user_payouts_request( Ok(HttpResponse::NotFound().body("")) } } + +pub async fn user_follow( + req: HttpRequest, + target_id: web::Path, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + println!("inside user_follow"); + let (_, current_user) = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_WRITE]), + ) + .await?; + println!("current_user: {:?}", current_user); + + let target = DBUser::get(&target_id, &**pool, &redis) + .await? + .ok_or_else(|| ApiError::InvalidInput("The specified user does not exist!".to_string()))?; + + println!("target: {:?}", target); + DBUserFollow { + follower_id: current_user.id.into(), + target_id: target.id, + } + .insert(&**pool) + .await + .map_err(|e| match e { + DatabaseError::Database(e) + if e.as_database_error() + .is_some_and(|e| e.is_unique_violation()) => + { + ApiError::InvalidInput("You are already following this user!".to_string()) + } + e => e.into(), + })?; + println!("inserted"); + Ok(HttpResponse::NoContent().body("")) +} + +pub async fn user_unfollow( + req: HttpRequest, + target_id: web::Path, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let (_, current_user) = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_WRITE]), + ) + .await?; + + let target = DBUser::get(&target_id, &**pool, &redis) + .await? + .ok_or_else(|| ApiError::InvalidInput("The specified user does not exist!".to_string()))?; + + DBUserFollow::unfollow(current_user.id.into(), target.id, &**pool).await?; + + Ok(HttpResponse::NoContent().body("")) +} + +#[derive(Serialize, Deserialize)] +pub struct FeedParameters { + pub limit: Option, + pub offset: Option, +} + +pub async fn current_user_feed( + req: HttpRequest, + web::Query(params): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + println!("In current_user_feed"); + let (_, current_user) = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::NOTIFICATION_READ]), + ) + .await?; + println!("current_user: {:?}", current_user); + + let followed_users = + DBUserFollow::get_follows_by_follower(current_user.id.into(), &**pool).await?; + let followed_organizations = + DBOrganizationFollow::get_follows_by_follower(current_user.id.into(), &**pool).await?; + + println!("followed_users: {:?}", followed_users); + // Feed by default shows the following: + // - Projects created by users you follow + // - Projects created by organizations you follow + // - Versions created by users you follow + // - Versions created by organizations you follow + let event_types = [EventType::ProjectPublished, EventType::VersionCreated]; + let selectors = followed_users + .into_iter() + .flat_map(|follow| { + event_types.iter().map(move |event_type| EventSelector { + id: follow.target_id.into(), + event_type: *event_type, + }) + }) + .chain(followed_organizations.into_iter().flat_map(|follow| { + event_types.iter().map(move |event_type| EventSelector { + id: follow.target_id.into(), + event_type: *event_type, + }) + })) + .collect_vec(); + println!("selectors:"); + let events = DBEvent::get_events(&[], &selectors, &**pool) + .await? + .into_iter() + .skip(params.offset.unwrap_or(0)) + .take(params.offset.unwrap_or(usize::MAX)) + .collect_vec(); + println!("events: {:?}", events); + let mut feed_items: Vec = Vec::new(); + let authorized_versions = + prefetch_authorized_event_versions(&events, &pool, &redis, ¤t_user).await?; + let authorized_version_project_ids = authorized_versions + .values() + .map(|versions| versions.project_id) + .collect_vec(); + let authorized_projects = prefetch_authorized_event_projects( + &events, + Some(&authorized_version_project_ids), + &pool, + &redis, + ¤t_user, + ) + .await?; + println!("authorized projects"); + + for event in events { + let body = match event.event_data { + EventData::ProjectPublished { + project_id, + creator_id, + } => authorized_projects.get(&project_id.into()).map(|p| { + FeedItemBody::ProjectPublished { + project_id: project_id.into(), + creator_id: creator_id.into(), + project_title: p.title.clone(), + } + }), + EventData::VersionCreated { + version_id, + creator_id, + } => { + let authorized_version = authorized_versions.get(&version_id.into()); + let authorized_project = + authorized_version.and_then(|v| authorized_projects.get(&v.project_id)); + if let (Some(authorized_version), Some(authorized_project)) = + (authorized_version, authorized_project) + { + Some(FeedItemBody::VersionCreated { + project_id: authorized_project.id, + version_id: authorized_version.id, + creator_id: creator_id.into(), + project_title: authorized_project.title.clone(), + }) + } else { + None + } + } + }; + + if let Some(body) = body { + let feed_item = FeedItem { + id: event.id.into(), + body, + time: event.time, + }; + + feed_items.push(feed_item); + } + } + + Ok(HttpResponse::Ok().json(feed_items)) +} + +async fn prefetch_authorized_event_projects( + events: &[db_models::Event], + additional_ids: Option<&[ProjectId]>, + pool: &web::Data, + redis: &RedisPool, + current_user: &crate::models::v3::users::User, +) -> Result, ApiError> { + let mut project_ids = events + .iter() + .filter_map(|e| match &e.event_data { + EventData::ProjectPublished { + project_id, + creator_id: _, + } => Some(*project_id), + EventData::VersionCreated { .. } => None, + }) + .collect_vec(); + if let Some(additional_ids) = additional_ids { + project_ids.extend( + additional_ids + .iter() + .copied() + .map(db_models::ProjectId::from), + ); + } + let projects = db_models::Project::get_many_ids(&project_ids, &***pool, redis).await?; + let authorized_projects = + filter_authorized_projects(projects, Some(current_user), pool).await?; + Ok(HashMap::::from_iter( + authorized_projects.into_iter().map(|p| (p.id, p)), + )) +} + +async fn prefetch_authorized_event_versions( + events: &[db_models::Event], + pool: &web::Data, + redis: &RedisPool, + current_user: &crate::models::v3::users::User, +) -> Result, ApiError> { + let version_ids = events + .iter() + .filter_map(|e| match &e.event_data { + EventData::VersionCreated { + version_id, + creator_id: _, + } => Some(*version_id), + EventData::ProjectPublished { .. } => None, + }) + .collect_vec(); + let versions = db_models::Version::get_many(&version_ids, &***pool, redis).await?; + let authorized_versions = + filter_authorized_versions(versions, Some(current_user), pool).await?; + Ok(HashMap::::from_iter( + authorized_versions.into_iter().map(|v| (v.id, v)), + )) +} diff --git a/src/routes/v3/version_creation.rs b/src/routes/v3/version_creation.rs index 9caad08f..49b75f61 100644 --- a/src/routes/v3/version_creation.rs +++ b/src/routes/v3/version_creation.rs @@ -1,11 +1,12 @@ use super::project_creation::{CreateError, UploadedFile}; use crate::auth::get_user_from_headers; +use crate::database::models::event_item::{CreatorId, EventData}; use crate::database::models::loader_fields::{LoaderField, LoaderFieldEnumValue, VersionField}; use crate::database::models::notification_item::NotificationBuilder; use crate::database::models::version_item::{ DependencyBuilder, VersionBuilder, VersionFileBuilder, }; -use crate::database::models::{self, image_item, Organization}; +use crate::database::models::{self, image_item, Event, Organization}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::images::{Image, ImageContext, ImageId}; @@ -296,6 +297,14 @@ async fn version_create_inner( }) .collect::>(); + insert_version_create_event( + version_id, + organization.map(|o| o.id), + &user, + transaction, + ) + .await?; + version_builder = Some(VersionBuilder { version_id: version_id.into(), project_id, @@ -951,6 +960,27 @@ pub fn get_name_ext( Ok((file_name, file_extension)) } +async fn insert_version_create_event( + version_id: VersionId, + organization_id: Option, + current_user: &crate::models::users::User, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, +) -> Result<(), CreateError> { + let event = Event::new( + EventData::VersionCreated { + version_id: version_id.into(), + creator_id: organization_id.map_or_else( + || CreatorId::User(current_user.id.into()), + CreatorId::Organization, + ), + }, + transaction, + ) + .await?; + event.insert(transaction).await?; + Ok(()) +} + // Reused functionality between project_creation and version_creation // Create a list of VersionFields from the fetched data, and check that all mandatory fields are present pub fn try_create_version_fields( diff --git a/src/routes/v3/version_file.rs b/src/routes/v3/version_file.rs index 4820362f..5ec75341 100644 --- a/src/routes/v3/version_file.rs +++ b/src/routes/v3/version_file.rs @@ -211,7 +211,7 @@ pub async fn get_versions_from_hashes( let version_ids = files.iter().map(|x| x.version_id).collect::>(); let versions_data = filter_authorized_versions( database::models::Version::get_many(&version_ids, &**pool, &redis).await?, - &user_option, + user_option.as_ref(), &pool, ) .await?; @@ -259,7 +259,7 @@ pub async fn get_projects_from_hashes( let projects_data = filter_authorized_projects( database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?, - &user_option, + user_option.as_ref(), &pool, ) .await?; diff --git a/src/routes/v3/versions.rs b/src/routes/v3/versions.rs index 1e5a7f14..bde78275 100644 --- a/src/routes/v3/versions.rs +++ b/src/routes/v3/versions.rs @@ -132,7 +132,7 @@ pub async fn versions_get( .map(|x| x.1) .ok(); - let versions = filter_authorized_versions(versions_data, &user_option, &pool).await?; + let versions = filter_authorized_versions(versions_data, user_option.as_ref(), &pool).await?; Ok(HttpResponse::Ok().json(versions)) } @@ -818,7 +818,7 @@ pub async fn version_list( response.sort(); response.dedup_by(|a, b| a.inner.id == b.inner.id); - let response = filter_authorized_versions(response, &user_option, &pool).await?; + let response = filter_authorized_versions(response, user_option.as_ref(), &pool).await?; Ok(HttpResponse::Ok().json(response)) } else { diff --git a/src/util/actix.rs b/src/util/actix.rs index fc77e663..5d4ccae9 100644 --- a/src/util/actix.rs +++ b/src/util/actix.rs @@ -82,3 +82,13 @@ pub fn generate_multipart(data: impl IntoIterator) -> ( (boundary, Bytes::from(payload)) } + +pub trait TestRequestExtensions { + fn append_auth(self, pat: &str) -> TestRequest; +} + +impl TestRequestExtensions for TestRequest { + fn append_auth(self, pat: &str) -> TestRequest { + self.append_header((reqwest::header::AUTHORIZATION, pat)) + } +} diff --git a/tests/common/api_v2/project.rs b/tests/common/api_v2/project.rs index 10f910d2..d57ffeee 100644 --- a/tests/common/api_v2/project.rs +++ b/tests/common/api_v2/project.rs @@ -17,9 +17,19 @@ use std::collections::HashMap; use crate::common::{asserts::assert_status, database::MOD_USER_PAT}; -use super::{request_data::ImageData, ApiV2}; +use super::{ + request_data::{get_public_project_creation_data, ImageData}, + ApiV2, +}; impl ApiV2 { + pub async fn add_default_org_project(&self, org_id: &str, pat: &str) -> LegacyProject { + let project_create_data = + get_public_project_creation_data("thisisaslug", None, Some(org_id)); + let (project, _) = self.add_public_project(project_create_data, pat).await; + project + } + pub async fn add_public_project( &self, creation_data: ProjectCreationRequestData, diff --git a/tests/common/api_v2/request_data.rs b/tests/common/api_v2/request_data.rs index a7a84c30..0691e6d5 100644 --- a/tests/common/api_v2/request_data.rs +++ b/tests/common/api_v2/request_data.rs @@ -28,8 +28,10 @@ pub struct ImageData { pub fn get_public_project_creation_data( slug: &str, version_jar: Option, + organization_id: Option<&str>, ) -> ProjectCreationRequestData { - let json_data = get_public_project_creation_data_json(slug, version_jar.as_ref()); + let json_data = + get_public_project_creation_data_json(slug, version_jar.as_ref(), organization_id); let multipart_data = get_public_creation_data_multipart(&json_data, version_jar.as_ref()); ProjectCreationRequestData { slug: slug.to_string(), @@ -72,6 +74,7 @@ pub fn get_public_version_creation_data_json( pub fn get_public_project_creation_data_json( slug: &str, version_jar: Option<&TestFile>, + organization_id: Option<&str>, ) -> serde_json::Value { let initial_versions = if let Some(jar) = version_jar { json!([get_public_version_creation_data_json("1.2.3", jar)]) @@ -93,6 +96,7 @@ pub fn get_public_project_creation_data_json( "is_draft": is_draft, "categories": [], "license_id": "MIT", + "organization_id": organization_id, } ) } diff --git a/tests/common/api_v3/mod.rs b/tests/common/api_v3/mod.rs index 9ed4bce2..bdc7e721 100644 --- a/tests/common/api_v3/mod.rs +++ b/tests/common/api_v3/mod.rs @@ -11,6 +11,7 @@ pub mod project; pub mod request_data; pub mod tags; pub mod team; +pub mod user; pub mod version; #[derive(Clone)] diff --git a/tests/common/api_v3/organization.rs b/tests/common/api_v3/organization.rs index 4a17cd2f..689b2648 100644 --- a/tests/common/api_v3/organization.rs +++ b/tests/common/api_v3/organization.rs @@ -4,7 +4,8 @@ use actix_web::{ }; use bytes::Bytes; use labrinth::models::{organizations::Organization, v3::projects::Project}; -use serde_json::json; +use labrinth::util::actix::TestRequestExtensions; +use serde_json::json; // TODO: extend other tests to do this use super::{request_data::ImageData, ApiV3}; @@ -26,6 +27,19 @@ impl ApiV3 { self.call(req).await } + pub async fn create_organization_deserialized( + &self, + organization_title: &str, + description: &str, + pat: &str, + ) -> Organization { + let resp = self + .create_organization(organization_title, description, pat) + .await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + pub async fn get_organization(&self, id_or_title: &str, pat: &str) -> ServiceResponse { let req = TestRequest::get() .uri(&format!("/v3/organization/{id_or_title}")) @@ -147,4 +161,22 @@ impl ApiV3 { self.call(req).await } + + pub async fn follow_organization(&self, organization_id: &str, pat: &str) -> ServiceResponse { + let req = TestRequest::post() + .uri(&format!("/v3/organization/{}/follow", organization_id)) + .append_auth(pat) + .to_request(); + + self.call(req).await + } + + pub async fn unfollow_organization(&self, organization_id: &str, pat: &str) -> ServiceResponse { + let req = TestRequest::delete() + .uri(&format!("/v3/organization/{}/follow", organization_id)) + .append_auth(pat) + .to_request(); + + self.call(req).await + } } diff --git a/tests/common/api_v3/project.rs b/tests/common/api_v3/project.rs index b4365d9c..8100c36f 100644 --- a/tests/common/api_v3/project.rs +++ b/tests/common/api_v3/project.rs @@ -18,11 +18,18 @@ use serde_json::json; use crate::common::{asserts::assert_status, database::MOD_USER_PAT}; use super::{ - request_data::{ImageData, ProjectCreationRequestData}, + request_data::{get_public_project_creation_data, ImageData, ProjectCreationRequestData}, ApiV3, }; impl ApiV3 { + pub async fn add_default_org_project(&self, org_id: &str, pat: &str) -> Project { + let project_create_data = + get_public_project_creation_data("thisisaslug", None, Some(org_id)); + let (project, _) = self.add_public_project(project_create_data, pat).await; + project + } + pub async fn add_public_project( &self, creation_data: ProjectCreationRequestData, diff --git a/tests/common/api_v3/request_data.rs b/tests/common/api_v3/request_data.rs index 6091992c..50ed8a59 100644 --- a/tests/common/api_v3/request_data.rs +++ b/tests/common/api_v3/request_data.rs @@ -28,8 +28,10 @@ pub struct ImageData { pub fn get_public_project_creation_data( slug: &str, version_jar: Option, + organization_id: Option<&str>, ) -> ProjectCreationRequestData { - let json_data = get_public_project_creation_data_json(slug, version_jar.as_ref()); + let json_data = + get_public_project_creation_data_json(slug, version_jar.as_ref(), organization_id); let multipart_data = get_public_creation_data_multipart(&json_data, version_jar.as_ref()); ProjectCreationRequestData { slug: slug.to_string(), @@ -88,6 +90,7 @@ pub fn get_public_version_creation_data_json( pub fn get_public_project_creation_data_json( slug: &str, version_jar: Option<&TestFile>, + organization_id: Option<&str>, ) -> serde_json::Value { let initial_versions = if let Some(jar) = version_jar { json!([get_public_version_creation_data_json("1.2.3", jar)]) @@ -96,7 +99,7 @@ pub fn get_public_project_creation_data_json( }; let is_draft = version_jar.is_none(); - json!( + let mut j = json!( { "title": format!("Test Project {slug}"), "slug": slug, @@ -107,7 +110,11 @@ pub fn get_public_project_creation_data_json( "categories": [], "license_id": "MIT", } - ) + ); + if let Some(organization_id) = organization_id { + j["organization_id"] = json!(organization_id); + } + j } pub fn get_public_creation_data_multipart( diff --git a/tests/common/api_v3/user.rs b/tests/common/api_v3/user.rs new file mode 100644 index 00000000..e5a4f9ef --- /dev/null +++ b/tests/common/api_v3/user.rs @@ -0,0 +1,40 @@ +use crate::common::asserts::assert_status; +use actix_http::StatusCode; +use actix_web::{ + dev::ServiceResponse, + test::{self, TestRequest}, +}; +use labrinth::{models::feeds::FeedItem, util::actix::TestRequestExtensions}; + +use super::ApiV3; + +impl ApiV3 { + pub async fn follow_user(&self, user_id: &str, pat: &str) -> ServiceResponse { + let req = TestRequest::post() + .uri(&format!("/v3/user/{}/follow", user_id)) + .append_auth(pat) + .to_request(); + + self.call(req).await + } + + pub async fn unfollow_user(&self, user_id: &str, pat: &str) -> ServiceResponse { + let req = TestRequest::delete() + .uri(&format!("/v3/user/{}/follow", user_id)) + .append_auth(pat) + .to_request(); + + self.call(req).await + } + + pub async fn get_feed(&self, pat: &str) -> Vec { + let req = TestRequest::get() + .uri("/v3/user/feed") + .append_auth(pat) + .to_request(); + let resp = self.call(req).await; + assert_status(&resp, StatusCode::OK); + + test::read_body_json(resp).await + } +} diff --git a/tests/common/asserts.rs b/tests/common/asserts.rs index cc6e35a6..ffd72b70 100644 --- a/tests/common/asserts.rs +++ b/tests/common/asserts.rs @@ -1,5 +1,7 @@ #![allow(dead_code)] +use labrinth::models::feeds::{FeedItem, FeedItemBody}; + use crate::common::get_json_val_str; use itertools::Itertools; use labrinth::models::v3::projects::Version; @@ -22,3 +24,16 @@ pub fn assert_any_status_except( ) { assert_ne!(response.status(), status, "{:#?}", response.response()); } + +pub fn assert_feed_contains_project_created( + feed: &[FeedItem], + expected_project_id: labrinth::models::projects::ProjectId, +) { + assert!(feed.iter().any(|fi| matches!(fi.body, FeedItemBody::ProjectPublished { project_id, .. } if project_id == expected_project_id)), "{:#?}", &feed); +} +pub fn assert_feed_contains_version_created( + feed: &[FeedItem], + expected_version_id: labrinth::models::projects::VersionId, +) { + assert!(feed.iter().any(|fi| matches!(fi.body, FeedItemBody::VersionCreated { version_id, .. } if version_id == expected_version_id)), "{:#?}", &feed); +} diff --git a/tests/common/dummy_data.rs b/tests/common/dummy_data.rs index aef26a71..c535783e 100644 --- a/tests/common/dummy_data.rs +++ b/tests/common/dummy_data.rs @@ -20,7 +20,7 @@ use super::{api_v3::request_data::get_public_project_creation_data, environment: use super::{asserts::assert_status, database::USER_USER_ID, get_json_val_str}; -pub const DUMMY_DATA_UPDATE: i64 = 5; +pub const DUMMY_DATA_UPDATE: i64 = 6; #[allow(dead_code)] pub const DUMMY_CATEGORIES: &[&str] = &[ @@ -307,7 +307,7 @@ pub async fn add_project_alpha(test_env: &TestEnvironment) -> (Project, Version) let (project, versions) = test_env .v3 .add_public_project( - get_public_project_creation_data("alpha", Some(TestFile::DummyProjectAlpha)), + get_public_project_creation_data("alpha", Some(TestFile::DummyProjectAlpha), None), USER_USER_PAT, ) .await; diff --git a/tests/common/permissions.rs b/tests/common/permissions.rs index 1bb2e20a..4698075b 100644 --- a/tests/common/permissions.rs +++ b/tests/common/permissions.rs @@ -925,7 +925,7 @@ async fn create_dummy_project(test_env: &TestEnvironment) -> (String, String) { // Create a very simple project let slug = generate_random_name("test_project"); - let creation_data = request_data::get_public_project_creation_data(&slug, None); + let creation_data = request_data::get_public_project_creation_data(&slug, None, None); let (project, _) = api.add_public_project(creation_data, ADMIN_USER_PAT).await; let project_id = project.id.to_string(); let team_id = project.team.to_string(); diff --git a/tests/feed.rs b/tests/feed.rs new file mode 100644 index 00000000..b59d9415 --- /dev/null +++ b/tests/feed.rs @@ -0,0 +1,125 @@ +use crate::common::{ + asserts::{assert_feed_contains_project_created, assert_feed_contains_version_created}, + dummy_data::DummyProjectAlpha, +}; +use assert_matches::assert_matches; +use common::{ + database::{FRIEND_USER_PAT, USER_USER_ID, USER_USER_PAT}, + environment::with_test_environment, +}; +use labrinth::models::{feeds::FeedItemBody, ids::base62_impl::parse_base62, projects::ProjectId}; + +mod common; + +#[actix_rt::test] +async fn get_feed_before_following_user_shows_no_projects() { + with_test_environment(|env| async move { + let feed = env.v3.get_feed(FRIEND_USER_PAT).await; + + assert_eq!(feed.len(), 0); + }) + .await +} + +#[actix_rt::test] +async fn get_feed_after_following_user_shows_previously_created_public_projects() { + with_test_environment(|env| async move { + let DummyProjectAlpha { + project_id: alpha_project_id, + .. + } = env.dummy.as_ref().unwrap().project_alpha.clone(); + env.v3.follow_user(USER_USER_ID, FRIEND_USER_PAT).await; + + let feed = env.v3.get_feed(FRIEND_USER_PAT).await; + + assert_eq!(feed.len(), 1); + assert_feed_contains_project_created( + &feed, + ProjectId(parse_base62(&alpha_project_id).unwrap()), + ); + }) + .await +} + +#[actix_rt::test] +async fn get_feed_after_following_user_shows_previously_created_public_versions() { + with_test_environment(|env| async move { + let DummyProjectAlpha { + project_id: alpha_project_id, + .. + } = env.dummy.as_ref().unwrap().project_alpha.clone(); + + // Add version + let v = env + .v3 + .create_default_version(&alpha_project_id, None, USER_USER_PAT) + .await; + + env.v3.follow_user(USER_USER_ID, FRIEND_USER_PAT).await; + + let feed = env.v3.get_feed(FRIEND_USER_PAT).await; + + assert_eq!(feed.len(), 2); + assert_feed_contains_project_created( + &feed, + ProjectId(parse_base62(&alpha_project_id).unwrap()), + ); + assert_feed_contains_version_created(&feed, v.id); + // Notably, this should *not* have a projectupdated from the publishing. + }) + .await +} + +#[actix_rt::test] +async fn get_feed_when_following_user_that_creates_project_as_org_only_shows_event_when_following_org( +) { + with_test_environment(|env| async move { + let org_id = env.v3.create_organization_deserialized("test", "desc", USER_USER_PAT).await.id.to_string(); + let project = env.v3.add_default_org_project(&org_id, USER_USER_PAT).await; + + env.v3.follow_user(USER_USER_ID, FRIEND_USER_PAT).await; + let feed = env.v3.get_feed(FRIEND_USER_PAT).await; + assert_eq!(feed.len(), 1); + + assert_matches!(feed[0].body, FeedItemBody::ProjectPublished { project_id, .. } if project_id != project.id); + + env.v3.follow_organization(&org_id, FRIEND_USER_PAT).await; + let feed = env.v3.get_feed(FRIEND_USER_PAT).await; + assert_eq!(feed.len(), 2); + assert_feed_contains_project_created(&feed, project.id); + }) + .await; +} + +#[actix_rt::test] +async fn get_feed_after_unfollowing_user_no_longer_shows_feed_items() { + with_test_environment(|env| async move { + env.v3.follow_user(USER_USER_ID, FRIEND_USER_PAT).await; + + env.v3.unfollow_user(USER_USER_ID, FRIEND_USER_PAT).await; + let feed = env.v3.get_feed(FRIEND_USER_PAT).await; + + assert_eq!(feed.len(), 0); + }) + .await; +} + +#[actix_rt::test] +async fn get_feed_after_unfollowing_organization_no_longer_shows_feed_items() { + with_test_environment(|env| async move { + let org_id = env + .v3 + .create_organization_deserialized("test", "desc", USER_USER_PAT) + .await + .id + .to_string(); + env.v3.add_default_org_project(&org_id, USER_USER_PAT).await; + env.v3.follow_organization(&org_id, FRIEND_USER_PAT).await; + + env.v3.unfollow_organization(&org_id, FRIEND_USER_PAT).await; + let feed = env.v3.get_feed(FRIEND_USER_PAT).await; + + assert_eq!(feed.len(), 0); + }) + .await; +} diff --git a/tests/organizations.rs b/tests/organizations.rs index 66ef7def..b51bbebd 100644 --- a/tests/organizations.rs +++ b/tests/organizations.rs @@ -7,10 +7,14 @@ use crate::common::{ use actix_web::test; use bytes::Bytes; use common::{ + api_v3::request_data::get_public_project_creation_data, database::{FRIEND_USER_ID, FRIEND_USER_PAT, USER_USER_PAT}, permissions::{PermissionsTest, PermissionsTestContext}, }; -use labrinth::models::teams::{OrganizationPermissions, ProjectPermissions}; +use labrinth::{ + models::teams::{OrganizationPermissions, ProjectPermissions}, + util::actix::AppendsMultipart, +}; use serde_json::json; mod common; @@ -289,6 +293,75 @@ async fn add_remove_organization_projects() { test_env.cleanup().await; } +#[actix_rt::test] +async fn create_project_in_organization() { + let test_env = TestEnvironment::build(None).await; + let zeta_organization_id: &str = &test_env + .dummy + .as_ref() + .unwrap() + .organization_zeta + .organization_id; + + // Create project in organization + let resp = test_env + .v2 + .add_default_org_project(zeta_organization_id, USER_USER_PAT) + .await; + + // Get project + let project = test_env + .v2 + .get_project_deserialized(&resp.id.to_string(), USER_USER_PAT) + .await; + + // Ensure organization id is correectly set in both returned project and + // fetched project. + assert_eq!(resp.organization.unwrap().to_string(), zeta_organization_id); + assert_eq!( + project.organization.unwrap().to_string(), + zeta_organization_id + ); + + test_env.cleanup().await; +} + +#[actix_rt::test] +async fn permissions_create_project_in_organization() { + let test_env = TestEnvironment::build(None).await; + + let zeta_organization_id = &test_env + .dummy + .as_ref() + .unwrap() + .organization_zeta + .organization_id; + let zeta_team_id = &test_env.dummy.as_ref().unwrap().organization_zeta.team_id; + + // Requires ADD_PROJECT to create project in org + let add_project = OrganizationPermissions::ADD_PROJECT; + + let req_gen = |ctx: &PermissionsTestContext| { + let multipart = get_public_project_creation_data( + &generate_random_name("randomslug"), + None, + Some(ctx.organization_id.unwrap()), + ) + .segment_data; + test::TestRequest::post() + .uri("/v2/project") + .set_multipart(multipart) + }; + PermissionsTest::new(&test_env) + .with_existing_organization(zeta_organization_id, zeta_team_id) + .with_user(FRIEND_USER_ID, FRIEND_USER_PAT, true) + .simple_organization_permissions_test(add_project, req_gen) + .await + .unwrap(); + + test_env.cleanup().await; +} + #[actix_rt::test] async fn permissions_patch_organization() { let test_env = TestEnvironment::build(Some(8)).await; diff --git a/tests/search.rs b/tests/search.rs index 36483547..8123edf3 100644 --- a/tests/search.rs +++ b/tests/search.rs @@ -40,7 +40,7 @@ async fn search_projects() { TestFile::build_random_jar() }; let mut basic_project_json = - request_data::get_public_project_creation_data_json(&slug, Some(&jar)); + request_data::get_public_project_creation_data_json(&slug, Some(&jar), None); modify_json(&mut basic_project_json); let basic_project_multipart = request_data::get_public_creation_data_multipart(&basic_project_json, Some(&jar)); diff --git a/tests/user.rs b/tests/user.rs index 75a97b83..21de492e 100644 --- a/tests/user.rs +++ b/tests/user.rs @@ -26,7 +26,7 @@ pub async fn get_user_projects_after_creating_project_returns_new_project() { let (project, _) = api .add_public_project( - get_public_project_creation_data("slug", Some(TestFile::BasicMod)), + get_public_project_creation_data("slug", Some(TestFile::BasicMod), None), USER_USER_PAT, ) .await; @@ -45,7 +45,7 @@ pub async fn get_user_projects_after_deleting_project_shows_removal() { let api = test_env.v3; let (project, _) = api .add_public_project( - get_public_project_creation_data("iota", Some(TestFile::BasicMod)), + get_public_project_creation_data("iota", Some(TestFile::BasicMod), None), USER_USER_PAT, ) .await; diff --git a/tests/v2/project.rs b/tests/v2/project.rs index 609b8481..07b32181 100644 --- a/tests/v2/project.rs +++ b/tests/v2/project.rs @@ -23,10 +23,12 @@ async fn test_project_type_sanity() { let test_creation_mod = request_data::get_public_project_creation_data( "test-mod", Some(TestFile::build_random_jar()), + None, ); let test_creation_modpack = request_data::get_public_project_creation_data( "test-modpack", Some(TestFile::build_random_mrpack()), + None, ); for (mod_or_modpack, test_creation_data) in [ ("mod", test_creation_mod), diff --git a/tests/v2/search.rs b/tests/v2/search.rs index fbe39ca6..1e7beb74 100644 --- a/tests/v2/search.rs +++ b/tests/v2/search.rs @@ -37,7 +37,7 @@ async fn search_projects() { TestFile::build_random_jar() }; let mut basic_project_json = - request_data::get_public_project_creation_data_json(&slug, Some(&jar)); + request_data::get_public_project_creation_data_json(&slug, Some(&jar), None); modify_json(&mut basic_project_json); let basic_project_multipart =