From 50ce7961d1e905fc770385babe0ba0ae435038a0 Mon Sep 17 00:00:00 2001 From: Joseph Silva Date: Fri, 27 Sep 2024 05:51:10 -0700 Subject: [PATCH 1/5] Apply scheduled post limit to future posts instead of past posts, and verify this in test (#5054) * test scheduled_post_count * fix syntax error * fix formatting * fix argument order * fix user_scheduled_post_count function --- crates/db_schema/src/impls/post.rs | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/crates/db_schema/src/impls/post.rs b/crates/db_schema/src/impls/post.rs index 075f72d23..aa5568b0e 100644 --- a/crates/db_schema/src/impls/post.rs +++ b/crates/db_schema/src/impls/post.rs @@ -258,9 +258,9 @@ impl Post { post::table .inner_join(person::table) .inner_join(community::table) - // find all posts which have scheduled_publish_time that is in the past + // find all posts which have scheduled_publish_time that is in the future .filter(post::scheduled_publish_time.is_not_null()) - .filter(coalesce(post::scheduled_publish_time, now()).lt(now())) + .filter(coalesce(post::scheduled_publish_time, now()).gt(now())) // make sure the post and community are still around .filter(not(post::deleted.or(post::removed))) .filter(not(community::removed.or(community::deleted))) @@ -414,6 +414,7 @@ mod tests { traits::{Crud, Likeable, Saveable}, utils::build_db_pool_for_tests, }; + use chrono::DateTime; use pretty_assertions::assert_eq; use serial_test::serial; use std::collections::HashSet; @@ -456,6 +457,12 @@ mod tests { ); let inserted_post2 = Post::create(pool, &new_post2).await.unwrap(); + let new_scheduled_post = PostInsertForm { + scheduled_publish_time: Some(DateTime::from_timestamp_nanos(i64::MAX)), + ..PostInsertForm::new("beans".into(), inserted_person.id, inserted_community.id) + }; + let inserted_scheduled_post = Post::create(pool, &new_scheduled_post).await.unwrap(); + let expected_post = Post { id: inserted_post.id, name: "A test post".into(), @@ -535,6 +542,12 @@ mod tests { .await .unwrap(); + // Scheduled post count + let scheduled_post_count = Post::user_scheduled_post_count(inserted_person.id, pool) + .await + .unwrap(); + assert_eq!(1, scheduled_post_count); + let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id) .await .unwrap(); @@ -551,8 +564,11 @@ mod tests { assert_eq!(2, read_removed); let num_deleted = Post::delete(pool, inserted_post.id).await.unwrap() - + Post::delete(pool, inserted_post2.id).await.unwrap(); - assert_eq!(2, num_deleted); + + Post::delete(pool, inserted_post2.id).await.unwrap() + + Post::delete(pool, inserted_scheduled_post.id) + .await + .unwrap(); + assert_eq!(3, num_deleted); Community::delete(pool, inserted_community.id) .await .unwrap(); From e82f72d3c8c435ff302f5b54d974dd66b609fc50 Mon Sep 17 00:00:00 2001 From: Nutomic Date: Fri, 27 Sep 2024 15:23:19 +0200 Subject: [PATCH 2/5] Avoid breaking changes, keep response fields as deprecated (#5058) --- crates/api/src/site/leave_admin.rs | 2 ++ crates/api_common/src/site.rs | 6 ++++++ crates/api_crud/src/site/create.rs | 5 ++++- crates/api_crud/src/site/read.rs | 2 ++ crates/api_crud/src/site/update.rs | 5 ++++- 5 files changed, 18 insertions(+), 2 deletions(-) diff --git a/crates/api/src/site/leave_admin.rs b/crates/api/src/site/leave_admin.rs index 5e1e69d49..f5ddec35b 100644 --- a/crates/api/src/site/leave_admin.rs +++ b/crates/api/src/site/leave_admin.rs @@ -76,5 +76,7 @@ pub async fn leave_admin( admin_oauth_providers: None, blocked_urls, tagline, + taglines: vec![], + custom_emojis: vec![], })) } diff --git a/crates/api_common/src/site.rs b/crates/api_common/src/site.rs index 1ffabb75a..d82303327 100644 --- a/crates/api_common/src/site.rs +++ b/crates/api_common/src/site.rs @@ -306,6 +306,8 @@ pub struct EditSite { /// The response for a site. pub struct SiteResponse { pub site_view: SiteView, + /// deprecated, use field `tagline` or /api/v3/tagline/list + pub taglines: Vec<()>, } #[skip_serializing_none] @@ -320,6 +322,10 @@ pub struct GetSiteResponse { pub my_user: Option, pub all_languages: Vec, pub discussion_languages: Vec, + /// deprecated, use field `tagline` or /api/v3/tagline/list + pub taglines: Vec<()>, + /// deprecated, use /api/v3/custom_emoji/list + pub custom_emojis: Vec<()>, /// If the site has any taglines, a random one is included here for displaying pub tagline: Option, /// A list of external auth methods your site supports. diff --git a/crates/api_crud/src/site/create.rs b/crates/api_crud/src/site/create.rs index 28faa0aac..9b0439da8 100644 --- a/crates/api_crud/src/site/create.rs +++ b/crates/api_crud/src/site/create.rs @@ -139,7 +139,10 @@ pub async fn create_site( local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); context.rate_limit_cell().set_config(rate_limit_config); - Ok(Json(SiteResponse { site_view })) + Ok(Json(SiteResponse { + site_view, + taglines: vec![], + })) } fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> { diff --git a/crates/api_crud/src/site/read.rs b/crates/api_crud/src/site/read.rs index 0901b9186..20c10c415 100644 --- a/crates/api_crud/src/site/read.rs +++ b/crates/api_crud/src/site/read.rs @@ -59,6 +59,8 @@ pub async fn get_site( tagline, oauth_providers: Some(oauth_providers), admin_oauth_providers: Some(admin_oauth_providers), + taglines: vec![], + custom_emojis: vec![], }) }) .await diff --git a/crates/api_crud/src/site/update.rs b/crates/api_crud/src/site/update.rs index 8b1934572..495a5cc98 100644 --- a/crates/api_crud/src/site/update.rs +++ b/crates/api_crud/src/site/update.rs @@ -193,7 +193,10 @@ pub async fn update_site( local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); context.rate_limit_cell().set_config(rate_limit_config); - Ok(Json(SiteResponse { site_view })) + Ok(Json(SiteResponse { + site_view, + taglines: vec![], + })) } fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> { From f7d881ac78862e2d0171b5e84251de4f60bedf3a Mon Sep 17 00:00:00 2001 From: Dessalines Date: Fri, 27 Sep 2024 11:15:44 -0400 Subject: [PATCH 3/5] Adding skip_serializing_none to another OAuth API request. (#5060) --- crates/api_common/src/oauth_provider.rs | 1 + crates/db_schema/src/schema.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/api_common/src/oauth_provider.rs b/crates/api_common/src/oauth_provider.rs index 4719480cc..14847edf1 100644 --- a/crates/api_common/src/oauth_provider.rs +++ b/crates/api_common/src/oauth_provider.rs @@ -5,6 +5,7 @@ use serde_with::skip_serializing_none; use ts_rs::TS; use url::Url; +#[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] #[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", ts(export))] diff --git a/crates/db_schema/src/schema.rs b/crates/db_schema/src/schema.rs index ab636c7d7..9617d9954 100644 --- a/crates/db_schema/src/schema.rs +++ b/crates/db_schema/src/schema.rs @@ -769,7 +769,7 @@ diesel::table! { featured_local -> Bool, url_content_type -> Nullable, alt_text -> Nullable, - scheduled_publish_time -> Nullable + scheduled_publish_time -> Nullable, } } From 5115ed4c0941f262bc803be10803ce641c1fb789 Mon Sep 17 00:00:00 2001 From: Nutomic Date: Tue, 1 Oct 2024 02:21:06 +0200 Subject: [PATCH 4/5] Handle partial settings backup (fixes #4307) (#5063) * Handle partial settings backup (fixes #4307) * clippy --- crates/apub/src/api/user_settings_backup.rs | 82 ++++++++++----------- crates/db_schema/src/source/local_user.rs | 3 +- 2 files changed, 40 insertions(+), 45 deletions(-) diff --git a/crates/apub/src/api/user_settings_backup.rs b/crates/apub/src/api/user_settings_backup.rs index 65ab01d7a..6a524b71d 100644 --- a/crates/apub/src/api/user_settings_backup.rs +++ b/crates/apub/src/api/user_settings_backup.rs @@ -103,13 +103,16 @@ pub async fn import_settings( context: Data, ) -> LemmyResult> { let person_form = PersonUpdateForm { - display_name: Some(data.display_name.clone()), - bio: Some(data.bio.clone()), - matrix_user_id: Some(data.matrix_id.clone()), + display_name: data.display_name.clone().map(Some), + bio: data.bio.clone().map(Some), + matrix_user_id: data.bio.clone().map(Some), bot_account: data.bot_account, ..Default::default() }; - Person::update(&mut context.pool(), local_user_view.person.id, &person_form).await?; + // ignore error in case form is empty + Person::update(&mut context.pool(), local_user_view.person.id, &person_form) + .await + .ok(); let local_user_form = LocalUserUpdateForm { show_nsfw: data.settings.as_ref().map(|s| s.show_nsfw), @@ -312,8 +315,9 @@ where #[expect(clippy::indexing_slicing)] mod tests { - use crate::api::user_settings_backup::{export_settings, import_settings, UserSettingsBackup}; + use crate::api::user_settings_backup::{export_settings, import_settings}; use activitypub_federation::config::Data; + use actix_web::web::Json; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ source::{ @@ -401,45 +405,6 @@ mod tests { Ok(()) } - #[tokio::test] - #[serial] - async fn test_settings_partial_import() -> LemmyResult<()> { - let context = LemmyContext::init_test_context().await; - - let export_user = - create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?; - - let community_form = CommunityInsertForm::new( - export_user.person.instance_id, - "testcom".to_string(), - "testcom".to_string(), - "pubkey".to_string(), - ); - let community = Community::create(&mut context.pool(), &community_form).await?; - let follower_form = CommunityFollowerForm { - community_id: community.id, - person_id: export_user.person.id, - pending: false, - }; - CommunityFollower::follow(&mut context.pool(), &follower_form).await?; - - let backup = export_settings(export_user.clone(), context.reset_request_count()).await?; - - let import_user = create_user("charles".to_string(), None, &context).await?; - - let backup2 = UserSettingsBackup { - followed_communities: backup.followed_communities.clone(), - ..Default::default() - }; - import_settings( - actix_web::web::Json(backup2), - import_user.clone(), - context.reset_request_count(), - ) - .await?; - Ok(()) - } - #[tokio::test] #[serial] async fn disallow_large_backup() -> LemmyResult<()> { @@ -475,4 +440,33 @@ mod tests { LocalUser::delete(&mut context.pool(), import_user.local_user.id).await?; Ok(()) } + + #[tokio::test] + #[serial] + async fn import_partial_backup() -> LemmyResult<()> { + let context = LemmyContext::init_test_context().await; + + let import_user = + create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?; + + let backup = + serde_json::from_str("{\"bot_account\": true, \"settings\": {\"theme\": \"my_theme\"}}")?; + import_settings( + Json(backup), + import_user.clone(), + context.reset_request_count(), + ) + .await?; + + let import_user_updated = + LocalUserView::read(&mut context.pool(), import_user.local_user.id).await?; + // mark as bot account + assert!(import_user_updated.person.bot_account); + // dont remove existing bio + assert_eq!(import_user.person.bio, import_user_updated.person.bio); + // local_user can be deserialized without id/person_id fields + assert_eq!("my_theme", import_user_updated.local_user.theme); + + Ok(()) + } } diff --git a/crates/db_schema/src/source/local_user.rs b/crates/db_schema/src/source/local_user.rs index d83fa798c..37da70908 100644 --- a/crates/db_schema/src/source/local_user.rs +++ b/crates/db_schema/src/source/local_user.rs @@ -14,11 +14,12 @@ use serde_with::skip_serializing_none; use ts_rs::TS; #[skip_serializing_none] -#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize, Default)] #[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", diesel(table_name = local_user))] #[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", ts(export))] +#[serde(default)] /// A local user. pub struct LocalUser { pub id: LocalUserId, From 44dda08b136750000fb646b9e411293da31a3025 Mon Sep 17 00:00:00 2001 From: Nutomic Date: Tue, 1 Oct 2024 02:27:14 +0200 Subject: [PATCH 5/5] Avoid stack overflow when fetching nested comments, reduce max comment depth to 50 (#5009) * Avoid stack overflow when fetching deeply nested comments * add test case * reduce comment depth, add docs * decrease * reduce max comment depth to 50 * fmt * clippy * cleanup --- Cargo.toml | 2 +- api_tests/src/comment.spec.ts | 23 +++++++++++++++++++++++ crates/api_crud/src/comment/create.rs | 3 +-- crates/apub/src/protocol/objects/note.rs | 19 ++++++++++++++----- crates/utils/src/lib.rs | 2 ++ 5 files changed, 41 insertions(+), 8 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 49a8e1d61..6b594a140 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -90,7 +90,7 @@ lemmy_db_views = { version = "=0.19.6-beta.7", path = "./crates/db_views" } lemmy_db_views_actor = { version = "=0.19.6-beta.7", path = "./crates/db_views_actor" } lemmy_db_views_moderator = { version = "=0.19.6-beta.7", path = "./crates/db_views_moderator" } lemmy_federate = { version = "=0.19.6-beta.7", path = "./crates/federate" } -activitypub_federation = { version = "0.6.0-alpha1", default-features = false, features = [ +activitypub_federation = { version = "0.6.0-alpha2", default-features = false, features = [ "actix-web", ] } diesel = "2.1.6" diff --git a/api_tests/src/comment.spec.ts b/api_tests/src/comment.spec.ts index c18469e33..5f2059e4f 100644 --- a/api_tests/src/comment.spec.ts +++ b/api_tests/src/comment.spec.ts @@ -858,3 +858,26 @@ test("Dont send a comment reply to a blocked community", async () => { blockRes = await blockCommunity(beta, newCommunityId, false); expect(blockRes.blocked).toBe(false); }); + +/// Fetching a deeply nested comment can lead to stack overflow as all parent comments are also +/// fetched recursively. Ensure that it works properly. +test("Fetch a deeply nested comment", async () => { + let lastComment; + for (let i = 0; i < 50; i++) { + let commentRes = await createComment( + alpha, + postOnAlphaRes.post_view.post.id, + lastComment?.comment_view.comment.id, + ); + expect(commentRes.comment_view.comment).toBeDefined(); + lastComment = commentRes; + } + + let betaComment = await resolveComment( + beta, + lastComment!.comment_view.comment, + ); + + expect(betaComment!.comment!.comment).toBeDefined(); + expect(betaComment?.comment?.post).toBeDefined(); +}); diff --git a/crates/api_crud/src/comment/create.rs b/crates/api_crud/src/comment/create.rs index 795c27b35..273ab7a5f 100644 --- a/crates/api_crud/src/comment/create.rs +++ b/crates/api_crud/src/comment/create.rs @@ -30,10 +30,9 @@ use lemmy_db_views::structs::{LocalUserView, PostView}; use lemmy_utils::{ error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field}, + MAX_COMMENT_DEPTH_LIMIT, }; -const MAX_COMMENT_DEPTH_LIMIT: usize = 100; - #[tracing::instrument(skip(context))] pub async fn create_comment( data: Json, diff --git a/crates/apub/src/protocol/objects/note.rs b/crates/apub/src/protocol/objects/note.rs index a092cec9f..e3e204254 100644 --- a/crates/apub/src/protocol/objects/note.rs +++ b/crates/apub/src/protocol/objects/note.rs @@ -20,10 +20,9 @@ use lemmy_db_schema::{ source::{community::Community, post::Post}, traits::Crud, }; -use lemmy_utils::error::LemmyResult; +use lemmy_utils::{error::LemmyResult, LemmyErrorType, MAX_COMMENT_DEPTH_LIMIT}; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; -use std::ops::Deref; use url::Url; #[skip_serializing_none] @@ -58,9 +57,19 @@ impl Note { &self, context: &Data, ) -> LemmyResult<(ApubPost, Option)> { - // Fetch parent comment chain in a box, otherwise it can cause a stack overflow. - let parent = Box::pin(self.in_reply_to.dereference(context).await?); - match parent.deref() { + // We use recursion here to fetch the entire comment chain up to the top-level parent. This is + // necessary because we need to know the post and parent comment in order to insert a new + // comment. However it can also lead to stack overflow when fetching many comments recursively. + // To avoid this we check the request count against max comment depth, which based on testing + // can be handled without risking stack overflow. This is not a perfect solution, because in + // some cases we have to fetch user profiles too, and reach the limit after only 25 comments + // or so. + // A cleaner solution would be converting the recursion into a loop, but that is tricky. + if context.request_count() > MAX_COMMENT_DEPTH_LIMIT as u32 { + Err(LemmyErrorType::MaxCommentDepthReached)?; + } + let parent = self.in_reply_to.dereference(context).await?; + match parent { PostOrComment::Post(p) => Ok((p.clone(), None)), PostOrComment::Comment(c) => { let post_id = c.post_id; diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 5a5e76d2a..7f0691496 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -29,6 +29,8 @@ pub const CACHE_DURATION_FEDERATION: Duration = Duration::from_secs(60); pub const CACHE_DURATION_API: Duration = Duration::from_secs(1); +pub const MAX_COMMENT_DEPTH_LIMIT: usize = 50; + #[macro_export] macro_rules! location_info { () => {