Compare commits

...

6 Commits

Author SHA1 Message Date
Dessalines
ca8458a7db Merge remote-tracking branch 'origin/main' into remove_success_responses 2024-09-30 20:58:40 -04:00
Nutomic
44dda08b13
Avoid stack overflow when fetching nested comments, reduce max comment depth to 50 (#5009)
* Avoid stack overflow when fetching deeply nested comments

* add test case

* reduce comment depth, add docs

* decrease

* reduce max comment depth to 50

* fmt

* clippy

* cleanup
2024-09-30 20:27:14 -04:00
Nutomic
5115ed4c09
Handle partial settings backup (fixes #4307) (#5063)
* Handle partial settings backup (fixes #4307)

* clippy
2024-09-30 20:21:06 -04:00
Dessalines
f7d881ac78
Adding skip_serializing_none to another OAuth API request. (#5060) 2024-09-27 11:15:44 -04:00
Nutomic
e82f72d3c8
Avoid breaking changes, keep response fields as deprecated (#5058) 2024-09-27 09:23:19 -04:00
Joseph Silva
50ce7961d1
Apply scheduled post limit to future posts instead of past posts, and verify this in test (#5054)
* test scheduled_post_count

* fix syntax error

* fix formatting

* fix argument order

* fix user_scheduled_post_count function
2024-09-27 08:51:10 -04:00
15 changed files with 120 additions and 61 deletions

View File

@ -90,7 +90,7 @@ lemmy_db_views = { version = "=0.19.6-beta.7", path = "./crates/db_views" }
lemmy_db_views_actor = { version = "=0.19.6-beta.7", path = "./crates/db_views_actor" } lemmy_db_views_actor = { version = "=0.19.6-beta.7", path = "./crates/db_views_actor" }
lemmy_db_views_moderator = { version = "=0.19.6-beta.7", path = "./crates/db_views_moderator" } lemmy_db_views_moderator = { version = "=0.19.6-beta.7", path = "./crates/db_views_moderator" }
lemmy_federate = { version = "=0.19.6-beta.7", path = "./crates/federate" } lemmy_federate = { version = "=0.19.6-beta.7", path = "./crates/federate" }
activitypub_federation = { version = "0.6.0-alpha1", default-features = false, features = [ activitypub_federation = { version = "0.6.0-alpha2", default-features = false, features = [
"actix-web", "actix-web",
] } ] }
diesel = "2.1.6" diesel = "2.1.6"

View File

@ -858,3 +858,26 @@ test("Dont send a comment reply to a blocked community", async () => {
blockRes = await blockCommunity(beta, newCommunityId, false); blockRes = await blockCommunity(beta, newCommunityId, false);
expect(blockRes.blocked).toBe(false); expect(blockRes.blocked).toBe(false);
}); });
/// Fetching a deeply nested comment can lead to stack overflow as all parent comments are also
/// fetched recursively. Ensure that it works properly.
test("Fetch a deeply nested comment", async () => {
let lastComment;
for (let i = 0; i < 50; i++) {
let commentRes = await createComment(
alpha,
postOnAlphaRes.post_view.post.id,
lastComment?.comment_view.comment.id,
);
expect(commentRes.comment_view.comment).toBeDefined();
lastComment = commentRes;
}
let betaComment = await resolveComment(
beta,
lastComment!.comment_view.comment,
);
expect(betaComment!.comment!.comment).toBeDefined();
expect(betaComment?.comment?.post).toBeDefined();
});

View File

@ -76,5 +76,7 @@ pub async fn leave_admin(
admin_oauth_providers: None, admin_oauth_providers: None,
blocked_urls, blocked_urls,
tagline, tagline,
taglines: vec![],
custom_emojis: vec![],
})) }))
} }

View File

@ -5,6 +5,7 @@ use serde_with::skip_serializing_none;
use ts_rs::TS; use ts_rs::TS;
use url::Url; use url::Url;
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]

View File

@ -306,6 +306,8 @@ pub struct EditSite {
/// The response for a site. /// The response for a site.
pub struct SiteResponse { pub struct SiteResponse {
pub site_view: SiteView, pub site_view: SiteView,
/// deprecated, use field `tagline` or /api/v3/tagline/list
pub taglines: Vec<()>,
} }
#[skip_serializing_none] #[skip_serializing_none]
@ -320,6 +322,10 @@ pub struct GetSiteResponse {
pub my_user: Option<MyUserInfo>, pub my_user: Option<MyUserInfo>,
pub all_languages: Vec<Language>, pub all_languages: Vec<Language>,
pub discussion_languages: Vec<LanguageId>, pub discussion_languages: Vec<LanguageId>,
/// deprecated, use field `tagline` or /api/v3/tagline/list
pub taglines: Vec<()>,
/// deprecated, use /api/v3/custom_emoji/list
pub custom_emojis: Vec<()>,
/// If the site has any taglines, a random one is included here for displaying /// If the site has any taglines, a random one is included here for displaying
pub tagline: Option<Tagline>, pub tagline: Option<Tagline>,
/// A list of external auth methods your site supports. /// A list of external auth methods your site supports.

View File

@ -30,10 +30,9 @@ use lemmy_db_views::structs::{LocalUserView, PostView};
use lemmy_utils::{ use lemmy_utils::{
error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, error::{LemmyErrorExt, LemmyErrorType, LemmyResult},
utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field}, utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field},
MAX_COMMENT_DEPTH_LIMIT,
}; };
const MAX_COMMENT_DEPTH_LIMIT: usize = 100;
#[tracing::instrument(skip(context))] #[tracing::instrument(skip(context))]
pub async fn create_comment( pub async fn create_comment(
data: Json<CreateComment>, data: Json<CreateComment>,

View File

@ -139,7 +139,10 @@ pub async fn create_site(
local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit);
context.rate_limit_cell().set_config(rate_limit_config); context.rate_limit_cell().set_config(rate_limit_config);
Ok(Json(SiteResponse { site_view })) Ok(Json(SiteResponse {
site_view,
taglines: vec![],
}))
} }
fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> { fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> {

View File

@ -59,6 +59,8 @@ pub async fn get_site(
tagline, tagline,
oauth_providers: Some(oauth_providers), oauth_providers: Some(oauth_providers),
admin_oauth_providers: Some(admin_oauth_providers), admin_oauth_providers: Some(admin_oauth_providers),
taglines: vec![],
custom_emojis: vec![],
}) })
}) })
.await .await

View File

@ -193,7 +193,10 @@ pub async fn update_site(
local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit);
context.rate_limit_cell().set_config(rate_limit_config); context.rate_limit_cell().set_config(rate_limit_config);
Ok(Json(SiteResponse { site_view })) Ok(Json(SiteResponse {
site_view,
taglines: vec![],
}))
} }
fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> { fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> {

View File

@ -103,13 +103,16 @@ pub async fn import_settings(
context: Data<LemmyContext>, context: Data<LemmyContext>,
) -> LemmyResult<Json<SuccessResponse>> { ) -> LemmyResult<Json<SuccessResponse>> {
let person_form = PersonUpdateForm { let person_form = PersonUpdateForm {
display_name: Some(data.display_name.clone()), display_name: data.display_name.clone().map(Some),
bio: Some(data.bio.clone()), bio: data.bio.clone().map(Some),
matrix_user_id: Some(data.matrix_id.clone()), matrix_user_id: data.bio.clone().map(Some),
bot_account: data.bot_account, bot_account: data.bot_account,
..Default::default() ..Default::default()
}; };
Person::update(&mut context.pool(), local_user_view.person.id, &person_form).await?; // ignore error in case form is empty
Person::update(&mut context.pool(), local_user_view.person.id, &person_form)
.await
.ok();
let local_user_form = LocalUserUpdateForm { let local_user_form = LocalUserUpdateForm {
show_nsfw: data.settings.as_ref().map(|s| s.show_nsfw), show_nsfw: data.settings.as_ref().map(|s| s.show_nsfw),
@ -312,8 +315,9 @@ where
#[expect(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::api::user_settings_backup::{export_settings, import_settings, UserSettingsBackup}; use crate::api::user_settings_backup::{export_settings, import_settings};
use activitypub_federation::config::Data; use activitypub_federation::config::Data;
use actix_web::web::Json;
use lemmy_api_common::context::LemmyContext; use lemmy_api_common::context::LemmyContext;
use lemmy_db_schema::{ use lemmy_db_schema::{
source::{ source::{
@ -401,45 +405,6 @@ mod tests {
Ok(()) Ok(())
} }
#[tokio::test]
#[serial]
async fn test_settings_partial_import() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let export_user =
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
let community_form = CommunityInsertForm::new(
export_user.person.instance_id,
"testcom".to_string(),
"testcom".to_string(),
"pubkey".to_string(),
);
let community = Community::create(&mut context.pool(), &community_form).await?;
let follower_form = CommunityFollowerForm {
community_id: community.id,
person_id: export_user.person.id,
pending: false,
};
CommunityFollower::follow(&mut context.pool(), &follower_form).await?;
let backup = export_settings(export_user.clone(), context.reset_request_count()).await?;
let import_user = create_user("charles".to_string(), None, &context).await?;
let backup2 = UserSettingsBackup {
followed_communities: backup.followed_communities.clone(),
..Default::default()
};
import_settings(
actix_web::web::Json(backup2),
import_user.clone(),
context.reset_request_count(),
)
.await?;
Ok(())
}
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn disallow_large_backup() -> LemmyResult<()> { async fn disallow_large_backup() -> LemmyResult<()> {
@ -475,4 +440,33 @@ mod tests {
LocalUser::delete(&mut context.pool(), import_user.local_user.id).await?; LocalUser::delete(&mut context.pool(), import_user.local_user.id).await?;
Ok(()) Ok(())
} }
#[tokio::test]
#[serial]
async fn import_partial_backup() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let import_user =
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
let backup =
serde_json::from_str("{\"bot_account\": true, \"settings\": {\"theme\": \"my_theme\"}}")?;
import_settings(
Json(backup),
import_user.clone(),
context.reset_request_count(),
)
.await?;
let import_user_updated =
LocalUserView::read(&mut context.pool(), import_user.local_user.id).await?;
// mark as bot account
assert!(import_user_updated.person.bot_account);
// dont remove existing bio
assert_eq!(import_user.person.bio, import_user_updated.person.bio);
// local_user can be deserialized without id/person_id fields
assert_eq!("my_theme", import_user_updated.local_user.theme);
Ok(())
}
} }

View File

@ -20,10 +20,9 @@ use lemmy_db_schema::{
source::{community::Community, post::Post}, source::{community::Community, post::Post},
traits::Crud, traits::Crud,
}; };
use lemmy_utils::error::LemmyResult; use lemmy_utils::{error::LemmyResult, LemmyErrorType, MAX_COMMENT_DEPTH_LIMIT};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none; use serde_with::skip_serializing_none;
use std::ops::Deref;
use url::Url; use url::Url;
#[skip_serializing_none] #[skip_serializing_none]
@ -58,9 +57,19 @@ impl Note {
&self, &self,
context: &Data<LemmyContext>, context: &Data<LemmyContext>,
) -> LemmyResult<(ApubPost, Option<ApubComment>)> { ) -> LemmyResult<(ApubPost, Option<ApubComment>)> {
// Fetch parent comment chain in a box, otherwise it can cause a stack overflow. // We use recursion here to fetch the entire comment chain up to the top-level parent. This is
let parent = Box::pin(self.in_reply_to.dereference(context).await?); // necessary because we need to know the post and parent comment in order to insert a new
match parent.deref() { // comment. However it can also lead to stack overflow when fetching many comments recursively.
// To avoid this we check the request count against max comment depth, which based on testing
// can be handled without risking stack overflow. This is not a perfect solution, because in
// some cases we have to fetch user profiles too, and reach the limit after only 25 comments
// or so.
// A cleaner solution would be converting the recursion into a loop, but that is tricky.
if context.request_count() > MAX_COMMENT_DEPTH_LIMIT as u32 {
Err(LemmyErrorType::MaxCommentDepthReached)?;
}
let parent = self.in_reply_to.dereference(context).await?;
match parent {
PostOrComment::Post(p) => Ok((p.clone(), None)), PostOrComment::Post(p) => Ok((p.clone(), None)),
PostOrComment::Comment(c) => { PostOrComment::Comment(c) => {
let post_id = c.post_id; let post_id = c.post_id;

View File

@ -257,9 +257,9 @@ impl Post {
post::table post::table
.inner_join(person::table) .inner_join(person::table)
.inner_join(community::table) .inner_join(community::table)
// find all posts which have scheduled_publish_time that is in the past // find all posts which have scheduled_publish_time that is in the future
.filter(post::scheduled_publish_time.is_not_null()) .filter(post::scheduled_publish_time.is_not_null())
.filter(coalesce(post::scheduled_publish_time, now()).lt(now())) .filter(coalesce(post::scheduled_publish_time, now()).gt(now()))
// make sure the post and community are still around // make sure the post and community are still around
.filter(not(post::deleted.or(post::removed))) .filter(not(post::deleted.or(post::removed)))
.filter(not(community::removed.or(community::deleted))) .filter(not(community::removed.or(community::deleted)))
@ -404,6 +404,7 @@ mod tests {
traits::{Crud, Likeable, Saveable}, traits::{Crud, Likeable, Saveable},
utils::build_db_pool_for_tests, utils::build_db_pool_for_tests,
}; };
use chrono::DateTime;
use lemmy_utils::error::LemmyResult; use lemmy_utils::error::LemmyResult;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use serial_test::serial; use serial_test::serial;
@ -444,6 +445,12 @@ mod tests {
); );
let inserted_post2 = Post::create(pool, &new_post2).await?; let inserted_post2 = Post::create(pool, &new_post2).await?;
let new_scheduled_post = PostInsertForm {
scheduled_publish_time: Some(DateTime::from_timestamp_nanos(i64::MAX)),
..PostInsertForm::new("beans".into(), inserted_person.id, inserted_community.id)
};
let inserted_scheduled_post = Post::create(pool, &new_scheduled_post).await?;
let expected_post = Post { let expected_post = Post {
id: inserted_post.id, id: inserted_post.id,
name: "A test post".into(), name: "A test post".into(),
@ -513,6 +520,10 @@ mod tests {
}; };
let updated_post = Post::update(pool, inserted_post.id, &new_post_update).await?; let updated_post = Post::update(pool, inserted_post.id, &new_post_update).await?;
// Scheduled post count
let scheduled_post_count = Post::user_scheduled_post_count(inserted_person.id, pool).await?;
assert_eq!(1, scheduled_post_count);
let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id).await?; let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id).await?;
assert_eq!(1, like_removed); assert_eq!(1, like_removed);
let saved_removed = PostSaved::unsave(pool, &post_saved_form).await?; let saved_removed = PostSaved::unsave(pool, &post_saved_form).await?;
@ -526,10 +537,13 @@ mod tests {
PostRead::mark_as_unread(pool, inserted_post2.id, inserted_person.id).await?; PostRead::mark_as_unread(pool, inserted_post2.id, inserted_person.id).await?;
assert_eq!(1, read_removed_2); assert_eq!(1, read_removed_2);
let num_deleted = let num_deleted = Post::delete(pool, inserted_post.id).await?
Post::delete(pool, inserted_post.id).await? + Post::delete(pool, inserted_post2.id).await?; + Post::delete(pool, inserted_post2.id).await?
assert_eq!(2, num_deleted); + Post::delete(pool, inserted_scheduled_post.id).await?;
assert_eq!(3, num_deleted);
Community::delete(pool, inserted_community.id).await?; Community::delete(pool, inserted_community.id).await?;
Person::delete(pool, inserted_person.id).await?; Person::delete(pool, inserted_person.id).await?;
Instance::delete(pool, inserted_instance.id).await?; Instance::delete(pool, inserted_instance.id).await?;

View File

@ -769,7 +769,7 @@ diesel::table! {
featured_local -> Bool, featured_local -> Bool,
url_content_type -> Nullable<Text>, url_content_type -> Nullable<Text>,
alt_text -> Nullable<Text>, alt_text -> Nullable<Text>,
scheduled_publish_time -> Nullable<Timestamptz> scheduled_publish_time -> Nullable<Timestamptz>,
} }
} }

View File

@ -14,11 +14,12 @@ use serde_with::skip_serializing_none;
use ts_rs::TS; use ts_rs::TS;
#[skip_serializing_none] #[skip_serializing_none]
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))]
#[cfg_attr(feature = "full", diesel(table_name = local_user))] #[cfg_attr(feature = "full", diesel(table_name = local_user))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
#[serde(default)]
/// A local user. /// A local user.
pub struct LocalUser { pub struct LocalUser {
pub id: LocalUserId, pub id: LocalUserId,

View File

@ -29,6 +29,8 @@ pub const CACHE_DURATION_FEDERATION: Duration = Duration::from_secs(60);
pub const CACHE_DURATION_API: Duration = Duration::from_secs(1); pub const CACHE_DURATION_API: Duration = Duration::from_secs(1);
pub const MAX_COMMENT_DEPTH_LIMIT: usize = 50;
#[macro_export] #[macro_export]
macro_rules! location_info { macro_rules! location_info {
() => { () => {