Compare commits

..

8 Commits

Author SHA1 Message Date
Dessalines
c4e0c24408 Simpler activitypub vote check. 2024-09-30 20:32:09 -04:00
Dessalines
f583041d5f Merge remote-tracking branch 'origin/main' into reject_federated_votes 2024-09-30 20:28:28 -04:00
Nutomic
44dda08b13
Avoid stack overflow when fetching nested comments, reduce max comment depth to 50 (#5009)
* Avoid stack overflow when fetching deeply nested comments

* add test case

* reduce comment depth, add docs

* decrease

* reduce max comment depth to 50

* fmt

* clippy

* cleanup
2024-09-30 20:27:14 -04:00
Nutomic
5115ed4c09
Handle partial settings backup (fixes #4307) (#5063)
* Handle partial settings backup (fixes #4307)

* clippy
2024-09-30 20:21:06 -04:00
Dessalines
f7d881ac78
Adding skip_serializing_none to another OAuth API request. (#5060) 2024-09-27 11:15:44 -04:00
Nutomic
e82f72d3c8
Avoid breaking changes, keep response fields as deprecated (#5058) 2024-09-27 09:23:19 -04:00
Joseph Silva
50ce7961d1
Apply scheduled post limit to future posts instead of past posts, and verify this in test (#5054)
* test scheduled_post_count

* fix syntax error

* fix formatting

* fix argument order

* fix user_scheduled_post_count function
2024-09-27 08:51:10 -04:00
SleeplessOne1917
33cbd95b7e
Add skip_serialize_none to OAuth structs with option fields (#5046)
* Add skip_serialize_none to OAuth structs with option fields

* PR feedback

* Remove serde and ts export from SSO db-only structs
2024-09-26 10:24:51 +02:00
17 changed files with 132 additions and 81 deletions

View File

@ -90,7 +90,7 @@ lemmy_db_views = { version = "=0.19.6-beta.7", path = "./crates/db_views" }
lemmy_db_views_actor = { version = "=0.19.6-beta.7", path = "./crates/db_views_actor" } lemmy_db_views_actor = { version = "=0.19.6-beta.7", path = "./crates/db_views_actor" }
lemmy_db_views_moderator = { version = "=0.19.6-beta.7", path = "./crates/db_views_moderator" } lemmy_db_views_moderator = { version = "=0.19.6-beta.7", path = "./crates/db_views_moderator" }
lemmy_federate = { version = "=0.19.6-beta.7", path = "./crates/federate" } lemmy_federate = { version = "=0.19.6-beta.7", path = "./crates/federate" }
activitypub_federation = { version = "0.6.0-alpha1", default-features = false, features = [ activitypub_federation = { version = "0.6.0-alpha2", default-features = false, features = [
"actix-web", "actix-web",
] } ] }
diesel = "2.1.6" diesel = "2.1.6"

View File

@ -858,3 +858,26 @@ test("Dont send a comment reply to a blocked community", async () => {
blockRes = await blockCommunity(beta, newCommunityId, false); blockRes = await blockCommunity(beta, newCommunityId, false);
expect(blockRes.blocked).toBe(false); expect(blockRes.blocked).toBe(false);
}); });
/// Fetching a deeply nested comment can lead to stack overflow as all parent comments are also
/// fetched recursively. Ensure that it works properly.
test("Fetch a deeply nested comment", async () => {
let lastComment;
for (let i = 0; i < 50; i++) {
let commentRes = await createComment(
alpha,
postOnAlphaRes.post_view.post.id,
lastComment?.comment_view.comment.id,
);
expect(commentRes.comment_view.comment).toBeDefined();
lastComment = commentRes;
}
let betaComment = await resolveComment(
beta,
lastComment!.comment_view.comment,
);
expect(betaComment!.comment!.comment).toBeDefined();
expect(betaComment?.comment?.post).toBeDefined();
});

View File

@ -76,5 +76,7 @@ pub async fn leave_admin(
admin_oauth_providers: None, admin_oauth_providers: None,
blocked_urls, blocked_urls,
tagline, tagline,
taglines: vec![],
custom_emojis: vec![],
})) }))
} }

View File

@ -5,6 +5,7 @@ use serde_with::skip_serializing_none;
use ts_rs::TS; use ts_rs::TS;
use url::Url; use url::Url;
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
@ -19,11 +20,12 @@ pub struct CreateOAuthProvider {
pub client_id: String, pub client_id: String,
pub client_secret: String, pub client_secret: String,
pub scopes: String, pub scopes: String,
pub auto_verify_email: bool, pub auto_verify_email: Option<bool>,
pub account_linking_enabled: bool, pub account_linking_enabled: Option<bool>,
pub enabled: bool, pub enabled: Option<bool>,
} }
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]

View File

@ -316,6 +316,8 @@ pub struct EditSite {
/// The response for a site. /// The response for a site.
pub struct SiteResponse { pub struct SiteResponse {
pub site_view: SiteView, pub site_view: SiteView,
/// deprecated, use field `tagline` or /api/v3/tagline/list
pub taglines: Vec<()>,
} }
#[skip_serializing_none] #[skip_serializing_none]
@ -330,6 +332,10 @@ pub struct GetSiteResponse {
pub my_user: Option<MyUserInfo>, pub my_user: Option<MyUserInfo>,
pub all_languages: Vec<Language>, pub all_languages: Vec<Language>,
pub discussion_languages: Vec<LanguageId>, pub discussion_languages: Vec<LanguageId>,
/// deprecated, use field `tagline` or /api/v3/tagline/list
pub taglines: Vec<()>,
/// deprecated, use /api/v3/custom_emoji/list
pub custom_emojis: Vec<()>,
/// If the site has any taglines, a random one is included here for displaying /// If the site has any taglines, a random one is included here for displaying
pub tagline: Option<Tagline>, pub tagline: Option<Tagline>,
/// A list of external auth methods your site supports. /// A list of external auth methods your site supports.

View File

@ -30,10 +30,9 @@ use lemmy_db_views::structs::{LocalUserView, PostView};
use lemmy_utils::{ use lemmy_utils::{
error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, error::{LemmyErrorExt, LemmyErrorType, LemmyResult},
utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field}, utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field},
MAX_COMMENT_DEPTH_LIMIT,
}; };
const MAX_COMMENT_DEPTH_LIMIT: usize = 100;
#[tracing::instrument(skip(context))] #[tracing::instrument(skip(context))]
pub async fn create_comment( pub async fn create_comment(
data: Json<CreateComment>, data: Json<CreateComment>,

View File

@ -142,7 +142,10 @@ pub async fn create_site(
local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit);
context.rate_limit_cell().set_config(rate_limit_config); context.rate_limit_cell().set_config(rate_limit_config);
Ok(Json(SiteResponse { site_view })) Ok(Json(SiteResponse {
site_view,
taglines: vec![],
}))
} }
fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> { fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> {

View File

@ -59,6 +59,8 @@ pub async fn get_site(
tagline, tagline,
oauth_providers: Some(oauth_providers), oauth_providers: Some(oauth_providers),
admin_oauth_providers: Some(admin_oauth_providers), admin_oauth_providers: Some(admin_oauth_providers),
taglines: vec![],
custom_emojis: vec![],
}) })
}) })
.await .await

View File

@ -196,7 +196,10 @@ pub async fn update_site(
local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit);
context.rate_limit_cell().set_config(rate_limit_config); context.rate_limit_cell().set_config(rate_limit_config);
Ok(Json(SiteResponse { site_view })) Ok(Json(SiteResponse {
site_view,
taglines: vec![],
}))
} }
fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> { fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> {

View File

@ -79,10 +79,8 @@ impl ActivityHandler for Vote {
}; };
// Don't allow dislikes for either disabled, or local only votes // Don't allow dislikes for either disabled, or local only votes
let downvote_fail = self.kind == VoteType::Dislike let downvote_fail = self.kind == VoteType::Dislike && downvote_setting != FederationMode::All;
&& [FederationMode::Disable, FederationMode::Local].contains(&downvote_setting); let upvote_fail = self.kind == VoteType::Like && upvote_setting != FederationMode::All;
let upvote_fail = self.kind == VoteType::Like
&& [FederationMode::Disable, FederationMode::Local].contains(&upvote_setting);
if downvote_fail || upvote_fail { if downvote_fail || upvote_fail {
// If this is a rejection, undo the vote // If this is a rejection, undo the vote

View File

@ -103,13 +103,16 @@ pub async fn import_settings(
context: Data<LemmyContext>, context: Data<LemmyContext>,
) -> LemmyResult<Json<SuccessResponse>> { ) -> LemmyResult<Json<SuccessResponse>> {
let person_form = PersonUpdateForm { let person_form = PersonUpdateForm {
display_name: Some(data.display_name.clone()), display_name: data.display_name.clone().map(Some),
bio: Some(data.bio.clone()), bio: data.bio.clone().map(Some),
matrix_user_id: Some(data.matrix_id.clone()), matrix_user_id: data.bio.clone().map(Some),
bot_account: data.bot_account, bot_account: data.bot_account,
..Default::default() ..Default::default()
}; };
Person::update(&mut context.pool(), local_user_view.person.id, &person_form).await?; // ignore error in case form is empty
Person::update(&mut context.pool(), local_user_view.person.id, &person_form)
.await
.ok();
let local_user_form = LocalUserUpdateForm { let local_user_form = LocalUserUpdateForm {
show_nsfw: data.settings.as_ref().map(|s| s.show_nsfw), show_nsfw: data.settings.as_ref().map(|s| s.show_nsfw),
@ -312,8 +315,9 @@ where
#[expect(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::api::user_settings_backup::{export_settings, import_settings, UserSettingsBackup}; use crate::api::user_settings_backup::{export_settings, import_settings};
use activitypub_federation::config::Data; use activitypub_federation::config::Data;
use actix_web::web::Json;
use lemmy_api_common::context::LemmyContext; use lemmy_api_common::context::LemmyContext;
use lemmy_db_schema::{ use lemmy_db_schema::{
source::{ source::{
@ -401,45 +405,6 @@ mod tests {
Ok(()) Ok(())
} }
#[tokio::test]
#[serial]
async fn test_settings_partial_import() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let export_user =
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
let community_form = CommunityInsertForm::new(
export_user.person.instance_id,
"testcom".to_string(),
"testcom".to_string(),
"pubkey".to_string(),
);
let community = Community::create(&mut context.pool(), &community_form).await?;
let follower_form = CommunityFollowerForm {
community_id: community.id,
person_id: export_user.person.id,
pending: false,
};
CommunityFollower::follow(&mut context.pool(), &follower_form).await?;
let backup = export_settings(export_user.clone(), context.reset_request_count()).await?;
let import_user = create_user("charles".to_string(), None, &context).await?;
let backup2 = UserSettingsBackup {
followed_communities: backup.followed_communities.clone(),
..Default::default()
};
import_settings(
actix_web::web::Json(backup2),
import_user.clone(),
context.reset_request_count(),
)
.await?;
Ok(())
}
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn disallow_large_backup() -> LemmyResult<()> { async fn disallow_large_backup() -> LemmyResult<()> {
@ -475,4 +440,33 @@ mod tests {
LocalUser::delete(&mut context.pool(), import_user.local_user.id).await?; LocalUser::delete(&mut context.pool(), import_user.local_user.id).await?;
Ok(()) Ok(())
} }
#[tokio::test]
#[serial]
async fn import_partial_backup() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let import_user =
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
let backup =
serde_json::from_str("{\"bot_account\": true, \"settings\": {\"theme\": \"my_theme\"}}")?;
import_settings(
Json(backup),
import_user.clone(),
context.reset_request_count(),
)
.await?;
let import_user_updated =
LocalUserView::read(&mut context.pool(), import_user.local_user.id).await?;
// mark as bot account
assert!(import_user_updated.person.bot_account);
// dont remove existing bio
assert_eq!(import_user.person.bio, import_user_updated.person.bio);
// local_user can be deserialized without id/person_id fields
assert_eq!("my_theme", import_user_updated.local_user.theme);
Ok(())
}
} }

View File

@ -20,10 +20,9 @@ use lemmy_db_schema::{
source::{community::Community, post::Post}, source::{community::Community, post::Post},
traits::Crud, traits::Crud,
}; };
use lemmy_utils::error::LemmyResult; use lemmy_utils::{error::LemmyResult, LemmyErrorType, MAX_COMMENT_DEPTH_LIMIT};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none; use serde_with::skip_serializing_none;
use std::ops::Deref;
use url::Url; use url::Url;
#[skip_serializing_none] #[skip_serializing_none]
@ -58,9 +57,19 @@ impl Note {
&self, &self,
context: &Data<LemmyContext>, context: &Data<LemmyContext>,
) -> LemmyResult<(ApubPost, Option<ApubComment>)> { ) -> LemmyResult<(ApubPost, Option<ApubComment>)> {
// Fetch parent comment chain in a box, otherwise it can cause a stack overflow. // We use recursion here to fetch the entire comment chain up to the top-level parent. This is
let parent = Box::pin(self.in_reply_to.dereference(context).await?); // necessary because we need to know the post and parent comment in order to insert a new
match parent.deref() { // comment. However it can also lead to stack overflow when fetching many comments recursively.
// To avoid this we check the request count against max comment depth, which based on testing
// can be handled without risking stack overflow. This is not a perfect solution, because in
// some cases we have to fetch user profiles too, and reach the limit after only 25 comments
// or so.
// A cleaner solution would be converting the recursion into a loop, but that is tricky.
if context.request_count() > MAX_COMMENT_DEPTH_LIMIT as u32 {
Err(LemmyErrorType::MaxCommentDepthReached)?;
}
let parent = self.in_reply_to.dereference(context).await?;
match parent {
PostOrComment::Post(p) => Ok((p.clone(), None)), PostOrComment::Post(p) => Ok((p.clone(), None)),
PostOrComment::Comment(c) => { PostOrComment::Comment(c) => {
let post_id = c.post_id; let post_id = c.post_id;

View File

@ -258,9 +258,9 @@ impl Post {
post::table post::table
.inner_join(person::table) .inner_join(person::table)
.inner_join(community::table) .inner_join(community::table)
// find all posts which have scheduled_publish_time that is in the past // find all posts which have scheduled_publish_time that is in the future
.filter(post::scheduled_publish_time.is_not_null()) .filter(post::scheduled_publish_time.is_not_null())
.filter(coalesce(post::scheduled_publish_time, now()).lt(now())) .filter(coalesce(post::scheduled_publish_time, now()).gt(now()))
// make sure the post and community are still around // make sure the post and community are still around
.filter(not(post::deleted.or(post::removed))) .filter(not(post::deleted.or(post::removed)))
.filter(not(community::removed.or(community::deleted))) .filter(not(community::removed.or(community::deleted)))
@ -414,6 +414,7 @@ mod tests {
traits::{Crud, Likeable, Saveable}, traits::{Crud, Likeable, Saveable},
utils::build_db_pool_for_tests, utils::build_db_pool_for_tests,
}; };
use chrono::DateTime;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use serial_test::serial; use serial_test::serial;
use std::collections::HashSet; use std::collections::HashSet;
@ -456,6 +457,12 @@ mod tests {
); );
let inserted_post2 = Post::create(pool, &new_post2).await.unwrap(); let inserted_post2 = Post::create(pool, &new_post2).await.unwrap();
let new_scheduled_post = PostInsertForm {
scheduled_publish_time: Some(DateTime::from_timestamp_nanos(i64::MAX)),
..PostInsertForm::new("beans".into(), inserted_person.id, inserted_community.id)
};
let inserted_scheduled_post = Post::create(pool, &new_scheduled_post).await.unwrap();
let expected_post = Post { let expected_post = Post {
id: inserted_post.id, id: inserted_post.id,
name: "A test post".into(), name: "A test post".into(),
@ -535,6 +542,12 @@ mod tests {
.await .await
.unwrap(); .unwrap();
// Scheduled post count
let scheduled_post_count = Post::user_scheduled_post_count(inserted_person.id, pool)
.await
.unwrap();
assert_eq!(1, scheduled_post_count);
let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id) let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id)
.await .await
.unwrap(); .unwrap();
@ -551,8 +564,11 @@ mod tests {
assert_eq!(2, read_removed); assert_eq!(2, read_removed);
let num_deleted = Post::delete(pool, inserted_post.id).await.unwrap() let num_deleted = Post::delete(pool, inserted_post.id).await.unwrap()
+ Post::delete(pool, inserted_post2.id).await.unwrap(); + Post::delete(pool, inserted_post2.id).await.unwrap()
assert_eq!(2, num_deleted); + Post::delete(pool, inserted_scheduled_post.id)
.await
.unwrap();
assert_eq!(3, num_deleted);
Community::delete(pool, inserted_community.id) Community::delete(pool, inserted_community.id)
.await .await
.unwrap(); .unwrap();

View File

@ -14,11 +14,12 @@ use serde_with::skip_serializing_none;
use ts_rs::TS; use ts_rs::TS;
#[skip_serializing_none] #[skip_serializing_none]
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))]
#[cfg_attr(feature = "full", diesel(table_name = local_user))] #[cfg_attr(feature = "full", diesel(table_name = local_user))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
#[serde(default)]
/// A local user. /// A local user.
pub struct LocalUser { pub struct LocalUser {
pub id: LocalUserId, pub id: LocalUserId,

View File

@ -87,39 +87,30 @@ impl Serialize for PublicOAuthProvider {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset, TS))] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = oauth_provider))] #[cfg_attr(feature = "full", diesel(table_name = oauth_provider))]
#[cfg_attr(feature = "full", ts(export))]
pub struct OAuthProviderInsertForm { pub struct OAuthProviderInsertForm {
pub display_name: String, pub display_name: String,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub issuer: DbUrl, pub issuer: DbUrl,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub authorization_endpoint: DbUrl, pub authorization_endpoint: DbUrl,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub token_endpoint: DbUrl, pub token_endpoint: DbUrl,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub userinfo_endpoint: DbUrl, pub userinfo_endpoint: DbUrl,
pub id_claim: String, pub id_claim: String,
pub client_id: String, pub client_id: String,
pub client_secret: String, pub client_secret: String,
pub scopes: String, pub scopes: String,
pub auto_verify_email: bool, pub auto_verify_email: Option<bool>,
pub account_linking_enabled: bool, pub account_linking_enabled: Option<bool>,
pub enabled: bool, pub enabled: Option<bool>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset, TS))] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = oauth_provider))] #[cfg_attr(feature = "full", diesel(table_name = oauth_provider))]
#[cfg_attr(feature = "full", ts(export))]
pub struct OAuthProviderUpdateForm { pub struct OAuthProviderUpdateForm {
pub display_name: Option<String>, pub display_name: Option<String>,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub authorization_endpoint: Option<DbUrl>, pub authorization_endpoint: Option<DbUrl>,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub token_endpoint: Option<DbUrl>, pub token_endpoint: Option<DbUrl>,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub userinfo_endpoint: Option<DbUrl>, pub userinfo_endpoint: Option<DbUrl>,
pub id_claim: Option<String>, pub id_claim: Option<String>,
pub client_secret: Option<String>, pub client_secret: Option<String>,

View File

@ -29,6 +29,8 @@ pub const CACHE_DURATION_FEDERATION: Duration = Duration::from_secs(60);
pub const CACHE_DURATION_API: Duration = Duration::from_secs(1); pub const CACHE_DURATION_API: Duration = Duration::from_secs(1);
pub const MAX_COMMENT_DEPTH_LIMIT: usize = 50;
#[macro_export] #[macro_export]
macro_rules! location_info { macro_rules! location_info {
() => { () => {

View File

@ -14,7 +14,7 @@ CREATE TABLE oauth_provider (
scopes text NOT NULL, scopes text NOT NULL,
auto_verify_email boolean DEFAULT TRUE NOT NULL, auto_verify_email boolean DEFAULT TRUE NOT NULL,
account_linking_enabled boolean DEFAULT FALSE NOT NULL, account_linking_enabled boolean DEFAULT FALSE NOT NULL,
enabled boolean DEFAULT FALSE NOT NULL, enabled boolean DEFAULT TRUE NOT NULL,
published timestamp with time zone DEFAULT now() NOT NULL, published timestamp with time zone DEFAULT now() NOT NULL,
updated timestamp with time zone updated timestamp with time zone
); );