Compare commits

...

16 Commits

Author SHA1 Message Date
Dessalines
9ca78e1bc9
Merge ca8458a7db into 777992e829 2024-09-30 22:11:53 -04:00
renovate[bot]
777992e829
Update Rust crate reqwest to v0.12.8 (#5068)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-30 22:04:24 -04:00
renovate[bot]
0fcbd25ad5
Update Rust crate clap to v4.5.18 (#5066)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-09-30 21:42:11 -04:00
Dessalines
ca8458a7db Merge remote-tracking branch 'origin/main' into remove_success_responses 2024-09-30 20:58:40 -04:00
Nutomic
44dda08b13
Avoid stack overflow when fetching nested comments, reduce max comment depth to 50 (#5009)
* Avoid stack overflow when fetching deeply nested comments

* add test case

* reduce comment depth, add docs

* decrease

* reduce max comment depth to 50

* fmt

* clippy

* cleanup
2024-09-30 20:27:14 -04:00
Nutomic
5115ed4c09
Handle partial settings backup (fixes #4307) (#5063)
* Handle partial settings backup (fixes #4307)

* clippy
2024-09-30 20:21:06 -04:00
Dessalines
f7d881ac78
Adding skip_serializing_none to another OAuth API request. (#5060) 2024-09-27 11:15:44 -04:00
Nutomic
e82f72d3c8
Avoid breaking changes, keep response fields as deprecated (#5058) 2024-09-27 09:23:19 -04:00
Joseph Silva
50ce7961d1
Apply scheduled post limit to future posts instead of past posts, and verify this in test (#5054)
* test scheduled_post_count

* fix syntax error

* fix formatting

* fix argument order

* fix user_scheduled_post_count function
2024-09-27 08:51:10 -04:00
Dessalines
a5eaad7afd Merge remote-tracking branch 'origin/main' into remove_success_responses 2024-09-27 08:49:50 -04:00
SleeplessOne1917
33cbd95b7e
Add skip_serialize_none to OAuth structs with option fields (#5046)
* Add skip_serialize_none to OAuth structs with option fields

* PR feedback

* Remove serde and ts export from SSO db-only structs
2024-09-26 10:24:51 +02:00
Nutomic
f6a24e133a
Replace clippy allow annotation with expect (fixes #5012) (#5048) 2024-09-24 13:29:02 -04:00
Nutomic
61a02482ff
Cleanup remaining use of Result<bool, Error> (fixes #4862) (#5047) 2024-09-24 13:25:33 -04:00
Dessalines
0fab5bed24
Add ability to search for Community by its description (or title only). (#5044)
- This changes the post_title_only for Search to title_only, since its
  also used in the community query now.
- Fixes #4785
2024-09-24 13:24:28 -04:00
Dessalines
622ce6c976 Fixing federation tests. 2024-09-23 15:35:37 -04:00
Dessalines
10079f8295 Removing a few SuccessResponses for PostHide and MarkPostAsRead.
- This also removes the pointless multiple post_ids. These can be done
  as individual calls on the front end anyway.
- Fixes #4755
2024-09-23 12:19:16 -04:00
82 changed files with 336 additions and 378 deletions

40
Cargo.lock generated
View File

@ -42,7 +42,7 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
"rand", "rand",
"regex", "regex",
"reqwest 0.12.7", "reqwest 0.12.8",
"reqwest-middleware", "reqwest-middleware",
"rsa", "rsa",
"serde", "serde",
@ -839,9 +839,9 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.17" version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
@ -849,9 +849,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.17" version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@ -861,9 +861,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_derive" name = "clap_derive"
version = "4.5.13" version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.5.0",
"proc-macro2", "proc-macro2",
@ -1989,7 +1989,7 @@ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"http-signature-normalization", "http-signature-normalization",
"httpdate", "httpdate",
"reqwest 0.12.7", "reqwest 0.12.8",
"reqwest-middleware", "reqwest-middleware",
"sha2", "sha2",
"thiserror", "thiserror",
@ -2514,7 +2514,7 @@ dependencies = [
"moka", "moka",
"pretty_assertions", "pretty_assertions",
"regex", "regex",
"reqwest 0.12.7", "reqwest 0.12.8",
"reqwest-middleware", "reqwest-middleware",
"rosetta-i18n", "rosetta-i18n",
"serde", "serde",
@ -2579,7 +2579,7 @@ dependencies = [
"lemmy_utils", "lemmy_utils",
"moka", "moka",
"pretty_assertions", "pretty_assertions",
"reqwest 0.12.7", "reqwest 0.12.8",
"serde", "serde",
"serde_json", "serde_json",
"serde_with", "serde_with",
@ -2718,7 +2718,7 @@ dependencies = [
"lemmy_utils", "lemmy_utils",
"mockall", "mockall",
"moka", "moka",
"reqwest 0.12.7", "reqwest 0.12.8",
"serde_json", "serde_json",
"serial_test", "serial_test",
"test-context", "test-context",
@ -2745,7 +2745,7 @@ dependencies = [
"lemmy_db_views", "lemmy_db_views",
"lemmy_db_views_actor", "lemmy_db_views_actor",
"lemmy_utils", "lemmy_utils",
"reqwest 0.12.7", "reqwest 0.12.8",
"reqwest-middleware", "reqwest-middleware",
"rss", "rss",
"serde", "serde",
@ -2778,7 +2778,7 @@ dependencies = [
"lemmy_utils", "lemmy_utils",
"pretty_assertions", "pretty_assertions",
"prometheus", "prometheus",
"reqwest 0.12.7", "reqwest 0.12.8",
"reqwest-middleware", "reqwest-middleware",
"reqwest-tracing", "reqwest-tracing",
"rustls 0.23.13", "rustls 0.23.13",
@ -2811,7 +2811,7 @@ dependencies = [
"markdown-it", "markdown-it",
"pretty_assertions", "pretty_assertions",
"regex", "regex",
"reqwest 0.12.7", "reqwest 0.12.8",
"reqwest-middleware", "reqwest-middleware",
"rosetta-build", "rosetta-build",
"rosetta-i18n", "rosetta-i18n",
@ -2870,7 +2870,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.52.6", "windows-targets 0.48.5",
] ]
[[package]] [[package]]
@ -3966,9 +3966,9 @@ dependencies = [
[[package]] [[package]]
name = "reqwest" name = "reqwest"
version = "0.12.7" version = "0.12.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b"
dependencies = [ dependencies = [
"async-compression", "async-compression",
"base64 0.22.1", "base64 0.22.1",
@ -4019,7 +4019,7 @@ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
"http 1.1.0", "http 1.1.0",
"reqwest 0.12.7", "reqwest 0.12.8",
"serde", "serde",
"thiserror", "thiserror",
"tower-service", "tower-service",
@ -4036,7 +4036,7 @@ dependencies = [
"getrandom", "getrandom",
"http 1.1.0", "http 1.1.0",
"matchit", "matchit",
"reqwest 0.12.7", "reqwest 0.12.8",
"reqwest-middleware", "reqwest-middleware",
"tracing", "tracing",
] ]
@ -5579,7 +5579,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.48.0",
] ]
[[package]] [[package]]

View File

@ -90,7 +90,7 @@ lemmy_db_views = { version = "=0.19.6-beta.7", path = "./crates/db_views" }
lemmy_db_views_actor = { version = "=0.19.6-beta.7", path = "./crates/db_views_actor" } lemmy_db_views_actor = { version = "=0.19.6-beta.7", path = "./crates/db_views_actor" }
lemmy_db_views_moderator = { version = "=0.19.6-beta.7", path = "./crates/db_views_moderator" } lemmy_db_views_moderator = { version = "=0.19.6-beta.7", path = "./crates/db_views_moderator" }
lemmy_federate = { version = "=0.19.6-beta.7", path = "./crates/federate" } lemmy_federate = { version = "=0.19.6-beta.7", path = "./crates/federate" }
activitypub_federation = { version = "0.6.0-alpha1", default-features = false, features = [ activitypub_federation = { version = "0.6.0-alpha2", default-features = false, features = [
"actix-web", "actix-web",
] } ] }
diesel = "2.1.6" diesel = "2.1.6"

View File

@ -27,7 +27,7 @@
"eslint": "^9.9.0", "eslint": "^9.9.0",
"eslint-plugin-prettier": "^5.1.3", "eslint-plugin-prettier": "^5.1.3",
"jest": "^29.5.0", "jest": "^29.5.0",
"lemmy-js-client": "0.20.0-alpha.11", "lemmy-js-client": "0.20.0-alpha.12",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"ts-jest": "^29.1.0", "ts-jest": "^29.1.0",
"typescript": "^5.5.4", "typescript": "^5.5.4",

View File

@ -30,8 +30,8 @@ importers:
specifier: ^29.5.0 specifier: ^29.5.0
version: 29.7.0(@types/node@22.3.0) version: 29.7.0(@types/node@22.3.0)
lemmy-js-client: lemmy-js-client:
specifier: 0.20.0-alpha.11 specifier: 0.20.0-alpha.12
version: 0.20.0-alpha.11 version: 0.20.0-alpha.12
prettier: prettier:
specifier: ^3.2.5 specifier: ^3.2.5
version: 3.3.3 version: 3.3.3
@ -1175,8 +1175,8 @@ packages:
resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==}
engines: {node: '>=6'} engines: {node: '>=6'}
lemmy-js-client@0.20.0-alpha.11: lemmy-js-client@0.20.0-alpha.12:
resolution: {integrity: sha512-iRSG4xHMjPDIreQqVIoJ5JrMY71uk07G0Zbgyf068xKbib22J3+i1x/XgCTs6tiHlqTnw1Ig/KRq7p7qJoA4uw==} resolution: {integrity: sha512-+nknIpFAT25TnhObvPPCI0JhDxmTSfg3jbNm7f4RnwidRskjS8SraNFD4bXFfHf14lu61ZEiVe58+UhhRe2UdA==}
leven@3.1.0: leven@3.1.0:
resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==}
@ -3107,7 +3107,7 @@ snapshots:
kleur@3.0.3: {} kleur@3.0.3: {}
lemmy-js-client@0.20.0-alpha.11: {} lemmy-js-client@0.20.0-alpha.12: {}
leven@3.1.0: {} leven@3.1.0: {}

View File

@ -858,3 +858,26 @@ test("Dont send a comment reply to a blocked community", async () => {
blockRes = await blockCommunity(beta, newCommunityId, false); blockRes = await blockCommunity(beta, newCommunityId, false);
expect(blockRes.blocked).toBe(false); expect(blockRes.blocked).toBe(false);
}); });
/// Fetching a deeply nested comment can lead to stack overflow as all parent comments are also
/// fetched recursively. Ensure that it works properly.
test("Fetch a deeply nested comment", async () => {
let lastComment;
for (let i = 0; i < 50; i++) {
let commentRes = await createComment(
alpha,
postOnAlphaRes.post_view.post.id,
lastComment?.comment_view.comment.id,
);
expect(commentRes.comment_view.comment).toBeDefined();
lastComment = commentRes;
}
let betaComment = await resolveComment(
beta,
lastComment!.comment_view.comment,
);
expect(betaComment!.comment!.comment).toBeDefined();
expect(betaComment?.comment?.post).toBeDefined();
});

View File

@ -265,8 +265,6 @@ pub async fn local_user_view_from_jwt(
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -1,34 +1,39 @@
use actix_web::web::{Data, Json}; use actix_web::web::{Data, Json};
use lemmy_api_common::{context::LemmyContext, post::HidePost, SuccessResponse}; use lemmy_api_common::{
context::LemmyContext,
post::{HidePost, PostResponse},
};
use lemmy_db_schema::source::post::PostHide; use lemmy_db_schema::source::post::PostHide;
use lemmy_db_views::structs::LocalUserView; use lemmy_db_views::structs::{LocalUserView, PostView};
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult, MAX_API_PARAM_ELEMENTS}; use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
use std::collections::HashSet;
#[tracing::instrument(skip(context))] #[tracing::instrument(skip(context))]
pub async fn hide_post( pub async fn hide_post(
data: Json<HidePost>, data: Json<HidePost>,
context: Data<LemmyContext>, context: Data<LemmyContext>,
local_user_view: LocalUserView, local_user_view: LocalUserView,
) -> LemmyResult<Json<SuccessResponse>> { ) -> LemmyResult<Json<PostResponse>> {
let post_ids = HashSet::from_iter(data.post_ids.clone());
if post_ids.len() > MAX_API_PARAM_ELEMENTS {
Err(LemmyErrorType::TooManyItems)?;
}
let person_id = local_user_view.person.id; let person_id = local_user_view.person.id;
let post_id = data.post_id;
// Mark the post as hidden / unhidden // Mark the post as hidden / unhidden
if data.hide { if data.hide {
PostHide::hide(&mut context.pool(), post_ids, person_id) PostHide::hide(&mut context.pool(), post_id, person_id)
.await .await
.with_lemmy_type(LemmyErrorType::CouldntHidePost)?; .with_lemmy_type(LemmyErrorType::CouldntHidePost)?;
} else { } else {
PostHide::unhide(&mut context.pool(), post_ids, person_id) PostHide::unhide(&mut context.pool(), post_id, person_id)
.await .await
.with_lemmy_type(LemmyErrorType::CouldntHidePost)?; .with_lemmy_type(LemmyErrorType::CouldntHidePost)?;
} }
Ok(Json(SuccessResponse::default())) let post_view = PostView::read(
&mut context.pool(),
post_id,
Some(&local_user_view.local_user),
false,
)
.await?;
Ok(Json(PostResponse { post_view }))
} }

View File

@ -1,34 +1,38 @@
use actix_web::web::{Data, Json}; use actix_web::web::{Data, Json};
use lemmy_api_common::{context::LemmyContext, post::MarkPostAsRead, SuccessResponse}; use lemmy_api_common::{
context::LemmyContext,
post::{MarkPostAsRead, PostResponse},
};
use lemmy_db_schema::source::post::PostRead; use lemmy_db_schema::source::post::PostRead;
use lemmy_db_views::structs::LocalUserView; use lemmy_db_views::structs::{LocalUserView, PostView};
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult, MAX_API_PARAM_ELEMENTS}; use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
use std::collections::HashSet;
#[tracing::instrument(skip(context))] #[tracing::instrument(skip(context))]
pub async fn mark_post_as_read( pub async fn mark_post_as_read(
data: Json<MarkPostAsRead>, data: Json<MarkPostAsRead>,
context: Data<LemmyContext>, context: Data<LemmyContext>,
local_user_view: LocalUserView, local_user_view: LocalUserView,
) -> LemmyResult<Json<SuccessResponse>> { ) -> LemmyResult<Json<PostResponse>> {
let post_ids = HashSet::from_iter(data.post_ids.clone());
if post_ids.len() > MAX_API_PARAM_ELEMENTS {
Err(LemmyErrorType::TooManyItems)?;
}
let person_id = local_user_view.person.id; let person_id = local_user_view.person.id;
let post_id = data.post_id;
// Mark the post as read / unread // Mark the post as read / unread
if data.read { if data.read {
PostRead::mark_as_read(&mut context.pool(), post_ids, person_id) PostRead::mark_as_read(&mut context.pool(), post_id, person_id)
.await .await
.with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?; .with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?;
} else { } else {
PostRead::mark_as_unread(&mut context.pool(), post_ids, person_id) PostRead::mark_as_unread(&mut context.pool(), post_id, person_id)
.await .await
.with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?; .with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?;
} }
let post_view = PostView::read(
&mut context.pool(),
post_id,
Some(&local_user_view.local_user),
false,
)
.await?;
Ok(Json(SuccessResponse::default())) Ok(Json(PostResponse { post_view }))
} }

View File

@ -76,5 +76,7 @@ pub async fn leave_admin(
admin_oauth_providers: None, admin_oauth_providers: None,
blocked_urls, blocked_urls,
tagline, tagline,
taglines: vec![],
custom_emojis: vec![],
})) }))
} }

View File

@ -34,7 +34,7 @@ use lemmy_db_views::structs::LocalUserView;
use lemmy_utils::{error::LemmyResult, LemmyErrorType, CACHE_DURATION_API}; use lemmy_utils::{error::LemmyResult, LemmyErrorType, CACHE_DURATION_API};
use serial_test::serial; use serial_test::serial;
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
async fn create_test_site(context: &Data<LemmyContext>) -> LemmyResult<(Instance, LocalUserView)> { async fn create_test_site(context: &Data<LemmyContext>) -> LemmyResult<(Instance, LocalUserView)> {
let pool = &mut context.pool(); let pool = &mut context.pool();
@ -109,7 +109,7 @@ async fn signup(
Ok((local_user, application)) Ok((local_user, application))
} }
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
async fn get_application_statuses( async fn get_application_statuses(
context: &Data<LemmyContext>, context: &Data<LemmyContext>,
admin: LocalUserView, admin: LocalUserView,
@ -138,10 +138,9 @@ async fn get_application_statuses(
Ok((application_count, unread_applications, all_applications)) Ok((application_count, unread_applications, all_applications))
} }
#[allow(clippy::indexing_slicing)]
#[allow(clippy::unwrap_used)]
#[tokio::test]
#[serial] #[serial]
#[tokio::test]
#[expect(clippy::indexing_slicing)]
async fn test_application_approval() -> LemmyResult<()> { async fn test_application_approval() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await; let context = LemmyContext::init_test_context().await;
let pool = &mut context.pool(); let pool = &mut context.pool();

View File

@ -42,7 +42,7 @@ pub async fn get_sitemap(context: Data<LemmyContext>) -> LemmyResult<HttpRespons
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
pub(crate) mod tests { pub(crate) mod tests {
use crate::sitemap::generate_urlset; use crate::sitemap::generate_urlset;

View File

@ -69,8 +69,7 @@ impl Claims {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{claims::Claims, context::LemmyContext}; use crate::{claims::Claims, context::LemmyContext};

View File

@ -5,6 +5,7 @@ use serde_with::skip_serializing_none;
use ts_rs::TS; use ts_rs::TS;
use url::Url; use url::Url;
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
@ -19,11 +20,12 @@ pub struct CreateOAuthProvider {
pub client_id: String, pub client_id: String,
pub client_secret: String, pub client_secret: String,
pub scopes: String, pub scopes: String,
pub auto_verify_email: bool, pub auto_verify_email: Option<bool>,
pub account_linking_enabled: bool, pub account_linking_enabled: Option<bool>,
pub enabled: bool, pub enabled: Option<bool>,
} }
#[skip_serializing_none]
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(feature = "full", derive(TS))] #[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]

View File

@ -156,7 +156,7 @@ pub struct RemovePost {
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
/// Mark a post as read. /// Mark a post as read.
pub struct MarkPostAsRead { pub struct MarkPostAsRead {
pub post_ids: Vec<PostId>, pub post_id: PostId,
pub read: bool, pub read: bool,
} }
@ -166,7 +166,7 @@ pub struct MarkPostAsRead {
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
/// Hide a post from list views /// Hide a post from list views
pub struct HidePost { pub struct HidePost {
pub post_ids: Vec<PostId>, pub post_id: PostId,
pub hide: bool, pub hide: bool,
} }

View File

@ -471,8 +471,7 @@ pub async fn replace_image(
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -78,7 +78,7 @@ pub struct Search {
pub listing_type: Option<ListingType>, pub listing_type: Option<ListingType>,
pub page: Option<i64>, pub page: Option<i64>,
pub limit: Option<i64>, pub limit: Option<i64>,
pub post_title_only: Option<bool>, pub title_only: Option<bool>,
pub post_url_only: Option<bool>, pub post_url_only: Option<bool>,
pub saved_only: Option<bool>, pub saved_only: Option<bool>,
pub liked_only: Option<bool>, pub liked_only: Option<bool>,
@ -306,6 +306,8 @@ pub struct EditSite {
/// The response for a site. /// The response for a site.
pub struct SiteResponse { pub struct SiteResponse {
pub site_view: SiteView, pub site_view: SiteView,
/// deprecated, use field `tagline` or /api/v3/tagline/list
pub taglines: Vec<()>,
} }
#[skip_serializing_none] #[skip_serializing_none]
@ -320,6 +322,10 @@ pub struct GetSiteResponse {
pub my_user: Option<MyUserInfo>, pub my_user: Option<MyUserInfo>,
pub all_languages: Vec<Language>, pub all_languages: Vec<Language>,
pub discussion_languages: Vec<LanguageId>, pub discussion_languages: Vec<LanguageId>,
/// deprecated, use field `tagline` or /api/v3/tagline/list
pub taglines: Vec<()>,
/// deprecated, use /api/v3/custom_emoji/list
pub custom_emojis: Vec<()>,
/// If the site has any taglines, a random one is included here for displaying /// If the site has any taglines, a random one is included here for displaying
pub tagline: Option<Tagline>, pub tagline: Option<Tagline>,
/// A list of external auth methods your site supports. /// A list of external auth methods your site supports.

View File

@ -59,7 +59,7 @@ use lemmy_utils::{
use moka::future::Cache; use moka::future::Cache;
use regex::{escape, Regex, RegexSet}; use regex::{escape, Regex, RegexSet};
use rosetta_i18n::{Language, LanguageId}; use rosetta_i18n::{Language, LanguageId};
use std::{collections::HashSet, sync::LazyLock}; use std::sync::LazyLock;
use tracing::warn; use tracing::warn;
use url::{ParseError, Url}; use url::{ParseError, Url};
use urlencoding::encode; use urlencoding::encode;
@ -142,7 +142,7 @@ pub async fn mark_post_as_read(
post_id: PostId, post_id: PostId,
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
) -> LemmyResult<()> { ) -> LemmyResult<()> {
PostRead::mark_as_read(pool, HashSet::from([post_id]), person_id) PostRead::mark_as_read(pool, post_id, person_id)
.await .await
.with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?; .with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?;
Ok(()) Ok(())
@ -1067,8 +1067,7 @@ fn build_proxied_image_url(
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -30,10 +30,9 @@ use lemmy_db_views::structs::{LocalUserView, PostView};
use lemmy_utils::{ use lemmy_utils::{
error::{LemmyErrorExt, LemmyErrorType, LemmyResult}, error::{LemmyErrorExt, LemmyErrorType, LemmyResult},
utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field}, utils::{mention::scrape_text_for_mentions, validation::is_valid_body_field},
MAX_COMMENT_DEPTH_LIMIT,
}; };
const MAX_COMMENT_DEPTH_LIMIT: usize = 100;
#[tracing::instrument(skip(context))] #[tracing::instrument(skip(context))]
pub async fn create_comment( pub async fn create_comment(
data: Json<CreateComment>, data: Json<CreateComment>,

View File

@ -139,7 +139,10 @@ pub async fn create_site(
local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit);
context.rate_limit_cell().set_config(rate_limit_config); context.rate_limit_cell().set_config(rate_limit_config);
Ok(Json(SiteResponse { site_view })) Ok(Json(SiteResponse {
site_view,
taglines: vec![],
}))
} }
fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> { fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> {
@ -189,8 +192,6 @@ fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) ->
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::site::create::validate_create_payload; use crate::site::create::validate_create_payload;

View File

@ -48,8 +48,6 @@ fn not_zero(val: Option<i32>) -> Option<i32> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::site::{application_question_check, not_zero, site_default_post_listing_type_check}; use crate::site::{application_question_check, not_zero, site_default_post_listing_type_check};

View File

@ -59,6 +59,8 @@ pub async fn get_site(
tagline, tagline,
oauth_providers: Some(oauth_providers), oauth_providers: Some(oauth_providers),
admin_oauth_providers: Some(admin_oauth_providers), admin_oauth_providers: Some(admin_oauth_providers),
taglines: vec![],
custom_emojis: vec![],
}) })
}) })
.await .await

View File

@ -193,7 +193,10 @@ pub async fn update_site(
local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit); local_site_rate_limit_to_rate_limit_config(&site_view.local_site_rate_limit);
context.rate_limit_cell().set_config(rate_limit_config); context.rate_limit_cell().set_config(rate_limit_config);
Ok(Json(SiteResponse { site_view })) Ok(Json(SiteResponse {
site_view,
taglines: vec![],
}))
} }
fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> { fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> {
@ -241,8 +244,6 @@ fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> Lemm
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::site::update::validate_update_payload; use crate::site::update::validate_update_payload;

View File

@ -107,9 +107,7 @@ pub async fn register(
check_slurs(&data.username, &slur_regex)?; check_slurs(&data.username, &slur_regex)?;
check_slurs_opt(&data.answer, &slur_regex)?; check_slurs_opt(&data.answer, &slur_regex)?;
if Person::is_username_taken(&mut context.pool(), &data.username).await? { Person::check_username_taken(&mut context.pool(), &data.username).await?;
return Err(LemmyErrorType::UsernameAlreadyExists)?;
}
if let Some(email) = &data.email { if let Some(email) = &data.email {
LocalUser::check_is_email_taken(&mut context.pool(), email).await?; LocalUser::check_is_email_taken(&mut context.pool(), email).await?;
@ -329,9 +327,7 @@ pub async fn authenticate_with_oauth(
check_slurs(username, &slur_regex)?; check_slurs(username, &slur_regex)?;
check_slurs_opt(&data.answer, &slur_regex)?; check_slurs_opt(&data.answer, &slur_regex)?;
if Person::is_username_taken(&mut context.pool(), username).await? { Person::check_username_taken(&mut context.pool(), username).await?;
return Err(LemmyErrorType::UsernameAlreadyExists)?;
}
// We have to create a person, a local_user, and an oauth_account // We have to create a person, a local_user, and an oauth_account
person = create_person( person = create_person(

View File

@ -123,7 +123,6 @@ impl InCommunity for AnnouncableActivities {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -47,7 +47,7 @@ pub async fn search(
listing_type, listing_type,
page, page,
limit, limit,
post_title_only, title_only,
post_url_only, post_url_only,
saved_only, saved_only,
liked_only, liked_only,
@ -78,7 +78,7 @@ pub async fn search(
search_term: Some(q.clone()), search_term: Some(q.clone()),
page, page,
limit, limit,
title_only: post_title_only, title_only,
url_only: post_url_only, url_only: post_url_only,
liked_only, liked_only,
disliked_only, disliked_only,
@ -105,6 +105,7 @@ pub async fn search(
sort, sort,
listing_type, listing_type,
search_term: Some(q.clone()), search_term: Some(q.clone()),
title_only,
local_user, local_user,
is_mod_or_admin: is_admin, is_mod_or_admin: is_admin,
page, page,

View File

@ -103,13 +103,16 @@ pub async fn import_settings(
context: Data<LemmyContext>, context: Data<LemmyContext>,
) -> LemmyResult<Json<SuccessResponse>> { ) -> LemmyResult<Json<SuccessResponse>> {
let person_form = PersonUpdateForm { let person_form = PersonUpdateForm {
display_name: Some(data.display_name.clone()), display_name: data.display_name.clone().map(Some),
bio: Some(data.bio.clone()), bio: data.bio.clone().map(Some),
matrix_user_id: Some(data.matrix_id.clone()), matrix_user_id: data.bio.clone().map(Some),
bot_account: data.bot_account, bot_account: data.bot_account,
..Default::default() ..Default::default()
}; };
Person::update(&mut context.pool(), local_user_view.person.id, &person_form).await?; // ignore error in case form is empty
Person::update(&mut context.pool(), local_user_view.person.id, &person_form)
.await
.ok();
let local_user_form = LocalUserUpdateForm { let local_user_form = LocalUserUpdateForm {
show_nsfw: data.settings.as_ref().map(|s| s.show_nsfw), show_nsfw: data.settings.as_ref().map(|s| s.show_nsfw),
@ -307,12 +310,14 @@ where
}); });
Ok(failed_items.into_iter().join(",")) Ok(failed_items.into_iter().join(","))
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::api::user_settings_backup::{export_settings, import_settings, UserSettingsBackup}; use crate::api::user_settings_backup::{export_settings, import_settings};
use activitypub_federation::config::Data; use activitypub_federation::config::Data;
use actix_web::web::Json;
use lemmy_api_common::context::LemmyContext; use lemmy_api_common::context::LemmyContext;
use lemmy_db_schema::{ use lemmy_db_schema::{
source::{ source::{
@ -400,45 +405,6 @@ mod tests {
Ok(()) Ok(())
} }
#[tokio::test]
#[serial]
async fn test_settings_partial_import() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let export_user =
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
let community_form = CommunityInsertForm::new(
export_user.person.instance_id,
"testcom".to_string(),
"testcom".to_string(),
"pubkey".to_string(),
);
let community = Community::create(&mut context.pool(), &community_form).await?;
let follower_form = CommunityFollowerForm {
community_id: community.id,
person_id: export_user.person.id,
pending: false,
};
CommunityFollower::follow(&mut context.pool(), &follower_form).await?;
let backup = export_settings(export_user.clone(), context.reset_request_count()).await?;
let import_user = create_user("charles".to_string(), None, &context).await?;
let backup2 = UserSettingsBackup {
followed_communities: backup.followed_communities.clone(),
..Default::default()
};
import_settings(
actix_web::web::Json(backup2),
import_user.clone(),
context.reset_request_count(),
)
.await?;
Ok(())
}
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn disallow_large_backup() -> LemmyResult<()> { async fn disallow_large_backup() -> LemmyResult<()> {
@ -474,4 +440,33 @@ mod tests {
LocalUser::delete(&mut context.pool(), import_user.local_user.id).await?; LocalUser::delete(&mut context.pool(), import_user.local_user.id).await?;
Ok(()) Ok(())
} }
#[tokio::test]
#[serial]
async fn import_partial_backup() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let import_user =
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
let backup =
serde_json::from_str("{\"bot_account\": true, \"settings\": {\"theme\": \"my_theme\"}}")?;
import_settings(
Json(backup),
import_user.clone(),
context.reset_request_count(),
)
.await?;
let import_user_updated =
LocalUserView::read(&mut context.pool(), import_user.local_user.id).await?;
// mark as bot account
assert!(import_user_updated.person.bot_account);
// dont remove existing bio
assert_eq!(import_user.person.bio, import_user_updated.person.bio);
// local_user can be deserialized without id/person_id fields
assert_eq!("my_theme", import_user_updated.local_user.theme);
Ok(())
}
} }

View File

@ -98,7 +98,7 @@ impl Collection for ApubCommunityModerators {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -120,8 +120,7 @@ pub(crate) async fn get_apub_community_featured(
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
pub(crate) mod tests { pub(crate) mod tests {
use super::*; use super::*;

View File

@ -20,10 +20,9 @@ use lemmy_db_schema::{
source::{community::Community, post::Post}, source::{community::Community, post::Post},
traits::Crud, traits::Crud,
}; };
use lemmy_utils::error::LemmyResult; use lemmy_utils::{error::LemmyResult, LemmyErrorType, MAX_COMMENT_DEPTH_LIMIT};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none; use serde_with::skip_serializing_none;
use std::ops::Deref;
use url::Url; use url::Url;
#[skip_serializing_none] #[skip_serializing_none]
@ -58,9 +57,19 @@ impl Note {
&self, &self,
context: &Data<LemmyContext>, context: &Data<LemmyContext>,
) -> LemmyResult<(ApubPost, Option<ApubComment>)> { ) -> LemmyResult<(ApubPost, Option<ApubComment>)> {
// Fetch parent comment chain in a box, otherwise it can cause a stack overflow. // We use recursion here to fetch the entire comment chain up to the top-level parent. This is
let parent = Box::pin(self.in_reply_to.dereference(context).await?); // necessary because we need to know the post and parent comment in order to insert a new
match parent.deref() { // comment. However it can also lead to stack overflow when fetching many comments recursively.
// To avoid this we check the request count against max comment depth, which based on testing
// can be handled without risking stack overflow. This is not a perfect solution, because in
// some cases we have to fetch user profiles too, and reach the limit after only 25 comments
// or so.
// A cleaner solution would be converting the recursion into a loop, but that is tricky.
if context.request_count() > MAX_COMMENT_DEPTH_LIMIT as u32 {
Err(LemmyErrorType::MaxCommentDepthReached)?;
}
let parent = self.in_reply_to.dereference(context).await?;
match parent {
PostOrComment::Post(p) => Ok((p.clone(), None)), PostOrComment::Post(p) => Ok((p.clone(), None)),
PostOrComment::Comment(c) => { PostOrComment::Comment(c) => {
let post_id = c.post_id; let post_id = c.post_id;

View File

@ -75,7 +75,7 @@ impl<S: ValidGrouping<(), IsAggregate = is_aggregate::No>> ValidGrouping<()>
type IsAggregate = is_aggregate::No; type IsAggregate = is_aggregate::No;
} }
#[allow(non_camel_case_types)] #[expect(non_camel_case_types)]
#[derive(QueryId, Clone, Copy, Debug)] #[derive(QueryId, Clone, Copy, Debug)]
pub struct current_value; pub struct current_value;

View File

@ -30,8 +30,7 @@ impl CommentAggregates {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -36,8 +36,7 @@ impl CommunityAggregates {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -20,8 +20,7 @@ impl PersonAggregates {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -49,8 +49,8 @@ impl PostAggregates {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -15,8 +15,8 @@ impl SiteAggregates {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -58,8 +58,7 @@ impl ReceivedActivity {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -392,8 +392,8 @@ async fn convert_read_languages(
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -51,8 +51,6 @@ impl CaptchaAnswer {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -196,8 +196,7 @@ impl Saveable for CommentSaved {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -431,7 +431,6 @@ impl ApubActor for Community {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{
source::{ source::{

View File

@ -48,8 +48,7 @@ impl FederationAllowList {
} }
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -41,8 +41,8 @@ impl Language {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{source::language::Language, utils::build_db_pool_for_tests}; use crate::{source::language::Language, utils::build_db_pool_for_tests};

View File

@ -369,7 +369,6 @@ pub struct UserBackupLists {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{
source::{ source::{

View File

@ -465,8 +465,7 @@ impl Crud for AdminPurgeComment {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -1,32 +1,13 @@
use crate::{ use crate::{
newtypes::{LocalUserId, OAuthProviderId}, newtypes::LocalUserId,
schema::{oauth_account, oauth_account::dsl::local_user_id}, schema::{oauth_account, oauth_account::dsl::local_user_id},
source::oauth_account::{OAuthAccount, OAuthAccountInsertForm}, source::oauth_account::{OAuthAccount, OAuthAccountInsertForm},
utils::{get_conn, DbPool}, utils::{get_conn, DbPool},
}; };
use diesel::{ use diesel::{insert_into, result::Error, ExpressionMethods, QueryDsl};
dsl::{exists, insert_into},
result::Error,
select,
ExpressionMethods,
QueryDsl,
};
use diesel_async::RunQueryDsl; use diesel_async::RunQueryDsl;
impl OAuthAccount { impl OAuthAccount {
pub async fn read(
pool: &mut DbPool<'_>,
for_oauth_provider_id: OAuthProviderId,
for_local_user_id: LocalUserId,
) -> Result<bool, Error> {
let conn = &mut get_conn(pool).await?;
select(exists(
oauth_account::table.find((for_oauth_provider_id, for_local_user_id)),
))
.get_result(conn)
.await
}
pub async fn create(pool: &mut DbPool<'_>, form: &OAuthAccountInsertForm) -> Result<Self, Error> { pub async fn create(pool: &mut DbPool<'_>, form: &OAuthAccountInsertForm) -> Result<Self, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
insert_into(oauth_account::table) insert_into(oauth_account::table)
@ -35,17 +16,6 @@ impl OAuthAccount {
.await .await
} }
pub async fn delete(
pool: &mut DbPool<'_>,
for_oauth_provider_id: OAuthProviderId,
for_local_user_id: LocalUserId,
) -> Result<usize, Error> {
let conn = &mut get_conn(pool).await?;
diesel::delete(oauth_account::table.find((for_oauth_provider_id, for_local_user_id)))
.execute(conn)
.await
}
pub async fn delete_user_accounts( pub async fn delete_user_accounts(
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
for_local_user_id: LocalUserId, for_local_user_id: LocalUserId,

View File

@ -42,8 +42,6 @@ impl PasswordResetRequest {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -21,6 +21,7 @@ use diesel::{
QueryDsl, QueryDsl,
}; };
use diesel_async::RunQueryDsl; use diesel_async::RunQueryDsl;
use lemmy_utils::{error::LemmyResult, LemmyErrorType};
#[async_trait] #[async_trait]
impl Crud for Person { impl Crud for Person {
@ -121,16 +122,18 @@ impl Person {
.await .await
} }
pub async fn is_username_taken(pool: &mut DbPool<'_>, username: &str) -> Result<bool, Error> { pub async fn check_username_taken(pool: &mut DbPool<'_>, username: &str) -> LemmyResult<()> {
use diesel::dsl::{exists, select}; use diesel::dsl::{exists, select};
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
select(exists( select(not(exists(
person::table person::table
.filter(lower(person::name).eq(username.to_lowercase())) .filter(lower(person::name).eq(username.to_lowercase()))
.filter(person::local.eq(true)), .filter(person::local.eq(true)),
)) )))
.get_result(conn) .get_result::<bool>(conn)
.await .await?
.then_some(())
.ok_or(LemmyErrorType::UsernameAlreadyExists.into())
} }
} }
@ -232,7 +235,6 @@ impl PersonFollower {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -39,7 +39,6 @@ use diesel::{
TextExpressionMethods, TextExpressionMethods,
}; };
use diesel_async::RunQueryDsl; use diesel_async::RunQueryDsl;
use std::collections::HashSet;
#[async_trait] #[async_trait]
impl Crud for Post { impl Crud for Post {
@ -258,9 +257,9 @@ impl Post {
post::table post::table
.inner_join(person::table) .inner_join(person::table)
.inner_join(community::table) .inner_join(community::table)
// find all posts which have scheduled_publish_time that is in the past // find all posts which have scheduled_publish_time that is in the future
.filter(post::scheduled_publish_time.is_not_null()) .filter(post::scheduled_publish_time.is_not_null())
.filter(coalesce(post::scheduled_publish_time, now()).lt(now())) .filter(coalesce(post::scheduled_publish_time, now()).gt(now()))
// make sure the post and community are still around // make sure the post and community are still around
.filter(not(post::deleted.or(post::removed))) .filter(not(post::deleted.or(post::removed)))
.filter(not(community::removed.or(community::deleted))) .filter(not(community::removed.or(community::deleted)))
@ -322,17 +321,15 @@ impl Saveable for PostSaved {
impl PostRead { impl PostRead {
pub async fn mark_as_read( pub async fn mark_as_read(
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
post_ids: HashSet<PostId>, post_id: PostId,
person_id: PersonId, person_id: PersonId,
) -> Result<usize, Error> { ) -> Result<usize, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
let forms = post_ids let form = PostReadForm { post_id, person_id };
.into_iter()
.map(|post_id| PostReadForm { post_id, person_id })
.collect::<Vec<PostReadForm>>();
insert_into(post_read::table) insert_into(post_read::table)
.values(forms) .values(form)
.on_conflict_do_nothing() .on_conflict_do_nothing()
.execute(conn) .execute(conn)
.await .await
@ -340,35 +337,30 @@ impl PostRead {
pub async fn mark_as_unread( pub async fn mark_as_unread(
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
post_id_: HashSet<PostId>, post_id_: PostId,
person_id_: PersonId, person_id_: PersonId,
) -> Result<usize, Error> { ) -> Result<usize, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
diesel::delete( let read_post = post_read::table
post_read::table .filter(post_read::post_id.eq(post_id_))
.filter(post_read::post_id.eq_any(post_id_)) .filter(post_read::person_id.eq(person_id_));
.filter(post_read::person_id.eq(person_id_)),
) diesel::delete(read_post).execute(conn).await
.execute(conn)
.await
} }
} }
impl PostHide { impl PostHide {
pub async fn hide( pub async fn hide(
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
post_ids: HashSet<PostId>, post_id: PostId,
person_id: PersonId, person_id: PersonId,
) -> Result<usize, Error> { ) -> Result<usize, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
let forms = post_ids let form = PostHideForm { post_id, person_id };
.into_iter()
.map(|post_id| PostHideForm { post_id, person_id })
.collect::<Vec<PostHideForm>>();
insert_into(post_hide::table) insert_into(post_hide::table)
.values(forms) .values(form)
.on_conflict_do_nothing() .on_conflict_do_nothing()
.execute(conn) .execute(conn)
.await .await
@ -376,23 +368,20 @@ impl PostHide {
pub async fn unhide( pub async fn unhide(
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
post_id_: HashSet<PostId>, post_id_: PostId,
person_id_: PersonId, person_id_: PersonId,
) -> Result<usize, Error> { ) -> Result<usize, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
diesel::delete( let hidden_post = post_hide::table
post_hide::table .filter(post_hide::post_id.eq(post_id_))
.filter(post_hide::post_id.eq_any(post_id_)) .filter(post_hide::person_id.eq(person_id_));
.filter(post_hide::person_id.eq(person_id_)),
) diesel::delete(hidden_post).execute(conn).await
.execute(conn)
.await
} }
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[allow(clippy::indexing_slicing)]
mod tests { mod tests {
@ -415,24 +404,23 @@ mod tests {
traits::{Crud, Likeable, Saveable}, traits::{Crud, Likeable, Saveable},
utils::build_db_pool_for_tests, utils::build_db_pool_for_tests,
}; };
use chrono::DateTime;
use lemmy_utils::error::LemmyResult;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use serial_test::serial; use serial_test::serial;
use std::collections::HashSet;
use url::Url; use url::Url;
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn test_crud() { async fn test_crud() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await; let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into(); let pool = &mut pool.into();
let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()).await?;
.await
.unwrap();
let new_person = PersonInsertForm::test_form(inserted_instance.id, "jim"); let new_person = PersonInsertForm::test_form(inserted_instance.id, "jim");
let inserted_person = Person::create(pool, &new_person).await.unwrap(); let inserted_person = Person::create(pool, &new_person).await?;
let new_community = CommunityInsertForm::new( let new_community = CommunityInsertForm::new(
inserted_instance.id, inserted_instance.id,
@ -441,21 +429,27 @@ mod tests {
"pubkey".to_string(), "pubkey".to_string(),
); );
let inserted_community = Community::create(pool, &new_community).await.unwrap(); let inserted_community = Community::create(pool, &new_community).await?;
let new_post = PostInsertForm::new( let new_post = PostInsertForm::new(
"A test post".into(), "A test post".into(),
inserted_person.id, inserted_person.id,
inserted_community.id, inserted_community.id,
); );
let inserted_post = Post::create(pool, &new_post).await.unwrap(); let inserted_post = Post::create(pool, &new_post).await?;
let new_post2 = PostInsertForm::new( let new_post2 = PostInsertForm::new(
"A test post 2".into(), "A test post 2".into(),
inserted_person.id, inserted_person.id,
inserted_community.id, inserted_community.id,
); );
let inserted_post2 = Post::create(pool, &new_post2).await.unwrap(); let inserted_post2 = Post::create(pool, &new_post2).await?;
let new_scheduled_post = PostInsertForm {
scheduled_publish_time: Some(DateTime::from_timestamp_nanos(i64::MAX)),
..PostInsertForm::new("beans".into(), inserted_person.id, inserted_community.id)
};
let inserted_scheduled_post = Post::create(pool, &new_scheduled_post).await?;
let expected_post = Post { let expected_post = Post {
id: inserted_post.id, id: inserted_post.id,
@ -475,9 +469,7 @@ mod tests {
embed_description: None, embed_description: None,
embed_video_url: None, embed_video_url: None,
thumbnail_url: None, thumbnail_url: None,
ap_id: Url::parse(&format!("https://lemmy-alpha/post/{}", inserted_post.id)) ap_id: Url::parse(&format!("https://lemmy-alpha/post/{}", inserted_post.id))?.into(),
.unwrap()
.into(),
local: true, local: true,
language_id: Default::default(), language_id: Default::default(),
featured_community: false, featured_community: false,
@ -493,7 +485,7 @@ mod tests {
score: 1, score: 1,
}; };
let inserted_post_like = PostLike::like(pool, &post_like_form).await.unwrap(); let inserted_post_like = PostLike::like(pool, &post_like_form).await?;
let expected_post_like = PostLike { let expected_post_like = PostLike {
post_id: inserted_post.id, post_id: inserted_post.id,
@ -508,7 +500,7 @@ mod tests {
person_id: inserted_person.id, person_id: inserted_person.id,
}; };
let inserted_post_saved = PostSaved::save(pool, &post_saved_form).await.unwrap(); let inserted_post_saved = PostSaved::save(pool, &post_saved_form).await?;
let expected_post_saved = PostSaved { let expected_post_saved = PostSaved {
post_id: inserted_post.id, post_id: inserted_post.id,
@ -516,54 +508,51 @@ mod tests {
published: inserted_post_saved.published, published: inserted_post_saved.published,
}; };
// Post Read // Mark 2 posts as read
let marked_as_read = PostRead::mark_as_read( PostRead::mark_as_read(pool, inserted_post.id, inserted_person.id).await?;
pool, PostRead::mark_as_read(pool, inserted_post2.id, inserted_person.id).await?;
HashSet::from([inserted_post.id, inserted_post2.id]),
inserted_person.id,
)
.await
.unwrap();
assert_eq!(2, marked_as_read);
let read_post = Post::read(pool, inserted_post.id).await.unwrap(); let read_post = Post::read(pool, inserted_post.id).await?;
let new_post_update = PostUpdateForm { let new_post_update = PostUpdateForm {
name: Some("A test post".into()), name: Some("A test post".into()),
..Default::default() ..Default::default()
}; };
let updated_post = Post::update(pool, inserted_post.id, &new_post_update) let updated_post = Post::update(pool, inserted_post.id, &new_post_update).await?;
.await
.unwrap();
let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id) // Scheduled post count
.await let scheduled_post_count = Post::user_scheduled_post_count(inserted_person.id, pool).await?;
.unwrap(); assert_eq!(1, scheduled_post_count);
let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id).await?;
assert_eq!(1, like_removed); assert_eq!(1, like_removed);
let saved_removed = PostSaved::unsave(pool, &post_saved_form).await.unwrap(); let saved_removed = PostSaved::unsave(pool, &post_saved_form).await?;
assert_eq!(1, saved_removed); assert_eq!(1, saved_removed);
let read_removed = PostRead::mark_as_unread(
pool,
HashSet::from([inserted_post.id, inserted_post2.id]),
inserted_person.id,
)
.await
.unwrap();
assert_eq!(2, read_removed);
let num_deleted = Post::delete(pool, inserted_post.id).await.unwrap() // mark some posts as unread
+ Post::delete(pool, inserted_post2.id).await.unwrap(); let read_removed_1 =
assert_eq!(2, num_deleted); PostRead::mark_as_unread(pool, inserted_post.id, inserted_person.id).await?;
Community::delete(pool, inserted_community.id) assert_eq!(1, read_removed_1);
.await let read_removed_2 =
.unwrap(); PostRead::mark_as_unread(pool, inserted_post2.id, inserted_person.id).await?;
Person::delete(pool, inserted_person.id).await.unwrap(); assert_eq!(1, read_removed_2);
Instance::delete(pool, inserted_instance.id).await.unwrap();
let num_deleted = Post::delete(pool, inserted_post.id).await?
+ Post::delete(pool, inserted_post2.id).await?
+ Post::delete(pool, inserted_scheduled_post.id).await?;
assert_eq!(3, num_deleted);
Community::delete(pool, inserted_community.id).await?;
Person::delete(pool, inserted_person.id).await?;
Instance::delete(pool, inserted_instance.id).await?;
assert_eq!(expected_post, read_post); assert_eq!(expected_post, read_post);
assert_eq!(expected_post, inserted_post); assert_eq!(expected_post, inserted_post);
assert_eq!(expected_post, updated_post); assert_eq!(expected_post, updated_post);
assert_eq!(expected_post_like, inserted_post_like); assert_eq!(expected_post_like, inserted_post_like);
assert_eq!(expected_post_saved, inserted_post_saved); assert_eq!(expected_post_saved, inserted_post_saved);
Ok(())
} }
} }

View File

@ -80,8 +80,7 @@ impl Reportable for PostReport {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -85,8 +85,7 @@ impl PrivateMessage {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -27,7 +27,6 @@ pub mod newtypes;
pub mod sensitive; pub mod sensitive;
#[cfg(feature = "full")] #[cfg(feature = "full")]
#[rustfmt::skip] #[rustfmt::skip]
#[allow(clippy::wildcard_imports)]
pub mod schema; pub mod schema;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub mod aliases { pub mod aliases {

View File

@ -191,13 +191,13 @@ impl Display for DbUrl {
} }
// the project doesn't compile with From // the project doesn't compile with From
#[allow(clippy::from_over_into)] #[expect(clippy::from_over_into)]
impl Into<DbUrl> for Url { impl Into<DbUrl> for Url {
fn into(self) -> DbUrl { fn into(self) -> DbUrl {
DbUrl(Box::new(self)) DbUrl(Box::new(self))
} }
} }
#[allow(clippy::from_over_into)] #[expect(clippy::from_over_into)]
impl Into<Url> for DbUrl { impl Into<Url> for DbUrl {
fn into(self) -> Url { fn into(self) -> Url {
*self.0 *self.0

View File

@ -769,7 +769,7 @@ diesel::table! {
featured_local -> Bool, featured_local -> Bool,
url_content_type -> Nullable<Text>, url_content_type -> Nullable<Text>,
alt_text -> Nullable<Text>, alt_text -> Nullable<Text>,
scheduled_publish_time -> Nullable<Timestamptz> scheduled_publish_time -> Nullable<Timestamptz>,
} }
} }

View File

@ -14,11 +14,12 @@ use serde_with::skip_serializing_none;
use ts_rs::TS; use ts_rs::TS;
#[skip_serializing_none] #[skip_serializing_none]
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))] #[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable, TS))]
#[cfg_attr(feature = "full", diesel(table_name = local_user))] #[cfg_attr(feature = "full", diesel(table_name = local_user))]
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))] #[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
#[cfg_attr(feature = "full", ts(export))] #[cfg_attr(feature = "full", ts(export))]
#[serde(default)]
/// A local user. /// A local user.
pub struct LocalUser { pub struct LocalUser {
pub id: LocalUserId, pub id: LocalUserId,

View File

@ -87,39 +87,30 @@ impl Serialize for PublicOAuthProvider {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset, TS))] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = oauth_provider))] #[cfg_attr(feature = "full", diesel(table_name = oauth_provider))]
#[cfg_attr(feature = "full", ts(export))]
pub struct OAuthProviderInsertForm { pub struct OAuthProviderInsertForm {
pub display_name: String, pub display_name: String,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub issuer: DbUrl, pub issuer: DbUrl,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub authorization_endpoint: DbUrl, pub authorization_endpoint: DbUrl,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub token_endpoint: DbUrl, pub token_endpoint: DbUrl,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub userinfo_endpoint: DbUrl, pub userinfo_endpoint: DbUrl,
pub id_claim: String, pub id_claim: String,
pub client_id: String, pub client_id: String,
pub client_secret: String, pub client_secret: String,
pub scopes: String, pub scopes: String,
pub auto_verify_email: bool, pub auto_verify_email: Option<bool>,
pub account_linking_enabled: bool, pub account_linking_enabled: Option<bool>,
pub enabled: bool, pub enabled: Option<bool>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset, TS))] #[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = oauth_provider))] #[cfg_attr(feature = "full", diesel(table_name = oauth_provider))]
#[cfg_attr(feature = "full", ts(export))]
pub struct OAuthProviderUpdateForm { pub struct OAuthProviderUpdateForm {
pub display_name: Option<String>, pub display_name: Option<String>,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub authorization_endpoint: Option<DbUrl>, pub authorization_endpoint: Option<DbUrl>,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub token_endpoint: Option<DbUrl>, pub token_endpoint: Option<DbUrl>,
#[cfg_attr(feature = "full", ts(type = "string"))]
pub userinfo_endpoint: Option<DbUrl>, pub userinfo_endpoint: Option<DbUrl>,
pub id_claim: Option<String>, pub id_claim: Option<String>,
pub client_secret: Option<String>, pub client_secret: Option<String>,

View File

@ -595,7 +595,6 @@ impl<RF, LF> Queries<RF, LF> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -259,8 +259,8 @@ impl CommentReportQuery {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -422,8 +422,8 @@ impl<'a> CommentQuery<'a> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -284,8 +284,8 @@ impl PostReportQuery {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -394,14 +394,12 @@ fn queries<'a>() -> Queries<
query = query.filter(post::url.eq(search_term)); query = query.filter(post::url.eq(search_term));
} else { } else {
let searcher = fuzzy_search(search_term); let searcher = fuzzy_search(search_term);
let name_filter = post::name.ilike(searcher.clone());
let body_filter = post::body.ilike(searcher.clone());
query = if options.title_only.unwrap_or_default() { query = if options.title_only.unwrap_or_default() {
query.filter(post::name.ilike(searcher)) query.filter(name_filter)
} else { } else {
query.filter( query.filter(name_filter.or(body_filter))
post::name
.ilike(searcher.clone())
.or(post::body.ilike(searcher)),
)
} }
.filter(not(post::removed.or(post::deleted))); .filter(not(post::removed.or(post::deleted)));
} }
@ -741,7 +739,7 @@ impl<'a> PostQuery<'a> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
mod tests { mod tests {
use crate::{ use crate::{
post_view::{PaginationCursorData, PostQuery, PostView}, post_view::{PaginationCursorData, PostQuery, PostView},
@ -794,7 +792,7 @@ mod tests {
use lemmy_utils::error::LemmyResult; use lemmy_utils::error::LemmyResult;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use serial_test::serial; use serial_test::serial;
use std::{collections::HashSet, time::Duration}; use std::time::Duration;
use url::Url; use url::Url;
const POST_WITH_ANOTHER_TITLE: &str = "Another title"; const POST_WITH_ANOTHER_TITLE: &str = "Another title";
@ -1629,7 +1627,7 @@ mod tests {
// Mark a post as read // Mark a post as read
PostRead::mark_as_read( PostRead::mark_as_read(
pool, pool,
HashSet::from([data.inserted_bot_post.id]), data.inserted_bot_post.id,
data.local_user_view.person.id, data.local_user_view.person.id,
) )
.await?; .await?;
@ -1671,7 +1669,7 @@ mod tests {
// Mark a post as hidden // Mark a post as hidden
PostHide::hide( PostHide::hide(
pool, pool,
HashSet::from([data.inserted_bot_post.id]), data.inserted_bot_post.id,
data.local_user_view.person.id, data.local_user_view.person.id,
) )
.await?; .await?;

View File

@ -111,8 +111,8 @@ impl PrivateMessageReportQuery {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::private_message_report_view::PrivateMessageReportQuery; use crate::private_message_report_view::PrivateMessageReportQuery;

View File

@ -173,8 +173,8 @@ impl PrivateMessageQuery {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{private_message_view::PrivateMessageQuery, structs::PrivateMessageView}; use crate::{private_message_view::PrivateMessageQuery, structs::PrivateMessageView};

View File

@ -135,8 +135,7 @@ impl RegistrationApplicationQuery {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::registration_application_view::{ use crate::registration_application_view::{

View File

@ -83,8 +83,7 @@ impl VoteView {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::structs::VoteView; use crate::structs::VoteView;

View File

@ -303,7 +303,6 @@ impl CommentReplyQuery {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{comment_reply_view::CommentReplyQuery, structs::CommentReplyView}; use crate::{comment_reply_view::CommentReplyQuery, structs::CommentReplyView};

View File

@ -112,9 +112,14 @@ fn queries<'a>() -> Queries<
if let Some(search_term) = options.search_term { if let Some(search_term) = options.search_term {
let searcher = fuzzy_search(&search_term); let searcher = fuzzy_search(&search_term);
query = query let name_filter = community::name.ilike(searcher.clone());
.filter(community::name.ilike(searcher.clone())) let title_filter = community::title.ilike(searcher.clone());
.or_filter(community::title.ilike(searcher)) let description_filter = community::description.ilike(searcher.clone());
query = if options.title_only.unwrap_or_default() {
query.filter(name_filter.or(title_filter))
} else {
query.filter(name_filter.or(title_filter.or(description_filter)))
}
} }
// Hide deleted and removed for non-admins or mods // Hide deleted and removed for non-admins or mods
@ -229,6 +234,7 @@ pub struct CommunityQuery<'a> {
pub sort: Option<PostSortType>, pub sort: Option<PostSortType>,
pub local_user: Option<&'a LocalUser>, pub local_user: Option<&'a LocalUser>,
pub search_term: Option<String>, pub search_term: Option<String>,
pub title_only: Option<bool>,
pub is_mod_or_admin: bool, pub is_mod_or_admin: bool,
pub show_nsfw: bool, pub show_nsfw: bool,
pub page: Option<i64>, pub page: Option<i64>,
@ -242,8 +248,7 @@ impl<'a> CommunityQuery<'a> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{community_view::CommunityQuery, structs::CommunityView}; use crate::{community_view::CommunityQuery, structs::CommunityView};

View File

@ -303,7 +303,6 @@ impl PersonMentionQuery {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{person_mention_view::PersonMentionQuery, structs::PersonMentionView}; use crate::{person_mention_view::PersonMentionQuery, structs::PersonMentionView};

View File

@ -164,7 +164,7 @@ impl PersonQuery {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -222,8 +222,8 @@ impl<T: DataSource> CommunityInboxCollector<T> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;
use lemmy_db_schema::{ use lemmy_db_schema::{

View File

@ -192,8 +192,8 @@ impl SendManager {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod test { mod test {
use super::*; use super::*;

View File

@ -439,8 +439,8 @@ impl InstanceWorker {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)] #[expect(clippy::indexing_slicing)]
mod test { mod test {
use super::*; use super::*;

View File

@ -29,6 +29,8 @@ pub const CACHE_DURATION_FEDERATION: Duration = Duration::from_secs(60);
pub const CACHE_DURATION_API: Duration = Duration::from_secs(1); pub const CACHE_DURATION_API: Duration = Duration::from_secs(1);
pub const MAX_COMMENT_DEPTH_LIMIT: usize = 50;
#[macro_export] #[macro_export]
macro_rules! location_info { macro_rules! location_info {
() => { () => {

View File

@ -221,8 +221,6 @@ fn parse_ip(addr: &str) -> Option<IpAddr> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
#[test] #[test]

View File

@ -136,7 +136,6 @@ impl<K: Eq + Hash, C: MapLevel> MapLevel for Map<K, C> {
.entry(addr_part) .entry(addr_part)
.or_insert(RateLimitedGroup::new(now, adjusted_configs)); .or_insert(RateLimitedGroup::new(now, adjusted_configs));
#[allow(clippy::indexing_slicing)]
let total_passes = group.check_total(action_type, now, adjusted_configs[action_type]); let total_passes = group.check_total(action_type, now, adjusted_configs[action_type]);
let children_pass = group.children.check( let children_pass = group.children.check(
@ -161,7 +160,6 @@ impl<K: Eq + Hash, C: MapLevel> MapLevel for Map<K, C> {
// Evaluated if `some_children_remaining` is false // Evaluated if `some_children_remaining` is false
let total_has_refill_in_future = || { let total_has_refill_in_future = || {
group.total.into_iter().any(|(action_type, bucket)| { group.total.into_iter().any(|(action_type, bucket)| {
#[allow(clippy::indexing_slicing)]
let config = configs[action_type]; let config = configs[action_type];
bucket.update(now, config).tokens != config.capacity bucket.update(now, config).tokens != config.capacity
}) })
@ -214,7 +212,6 @@ impl<C: Default> RateLimitedGroup<C> {
now: InstantSecs, now: InstantSecs,
config: BucketConfig, config: BucketConfig,
) -> bool { ) -> bool {
#[allow(clippy::indexing_slicing)] // `EnumMap` has no `get` function
let bucket = &mut self.total[action_type]; let bucket = &mut self.total[action_type];
let new_bucket = bucket.update(now, config); let new_bucket = bucket.update(now, config);
@ -311,8 +308,7 @@ fn split_ipv6(ip: Ipv6Addr) -> ([u8; 6], u8, u8) {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use super::{ActionType, BucketConfig, InstantSecs, RateLimitState, RateLimitedGroup}; use super::{ActionType, BucketConfig, InstantSecs, RateLimitState, RateLimitedGroup};
@ -361,7 +357,6 @@ mod tests {
assert!(post_passed); assert!(post_passed);
} }
#[allow(clippy::indexing_slicing)]
let expected_buckets = |factor: u32, tokens_consumed: u32| { let expected_buckets = |factor: u32, tokens_consumed: u32| {
let adjusted_configs = bucket_configs.map(|_, config| BucketConfig { let adjusted_configs = bucket_configs.map(|_, config| BucketConfig {
capacity: config.capacity.saturating_mul(factor), capacity: config.capacity.saturating_mul(factor),

View File

@ -107,8 +107,7 @@ pub fn markdown_check_for_blocked_urls(text: &str, blocklist: &RegexSet) -> Lemm
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -134,8 +134,6 @@ pub fn add(markdown_parser: &mut MarkdownIt) {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::utils::markdown::spoiler_rule::add; use crate::utils::markdown::spoiler_rule::add;

View File

@ -34,8 +34,7 @@ pub fn scrape_text_for_mentions(text: &str) -> Vec<MentionData> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::indexing_slicing)]
#[allow(clippy::indexing_slicing)]
mod test { mod test {
use crate::utils::mention::scrape_text_for_mentions; use crate::utils::mention::scrape_text_for_mentions;

View File

@ -61,8 +61,7 @@ pub(crate) fn slurs_vec_to_str(slurs: &[&str]) -> String {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod test { mod test {
use crate::utils::slurs::{remove_slurs, slur_check, slurs_vec_to_str}; use crate::utils::slurs::{remove_slurs, slur_check, slurs_vec_to_str};

View File

@ -351,7 +351,6 @@ pub fn build_url_str_without_scheme(url_str: &str) -> LemmyResult<String> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::{ use crate::{

View File

@ -14,7 +14,7 @@ CREATE TABLE oauth_provider (
scopes text NOT NULL, scopes text NOT NULL,
auto_verify_email boolean DEFAULT TRUE NOT NULL, auto_verify_email boolean DEFAULT TRUE NOT NULL,
account_linking_enabled boolean DEFAULT FALSE NOT NULL, account_linking_enabled boolean DEFAULT FALSE NOT NULL,
enabled boolean DEFAULT FALSE NOT NULL, enabled boolean DEFAULT TRUE NOT NULL,
published timestamp with time zone DEFAULT now() NOT NULL, published timestamp with time zone DEFAULT now() NOT NULL,
updated timestamp with time zone updated timestamp with time zone
); );

View File

@ -605,7 +605,6 @@ async fn build_update_instance_form(
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use crate::scheduled_tasks::build_update_instance_form; use crate::scheduled_tasks::build_update_instance_form;

View File

@ -97,8 +97,7 @@ where
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
#[allow(clippy::indexing_slicing)]
mod tests { mod tests {
use super::*; use super::*;