mirror of
https://github.com/LemmyNet/lemmy
synced 2024-11-24 13:33:05 +00:00
Removing a few expects from production and test code. (#5193)
* Removing a few expects from production and test code. - Fixes #5192 * Using if let filter for admin emails. * Fixing unused error. * Adding expect_used = deny to clippy lints. * Update src/lib.rs Co-authored-by: Nutomic <me@nutomic.com> * Update crates/utils/src/settings/structs.rs Co-authored-by: Nutomic <me@nutomic.com> * Update crates/utils/src/settings/mod.rs Co-authored-by: Nutomic <me@nutomic.com> * Some more cleanup. * Fix clippy --------- Co-authored-by: Nutomic <me@nutomic.com>
This commit is contained in:
parent
231cce9350
commit
fa4825b524
42 changed files with 252 additions and 173 deletions
|
@ -79,6 +79,7 @@ unused_self = "deny"
|
||||||
unwrap_used = "deny"
|
unwrap_used = "deny"
|
||||||
unimplemented = "deny"
|
unimplemented = "deny"
|
||||||
unused_async = "deny"
|
unused_async = "deny"
|
||||||
|
expect_used = "deny"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lemmy_api = { version = "=0.19.6-beta.7", path = "./crates/api" }
|
lemmy_api = { version = "=0.19.6-beta.7", path = "./crates/api" }
|
||||||
|
|
|
@ -10,7 +10,7 @@ use lemmy_db_schema::source::{
|
||||||
login_token::LoginToken,
|
login_token::LoginToken,
|
||||||
password_reset_request::PasswordResetRequest,
|
password_reset_request::PasswordResetRequest,
|
||||||
};
|
};
|
||||||
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||||
|
|
||||||
#[tracing::instrument(skip(context))]
|
#[tracing::instrument(skip(context))]
|
||||||
pub async fn change_password_after_reset(
|
pub async fn change_password_after_reset(
|
||||||
|
@ -32,9 +32,7 @@ pub async fn change_password_after_reset(
|
||||||
|
|
||||||
// Update the user with the new password
|
// Update the user with the new password
|
||||||
let password = data.password.clone();
|
let password = data.password.clone();
|
||||||
LocalUser::update_password(&mut context.pool(), local_user_id, &password)
|
LocalUser::update_password(&mut context.pool(), local_user_id, &password).await?;
|
||||||
.await
|
|
||||||
.with_lemmy_type(LemmyErrorType::CouldntUpdateUser)?;
|
|
||||||
|
|
||||||
LoginToken::invalidate_all(&mut context.pool(), local_user_id).await?;
|
LoginToken::invalidate_all(&mut context.pool(), local_user_id).await?;
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ use captcha::{gen, Difficulty};
|
||||||
use lemmy_api_common::{
|
use lemmy_api_common::{
|
||||||
context::LemmyContext,
|
context::LemmyContext,
|
||||||
person::{CaptchaResponse, GetCaptchaResponse},
|
person::{CaptchaResponse, GetCaptchaResponse},
|
||||||
|
LemmyErrorType,
|
||||||
};
|
};
|
||||||
use lemmy_db_schema::source::{
|
use lemmy_db_schema::source::{
|
||||||
captcha_answer::{CaptchaAnswer, CaptchaAnswerForm},
|
captcha_answer::{CaptchaAnswer, CaptchaAnswerForm},
|
||||||
|
@ -37,7 +38,9 @@ pub async fn get_captcha(context: Data<LemmyContext>) -> LemmyResult<HttpRespons
|
||||||
|
|
||||||
let answer = captcha.chars_as_string();
|
let answer = captcha.chars_as_string();
|
||||||
|
|
||||||
let png = captcha.as_base64().expect("failed to generate captcha");
|
let png = captcha
|
||||||
|
.as_base64()
|
||||||
|
.ok_or(LemmyErrorType::CouldntCreateImageCaptcha)?;
|
||||||
|
|
||||||
let wav = captcha_as_wav_base64(&captcha)?;
|
let wav = captcha_as_wav_base64(&captcha)?;
|
||||||
|
|
||||||
|
|
|
@ -55,6 +55,7 @@ impl LemmyContext {
|
||||||
/// Initialize a context for use in tests which blocks federation network calls.
|
/// Initialize a context for use in tests which blocks federation network calls.
|
||||||
///
|
///
|
||||||
/// Do not use this in production code.
|
/// Do not use this in production code.
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
pub async fn init_test_federation_config() -> FederationConfig<LemmyContext> {
|
pub async fn init_test_federation_config() -> FederationConfig<LemmyContext> {
|
||||||
// call this to run migrations
|
// call this to run migrations
|
||||||
let pool = build_db_pool_for_tests();
|
let pool = build_db_pool_for_tests();
|
||||||
|
|
|
@ -521,7 +521,7 @@ mod tests {
|
||||||
|
|
||||||
// root relative url
|
// root relative url
|
||||||
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='/image.jpg'></head><body></body></html>";
|
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='/image.jpg'></head><body></body></html>";
|
||||||
let metadata = extract_opengraph_data(html_bytes, &url).expect("Unable to parse metadata");
|
let metadata = extract_opengraph_data(html_bytes, &url)?;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
metadata.image,
|
metadata.image,
|
||||||
Some(Url::parse("https://example.com/image.jpg")?.into())
|
Some(Url::parse("https://example.com/image.jpg")?.into())
|
||||||
|
@ -529,7 +529,7 @@ mod tests {
|
||||||
|
|
||||||
// base relative url
|
// base relative url
|
||||||
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='image.jpg'></head><body></body></html>";
|
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='image.jpg'></head><body></body></html>";
|
||||||
let metadata = extract_opengraph_data(html_bytes, &url).expect("Unable to parse metadata");
|
let metadata = extract_opengraph_data(html_bytes, &url)?;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
metadata.image,
|
metadata.image,
|
||||||
Some(Url::parse("https://example.com/one/image.jpg")?.into())
|
Some(Url::parse("https://example.com/one/image.jpg")?.into())
|
||||||
|
@ -537,7 +537,7 @@ mod tests {
|
||||||
|
|
||||||
// absolute url
|
// absolute url
|
||||||
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='https://cdn.host.com/image.jpg'></head><body></body></html>";
|
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='https://cdn.host.com/image.jpg'></head><body></body></html>";
|
||||||
let metadata = extract_opengraph_data(html_bytes, &url).expect("Unable to parse metadata");
|
let metadata = extract_opengraph_data(html_bytes, &url)?;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
metadata.image,
|
metadata.image,
|
||||||
Some(Url::parse("https://cdn.host.com/image.jpg")?.into())
|
Some(Url::parse("https://cdn.host.com/image.jpg")?.into())
|
||||||
|
@ -545,7 +545,7 @@ mod tests {
|
||||||
|
|
||||||
// protocol relative url
|
// protocol relative url
|
||||||
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='//example.com/image.jpg'></head><body></body></html>";
|
let html_bytes = b"<!DOCTYPE html><html><head><meta property='og:image' content='//example.com/image.jpg'></head><body></body></html>";
|
||||||
let metadata = extract_opengraph_data(html_bytes, &url).expect("Unable to parse metadata");
|
let metadata = extract_opengraph_data(html_bytes, &url)?;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
metadata.image,
|
metadata.image,
|
||||||
Some(Url::parse("https://example.com/image.jpg")?.into())
|
Some(Url::parse("https://example.com/image.jpg")?.into())
|
||||||
|
|
|
@ -514,6 +514,7 @@ pub struct ReadableFederationState {
|
||||||
next_retry: Option<DateTime<Utc>>,
|
next_retry: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
impl From<FederationQueueState> for ReadableFederationState {
|
impl From<FederationQueueState> for ReadableFederationState {
|
||||||
fn from(internal_state: FederationQueueState) -> Self {
|
fn from(internal_state: FederationQueueState) -> Self {
|
||||||
ReadableFederationState {
|
ReadableFederationState {
|
||||||
|
|
|
@ -442,7 +442,11 @@ pub async fn send_password_reset_email(
|
||||||
// Generate a random token
|
// Generate a random token
|
||||||
let token = uuid::Uuid::new_v4().to_string();
|
let token = uuid::Uuid::new_v4().to_string();
|
||||||
|
|
||||||
let email = &user.local_user.email.clone().expect("email");
|
let email = &user
|
||||||
|
.local_user
|
||||||
|
.email
|
||||||
|
.clone()
|
||||||
|
.ok_or(LemmyErrorType::EmailRequired)?;
|
||||||
let lang = get_interface_language(user);
|
let lang = get_interface_language(user);
|
||||||
let subject = &lang.password_reset_subject(&user.person.name);
|
let subject = &lang.password_reset_subject(&user.person.name);
|
||||||
let protocol_and_hostname = settings.get_protocol_and_hostname();
|
let protocol_and_hostname = settings.get_protocol_and_hostname();
|
||||||
|
@ -492,6 +496,7 @@ pub fn get_interface_language_from_settings(user: &LocalUserView) -> Lang {
|
||||||
lang_str_to_lang(&user.local_user.interface_language)
|
lang_str_to_lang(&user.local_user.interface_language)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
fn lang_str_to_lang(lang: &str) -> Lang {
|
fn lang_str_to_lang(lang: &str) -> Lang {
|
||||||
let lang_id = LanguageId::new(lang);
|
let lang_id = LanguageId::new(lang);
|
||||||
Lang::from_language_id(&lang_id).unwrap_or_else(|| {
|
Lang::from_language_id(&lang_id).unwrap_or_else(|| {
|
||||||
|
@ -518,11 +523,11 @@ pub fn local_site_rate_limit_to_rate_limit_config(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn local_site_to_slur_regex(local_site: &LocalSite) -> Option<Regex> {
|
pub fn local_site_to_slur_regex(local_site: &LocalSite) -> Option<LemmyResult<Regex>> {
|
||||||
build_slur_regex(local_site.slur_filter_regex.as_deref())
|
build_slur_regex(local_site.slur_filter_regex.as_deref())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn local_site_opt_to_slur_regex(local_site: &Option<LocalSite>) -> Option<Regex> {
|
pub fn local_site_opt_to_slur_regex(local_site: &Option<LocalSite>) -> Option<LemmyResult<Regex>> {
|
||||||
local_site
|
local_site
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(local_site_to_slur_regex)
|
.map(local_site_to_slur_regex)
|
||||||
|
@ -557,7 +562,11 @@ pub async fn send_application_approved_email(
|
||||||
user: &LocalUserView,
|
user: &LocalUserView,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
) -> LemmyResult<()> {
|
) -> LemmyResult<()> {
|
||||||
let email = &user.local_user.email.clone().expect("email");
|
let email = &user
|
||||||
|
.local_user
|
||||||
|
.email
|
||||||
|
.clone()
|
||||||
|
.ok_or(LemmyErrorType::EmailRequired)?;
|
||||||
let lang = get_interface_language(user);
|
let lang = get_interface_language(user);
|
||||||
let subject = lang.registration_approved_subject(&user.person.actor_id);
|
let subject = lang.registration_approved_subject(&user.person.actor_id);
|
||||||
let body = lang.registration_approved_body(&settings.hostname);
|
let body = lang.registration_approved_body(&settings.hostname);
|
||||||
|
@ -579,7 +588,11 @@ pub async fn send_new_applicant_email_to_admins(
|
||||||
);
|
);
|
||||||
|
|
||||||
for admin in &admins {
|
for admin in &admins {
|
||||||
let email = &admin.local_user.email.clone().expect("email");
|
let email = &admin
|
||||||
|
.local_user
|
||||||
|
.email
|
||||||
|
.clone()
|
||||||
|
.ok_or(LemmyErrorType::EmailRequired)?;
|
||||||
let lang = get_interface_language_from_settings(admin);
|
let lang = get_interface_language_from_settings(admin);
|
||||||
let subject = lang.new_application_subject(&settings.hostname, applicant_username);
|
let subject = lang.new_application_subject(&settings.hostname, applicant_username);
|
||||||
let body = lang.new_application_body(applications_link);
|
let body = lang.new_application_body(applications_link);
|
||||||
|
@ -601,11 +614,13 @@ pub async fn send_new_report_email_to_admins(
|
||||||
let reports_link = &format!("{}/reports", settings.get_protocol_and_hostname(),);
|
let reports_link = &format!("{}/reports", settings.get_protocol_and_hostname(),);
|
||||||
|
|
||||||
for admin in &admins {
|
for admin in &admins {
|
||||||
let email = &admin.local_user.email.clone().expect("email");
|
if let Some(email) = &admin.local_user.email {
|
||||||
let lang = get_interface_language_from_settings(admin);
|
let lang = get_interface_language_from_settings(admin);
|
||||||
let subject = lang.new_report_subject(&settings.hostname, reported_username, reporter_username);
|
let subject =
|
||||||
let body = lang.new_report_body(reports_link);
|
lang.new_report_subject(&settings.hostname, reported_username, reporter_username);
|
||||||
send_email(&subject, email, &admin.person.name, &body, settings).await?;
|
let body = lang.new_report_body(reports_link);
|
||||||
|
send_email(&subject, email, &admin.person.name, &body, settings).await?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1030,7 +1045,7 @@ pub fn check_conflicting_like_filters(
|
||||||
|
|
||||||
pub async fn process_markdown(
|
pub async fn process_markdown(
|
||||||
text: &str,
|
text: &str,
|
||||||
slur_regex: &Option<Regex>,
|
slur_regex: &Option<LemmyResult<Regex>>,
|
||||||
url_blocklist: &RegexSet,
|
url_blocklist: &RegexSet,
|
||||||
context: &LemmyContext,
|
context: &LemmyContext,
|
||||||
) -> LemmyResult<String> {
|
) -> LemmyResult<String> {
|
||||||
|
@ -1062,7 +1077,7 @@ pub async fn process_markdown(
|
||||||
|
|
||||||
pub async fn process_markdown_opt(
|
pub async fn process_markdown_opt(
|
||||||
text: &Option<String>,
|
text: &Option<String>,
|
||||||
slur_regex: &Option<Regex>,
|
slur_regex: &Option<LemmyResult<Regex>>,
|
||||||
url_blocklist: &RegexSet,
|
url_blocklist: &RegexSet,
|
||||||
context: &LemmyContext,
|
context: &LemmyContext,
|
||||||
) -> LemmyResult<Option<String>> {
|
) -> LemmyResult<Option<String>> {
|
||||||
|
|
|
@ -162,7 +162,7 @@ fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) ->
|
||||||
.slur_filter_regex
|
.slur_filter_regex
|
||||||
.as_deref()
|
.as_deref()
|
||||||
.or(local_site.slur_filter_regex.as_deref()),
|
.or(local_site.slur_filter_regex.as_deref()),
|
||||||
)?;
|
);
|
||||||
|
|
||||||
site_name_length_check(&create_site.name)?;
|
site_name_length_check(&create_site.name)?;
|
||||||
check_slurs(&create_site.name, &slur_regex)?;
|
check_slurs(&create_site.name, &slur_regex)?;
|
||||||
|
|
|
@ -211,7 +211,7 @@ fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> Lemm
|
||||||
.slur_filter_regex
|
.slur_filter_regex
|
||||||
.as_deref()
|
.as_deref()
|
||||||
.or(local_site.slur_filter_regex.as_deref()),
|
.or(local_site.slur_filter_regex.as_deref()),
|
||||||
)?;
|
);
|
||||||
|
|
||||||
if let Some(name) = &edit_site.name {
|
if let Some(name) = &edit_site.name {
|
||||||
// The name doesn't need to be updated, but if provided it cannot be blanked out...
|
// The name doesn't need to be updated, but if provided it cannot be blanked out...
|
||||||
|
|
|
@ -148,14 +148,15 @@ pub async fn register(
|
||||||
let inserted_local_user = create_local_user(&context, language_tags, &local_user_form).await?;
|
let inserted_local_user = create_local_user(&context, language_tags, &local_user_form).await?;
|
||||||
|
|
||||||
if local_site.site_setup && require_registration_application {
|
if local_site.site_setup && require_registration_application {
|
||||||
// Create the registration application
|
if let Some(answer) = data.answer.clone() {
|
||||||
let form = RegistrationApplicationInsertForm {
|
// Create the registration application
|
||||||
local_user_id: inserted_local_user.id,
|
let form = RegistrationApplicationInsertForm {
|
||||||
// We already made sure answer was not null above
|
local_user_id: inserted_local_user.id,
|
||||||
answer: data.answer.clone().expect("must have an answer"),
|
answer,
|
||||||
};
|
};
|
||||||
|
|
||||||
RegistrationApplication::create(&mut context.pool(), &form).await?;
|
RegistrationApplication::create(&mut context.pool(), &form).await?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Email the admins, only if email verification is not required
|
// Email the admins, only if email verification is not required
|
||||||
|
@ -373,17 +374,19 @@ pub async fn authenticate_with_oauth(
|
||||||
&& !local_user.accepted_application
|
&& !local_user.accepted_application
|
||||||
&& !local_user.admin
|
&& !local_user.admin
|
||||||
{
|
{
|
||||||
// Create the registration application
|
if let Some(answer) = data.answer.clone() {
|
||||||
RegistrationApplication::create(
|
// Create the registration application
|
||||||
&mut context.pool(),
|
RegistrationApplication::create(
|
||||||
&RegistrationApplicationInsertForm {
|
&mut context.pool(),
|
||||||
local_user_id: local_user.id,
|
&RegistrationApplicationInsertForm {
|
||||||
answer: data.answer.clone().expect("must have an answer"),
|
local_user_id: local_user.id,
|
||||||
},
|
answer,
|
||||||
)
|
},
|
||||||
.await?;
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
login_response.registration_created = true;
|
login_response.registration_created = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check email is verified when required
|
// Check email is verified when required
|
||||||
|
@ -483,7 +486,7 @@ async fn send_verification_email_if_required(
|
||||||
&local_user
|
&local_user
|
||||||
.email
|
.email
|
||||||
.clone()
|
.clone()
|
||||||
.expect("invalid verification email"),
|
.ok_or(LemmyErrorType::EmailRequired)?,
|
||||||
&mut context.pool(),
|
&mut context.pool(),
|
||||||
context.settings(),
|
context.settings(),
|
||||||
)
|
)
|
||||||
|
|
|
@ -130,7 +130,7 @@ impl AnnounceActivity {
|
||||||
actor: c.actor.clone().into_inner(),
|
actor: c.actor.clone().into_inner(),
|
||||||
other: serde_json::to_value(c.object)?
|
other: serde_json::to_value(c.object)?
|
||||||
.as_object()
|
.as_object()
|
||||||
.expect("is object")
|
.ok_or(FederationError::Unreachable)?
|
||||||
.clone(),
|
.clone(),
|
||||||
};
|
};
|
||||||
let announce_compat = AnnounceActivity::new(announcable_page, community, context)?;
|
let announce_compat = AnnounceActivity::new(announcable_page, community, context)?;
|
||||||
|
|
|
@ -5,7 +5,7 @@ use activitypub_federation::{
|
||||||
};
|
};
|
||||||
use diesel::NotFound;
|
use diesel::NotFound;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::{context::LemmyContext, LemmyErrorType};
|
||||||
use lemmy_db_schema::traits::ApubActor;
|
use lemmy_db_schema::traits::ApubActor;
|
||||||
use lemmy_db_views::structs::LocalUserView;
|
use lemmy_db_views::structs::LocalUserView;
|
||||||
use lemmy_utils::error::{LemmyError, LemmyResult};
|
use lemmy_utils::error::{LemmyError, LemmyResult};
|
||||||
|
@ -42,7 +42,7 @@ where
|
||||||
let (name, domain) = identifier
|
let (name, domain) = identifier
|
||||||
.splitn(2, '@')
|
.splitn(2, '@')
|
||||||
.collect_tuple()
|
.collect_tuple()
|
||||||
.expect("invalid query");
|
.ok_or(LemmyErrorType::InvalidUrl)?;
|
||||||
let actor = DbActor::read_from_name_and_domain(&mut context.pool(), name, domain)
|
let actor = DbActor::read_from_name_and_domain(&mut context.pool(), name, domain)
|
||||||
.await
|
.await
|
||||||
.ok()
|
.ok()
|
||||||
|
|
|
@ -50,7 +50,8 @@ impl UrlVerifier for VerifyUrlData {
|
||||||
async fn verify(&self, url: &Url) -> Result<(), ActivityPubError> {
|
async fn verify(&self, url: &Url) -> Result<(), ActivityPubError> {
|
||||||
let local_site_data = local_site_data_cached(&mut (&self.0).into())
|
let local_site_data = local_site_data_cached(&mut (&self.0).into())
|
||||||
.await
|
.await
|
||||||
.expect("read local site data");
|
.map_err(|e| ActivityPubError::Other(format!("Cant read local site data: {e}")))?;
|
||||||
|
|
||||||
use FederationError::*;
|
use FederationError::*;
|
||||||
check_apub_id_valid(url, &local_site_data).map_err(|err| match err {
|
check_apub_id_valid(url, &local_site_data).map_err(|err| match err {
|
||||||
LemmyError {
|
LemmyError {
|
||||||
|
@ -176,10 +177,7 @@ pub(crate) async fn check_apub_id_valid_with_strictness(
|
||||||
.domain()
|
.domain()
|
||||||
.ok_or(FederationError::UrlWithoutDomain)?
|
.ok_or(FederationError::UrlWithoutDomain)?
|
||||||
.to_string();
|
.to_string();
|
||||||
let local_instance = context
|
let local_instance = context.settings().get_hostname_without_port()?;
|
||||||
.settings()
|
|
||||||
.get_hostname_without_port()
|
|
||||||
.expect("local hostname is valid");
|
|
||||||
if domain == local_instance {
|
if domain == local_instance {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -196,10 +194,7 @@ pub(crate) async fn check_apub_id_valid_with_strictness(
|
||||||
.iter()
|
.iter()
|
||||||
.map(|i| i.domain.clone())
|
.map(|i| i.domain.clone())
|
||||||
.collect::<Vec<String>>();
|
.collect::<Vec<String>>();
|
||||||
let local_instance = context
|
let local_instance = context.settings().get_hostname_without_port()?;
|
||||||
.settings()
|
|
||||||
.get_hostname_without_port()
|
|
||||||
.expect("local hostname is valid");
|
|
||||||
allowed_and_local.push(local_instance);
|
allowed_and_local.push(local_instance);
|
||||||
|
|
||||||
let domain = apub_id
|
let domain = apub_id
|
||||||
|
|
|
@ -372,6 +372,7 @@ async fn convert_update_languages(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If all languages are returned, return empty vec instead
|
/// If all languages are returned, return empty vec instead
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
async fn convert_read_languages(
|
async fn convert_read_languages(
|
||||||
conn: &mut AsyncPgConnection,
|
conn: &mut AsyncPgConnection,
|
||||||
language_ids: Vec<LanguageId>,
|
language_ids: Vec<LanguageId>,
|
||||||
|
@ -510,7 +511,7 @@ mod tests {
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_user_languages() -> Result<(), Error> {
|
async fn test_user_languages() -> LemmyResult<()> {
|
||||||
let pool = &build_db_pool_for_tests();
|
let pool = &build_db_pool_for_tests();
|
||||||
let pool = &mut pool.into();
|
let pool = &mut pool.into();
|
||||||
|
|
||||||
|
@ -543,7 +544,7 @@ mod tests {
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_community_languages() -> Result<(), Error> {
|
async fn test_community_languages() -> LemmyResult<()> {
|
||||||
let pool = &build_db_pool_for_tests();
|
let pool = &build_db_pool_for_tests();
|
||||||
let pool = &mut pool.into();
|
let pool = &mut pool.into();
|
||||||
let (site, instance) = create_test_site(pool).await?;
|
let (site, instance) = create_test_site(pool).await?;
|
||||||
|
|
|
@ -57,11 +57,12 @@ mod tests {
|
||||||
source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer},
|
source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer},
|
||||||
utils::build_db_pool_for_tests,
|
utils::build_db_pool_for_tests,
|
||||||
};
|
};
|
||||||
|
use lemmy_utils::error::LemmyResult;
|
||||||
use serial_test::serial;
|
use serial_test::serial;
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_captcha_happy_path() {
|
async fn test_captcha_happy_path() -> LemmyResult<()> {
|
||||||
let pool = &build_db_pool_for_tests();
|
let pool = &build_db_pool_for_tests();
|
||||||
let pool = &mut pool.into();
|
let pool = &mut pool.into();
|
||||||
|
|
||||||
|
@ -71,8 +72,7 @@ mod tests {
|
||||||
answer: "XYZ".to_string(),
|
answer: "XYZ".to_string(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await?;
|
||||||
.expect("should not fail to insert captcha");
|
|
||||||
|
|
||||||
let result = CaptchaAnswer::check_captcha(
|
let result = CaptchaAnswer::check_captcha(
|
||||||
pool,
|
pool,
|
||||||
|
@ -84,11 +84,12 @@ mod tests {
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
assert!(result.is_ok());
|
assert!(result.is_ok());
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[serial]
|
#[serial]
|
||||||
async fn test_captcha_repeat_answer_fails() {
|
async fn test_captcha_repeat_answer_fails() -> LemmyResult<()> {
|
||||||
let pool = &build_db_pool_for_tests();
|
let pool = &build_db_pool_for_tests();
|
||||||
let pool = &mut pool.into();
|
let pool = &mut pool.into();
|
||||||
|
|
||||||
|
@ -98,8 +99,7 @@ mod tests {
|
||||||
answer: "XYZ".to_string(),
|
answer: "XYZ".to_string(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await?;
|
||||||
.expect("should not fail to insert captcha");
|
|
||||||
|
|
||||||
let _result = CaptchaAnswer::check_captcha(
|
let _result = CaptchaAnswer::check_captcha(
|
||||||
pool,
|
pool,
|
||||||
|
@ -120,5 +120,7 @@ mod tests {
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
assert!(result_repeat.is_err());
|
assert!(result_repeat.is_err());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,19 +26,19 @@ use diesel::{
|
||||||
QueryDsl,
|
QueryDsl,
|
||||||
};
|
};
|
||||||
use diesel_async::RunQueryDsl;
|
use diesel_async::RunQueryDsl;
|
||||||
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
||||||
|
|
||||||
impl LocalUser {
|
impl LocalUser {
|
||||||
pub async fn create(
|
pub async fn create(
|
||||||
pool: &mut DbPool<'_>,
|
pool: &mut DbPool<'_>,
|
||||||
form: &LocalUserInsertForm,
|
form: &LocalUserInsertForm,
|
||||||
languages: Vec<LanguageId>,
|
languages: Vec<LanguageId>,
|
||||||
) -> Result<LocalUser, Error> {
|
) -> LemmyResult<LocalUser> {
|
||||||
let conn = &mut get_conn(pool).await?;
|
let conn = &mut get_conn(pool).await?;
|
||||||
let mut form_with_encrypted_password = form.clone();
|
let mut form_with_encrypted_password = form.clone();
|
||||||
|
|
||||||
if let Some(password_encrypted) = &form.password_encrypted {
|
if let Some(password_encrypted) = &form.password_encrypted {
|
||||||
let password_hash = hash(password_encrypted, DEFAULT_COST).expect("Couldn't hash password");
|
let password_hash = hash(password_encrypted, DEFAULT_COST)?;
|
||||||
form_with_encrypted_password.password_encrypted = Some(password_hash);
|
form_with_encrypted_password.password_encrypted = Some(password_hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,14 +84,15 @@ impl LocalUser {
|
||||||
pool: &mut DbPool<'_>,
|
pool: &mut DbPool<'_>,
|
||||||
local_user_id: LocalUserId,
|
local_user_id: LocalUserId,
|
||||||
new_password: &str,
|
new_password: &str,
|
||||||
) -> Result<Self, Error> {
|
) -> LemmyResult<Self> {
|
||||||
let conn = &mut get_conn(pool).await?;
|
let conn = &mut get_conn(pool).await?;
|
||||||
let password_hash = hash(new_password, DEFAULT_COST).expect("Couldn't hash password");
|
let password_hash = hash(new_password, DEFAULT_COST)?;
|
||||||
|
|
||||||
diesel::update(local_user::table.find(local_user_id))
|
diesel::update(local_user::table.find(local_user_id))
|
||||||
.set((local_user::password_encrypted.eq(password_hash),))
|
.set((local_user::password_encrypted.eq(password_hash),))
|
||||||
.get_result::<Self>(conn)
|
.get_result::<Self>(conn)
|
||||||
.await
|
.await
|
||||||
|
.with_lemmy_type(LemmyErrorType::CouldntUpdateUser)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn set_all_users_email_verified(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
|
pub async fn set_all_users_email_verified(pool: &mut DbPool<'_>) -> Result<Vec<Self>, Error> {
|
||||||
|
|
|
@ -115,9 +115,7 @@ impl Post {
|
||||||
.filter(post::local.eq(true))
|
.filter(post::local.eq(true))
|
||||||
.filter(post::deleted.eq(false))
|
.filter(post::deleted.eq(false))
|
||||||
.filter(post::removed.eq(false))
|
.filter(post::removed.eq(false))
|
||||||
.filter(
|
.filter(post::published.ge(Utc::now().naive_utc() - SITEMAP_DAYS))
|
||||||
post::published.ge(Utc::now().naive_utc() - SITEMAP_DAYS.expect("TimeDelta out of bounds")),
|
|
||||||
)
|
|
||||||
.order(post::published.desc())
|
.order(post::published.desc())
|
||||||
.limit(SITEMAP_LIMIT)
|
.limit(SITEMAP_LIMIT)
|
||||||
.load::<(DbUrl, chrono::DateTime<Utc>)>(conn)
|
.load::<(DbUrl, chrono::DateTime<Utc>)>(conn)
|
||||||
|
|
|
@ -47,6 +47,7 @@ pub mod tagline;
|
||||||
/// This is necessary so they can be successfully deserialized from API responses, even though the
|
/// This is necessary so they can be successfully deserialized from API responses, even though the
|
||||||
/// value is not sent by Lemmy. Necessary for crates which rely on Rust API such as
|
/// value is not sent by Lemmy. Necessary for crates which rely on Rust API such as
|
||||||
/// lemmy-stats-crawler.
|
/// lemmy-stats-crawler.
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
fn placeholder_apub_url() -> DbUrl {
|
fn placeholder_apub_url() -> DbUrl {
|
||||||
DbUrl(Box::new(
|
DbUrl(Box::new(
|
||||||
Url::parse("http://example.com").expect("parse placeholder url"),
|
Url::parse("http://example.com").expect("parse placeholder url"),
|
||||||
|
|
|
@ -68,7 +68,7 @@ use url::Url;
|
||||||
const FETCH_LIMIT_DEFAULT: i64 = 10;
|
const FETCH_LIMIT_DEFAULT: i64 = 10;
|
||||||
pub const FETCH_LIMIT_MAX: i64 = 50;
|
pub const FETCH_LIMIT_MAX: i64 = 50;
|
||||||
pub const SITEMAP_LIMIT: i64 = 50000;
|
pub const SITEMAP_LIMIT: i64 = 50000;
|
||||||
pub const SITEMAP_DAYS: Option<TimeDelta> = TimeDelta::try_days(31);
|
pub const SITEMAP_DAYS: TimeDelta = TimeDelta::days(31);
|
||||||
pub const RANK_DEFAULT: f64 = 0.0001;
|
pub const RANK_DEFAULT: f64 = 0.0001;
|
||||||
|
|
||||||
/// Some connection options to speed up queries
|
/// Some connection options to speed up queries
|
||||||
|
@ -360,8 +360,8 @@ pub fn diesel_url_create(opt: Option<&str>) -> LemmyResult<Option<DbUrl>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets a few additional config options necessary for starting lemmy
|
/// Sets a few additional config options necessary for starting lemmy
|
||||||
fn build_config_options_uri_segment(config: &str) -> String {
|
fn build_config_options_uri_segment(config: &str) -> LemmyResult<String> {
|
||||||
let mut url = Url::parse(config).expect("Couldn't parse postgres connection URI");
|
let mut url = Url::parse(config)?;
|
||||||
|
|
||||||
// Set `lemmy.protocol_and_hostname` so triggers can use it
|
// Set `lemmy.protocol_and_hostname` so triggers can use it
|
||||||
let lemmy_protocol_and_hostname_option =
|
let lemmy_protocol_and_hostname_option =
|
||||||
|
@ -377,7 +377,7 @@ fn build_config_options_uri_segment(config: &str) -> String {
|
||||||
.join(" ");
|
.join(" ");
|
||||||
|
|
||||||
url.set_query(Some(&format!("options={options_segments}")));
|
url.set_query(Some(&format!("options={options_segments}")));
|
||||||
url.into()
|
Ok(url.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
|
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
|
||||||
|
@ -385,8 +385,11 @@ fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConne
|
||||||
/// Use a once_lock to create the postgres connection config, since this config never changes
|
/// Use a once_lock to create the postgres connection config, since this config never changes
|
||||||
static POSTGRES_CONFIG_WITH_OPTIONS: OnceLock<String> = OnceLock::new();
|
static POSTGRES_CONFIG_WITH_OPTIONS: OnceLock<String> = OnceLock::new();
|
||||||
|
|
||||||
let config =
|
let config = POSTGRES_CONFIG_WITH_OPTIONS.get_or_init(|| {
|
||||||
POSTGRES_CONFIG_WITH_OPTIONS.get_or_init(|| build_config_options_uri_segment(config));
|
build_config_options_uri_segment(config)
|
||||||
|
.inspect_err(|e| error!("Couldn't parse postgres connection URI: {e}"))
|
||||||
|
.unwrap_or_default()
|
||||||
|
});
|
||||||
|
|
||||||
// We only support TLS with sslmode=require currently
|
// We only support TLS with sslmode=require currently
|
||||||
let conn = if config.contains("sslmode=require") {
|
let conn = if config.contains("sslmode=require") {
|
||||||
|
@ -495,6 +498,7 @@ pub fn build_db_pool() -> LemmyResult<ActualDbPool> {
|
||||||
Ok(pool)
|
Ok(pool)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
pub fn build_db_pool_for_tests() -> ActualDbPool {
|
pub fn build_db_pool_for_tests() -> ActualDbPool {
|
||||||
build_db_pool().expect("db pool missing")
|
build_db_pool().expect("db pool missing")
|
||||||
}
|
}
|
||||||
|
@ -511,6 +515,7 @@ pub fn post_to_comment_sort_type(sort: PostSortType) -> CommentSortType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
static EMAIL_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
static EMAIL_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$")
|
Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$")
|
||||||
.expect("compile email regex")
|
.expect("compile email regex")
|
||||||
|
|
|
@ -20,7 +20,7 @@ use lemmy_db_schema::{
|
||||||
ReadFn,
|
ReadFn,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use lemmy_utils::error::{LemmyError, LemmyErrorType};
|
use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult};
|
||||||
use std::future::{ready, Ready};
|
use std::future::{ready, Ready};
|
||||||
|
|
||||||
enum ReadBy<'a> {
|
enum ReadBy<'a> {
|
||||||
|
@ -146,7 +146,7 @@ impl LocalUserView {
|
||||||
name: &str,
|
name: &str,
|
||||||
bio: &str,
|
bio: &str,
|
||||||
admin: bool,
|
admin: bool,
|
||||||
) -> Result<Self, Error> {
|
) -> LemmyResult<Self> {
|
||||||
let instance_id = Instance::read_or_create(pool, "example.com".to_string())
|
let instance_id = Instance::read_or_create(pool, "example.com".to_string())
|
||||||
.await?
|
.await?
|
||||||
.id;
|
.id;
|
||||||
|
@ -163,7 +163,9 @@ impl LocalUserView {
|
||||||
};
|
};
|
||||||
let local_user = LocalUser::create(pool, &user_form, vec![]).await?;
|
let local_user = LocalUser::create(pool, &user_form, vec![]).await?;
|
||||||
|
|
||||||
LocalUserView::read(pool, local_user.id).await
|
LocalUserView::read(pool, local_user.id)
|
||||||
|
.await
|
||||||
|
.with_lemmy_type(LemmyErrorType::NotFound)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -501,6 +501,7 @@ pub struct PostQuery<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> PostQuery<'a> {
|
impl<'a> PostQuery<'a> {
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
async fn prefetch_upper_bound_for_page_before(
|
async fn prefetch_upper_bound_for_page_before(
|
||||||
&self,
|
&self,
|
||||||
site: &Site,
|
site: &Site,
|
||||||
|
|
|
@ -286,7 +286,7 @@ mod tests {
|
||||||
CommunityVisibility,
|
CommunityVisibility,
|
||||||
SubscribedType,
|
SubscribedType,
|
||||||
};
|
};
|
||||||
use lemmy_utils::error::LemmyResult;
|
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||||
use serial_test::serial;
|
use serial_test::serial;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -495,7 +495,7 @@ mod tests {
|
||||||
};
|
};
|
||||||
let communities = query.list(&data.site, pool).await?;
|
let communities = query.list(&data.site, pool).await?;
|
||||||
for (i, c) in communities.iter().enumerate().skip(1) {
|
for (i, c) in communities.iter().enumerate().skip(1) {
|
||||||
let prev = communities.get(i - 1).expect("No previous community?");
|
let prev = communities.get(i - 1).ok_or(LemmyErrorType::NotFound)?;
|
||||||
assert!(c.community.title.cmp(&prev.community.title).is_ge());
|
assert!(c.community.title.cmp(&prev.community.title).is_ge());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -505,7 +505,7 @@ mod tests {
|
||||||
};
|
};
|
||||||
let communities = query.list(&data.site, pool).await?;
|
let communities = query.list(&data.site, pool).await?;
|
||||||
for (i, c) in communities.iter().enumerate().skip(1) {
|
for (i, c) in communities.iter().enumerate().skip(1) {
|
||||||
let prev = communities.get(i - 1).expect("No previous community?");
|
let prev = communities.get(i - 1).ok_or(LemmyErrorType::NotFound)?;
|
||||||
assert!(c.community.title.cmp(&prev.community.title).is_le());
|
assert!(c.community.title.cmp(&prev.community.title).is_le());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ use std::{
|
||||||
/// currently fairly high because of the current structure of storing inboxes for every person, not
|
/// currently fairly high because of the current structure of storing inboxes for every person, not
|
||||||
/// having a separate list of shared_inboxes, and the architecture of having every instance queue be
|
/// having a separate list of shared_inboxes, and the architecture of having every instance queue be
|
||||||
/// fully separate. (see https://github.com/LemmyNet/lemmy/issues/3958)
|
/// fully separate. (see https://github.com/LemmyNet/lemmy/issues/3958)
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
static FOLLOW_ADDITIONS_RECHECK_DELAY: LazyLock<chrono::TimeDelta> = LazyLock::new(|| {
|
static FOLLOW_ADDITIONS_RECHECK_DELAY: LazyLock<chrono::TimeDelta> = LazyLock::new(|| {
|
||||||
if *LEMMY_TEST_FAST_FEDERATION {
|
if *LEMMY_TEST_FAST_FEDERATION {
|
||||||
chrono::TimeDelta::try_seconds(1).expect("TimeDelta out of bounds")
|
chrono::TimeDelta::try_seconds(1).expect("TimeDelta out of bounds")
|
||||||
|
@ -33,6 +34,7 @@ static FOLLOW_ADDITIONS_RECHECK_DELAY: LazyLock<chrono::TimeDelta> = LazyLock::n
|
||||||
/// The same as FOLLOW_ADDITIONS_RECHECK_DELAY, but triggering when the last person on an instance
|
/// The same as FOLLOW_ADDITIONS_RECHECK_DELAY, but triggering when the last person on an instance
|
||||||
/// unfollows a specific remote community. This is expected to happen pretty rarely and updating it
|
/// unfollows a specific remote community. This is expected to happen pretty rarely and updating it
|
||||||
/// in a timely manner is not too important.
|
/// in a timely manner is not too important.
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
static FOLLOW_REMOVALS_RECHECK_DELAY: LazyLock<chrono::TimeDelta> =
|
static FOLLOW_REMOVALS_RECHECK_DELAY: LazyLock<chrono::TimeDelta> =
|
||||||
LazyLock::new(|| chrono::TimeDelta::try_hours(1).expect("TimeDelta out of bounds"));
|
LazyLock::new(|| chrono::TimeDelta::try_hours(1).expect("TimeDelta out of bounds"));
|
||||||
|
|
||||||
|
@ -472,6 +474,7 @@ mod tests {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_update_communities() -> LemmyResult<()> {
|
async fn test_update_communities() -> LemmyResult<()> {
|
||||||
let mut collector = setup_collector();
|
let mut collector = setup_collector();
|
||||||
|
|
|
@ -331,7 +331,7 @@ impl InstanceWorker {
|
||||||
self.state.last_successful_published_time = next.published;
|
self.state.last_successful_published_time = next.published;
|
||||||
}
|
}
|
||||||
|
|
||||||
let save_state_every = chrono::Duration::from_std(SAVE_STATE_EVERY_TIME).expect("not negative");
|
let save_state_every = chrono::Duration::from_std(SAVE_STATE_EVERY_TIME)?;
|
||||||
if force_write || (Utc::now() - self.last_state_insert) > save_state_every {
|
if force_write || (Utc::now() - self.last_state_insert) > save_state_every {
|
||||||
self.save_and_send_state().await?;
|
self.save_and_send_state().await?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -312,12 +312,16 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: remove these conversions after actix-web upgrades to http 1.0
|
// TODO: remove these conversions after actix-web upgrades to http 1.0
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
fn convert_status(status: http::StatusCode) -> StatusCode {
|
fn convert_status(status: http::StatusCode) -> StatusCode {
|
||||||
StatusCode::from_u16(status.as_u16()).expect("status can be converted")
|
StatusCode::from_u16(status.as_u16()).expect("status can be converted")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
fn convert_method(method: &Method) -> http::Method {
|
fn convert_method(method: &Method) -> http::Method {
|
||||||
http::Method::from_bytes(method.as_str().as_bytes()).expect("method can be converted")
|
http::Method::from_bytes(method.as_str().as_bytes()).expect("method can be converted")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_header<'a>(name: &'a http::HeaderName, value: &'a HeaderValue) -> (&'a str, &'a [u8]) {
|
fn convert_header<'a>(name: &'a http::HeaderName, value: &'a HeaderValue) -> (&'a str, &'a [u8]) {
|
||||||
(name.as_str(), value.as_bytes())
|
(name.as_str(), value.as_bytes())
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,13 +3,16 @@ use activitypub_federation::{
|
||||||
fetch::webfinger::{extract_webfinger_name, Webfinger, WebfingerLink, WEBFINGER_CONTENT_TYPE},
|
fetch::webfinger::{extract_webfinger_name, Webfinger, WebfingerLink, WEBFINGER_CONTENT_TYPE},
|
||||||
};
|
};
|
||||||
use actix_web::{web, web::Query, HttpResponse};
|
use actix_web::{web, web::Query, HttpResponse};
|
||||||
use lemmy_api_common::context::LemmyContext;
|
use lemmy_api_common::{context::LemmyContext, LemmyErrorType};
|
||||||
use lemmy_db_schema::{
|
use lemmy_db_schema::{
|
||||||
source::{community::Community, person::Person},
|
source::{community::Community, person::Person},
|
||||||
traits::ApubActor,
|
traits::ApubActor,
|
||||||
CommunityVisibility,
|
CommunityVisibility,
|
||||||
};
|
};
|
||||||
use lemmy_utils::{cache_header::cache_3days, error::LemmyResult};
|
use lemmy_utils::{
|
||||||
|
cache_header::cache_3days,
|
||||||
|
error::{LemmyErrorExt, LemmyResult},
|
||||||
|
};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
@ -41,7 +44,7 @@ async fn get_webfinger_response(
|
||||||
let links = if name == context.settings().hostname {
|
let links = if name == context.settings().hostname {
|
||||||
// webfinger response for instance actor (required for mastodon authorized fetch)
|
// webfinger response for instance actor (required for mastodon authorized fetch)
|
||||||
let url = Url::parse(&context.settings().get_protocol_and_hostname())?;
|
let url = Url::parse(&context.settings().get_protocol_and_hostname())?;
|
||||||
vec![webfinger_link_for_actor(Some(url), "none", &context)]
|
vec![webfinger_link_for_actor(Some(url), "none", &context)?]
|
||||||
} else {
|
} else {
|
||||||
// webfinger response for user/community
|
// webfinger response for user/community
|
||||||
let user_id: Option<Url> = Person::read_from_name(&mut context.pool(), name, false)
|
let user_id: Option<Url> = Person::read_from_name(&mut context.pool(), name, false)
|
||||||
|
@ -65,8 +68,8 @@ async fn get_webfinger_response(
|
||||||
// Mastodon seems to prioritize the last webfinger item in case of duplicates. Put
|
// Mastodon seems to prioritize the last webfinger item in case of duplicates. Put
|
||||||
// community last so that it gets prioritized. For Lemmy the order doesn't matter.
|
// community last so that it gets prioritized. For Lemmy the order doesn't matter.
|
||||||
vec![
|
vec![
|
||||||
webfinger_link_for_actor(user_id, "Person", &context),
|
webfinger_link_for_actor(user_id, "Person", &context)?,
|
||||||
webfinger_link_for_actor(community_id, "Group", &context),
|
webfinger_link_for_actor(community_id, "Group", &context)?,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -94,11 +97,11 @@ fn webfinger_link_for_actor(
|
||||||
url: Option<Url>,
|
url: Option<Url>,
|
||||||
kind: &str,
|
kind: &str,
|
||||||
context: &LemmyContext,
|
context: &LemmyContext,
|
||||||
) -> Vec<WebfingerLink> {
|
) -> LemmyResult<Vec<WebfingerLink>> {
|
||||||
if let Some(url) = url {
|
if let Some(url) = url {
|
||||||
let type_key = "https://www.w3.org/ns/activitystreams#type"
|
let type_key = "https://www.w3.org/ns/activitystreams#type"
|
||||||
.parse()
|
.parse()
|
||||||
.expect("parse url");
|
.with_lemmy_type(LemmyErrorType::InvalidUrl)?;
|
||||||
|
|
||||||
let mut vec = vec![
|
let mut vec = vec![
|
||||||
WebfingerLink {
|
WebfingerLink {
|
||||||
|
@ -128,8 +131,8 @@ fn webfinger_link_for_actor(
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
vec
|
Ok(vec)
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
Ok(vec![])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ pub async fn send_email(
|
||||||
let email_and_port = email_config.smtp_server.split(':').collect::<Vec<&str>>();
|
let email_and_port = email_config.smtp_server.split(':').collect::<Vec<&str>>();
|
||||||
let email = *email_and_port
|
let email = *email_and_port
|
||||||
.first()
|
.first()
|
||||||
.ok_or(LemmyErrorType::MissingAnEmail)?;
|
.ok_or(LemmyErrorType::EmailRequired)?;
|
||||||
let port = email_and_port
|
let port = email_and_port
|
||||||
.get(1)
|
.get(1)
|
||||||
.ok_or(LemmyErrorType::EmailSmtpServerNeedsAPort)?
|
.ok_or(LemmyErrorType::EmailSmtpServerNeedsAPort)?
|
||||||
|
@ -45,16 +45,20 @@ pub async fn send_email(
|
||||||
// use usize::MAX as the line wrap length, since lettre handles the wrapping for us
|
// use usize::MAX as the line wrap length, since lettre handles the wrapping for us
|
||||||
let plain_text = html2text::from_read(html.as_bytes(), usize::MAX);
|
let plain_text = html2text::from_read(html.as_bytes(), usize::MAX);
|
||||||
|
|
||||||
|
let smtp_from_address = &email_config.smtp_from_address;
|
||||||
|
|
||||||
let email = Message::builder()
|
let email = Message::builder()
|
||||||
.from(
|
.from(
|
||||||
email_config
|
smtp_from_address
|
||||||
.smtp_from_address
|
|
||||||
.parse()
|
.parse()
|
||||||
.expect("email from address isn't valid"),
|
.with_lemmy_type(LemmyErrorType::InvalidEmailAddress(
|
||||||
|
smtp_from_address.into(),
|
||||||
|
))?,
|
||||||
)
|
)
|
||||||
.to(Mailbox::new(
|
.to(Mailbox::new(
|
||||||
Some(to_username.to_string()),
|
Some(to_username.to_string()),
|
||||||
Address::from_str(to_email).expect("email to address isn't valid"),
|
Address::from_str(to_email)
|
||||||
|
.with_lemmy_type(LemmyErrorType::InvalidEmailAddress(to_email.into()))?,
|
||||||
))
|
))
|
||||||
.message_id(Some(format!("<{}@{}>", Uuid::new_v4(), settings.hostname)))
|
.message_id(Some(format!("<{}@{}>", Uuid::new_v4(), settings.hostname)))
|
||||||
.subject(subject)
|
.subject(subject)
|
||||||
|
@ -62,7 +66,7 @@ pub async fn send_email(
|
||||||
plain_text,
|
plain_text,
|
||||||
html.to_string(),
|
html.to_string(),
|
||||||
))
|
))
|
||||||
.expect("email built incorrectly");
|
.with_lemmy_type(LemmyErrorType::EmailSendFailed)?;
|
||||||
|
|
||||||
// don't worry about 'dangeous'. it's just that leaving it at the default configuration
|
// don't worry about 'dangeous'. it's just that leaving it at the default configuration
|
||||||
// is bad.
|
// is bad.
|
||||||
|
|
|
@ -73,7 +73,7 @@ pub enum LemmyErrorType {
|
||||||
NoEmailSetup,
|
NoEmailSetup,
|
||||||
LocalSiteNotSetup,
|
LocalSiteNotSetup,
|
||||||
EmailSmtpServerNeedsAPort,
|
EmailSmtpServerNeedsAPort,
|
||||||
MissingAnEmail,
|
InvalidEmailAddress(String),
|
||||||
RateLimitError,
|
RateLimitError,
|
||||||
InvalidName,
|
InvalidName,
|
||||||
InvalidDisplayName,
|
InvalidDisplayName,
|
||||||
|
@ -129,6 +129,7 @@ pub enum LemmyErrorType {
|
||||||
InvalidRegex,
|
InvalidRegex,
|
||||||
CaptchaIncorrect,
|
CaptchaIncorrect,
|
||||||
CouldntCreateAudioCaptcha,
|
CouldntCreateAudioCaptcha,
|
||||||
|
CouldntCreateImageCaptcha,
|
||||||
InvalidUrlScheme,
|
InvalidUrlScheme,
|
||||||
CouldntSendWebmention,
|
CouldntSendWebmention,
|
||||||
ContradictingFilters,
|
ContradictingFilters,
|
||||||
|
@ -185,6 +186,7 @@ pub enum FederationError {
|
||||||
CantDeleteSite,
|
CantDeleteSite,
|
||||||
ObjectIsNotPublic,
|
ObjectIsNotPublic,
|
||||||
ObjectIsNotPrivate,
|
ObjectIsNotPrivate,
|
||||||
|
Unreachable,
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg_if! {
|
cfg_if! {
|
||||||
|
|
|
@ -29,6 +29,7 @@ pub struct RateLimitCell {
|
||||||
state: Arc<Mutex<RateLimitState>>,
|
state: Arc<Mutex<RateLimitState>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
impl RateLimitCell {
|
impl RateLimitCell {
|
||||||
pub fn new(rate_limit_config: EnumMap<ActionType, BucketConfig>) -> Self {
|
pub fn new(rate_limit_config: EnumMap<ActionType, BucketConfig>) -> Self {
|
||||||
let state = Arc::new(Mutex::new(RateLimitState::new(rate_limit_config)));
|
let state = Arc::new(Mutex::new(RateLimitState::new(rate_limit_config)));
|
||||||
|
@ -133,6 +134,7 @@ pub struct RateLimitedMiddleware<S> {
|
||||||
service: Rc<S>,
|
service: Rc<S>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
impl RateLimitChecker {
|
impl RateLimitChecker {
|
||||||
/// Returns true if the request passed the rate limit, false if it failed and should be rejected.
|
/// Returns true if the request passed the rate limit, false if it failed and should be rejected.
|
||||||
pub fn check(self, ip_addr: IpAddr) -> bool {
|
pub fn check(self, ip_addr: IpAddr) -> bool {
|
||||||
|
|
|
@ -18,6 +18,7 @@ pub struct InstantSecs {
|
||||||
secs: u32,
|
secs: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
impl InstantSecs {
|
impl InstantSecs {
|
||||||
pub fn now() -> Self {
|
pub fn now() -> Self {
|
||||||
InstantSecs {
|
InstantSecs {
|
||||||
|
|
|
@ -10,6 +10,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
async fn retry_custom<F, Fut, T>(f: F) -> Result<T, reqwest_middleware::Error>
|
async fn retry_custom<F, Fut, T>(f: F) -> Result<T, reqwest_middleware::Error>
|
||||||
where
|
where
|
||||||
F: Fn() -> Fut,
|
F: Fn() -> Fut,
|
||||||
|
|
|
@ -11,19 +11,20 @@ pub fn jsonify_plain_text_errors<BODY>(
|
||||||
return Ok(ErrorHandlerResponse::Response(res.map_into_left_body()));
|
return Ok(ErrorHandlerResponse::Response(res.map_into_left_body()));
|
||||||
}
|
}
|
||||||
// We're assuming that any LemmyError is already in JSON format, so we don't need to do anything
|
// We're assuming that any LemmyError is already in JSON format, so we don't need to do anything
|
||||||
if maybe_error
|
if let Some(maybe_error) = maybe_error {
|
||||||
.expect("http responses with 400-599 statuses should have an error object")
|
if maybe_error.as_error::<LemmyError>().is_some() {
|
||||||
.as_error::<LemmyError>()
|
return Ok(ErrorHandlerResponse::Response(res.map_into_left_body()));
|
||||||
.is_some()
|
}
|
||||||
{
|
|
||||||
return Ok(ErrorHandlerResponse::Response(res.map_into_left_body()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let (req, res) = res.into_parts();
|
let (req, res_parts) = res.into_parts();
|
||||||
let error = res
|
let lemmy_err_type = if let Some(error) = res_parts.error() {
|
||||||
.error()
|
LemmyErrorType::Unknown(error.to_string())
|
||||||
.expect("expected an error object in the response");
|
} else {
|
||||||
let response = HttpResponse::build(res.status()).json(LemmyErrorType::Unknown(error.to_string()));
|
LemmyErrorType::Unknown("couldnt build json".into())
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = HttpResponse::build(res_parts.status()).json(lemmy_err_type);
|
||||||
|
|
||||||
let service_response = ServiceResponse::new(req, response);
|
let service_response = ServiceResponse::new(req, response);
|
||||||
Ok(ErrorHandlerResponse::Response(
|
Ok(ErrorHandlerResponse::Response(
|
||||||
|
|
|
@ -3,6 +3,7 @@ use anyhow::{anyhow, Context};
|
||||||
use deser_hjson::from_str;
|
use deser_hjson::from_str;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::{env, fs, io::Error, sync::LazyLock};
|
use std::{env, fs, io::Error, sync::LazyLock};
|
||||||
|
use url::Url;
|
||||||
use urlencoding::encode;
|
use urlencoding::encode;
|
||||||
|
|
||||||
pub mod structs;
|
pub mod structs;
|
||||||
|
@ -11,6 +12,7 @@ use structs::{DatabaseConnection, PictrsConfig, PictrsImageMode, Settings};
|
||||||
|
|
||||||
static DEFAULT_CONFIG_FILE: &str = "config/config.hjson";
|
static DEFAULT_CONFIG_FILE: &str = "config/config.hjson";
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
pub static SETTINGS: LazyLock<Settings> = LazyLock::new(|| {
|
pub static SETTINGS: LazyLock<Settings> = LazyLock::new(|| {
|
||||||
if env::var("LEMMY_INITIALIZE_WITH_DEFAULT_SETTINGS").is_ok() {
|
if env::var("LEMMY_INITIALIZE_WITH_DEFAULT_SETTINGS").is_ok() {
|
||||||
println!(
|
println!(
|
||||||
|
@ -23,6 +25,7 @@ pub static SETTINGS: LazyLock<Settings> = LazyLock::new(|| {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
static WEBFINGER_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
static WEBFINGER_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(&format!(
|
Regex::new(&format!(
|
||||||
"^acct:([a-zA-Z0-9_]{{3,}})@{}$",
|
"^acct:([a-zA-Z0-9_]{{3,}})@{}$",
|
||||||
|
@ -128,3 +131,9 @@ impl PictrsConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
|
/// Necessary to avoid URL expect failures
|
||||||
|
fn pictrs_placeholder_url() -> Url {
|
||||||
|
Url::parse("http://localhost:8080").expect("parse pictrs url")
|
||||||
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use super::pictrs_placeholder_url;
|
||||||
use doku::Document;
|
use doku::Document;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use smart_default::SmartDefault;
|
use smart_default::SmartDefault;
|
||||||
|
@ -68,7 +69,7 @@ impl Settings {
|
||||||
#[serde(default, deny_unknown_fields)]
|
#[serde(default, deny_unknown_fields)]
|
||||||
pub struct PictrsConfig {
|
pub struct PictrsConfig {
|
||||||
/// Address where pictrs is available (for image hosting)
|
/// Address where pictrs is available (for image hosting)
|
||||||
#[default(Url::parse("http://localhost:8080").expect("parse pictrs url"))]
|
#[default(pictrs_placeholder_url())]
|
||||||
#[doku(example = "http://localhost:8080")]
|
#[doku(example = "http://localhost:8080")]
|
||||||
pub url: Url,
|
pub url: Url,
|
||||||
|
|
||||||
|
|
|
@ -58,11 +58,13 @@ fn find_urls<T: NodeValue + UrlAndTitle>(src: &str) -> Vec<(usize, usize)> {
|
||||||
let mut links_offsets = vec![];
|
let mut links_offsets = vec![];
|
||||||
ast.walk(|node, _depth| {
|
ast.walk(|node, _depth| {
|
||||||
if let Some(image) = node.cast::<T>() {
|
if let Some(image) = node.cast::<T>() {
|
||||||
let (_, node_offset) = node.srcmap.expect("srcmap is none").get_byte_offsets();
|
if let Some(srcmap) = node.srcmap {
|
||||||
let start_offset = node_offset - image.url_len() - 1 - image.title_len();
|
let (_, node_offset) = srcmap.get_byte_offsets();
|
||||||
let end_offset = node_offset - 1;
|
let start_offset = node_offset - image.url_len() - 1 - image.title_len();
|
||||||
|
let end_offset = node_offset - 1;
|
||||||
|
|
||||||
links_offsets.push((start_offset, end_offset));
|
links_offsets.push((start_offset, end_offset));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
links_offsets
|
links_offsets
|
||||||
|
|
|
@ -2,6 +2,7 @@ use itertools::Itertools;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
static MENTIONS_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
static MENTIONS_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._:-]+)").expect("compile regex")
|
Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._:-]+)").expect("compile regex")
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use crate::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
use crate::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
|
||||||
use regex::{Regex, RegexBuilder};
|
use regex::{Regex, RegexBuilder};
|
||||||
|
|
||||||
pub fn remove_slurs(test: &str, slur_regex: &Option<Regex>) -> String {
|
pub fn remove_slurs(test: &str, slur_regex: &Option<LemmyResult<Regex>>) -> String {
|
||||||
if let Some(slur_regex) = slur_regex {
|
if let Some(Ok(slur_regex)) = slur_regex {
|
||||||
slur_regex.replace_all(test, "*removed*").to_string()
|
slur_regex.replace_all(test, "*removed*").to_string()
|
||||||
} else {
|
} else {
|
||||||
test.to_string()
|
test.to_string()
|
||||||
|
@ -11,9 +11,9 @@ pub fn remove_slurs(test: &str, slur_regex: &Option<Regex>) -> String {
|
||||||
|
|
||||||
pub(crate) fn slur_check<'a>(
|
pub(crate) fn slur_check<'a>(
|
||||||
test: &'a str,
|
test: &'a str,
|
||||||
slur_regex: &'a Option<Regex>,
|
slur_regex: &'a Option<LemmyResult<Regex>>,
|
||||||
) -> Result<(), Vec<&'a str>> {
|
) -> Result<(), Vec<&'a str>> {
|
||||||
if let Some(slur_regex) = slur_regex {
|
if let Some(Ok(slur_regex)) = slur_regex {
|
||||||
let mut matches: Vec<&str> = slur_regex.find_iter(test).map(|mat| mat.as_str()).collect();
|
let mut matches: Vec<&str> = slur_regex.find_iter(test).map(|mat| mat.as_str()).collect();
|
||||||
|
|
||||||
// Unique
|
// Unique
|
||||||
|
@ -30,16 +30,16 @@ pub(crate) fn slur_check<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_slur_regex(regex_str: Option<&str>) -> Option<Regex> {
|
pub fn build_slur_regex(regex_str: Option<&str>) -> Option<LemmyResult<Regex>> {
|
||||||
regex_str.map(|slurs| {
|
regex_str.map(|slurs| {
|
||||||
RegexBuilder::new(slurs)
|
RegexBuilder::new(slurs)
|
||||||
.case_insensitive(true)
|
.case_insensitive(true)
|
||||||
.build()
|
.build()
|
||||||
.expect("compile regex")
|
.with_lemmy_type(LemmyErrorType::InvalidRegex)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_slurs(text: &str, slur_regex: &Option<Regex>) -> LemmyResult<()> {
|
pub fn check_slurs(text: &str, slur_regex: &Option<LemmyResult<Regex>>) -> LemmyResult<()> {
|
||||||
if let Err(slurs) = slur_check(text, slur_regex) {
|
if let Err(slurs) = slur_check(text, slur_regex) {
|
||||||
Err(anyhow::anyhow!("{}", slurs_vec_to_str(&slurs))).with_lemmy_type(LemmyErrorType::Slurs)
|
Err(anyhow::anyhow!("{}", slurs_vec_to_str(&slurs))).with_lemmy_type(LemmyErrorType::Slurs)
|
||||||
} else {
|
} else {
|
||||||
|
@ -47,7 +47,10 @@ pub fn check_slurs(text: &str, slur_regex: &Option<Regex>) -> LemmyResult<()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_slurs_opt(text: &Option<String>, slur_regex: &Option<Regex>) -> LemmyResult<()> {
|
pub fn check_slurs_opt(
|
||||||
|
text: &Option<String>,
|
||||||
|
slur_regex: &Option<LemmyResult<Regex>>,
|
||||||
|
) -> LemmyResult<()> {
|
||||||
match text {
|
match text {
|
||||||
Some(t) => check_slurs(t, slur_regex),
|
Some(t) => check_slurs(t, slur_regex),
|
||||||
None => Ok(()),
|
None => Ok(()),
|
||||||
|
@ -64,7 +67,7 @@ pub(crate) fn slurs_vec_to_str(slurs: &[&str]) -> String {
|
||||||
mod test {
|
mod test {
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error::LemmyResult,
|
error::{LemmyErrorExt, LemmyErrorType, LemmyResult},
|
||||||
utils::slurs::{remove_slurs, slur_check, slurs_vec_to_str},
|
utils::slurs::{remove_slurs, slur_check, slurs_vec_to_str},
|
||||||
};
|
};
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
@ -72,7 +75,7 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_slur_filter() -> LemmyResult<()> {
|
fn test_slur_filter() -> LemmyResult<()> {
|
||||||
let slur_regex = Some(RegexBuilder::new(r"(fag(g|got|tard)?\b|cock\s?sucker(s|ing)?|ni((g{2,}|q)+|[gq]{2,})[e3r]+(s|z)?|mudslime?s?|kikes?|\bspi(c|k)s?\b|\bchinks?|gooks?|bitch(es|ing|y)?|whor(es?|ing)|\btr(a|@)nn?(y|ies?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build()?);
|
let slur_regex = Some(RegexBuilder::new(r"(fag(g|got|tard)?\b|cock\s?sucker(s|ing)?|ni((g{2,}|q)+|[gq]{2,})[e3r]+(s|z)?|mudslime?s?|kikes?|\bspi(c|k)s?\b|\bchinks?|gooks?|bitch(es|ing|y)?|whor(es?|ing)|\btr(a|@)nn?(y|ies?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build().with_lemmy_type(LemmyErrorType::InvalidRegex));
|
||||||
let test =
|
let test =
|
||||||
"faggot test kike tranny cocksucker retardeds. Capitalized Niggerz. This is a bunch of other safe text.";
|
"faggot test kike tranny cocksucker retardeds. Capitalized Niggerz. This is a bunch of other safe text.";
|
||||||
let slur_free = "No slurs here";
|
let slur_free = "No slurs here";
|
||||||
|
|
|
@ -6,11 +6,13 @@ use std::sync::LazyLock;
|
||||||
use url::{ParseError, Url};
|
use url::{ParseError, Url};
|
||||||
|
|
||||||
// From here: https://github.com/vector-im/element-android/blob/develop/matrix-sdk-android/src/main/java/org/matrix/android/sdk/api/MatrixPatterns.kt#L35
|
// From here: https://github.com/vector-im/element-android/blob/develop/matrix-sdk-android/src/main/java/org/matrix/android/sdk/api/MatrixPatterns.kt#L35
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
static VALID_MATRIX_ID_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
static VALID_MATRIX_ID_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"^@[A-Za-z0-9\x21-\x39\x3B-\x7F]+:[A-Za-z0-9.-]+(:[0-9]{2,5})?$")
|
Regex::new(r"^@[A-Za-z0-9\x21-\x39\x3B-\x7F]+:[A-Za-z0-9.-]+(:[0-9]{2,5})?$")
|
||||||
.expect("compile regex")
|
.expect("compile regex")
|
||||||
});
|
});
|
||||||
// taken from https://en.wikipedia.org/wiki/UTM_parameters
|
// taken from https://en.wikipedia.org/wiki/UTM_parameters
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
static URL_CLEANER: LazyLock<UrlCleaner> =
|
static URL_CLEANER: LazyLock<UrlCleaner> =
|
||||||
LazyLock::new(|| UrlCleaner::from_embedded_rules().expect("compile clearurls"));
|
LazyLock::new(|| UrlCleaner::from_embedded_rules().expect("compile clearurls"));
|
||||||
const ALLOWED_POST_URL_SCHEMES: [&str; 3] = ["http", "https", "magnet"];
|
const ALLOWED_POST_URL_SCHEMES: [&str; 3] = ["http", "https", "magnet"];
|
||||||
|
@ -88,6 +90,7 @@ pub fn is_valid_actor_name(name: &str, actor_name_max_length: usize) -> LemmyRes
|
||||||
// Only allow characters from a single alphabet per username. This avoids problems with lookalike
|
// Only allow characters from a single alphabet per username. This avoids problems with lookalike
|
||||||
// characters like `o` which looks identical in Latin and Cyrillic, and can be used to imitate
|
// characters like `o` which looks identical in Latin and Cyrillic, and can be used to imitate
|
||||||
// other users. Checks for additional alphabets can be added in the same way.
|
// other users. Checks for additional alphabets can be added in the same way.
|
||||||
|
#[allow(clippy::expect_used)]
|
||||||
static VALID_ACTOR_NAME_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
static VALID_ACTOR_NAME_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"^(?:[a-zA-Z0-9_]+|[0-9_\p{Arabic}]+|[0-9_\p{Cyrillic}]+)$").expect("compile regex")
|
Regex::new(r"^(?:[a-zA-Z0-9_]+|[0-9_\p{Arabic}]+|[0-9_\p{Cyrillic}]+)$").expect("compile regex")
|
||||||
});
|
});
|
||||||
|
@ -218,33 +221,32 @@ fn min_length_check(item: &str, min_length: usize, min_msg: LemmyErrorType) -> L
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to build a regex and check it for common errors before inserting into the DB.
|
/// Attempts to build a regex and check it for common errors before inserting into the DB.
|
||||||
pub fn build_and_check_regex(regex_str_opt: &Option<&str>) -> LemmyResult<Option<Regex>> {
|
pub fn build_and_check_regex(regex_str_opt: &Option<&str>) -> Option<LemmyResult<Regex>> {
|
||||||
regex_str_opt.map_or_else(
|
if let Some(regex) = regex_str_opt {
|
||||||
|| Ok(None::<Regex>),
|
if regex.is_empty() {
|
||||||
|regex_str| {
|
None
|
||||||
if regex_str.is_empty() {
|
} else {
|
||||||
// If the proposed regex is empty, return as having no regex at all; this is the same
|
Some(
|
||||||
// behavior that happens downstream before the write to the database.
|
RegexBuilder::new(regex)
|
||||||
return Ok(None::<Regex>);
|
.case_insensitive(true)
|
||||||
}
|
.build()
|
||||||
|
.with_lemmy_type(LemmyErrorType::InvalidRegex)
|
||||||
RegexBuilder::new(regex_str)
|
.and_then(|regex| {
|
||||||
.case_insensitive(true)
|
// NOTE: It is difficult to know, in the universe of user-crafted regex, which ones
|
||||||
.build()
|
// may match against any string text. To keep it simple, we'll match the regex
|
||||||
.with_lemmy_type(LemmyErrorType::InvalidRegex)
|
// against an innocuous string - a single number - which should help catch a regex
|
||||||
.and_then(|regex| {
|
// that accidentally matches against all strings.
|
||||||
// NOTE: It is difficult to know, in the universe of user-crafted regex, which ones
|
if regex.is_match("1") {
|
||||||
// may match against any string text. To keep it simple, we'll match the regex
|
Err(LemmyErrorType::PermissiveRegex.into())
|
||||||
// against an innocuous string - a single number - which should help catch a regex
|
} else {
|
||||||
// that accidentally matches against all strings.
|
Ok(regex)
|
||||||
if regex.is_match("1") {
|
}
|
||||||
Err(LemmyErrorType::PermissiveRegex.into())
|
}),
|
||||||
} else {
|
)
|
||||||
Ok(Some(regex))
|
}
|
||||||
}
|
} else {
|
||||||
})
|
None
|
||||||
},
|
}
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Cleans a url of tracking parameters.
|
/// Cleans a url of tracking parameters.
|
||||||
|
@ -565,13 +567,27 @@ Line3",
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_valid_slur_regex() {
|
fn test_valid_slur_regex() {
|
||||||
let valid_regexes = [&None, &Some(""), &Some("(foo|bar)")];
|
let valid_regex = Some("(foo|bar)");
|
||||||
|
let result = build_and_check_regex(&valid_regex);
|
||||||
|
assert!(
|
||||||
|
result.is_some_and(|x| x.is_ok()),
|
||||||
|
"Testing regex: {:?}",
|
||||||
|
valid_regex
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
valid_regexes.iter().for_each(|regex| {
|
#[test]
|
||||||
let result = build_and_check_regex(regex);
|
fn test_missing_slur_regex() {
|
||||||
|
let missing_regex = None;
|
||||||
|
let result = build_and_check_regex(&missing_regex);
|
||||||
|
assert!(result.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
assert!(result.is_ok(), "Testing regex: {:?}", regex);
|
#[test]
|
||||||
});
|
fn test_empty_slur_regex() {
|
||||||
|
let empty = Some("");
|
||||||
|
let result = build_and_check_regex(&empty);
|
||||||
|
assert!(result.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -587,9 +603,9 @@ Line3",
|
||||||
.for_each(|(regex_str, expected_err)| {
|
.for_each(|(regex_str, expected_err)| {
|
||||||
let result = build_and_check_regex(regex_str);
|
let result = build_and_check_regex(regex_str);
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.as_ref().is_some_and(Result::is_err));
|
||||||
assert!(
|
assert!(
|
||||||
result.is_err_and(|e| e.error_type.eq(&expected_err.clone())),
|
result.is_some_and(|x| x.is_err_and(|e| e.error_type.eq(&expected_err.clone()))),
|
||||||
"Testing regex {:?}, expected error {}",
|
"Testing regex {:?}, expected error {}",
|
||||||
regex_str,
|
regex_str,
|
||||||
expected_err
|
expected_err
|
||||||
|
|
|
@ -29,7 +29,10 @@ use lemmy_db_schema::{
|
||||||
traits::Crud,
|
traits::Crud,
|
||||||
utils::{get_conn, DbPool},
|
utils::{get_conn, DbPool},
|
||||||
};
|
};
|
||||||
use lemmy_utils::{error::LemmyResult, settings::structs::Settings};
|
use lemmy_utils::{
|
||||||
|
error::{LemmyErrorExt, LemmyErrorType, LemmyResult},
|
||||||
|
settings::structs::Settings,
|
||||||
|
};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -421,7 +424,7 @@ async fn initialize_local_site_2022_10_10(
|
||||||
|
|
||||||
let domain = settings
|
let domain = settings
|
||||||
.get_hostname_without_port()
|
.get_hostname_without_port()
|
||||||
.expect("must have domain");
|
.with_lemmy_type(LemmyErrorType::Unknown("must have domain".into()))?;
|
||||||
|
|
||||||
// Upsert this to the instance table
|
// Upsert this to the instance table
|
||||||
let instance = Instance::read_or_create(pool, domain).await?;
|
let instance = Instance::read_or_create(pool, domain).await?;
|
||||||
|
|
|
@ -37,7 +37,7 @@ use lemmy_db_schema::{source::secret::Secret, utils::build_db_pool};
|
||||||
use lemmy_federate::{Opts, SendManager};
|
use lemmy_federate::{Opts, SendManager};
|
||||||
use lemmy_routes::{feeds, images, nodeinfo, webfinger};
|
use lemmy_routes::{feeds, images, nodeinfo, webfinger};
|
||||||
use lemmy_utils::{
|
use lemmy_utils::{
|
||||||
error::LemmyResult,
|
error::{LemmyErrorType, LemmyResult},
|
||||||
rate_limit::RateLimitCell,
|
rate_limit::RateLimitCell,
|
||||||
response::jsonify_plain_text_errors,
|
response::jsonify_plain_text_errors,
|
||||||
settings::{structs::Settings, SETTINGS},
|
settings::{structs::Settings, SETTINGS},
|
||||||
|
@ -178,7 +178,8 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
|
||||||
.set(Box::new(move |d, c| {
|
.set(Box::new(move |d, c| {
|
||||||
Box::pin(match_outgoing_activities(d, c))
|
Box::pin(match_outgoing_activities(d, c))
|
||||||
}))
|
}))
|
||||||
.expect("set function pointer");
|
.map_err(|_| LemmyErrorType::Unknown("couldnt set function pointer".into()))?;
|
||||||
|
|
||||||
let request_data = federation_config.to_request_data();
|
let request_data = federation_config.to_request_data();
|
||||||
let outgoing_activities_task = tokio::task::spawn(handle_outgoing_activities(
|
let outgoing_activities_task = tokio::task::spawn(handle_outgoing_activities(
|
||||||
request_data.reset_request_count(),
|
request_data.reset_request_count(),
|
||||||
|
@ -281,7 +282,7 @@ fn create_http_server(
|
||||||
let prom_api_metrics = PrometheusMetricsBuilder::new("lemmy_api")
|
let prom_api_metrics = PrometheusMetricsBuilder::new("lemmy_api")
|
||||||
.registry(default_registry().clone())
|
.registry(default_registry().clone())
|
||||||
.build()
|
.build()
|
||||||
.expect("Should always be buildable");
|
.map_err(|e| LemmyErrorType::Unknown(format!("Should always be buildable: {e}")))?;
|
||||||
|
|
||||||
let context: LemmyContext = federation_config.deref().clone();
|
let context: LemmyContext = federation_config.deref().clone();
|
||||||
let rate_limit_cell = federation_config.rate_limit_cell().clone();
|
let rate_limit_cell = federation_config.rate_limit_cell().clone();
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use lemmy_server::{start_lemmy_server, CmdArgs};
|
use lemmy_server::{start_lemmy_server, CmdArgs};
|
||||||
use lemmy_utils::error::LemmyResult;
|
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
|
||||||
use tracing::level_filters::LevelFilter;
|
use tracing::level_filters::LevelFilter;
|
||||||
use tracing_subscriber::EnvFilter;
|
use tracing_subscriber::EnvFilter;
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ pub async fn main() -> LemmyResult<()> {
|
||||||
|
|
||||||
rustls::crypto::ring::default_provider()
|
rustls::crypto::ring::default_provider()
|
||||||
.install_default()
|
.install_default()
|
||||||
.expect("Failed to install rustls crypto provider");
|
.map_err(|_| LemmyErrorType::Unknown("Failed to install rustls crypto provider".into()))?;
|
||||||
|
|
||||||
start_lemmy_server(args).await?;
|
start_lemmy_server(args).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -169,10 +169,7 @@ async fn process_ranks_in_batches(
|
||||||
where_clause: &str,
|
where_clause: &str,
|
||||||
set_clause: &str,
|
set_clause: &str,
|
||||||
) {
|
) {
|
||||||
let process_start_time: DateTime<Utc> = Utc
|
let process_start_time: DateTime<Utc> = Utc.timestamp_opt(0, 0).single().unwrap_or_default();
|
||||||
.timestamp_opt(0, 0)
|
|
||||||
.single()
|
|
||||||
.expect("0 timestamp creation");
|
|
||||||
|
|
||||||
let update_batch_size = 1000; // Bigger batches than this tend to cause seq scans
|
let update_batch_size = 1000; // Bigger batches than this tend to cause seq scans
|
||||||
let mut processed_rows_count = 0;
|
let mut processed_rows_count = 0;
|
||||||
|
@ -220,10 +217,7 @@ async fn process_ranks_in_batches(
|
||||||
/// Post aggregates is a special case, since it needs to join to the community_aggregates
|
/// Post aggregates is a special case, since it needs to join to the community_aggregates
|
||||||
/// table, to get the active monthly user counts.
|
/// table, to get the active monthly user counts.
|
||||||
async fn process_post_aggregates_ranks_in_batches(conn: &mut AsyncPgConnection) {
|
async fn process_post_aggregates_ranks_in_batches(conn: &mut AsyncPgConnection) {
|
||||||
let process_start_time: DateTime<Utc> = Utc
|
let process_start_time: DateTime<Utc> = Utc.timestamp_opt(0, 0).single().unwrap_or_default();
|
||||||
.timestamp_opt(0, 0)
|
|
||||||
.single()
|
|
||||||
.expect("0 timestamp creation");
|
|
||||||
|
|
||||||
let update_batch_size = 1000; // Bigger batches than this tend to cause seq scans
|
let update_batch_size = 1000; // Bigger batches than this tend to cause seq scans
|
||||||
let mut processed_rows_count = 0;
|
let mut processed_rows_count = 0;
|
||||||
|
|
Loading…
Reference in a new issue