From e26586732bd352374baaf7818a814820f10f7110 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gon=C3=A7alo?= Date: Sat, 14 Jun 2025 10:52:47 +0100 Subject: [PATCH 1/5] Add function to clean urls from tracking query params that are often included in the shared links --- Cargo.lock | 14 ++++++++++++++ Cargo.toml | 1 + src/utils.rs | 20 +++++++++++++++++++- 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 1d881f80..23bc9a4f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -328,6 +328,19 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +[[package]] +name = "clearurls" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e291c00af89ac0a5b400d9ba46a682e38015ae3cd8926dbbe85b3b864d550be3" +dependencies = [ + "percent-encoding", + "regex", + "serde", + "serde_json", + "url", +] + [[package]] name = "cookie" version = "0.18.1" @@ -1368,6 +1381,7 @@ dependencies = [ "cached", "chrono", "clap", + "clearurls", "cookie", "dotenvy", "fastrand", diff --git a/Cargo.toml b/Cargo.toml index 0596bc29..80cc55da 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,6 +56,7 @@ htmlescape = "0.3.1" bincode = "1.3.3" base2048 = "2.0.2" revision = "0.10.0" +clearurls = "0.0.4" [dev-dependencies] diff --git a/src/utils.rs b/src/utils.rs index 984c91a2..ee2adedc 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -6,6 +6,7 @@ use crate::config::{self, get_setting}; // CRATES // use crate::{client::json, server::RequestExt}; +use clearurls::UrlCleaner; use cookie::Cookie; use hyper::{Body, Request, Response}; use libflate::deflate::{Decoder, Encoder}; @@ -23,6 +24,7 @@ use std::env; use std::io::{Read, Write}; use std::str::FromStr; use std::string::ToString; +use std::sync::Mutex; use time::{macros::format_description, Duration, OffsetDateTime}; use url::Url; @@ -269,7 +271,7 @@ impl Media { ( post_type.to_string(), Self { - url: format_url(url_val.as_str().unwrap_or_default()), + url: format_url(clean_url(url_val.as_str().unwrap_or_default()).as_str()), alt_url, // Note: in the data["is_reddit_media_domain"] path above // width and height will be 0. @@ -1075,6 +1077,22 @@ pub fn format_url(url: &str) -> String { } } +// Remove tracking query params +static URL_CLEANER: Lazy> = Lazy::new(|| Mutex::new(UrlCleaner::from_embedded_rules().expect("Failed to initialize UrlCleaner"))); + +pub fn clean_url(url: &str) -> String { + let is_external_url = match Url::parse(url) { + Ok(parsed_url) => parsed_url.domain().is_some(), + _ => false, + }; + let mut cleaned_url = url.to_owned(); + if is_external_url { + let cleaner = URL_CLEANER.lock().unwrap(); + cleaned_url = cleaner.clear_single_url_str(url).expect("Unable to clean the URL.").as_ref().to_owned(); + } + cleaned_url +} + static REGEX_BULLET: Lazy = Lazy::new(|| Regex::new(r"(?m)^- (.*)$").unwrap()); static REGEX_BULLET_CONSECUTIVE_LINES: Lazy = Lazy::new(|| Regex::new(r"\n
    ").unwrap()); From 3b1445ccc89cbd0cb0f344d7a4fb4ca5368fdafc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gon=C3=A7alo?= Date: Sun, 15 Jun 2025 13:36:29 +0100 Subject: [PATCH 2/5] Make clean urls an optional setting/config --- .env.example | 2 ++ README.md | 13 +++++++------ app.json | 3 +++ src/config.rs | 5 +++++ src/instance_info.rs | 3 +++ src/post.rs | 10 ++++++++-- src/settings.rs | 3 ++- src/utils.rs | 14 +++++++++----- templates/settings.html | 8 +++++++- 9 files changed, 46 insertions(+), 15 deletions(-) diff --git a/.env.example b/.env.example index 5e60b082..2504d138 100644 --- a/.env.example +++ b/.env.example @@ -50,3 +50,5 @@ REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION=off REDLIB_DEFAULT_HIDE_SCORE=off # Enable fixed navbar by default REDLIB_DEFAULT_FIXED_NAVBAR=on +# Enable tracking url removal +REDLIB_DEFAULT_CLEAN_URLS=off diff --git a/README.md b/README.md index fcae126c..0de2269b 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ - [Docker](#docker) - [Docker Compose](#docker-compose) - [Docker CLI](#docker-cli) - - Podman + - Podman - Quadlets - [Binary](#binary) @@ -216,7 +216,7 @@ Stream logs from the Redlib container: ```bash docker logs -f redlib ``` -## Podman +## Podman [Podman](https://podman.io/) lets you run containerized applications in a rootless fashion. Containers are loosely isolated environments that are lightweight and contain everything needed to run the application, so there's no need to rely on what's installed on the host. @@ -225,8 +225,8 @@ Container images for Redlib are available at [quay.io](https://quay.io/repositor ### Quadlets > [!IMPORTANT] -> These instructions assume that you are on a systemd based distro with [podman](https://podman.io/). If not, follow these [instructions on podman's website](https://podman.io/docs/installation) for how to do so. -> It also assumes you have used `loginctl enable-linger ` to enable the service to start for your user without logging in. +> These instructions assume that you are on a systemd based distro with [podman](https://podman.io/). If not, follow these [instructions on podman's website](https://podman.io/docs/installation) for how to do so. +> It also assumes you have used `loginctl enable-linger ` to enable the service to start for your user without logging in. Copy the `redlib.container` and `.env.example` files to `.config/containers/systemd/` and modify any relevant values (for example, the ports Redlib should listen on, renaming the .env file and editing its values, etc.). @@ -244,7 +244,7 @@ systemctl --user start redlib.service ``` You can check the status of your container by using the following command: -```bash +```bash systemctl --user status redlib.service ``` @@ -441,4 +441,5 @@ Assign a default value for each user-modifiable setting by passing environment v | `HIDE_SCORE` | `["on", "off"]` | `off` | | `HIDE_SIDEBAR_AND_SUMMARY` | `["on", "off"]` | `off` | | `FIXED_NAVBAR` | `["on", "off"]` | `on` | -| `REMOVE_DEFAULT_FEEDS` | `["on", "off"]` | `off` | \ No newline at end of file +| `REMOVE_DEFAULT_FEEDS` | `["on", "off"]` | `off` | +| `CLEAN_URLS` | `["on", "off"]` | `off` | diff --git a/app.json b/app.json index 4af7cfec..d4e29753 100644 --- a/app.json +++ b/app.json @@ -79,6 +79,9 @@ }, "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS": { "required": false + }, + "REDLIB_DEFAULT_CLEAN_URLS": { + "required": false } } } diff --git a/src/config.rs b/src/config.rs index 7b1c95cc..4261eea0 100644 --- a/src/config.rs +++ b/src/config.rs @@ -112,6 +112,9 @@ pub struct Config { #[serde(rename = "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS")] pub(crate) default_remove_default_feeds: Option, + + #[serde(rename = "REDLIB_DEFAULT_CLEAN_URLS")] + pub(crate) default_clean_urls: Option, } impl Config { @@ -160,6 +163,7 @@ impl Config { enable_rss: parse("REDLIB_ENABLE_RSS"), full_url: parse("REDLIB_FULL_URL"), default_remove_default_feeds: parse("REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS"), + default_clean_urls: parse("REDLIB_DEFAULT_CLEAN_URLS"), } } } @@ -190,6 +194,7 @@ fn get_setting_from_config(name: &str, config: &Config) -> Option { "REDLIB_ENABLE_RSS" => config.enable_rss.clone(), "REDLIB_FULL_URL" => config.full_url.clone(), "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS" => config.default_remove_default_feeds.clone(), + "REDLIB_DEFAULT_CLEAN_URLS" => config.default_clean_urls.clone(), _ => None, } } diff --git a/src/instance_info.rs b/src/instance_info.rs index a573953d..ae500e8d 100644 --- a/src/instance_info.rs +++ b/src/instance_info.rs @@ -151,6 +151,7 @@ impl InstanceInfo { ["Hide HLS notification", &convert(&self.config.default_hide_hls_notification)], ["Subscriptions", &convert(&self.config.default_subscriptions)], ["Filters", &convert(&self.config.default_filters)], + ["Clean URLs", &convert(&self.config.default_clean_urls)], ]) .with_header_row(["Default preferences"]), ); @@ -186,6 +187,7 @@ impl InstanceInfo { Default blur NSFW: {:?}\n Default use HLS: {:?}\n Default hide HLS notification: {:?}\n + Default clean urls: {:?}\n Default subscriptions: {:?}\n Default filters: {:?}\n", self.package_name, @@ -213,6 +215,7 @@ impl InstanceInfo { self.config.default_blur_nsfw, self.config.default_use_hls, self.config.default_hide_hls_notification, + self.config.default_clean_urls, self.config.default_subscriptions, self.config.default_filters, ) diff --git a/src/post.rs b/src/post.rs index 20b917da..36210589 100644 --- a/src/post.rs +++ b/src/post.rs @@ -6,7 +6,8 @@ use crate::config::get_setting; use crate::server::RequestExt; use crate::subreddit::{can_access_quarantine, quarantine}; use crate::utils::{ - error, format_num, get_filters, nsfw_landing, param, parse_post, rewrite_emotes, setting, template, time, val, Author, Awards, Comment, Flair, FlairPart, Post, Preferences, + clean_url, error, format_num, get_filters, nsfw_landing, param, parse_post, rewrite_emotes, setting, template, time, val, Author, Awards, Comment, Flair, FlairPart, Post, + Preferences, }; use hyper::{Body, Request, Response}; @@ -64,7 +65,12 @@ pub async fn item(req: Request) -> Result, String> { // Otherwise, grab the JSON output from the request Ok(response) => { // Parse the JSON into Post and Comment structs - let post = parse_post(&response[0]["data"]["children"][0]).await; + let mut post = parse_post(&response[0]["data"]["children"][0]).await; + + let clean_urls = setting(&req, "clean_urls"); + if clean_urls == "on".to_owned() { + post.media.url = clean_url(post.media.url); + } let req_url = req.uri().to_string(); // Return landing page if this post if this Reddit deems this post diff --git a/src/settings.rs b/src/settings.rs index e38b4ecc..77dcc076 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -24,7 +24,7 @@ struct SettingsTemplate { // CONSTANTS -const PREFS: [&str; 19] = [ +const PREFS: [&str; 20] = [ "theme", "front_page", "layout", @@ -44,6 +44,7 @@ const PREFS: [&str; 19] = [ "disable_visit_reddit_confirmation", "video_quality", "remove_default_feeds", + "clean_urls", ]; // FUNCTIONS diff --git a/src/utils.rs b/src/utils.rs index ee2adedc..d5033754 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -271,7 +271,7 @@ impl Media { ( post_type.to_string(), Self { - url: format_url(clean_url(url_val.as_str().unwrap_or_default()).as_str()), + url: format_url(url_val.as_str().unwrap_or_default()), alt_url, // Note: in the data["is_reddit_media_domain"] path above // width and height will be 0. @@ -672,6 +672,8 @@ pub struct Preferences { pub hide_score: String, #[revision(start = 1)] pub remove_default_feeds: String, + #[revision(start = 1)] + pub clean_urls: String, } fn serialize_vec_with_plus(vec: &[String], serializer: S) -> Result @@ -730,6 +732,7 @@ impl Preferences { hide_awards: setting(req, "hide_awards"), hide_score: setting(req, "hide_score"), remove_default_feeds: setting(req, "remove_default_feeds"), + clean_urls: setting(req, "clean_urls"), } } @@ -1080,15 +1083,15 @@ pub fn format_url(url: &str) -> String { // Remove tracking query params static URL_CLEANER: Lazy> = Lazy::new(|| Mutex::new(UrlCleaner::from_embedded_rules().expect("Failed to initialize UrlCleaner"))); -pub fn clean_url(url: &str) -> String { - let is_external_url = match Url::parse(url) { +pub fn clean_url(url: String) -> String { + let is_external_url = match Url::parse(url.as_str()) { Ok(parsed_url) => parsed_url.domain().is_some(), _ => false, }; - let mut cleaned_url = url.to_owned(); + let mut cleaned_url = url.clone(); if is_external_url { let cleaner = URL_CLEANER.lock().unwrap(); - cleaned_url = cleaner.clear_single_url_str(url).expect("Unable to clean the URL.").as_ref().to_owned(); + cleaned_url = cleaner.clear_single_url_str(&cleaned_url.as_str()).expect("Unable to clean the URL.").as_ref().to_owned(); } cleaned_url } @@ -1559,6 +1562,7 @@ mod tests { hide_awards: "off".to_owned(), hide_score: "off".to_owned(), remove_default_feeds: "off".to_owned(), + clean_urls: "off".to_owned(), }; let urlencoded = serde_urlencoded::to_string(prefs).expect("Failed to serialize Prefs"); diff --git a/templates/settings.html b/templates/settings.html index c3d8086b..c14de460 100644 --- a/templates/settings.html +++ b/templates/settings.html @@ -145,6 +145,12 @@ +
    + + + +
    @@ -214,4 +220,4 @@ -{% endblock %} \ No newline at end of file +{% endblock %} From 5065fb6e8ed78125df6404d40929faa66832a0df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gon=C3=A7alo?= Date: Sun, 6 Jul 2025 11:15:05 +0100 Subject: [PATCH 3/5] clean urls on subreddit and search pages --- src/search.rs | 6 +++++- src/subreddit.rs | 8 ++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/search.rs b/src/search.rs index 88dcfdd8..6c106b8d 100644 --- a/src/search.rs +++ b/src/search.rs @@ -1,7 +1,7 @@ #![allow(clippy::cmp_owned)] // CRATES -use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences}; +use crate::utils::{self, catch_random, clean_url, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences}; use crate::{ client::json, server::RequestExt, @@ -131,6 +131,10 @@ pub async fn find(req: Request) -> Result, String> { } else { match Post::fetch(&path, quarantined).await { Ok((mut posts, after)) => { + let clean_urls = setting(&req, "clean_urls"); + if clean_urls == "on".to_owned() { + posts.iter_mut().for_each(|post| post.media.url = clean_url(post.media.url.clone())); + } let (_, all_posts_filtered) = filter_posts(&mut posts, &filters); let no_posts = posts.is_empty(); let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"); diff --git a/src/subreddit.rs b/src/subreddit.rs index d631e318..c892f6a6 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -3,8 +3,8 @@ use crate::{config, utils}; // CRATES use crate::utils::{ - catch_random, error, filter_posts, format_num, format_url, get_filters, info, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, - Subreddit, + catch_random, clean_url, error, filter_posts, format_num, format_url, get_filters, info, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, + Preferences, Subreddit, }; use crate::{client::json, server::RequestExt, server::ResponseExt}; use cookie::Cookie; @@ -164,6 +164,10 @@ pub async fn community(req: Request) -> Result, String> { } else { match Post::fetch(&path, quarantined).await { Ok((mut posts, after)) => { + let clean_urls = setting(&req, "clean_urls"); + if clean_urls == "on".to_owned() { + posts.iter_mut().for_each(|post| post.media.url = clean_url(post.media.url.clone())); + } let (_, all_posts_filtered) = filter_posts(&mut posts, &filters); let no_posts = posts.is_empty(); let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"); From edd1f0ed4088a1ff91aaa5c40c6e695ea06367f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gon=C3=A7alo?= Date: Sat, 16 Aug 2025 12:04:24 +0100 Subject: [PATCH 4/5] fix tests and clippy findings --- src/utils.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/utils.rs b/src/utils.rs index d5033754..618b9228 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1091,7 +1091,7 @@ pub fn clean_url(url: String) -> String { let mut cleaned_url = url.clone(); if is_external_url { let cleaner = URL_CLEANER.lock().unwrap(); - cleaned_url = cleaner.clear_single_url_str(&cleaned_url.as_str()).expect("Unable to clean the URL.").as_ref().to_owned(); + cleaned_url = cleaner.clear_single_url_str(cleaned_url.as_str()).expect("Unable to clean the URL.").as_ref().to_owned(); } cleaned_url } @@ -1566,7 +1566,7 @@ mod tests { }; let urlencoded = serde_urlencoded::to_string(prefs).expect("Failed to serialize Prefs"); - assert_eq!(urlencoded, "theme=laserwave&front_page=default&layout=compact&wide=on&blur_spoiler=on&show_nsfw=off&blur_nsfw=on&hide_hls_notification=off&video_quality=best&hide_sidebar_and_summary=off&use_hls=on&autoplay_videos=on&fixed_navbar=on&disable_visit_reddit_confirmation=on&comment_sort=confidence&post_sort=top&subscriptions=memes%2Bmildlyinteresting&filters=&hide_awards=off&hide_score=off&remove_default_feeds=off"); + assert_eq!(urlencoded, "theme=laserwave&front_page=default&layout=compact&wide=on&blur_spoiler=on&show_nsfw=off&blur_nsfw=on&hide_hls_notification=off&video_quality=best&hide_sidebar_and_summary=off&use_hls=on&autoplay_videos=on&fixed_navbar=on&disable_visit_reddit_confirmation=on&comment_sort=confidence&post_sort=top&subscriptions=memes%2Bmildlyinteresting&filters=&hide_awards=off&hide_score=off&remove_default_feeds=off&clean_urls=off"); } } @@ -1653,7 +1653,7 @@ fn test_rewriting_bullet_list() { How`s your monitor by the way? Any IPS bleed whatsoever? I either got lucky or the panel is pretty good, 0 bleed for me, just the usual IPS glow. How about the pixels? I see the pixels even at one meter away, especially on Microsoft Edge's icon for example, the blue background is just blocky, don't know why.

    "#; let output = r#"

    Hi, I've bought this very same monitor and found no calibration whatsoever. I have an ICC profile that has been set up since I've installed its driver from the LG website and it works ok. I also used http://www.lagom.nl/lcd-test/ to calibrate it. After some good tinkering I've found the following settings + the color profile from the driver gets me past all the tests perfectly: -

    • Brightness 50 (still have to settle on this one, it's personal preference, it controls the backlight, not the colors)
    • Contrast 70 (which for me was the default one)
    • Picture mode Custom
    • Super resolution + Off (it looks horrible anyway)
    • Sharpness 50 (default one I think)
    • Black level High (low messes up gray colors)
    • DFC Off
    • Response Time Middle (personal preference, https://www.blurbusters.com/ show horrible overdrive with it on high)
    • Freesync doesn't matter
    • Black stabilizer 50
    • Gamma setting on 0
    • Color Temp Medium
    +
    • Brightness 50 (still have to settle on this one, it's personal preference, it controls the backlight, not the colors)
    • Contrast 70 (which for me was the default one)
    • Picture mode Custom
    • Super resolution + Off (it looks horrible anyway)
    • Sharpness 50 (default one I think)
    • Black level High (low messes up gray colors)
    • DFC Off
    • Response Time Middle (personal preference, https://www.blurbusters.com/ show horrible overdrive with it on high)
    • Freesync doesn't matter
    • Black stabilizer 50
    • Gamma setting on 0
    • Color Temp Medium
    How`s your monitor by the way? Any IPS bleed whatsoever? I either got lucky or the panel is pretty good, 0 bleed for me, just the usual IPS glow. How about the pixels? I see the pixels even at one meter away, especially on Microsoft Edge's icon for example, the blue background is just blocky, don't know why.

    "#; @@ -1676,9 +1676,9 @@ fn test_default_prefs_serialization_loop_bincode() { } static KNOWN_GOOD_CONFIGS: &[&str] = &[ - "ఴӅβØØҞÉဏႢձĬ༧ȒʯऌԔӵ୮༏", - "ਧՊΥÀÃǎƱГ۸ඣമĖฤ႙ʟาúໜϾௐɥঀĜໃહཞઠѫҲɂఙ࿔DzઉƲӟӻĻฅΜδ໖ԜǗဖငƦơ৶Ą௩ԹʛใЛʃශаΏ", - "ਧԩΥÀÃΊ౭൩ඔႠϼҭöҪƸռઇԾॐნɔາǒՍҰच௨ಖມŃЉŐདƦ๙ϩএఠȝഽйʮჯඒϰळՋ௮ສ৵ऎΦѧਹಧଟƙŃ३î༦ŌပղयƟแҜ།", + "ਧӐΥºÃΦĴгౡୡϤҚԷŽဎՐΧΣೡຽဒ೨ʛĽତ๘Ӓǹভµɾ൦ॴцৱ௬చΣҭжҭȱȾཊజĊȔ௸७ƘȂј۰ȥėǨԯၻíႽਈႴ۹ଆ", + "ਧҫടºÃǒɣυໃਣөŕǁజ८ௐɪDžઘႴ౨ඛႻຫǪၼդɍ৪Êѕ୶ʭѹŪҚຊೱѰງიŠСঌາඌĨğਜડ࿅ଠಲೱҋŇƞਭăʁझшȖǾཔ௧ந۞ສÚ", + "ਧҫടºÃǒɿဧϯljഔค๖۞ԆНȦ൨ĭ྅ҤƍตཧႯƅशञঊମਇȕමзқଽijჰଐՋບӎՓஶཕ૭ଛกήऋĜɀಱӔԩझԩîဓŒԬũլಙટщೞຝ৪༎", ]; #[test] From 3057fa22a13418ce950d189b598cf054fbbfc23c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gon=C3=A7alo?= Date: Sat, 16 Aug 2025 12:43:50 +0100 Subject: [PATCH 5/5] fix clippy errors --- src/settings.rs | 8 ++++---- src/utils.rs | 8 +++----- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/settings.rs b/src/settings.rs index 77dcc076..39b72b84 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -160,8 +160,8 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response = subscriptions.expect("Subscriptions").split('+').map(str::to_string).collect(); + if let Some(subscriptions) = subscriptions { + let sub_list: Vec = subscriptions.split('+').map(str::to_string).collect(); // Start at 0 to keep track of what number we need to start deleting old subscription cookies from let mut subscriptions_number_to_delete_from = 0; @@ -211,8 +211,8 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response = filters.expect("Filters").split('+').map(str::to_string).collect(); + if let Some(filters) = filters { + let filters_list: Vec = filters.split('+').map(str::to_string).collect(); // Start at 0 to keep track of what number we need to start deleting old subscription cookies from let mut filters_number_to_delete_from = 0; diff --git a/src/utils.rs b/src/utils.rs index 618b9228..c2af8e85 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1444,13 +1444,11 @@ pub async fn nsfw_landing(req: Request, req_url: String) -> Result String { let url_result = Url::parse(format!("https://libredd.it/{path}").as_str()); - if url_result.is_err() { - path.to_string() - } else { - let mut url = url_result.unwrap(); + if let Ok(mut url) = url_result { url.path_segments_mut().unwrap().pop_if_empty(); - url.path_segments().unwrap().next_back().unwrap().to_string() + } else { + path.to_string() } }