noodlemagazine

This commit is contained in:
Simon
2025-11-29 20:08:46 +00:00
parent e924c89573
commit 39e38249b7
5 changed files with 332 additions and 1 deletions

View File

@@ -37,6 +37,7 @@ pub mod pornxp;
pub mod rule34gen;
pub mod xxdbx;
pub mod hqporner;
pub mod noodlemagazine;
// convenient alias
pub type DynProvider = Arc<dyn Provider>;
@@ -51,6 +52,7 @@ pub static ALL_PROVIDERS: Lazy<HashMap<&'static str, DynProvider>> = Lazy::new(|
m.insert("xxdbx", Arc::new(xxdbx::XxdbxProvider::new()) as DynProvider);
m.insert("hqporner", Arc::new(hqporner::HqpornerProvider::new()) as DynProvider);
m.insert("pmvhaven", Arc::new(pmvhaven::PmvhavenProvider::new()) as DynProvider);
m.insert("noodlemagazine", Arc::new(noodlemagazine::NoodlemagazineProvider::new()) as DynProvider);
// add more here as you migrate them
m
});

View File

@@ -0,0 +1,264 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::Provider;
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use std::{vec};
use titlecase::Titlecase;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct NoodlemagazineProvider {
url: String,
}
impl NoodlemagazineProvider {
pub fn new() -> Self {
let provider = NoodlemagazineProvider {
url: "https://noodlemagazine.com".to_string(),
};
provider
}
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
let _ = clientversion;
Channel {
id: "noodlemagazine".to_string(),
name: "Noodlemagazine".to_string(),
description: "The Best Search Engine of HD Videos".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=noodlemagazine.com"
.to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![],
nsfw: true,
cacheDuration: Some(1800),
}
}
async fn get(
&self,
cache: VideoCache,
page: u8,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let _ = sort;
let video_url = format!(
"{}/popular/recent?sort_by=views&sort_order=desc&p={}",
self.url,
page - 1
);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap();
let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone())
.await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let search_string = query.trim().replace(" ", "%20").to_string();
let video_url = format!("{}/video/{}?p={}", self.url, search_string, page - 1);
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone());
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap();
let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone())
.await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
async fn get_video_items_from_html(
&self,
html: String,
) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") {
return vec![];
}
let raw_videos = html.split(">Show more</div>").collect::<Vec<&str>>()[0]
.split("<div class=\"list_videos\" id=\"list_videos\">")
.collect::<Vec<&str>>()[1]
.split("<div class=\"item\">")
.collect::<Vec<&str>>()[1..]
.to_vec();
let mut video_items: Vec<VideoItem> = vec![];
for raw_video in raw_videos {
if let Ok(video_item) = self.get_video_item(raw_video.to_string()).await {
video_items.push(video_item);
}
}
// let futures = raw_videos
// .into_iter()
// .map(|el| self.get_video_item(el.to_string()));
// let results: Vec<Result<VideoItem>> = join_all(futures).await;
// let video_items: Vec<VideoItem> = results.into_iter().filter_map(Result::ok).collect();
return video_items;
}
async fn get_video_item(
&self,
video_segment: String,
) -> Result<VideoItem> {
let video_url: String = format!(
"{}{}",
self.url,
video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string()
);
let mut title = video_segment
.split("<div class=\"title\">")
.collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.trim()
.to_string();
// html decode
title = decode(title.as_bytes())
.to_string()
.unwrap_or(title)
.titlecase();
let id = video_url.split("/").collect::<Vec<&str>>()[4]
.split(".")
.collect::<Vec<&str>>()[0]
.to_string();
let thumb = video_segment.split("<img ").collect::<Vec<&str>>()[1]
.split("data-src=\"")
.collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let raw_duration = video_segment
.split("#clock-o\"></use></svg>")
.collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.trim()
.to_string();
let duration = parse_time_to_seconds(raw_duration.as_str()).unwrap_or(0) as u32;
let views = parse_abbreviated_number( video_segment
.split("<use xlink:href=\"#eye\"></use></svg>")
.collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.trim()).unwrap_or(0);
let proxy_url = format!(
"https://hottub.spacemoehre.de/proxy/noodlemagazine/watch/{}",
video_url.split("/").collect::<Vec<&str>>()[4]
);
let video_item = VideoItem::new(
id,
title,
proxy_url,
"noodlemagazine".to_string(),
thumb,
duration,
)
.views(views)
;
return Ok(video_item);
}
}
#[async_trait]
impl Provider for NoodlemagazineProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = per_page;
let _ = pool;
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => {
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, options)
.await
}
None => {
self.get(cache, page.parse::<u8>().unwrap_or(1), &sort, options)
.await
}
};
match videos {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel {
self.build_channel(clientversion)
}
}

View File

@@ -1,13 +1,15 @@
use ntex::web;
use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester};
use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester, proxies::noodlemagazine::NoodlemagazineProxy};
pub mod sxyprn;
pub mod hanimecdn;
pub mod noodlemagazine;
#[derive(Debug, Clone)]
pub enum AnyProxy {
Sxyprn(SxyprnProxy),
Noodlemagazine(NoodlemagazineProxy),
}
pub trait Proxy {
@@ -32,6 +34,12 @@ impl Proxy for AnyProxy {
requester,
).await
}
AnyProxy::Noodlemagazine(p) => {
p.get_video_url(
url,
requester,
).await
}
}
}
}

View File

@@ -0,0 +1,37 @@
use ntex::web;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct NoodlemagazineProxy {
}
impl NoodlemagazineProxy {
pub fn new() -> Self {
NoodlemagazineProxy {
}
}
pub async fn get_video_url(
&self,
url: String,
requester: web::types::State<Requester>,
) -> String {
let mut requester = requester.get_ref().clone();
let url = "https://noodlemagazine.com/".to_string() + &url;
let text = requester.get(&url).await.unwrap_or("".to_string());
if text.is_empty() {
return "".to_string();
}
let json_str = text.split("window.playlist = ")
.collect::<Vec<&str>>()[1]
.split(";")
.collect::<Vec<&str>>()[0];
let json: serde_json::Value = serde_json::from_str(json_str).unwrap();
let sources = json["sources"].as_array().unwrap();
let video_url = sources[0]["file"].as_str().unwrap().to_string();
return video_url;
}
}

View File

@@ -1,5 +1,6 @@
use ntex::web::{self, HttpRequest};
use crate::proxies::noodlemagazine::NoodlemagazineProxy;
use crate::proxies::sxyprn::SxyprnProxy;
use crate::util::requester::Requester;
use crate::proxies::*;
@@ -15,6 +16,11 @@ pub fn config(cfg: &mut web::ServiceConfig) {
.route(web::post().to(crate::proxies::hanimecdn::get_image))
.route(web::get().to(crate::proxies::hanimecdn::get_image)),
)
.service(
web::resource("/noodlemagazine/{endpoint}*")
.route(web::post().to(noodlemagazine))
.route(web::get().to(noodlemagazine)),
)
// .service(
// web::resource("/videos")
// // .route(web::get().to(videos_get))
@@ -37,9 +43,23 @@ async fn sxyprn(req: HttpRequest,
.finish())
}
async fn noodlemagazine(req: HttpRequest,
requester: web::types::State<Requester>,) -> Result<impl web::Responder, web::Error> {
let proxy = get_proxy(req.uri().to_string().split("/").collect::<Vec<&str>>()[2]).unwrap();
let endpoint = req.match_info().query("endpoint").to_string();
let video_url = match proxy.get_video_url(endpoint, requester).await{
url if url != "" => url,
_ => "Error".to_string(),
};
Ok(web::HttpResponse::PermanentRedirect()
.header("Location", format!("{}", video_url))
.finish())
}
fn get_proxy(proxy: &str) -> Option<AnyProxy> {
match proxy {
"sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())),
"noodlemagazine" => Some(AnyProxy::Noodlemagazine(NoodlemagazineProxy::new())),
_ => None,
}
}