Compare commits

..

6 Commits

Author SHA1 Message Date
Simon
e3d62524be removed debug print 2026-05-06 13:50:47 +00:00
Simon
5e5786010a doodstream and lulustream in sxyprn integrated
Co-authored-by: Copilot <copilot@github.com>
2026-05-06 11:17:26 +00:00
Simon
9e8f326518 sxyprn format changes vidara 2026-05-05 19:11:26 +00:00
Simon
c4292c2ffc blowjobspro launch 2026-05-05 18:41:05 +00:00
Simon
3da09dc561 erome launch 2026-05-05 14:20:47 +00:00
Simon
d6fa364b28 youporn 2026-05-05 13:53:01 +00:00
12 changed files with 1855 additions and 141 deletions

View File

@@ -36,6 +36,11 @@ const PROVIDERS: &[ProviderDef] = &[
module: "pornhub", module: "pornhub",
ty: "PornhubProvider", ty: "PornhubProvider",
}, },
ProviderDef {
id: "youporn",
module: "youporn",
ty: "YoupornProvider",
},
ProviderDef { ProviderDef {
id: "pornhd3x", id: "pornhd3x",
module: "pornhd3x", module: "pornhd3x",
@@ -251,6 +256,16 @@ const PROVIDERS: &[ProviderDef] = &[
module: "hsex", module: "hsex",
ty: "HsexProvider", ty: "HsexProvider",
}, },
ProviderDef {
id: "blowjobspro",
module: "blowjobspro",
ty: "BlowjobsproProvider",
},
ProviderDef {
id: "erome",
module: "erome",
ty: "EromeProvider",
},
ProviderDef { ProviderDef {
id: "sextb", id: "sextb",
module: "sextb", module: "sextb",

View File

@@ -9,7 +9,9 @@ This is the current implementation inventory as of this snapshot of the repo. Us
| `all` | `meta-search` | no | no | Aggregates all compiled providers. | | `all` | `meta-search` | no | no | Aggregates all compiled providers. |
| `archivebate` | `live-cams` | no | no | Livewire-backed cam archive listings with platform/gender/profile shortcuts. | | `archivebate` | `live-cams` | no | no | Livewire-backed cam archive listings with platform/gender/profile shortcuts. |
| `beeg` | `mainstream-tube` | no | no | Basic mainstream tube pattern. | | `beeg` | `mainstream-tube` | no | no | Basic mainstream tube pattern. |
| `blowjobspro` | `mainstream-tube` | no | no | KVS-style HTML provider with async search pagination and category shortcut routing. |
| `chaturbate` | `live-cams` | no | no | Live cam channel. | | `chaturbate` | `live-cams` | no | no | Live cam channel. |
| `erome` | `amateur-homemade` | no | no | HTML album scraper with hot/new feeds, keyword search, and uploader-slug shortcuts (`uploader:<name>`). |
| `freepornvideosxxx` | `studio-network` | no | no | Studio-style scraper. | | `freepornvideosxxx` | `studio-network` | no | no | Studio-style scraper. |
| `freeuseporn` | `fetish-kink` | no | no | Fetish archive pattern. | | `freeuseporn` | `fetish-kink` | no | no | Fetish archive pattern. |
| `hanime` | `hentai-animation` | no | yes | Uses proxied CDN/thumb handling. | | `hanime` | `hentai-animation` | no | yes | Uses proxied CDN/thumb handling. |
@@ -57,6 +59,7 @@ This is the current implementation inventory as of this snapshot of the repo. Us
| `xxthots` | `onlyfans` | no | no | OnlyFans-like metadata example. | | `xxthots` | `onlyfans` | no | no | OnlyFans-like metadata example. |
| `yesporn` | `mainstream-tube` | no | no | Preview format examples. | | `yesporn` | `mainstream-tube` | no | no | Preview format examples. |
| `youjizz` | `mainstream-tube` | no | no | Mainstream tube provider. | | `youjizz` | `mainstream-tube` | no | no | Mainstream tube provider. |
| `youporn` | `mainstream-tube` | no | no | Pornhub-network HTML provider with watch-page playback URLs and tag/channel/pornstar shortcuts. |
## Proxy Routes ## Proxy Routes

View File

@@ -0,0 +1,531 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{
Provider, report_provider_error, report_provider_error_background, requester_or_default,
};
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use percent_encoding::{NON_ALPHANUMERIC, utf8_percent_encode};
use regex::Regex;
use scraper::{Html, Selector};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use std::thread;
use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "kvs", "blowjob"],
};
const BASE_URL: &str = "https://blowjobs.pro";
const CHANNEL_ID: &str = "blowjobspro";
const FIREFOX_UA: &str =
"Mozilla/5.0 (X11; Linux x86_64; rv:147.0) Gecko/20100101 Firefox/147.0";
const HTML_ACCEPT: &str =
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8";
error_chain! {
foreign_links {
Io(std::io::Error);
}
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
}
}
#[derive(Debug, Clone)]
pub struct BlowjobsproProvider {
url: String,
categories: Arc<RwLock<Vec<FilterOption>>>,
category_map: Arc<RwLock<HashMap<String, String>>>,
}
#[derive(Debug, Clone)]
enum Target {
Latest,
MostViewed,
TopRated,
Search { query: String },
Category { url: String },
}
impl BlowjobsproProvider {
pub fn new() -> Self {
let provider = Self {
url: BASE_URL.to_string(),
categories: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
category_map: Arc::new(RwLock::new(HashMap::new())),
};
provider.spawn_initial_load();
provider
}
fn spawn_initial_load(&self) {
let url = self.url.clone();
let categories = Arc::clone(&self.categories);
let category_map = Arc::clone(&self.category_map);
thread::spawn(move || {
let runtime = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(runtime) => runtime,
Err(error) => {
report_provider_error_background(
CHANNEL_ID,
"spawn_initial_load.runtime_build",
&error.to_string(),
);
return;
}
};
runtime.block_on(async move {
if let Err(error) = Self::load_categories(&url, categories, category_map).await {
report_provider_error_background(CHANNEL_ID, "load_categories", &error.to_string());
}
});
});
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
let categories = self
.categories
.read()
.map(|value| value.clone())
.unwrap_or_default();
Channel {
id: CHANNEL_ID.to_string(),
name: "Blowjobs.pro".to_string(),
description: "Blowjobs.pro KVS listings with latest, most viewed, top rated, search, and category shortcuts."
.to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=blowjobs.pro".to_string(),
status: "active".to_string(),
categories: categories.iter().map(|value| value.title.clone()).collect(),
options: vec![
ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Browse feed ordering.".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "new".to_string(),
title: "Latest".to_string(),
},
FilterOption {
id: "popular".to_string(),
title: "Most Viewed".to_string(),
},
FilterOption {
id: "rated".to_string(),
title: "Top Rated".to_string(),
},
],
multiSelect: false,
},
ChannelOption {
id: "categories".to_string(),
title: "Categories".to_string(),
description: "Jump directly to category archives.".to_string(),
systemImage: "square.grid.2x2".to_string(),
colorName: "orange".to_string(),
options: categories,
multiSelect: false,
},
],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn selector(value: &str) -> Result<Selector> {
Selector::parse(value)
.map_err(|error| Error::from(format!("selector `{value}` parse failed: {error}")))
}
fn regex(value: &str) -> Result<Regex> {
Regex::new(value).map_err(|error| Error::from(format!("regex `{value}` failed: {error}")))
}
fn decode_html(text: &str) -> String {
decode(text.as_bytes())
.to_string()
.unwrap_or_else(|_| text.to_string())
}
fn collapse_whitespace(text: &str) -> String {
text.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn normalize_title(title: &str) -> String {
title
.trim()
.trim_start_matches('#')
.replace(['_', '-'], " ")
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.to_ascii_lowercase()
}
fn normalize_url(&self, url: &str) -> String {
let trimmed = url.trim();
if trimmed.is_empty() {
return String::new();
}
if trimmed.starts_with("http://") || trimmed.starts_with("https://") {
return trimmed.to_string();
}
if trimmed.starts_with("//") {
return format!("https:{trimmed}");
}
if trimmed.starts_with('/') {
return format!("{}{}", self.url, trimmed);
}
format!("{}/{}", self.url, trimmed.trim_start_matches("./"))
}
fn html_headers(referer: &str) -> Vec<(String, String)> {
vec![
("User-Agent".to_string(), FIREFOX_UA.to_string()),
("Accept".to_string(), HTML_ACCEPT.to_string()),
("Referer".to_string(), referer.to_string()),
]
}
fn build_search_path(query: &str) -> String {
query
.split_whitespace()
.map(|part| utf8_percent_encode(part, NON_ALPHANUMERIC).to_string())
.collect::<Vec<_>>()
.join("-")
}
fn build_archive_page_url(archive_url: &str, page: u16) -> String {
if page <= 1 {
return archive_url.trim_end_matches('/').to_string() + "/";
}
format!("{}/{page}/", archive_url.trim_end_matches('/'))
}
fn build_target_url(&self, target: &Target, page: u16) -> String {
match target {
Target::Latest => {
Self::build_archive_page_url(&format!("{}/latest-updates/", self.url), page)
}
Target::MostViewed => {
Self::build_archive_page_url(&format!("{}/most-popular/", self.url), page)
}
Target::TopRated => {
Self::build_archive_page_url(&format!("{}/top-rated/", self.url), page)
}
Target::Category { url } => Self::build_archive_page_url(url, page),
Target::Search { query } => {
let normalized = Self::build_search_path(query);
format!("{}/search/{normalized}/", self.url)
}
}
}
fn target_from_request(
&self,
sort: &str,
query: Option<&str>,
category: Option<&str>,
) -> Target {
let category_value = category.unwrap_or("").trim();
if !category_value.is_empty() && !category_value.eq_ignore_ascii_case("all") {
if let Some(url) = self.resolve_category(category_value) {
return Target::Category { url };
}
}
if let Some(raw_query) = query {
let trimmed = raw_query.trim();
if !trimmed.is_empty() {
if let Some(value) = trimmed.strip_prefix("category:")
&& let Some(url) = self.resolve_category(value)
{
return Target::Category { url };
}
if let Some(url) = self.resolve_category(trimmed) {
return Target::Category { url };
}
return Target::Search {
query: trimmed.to_string(),
};
}
}
match sort {
"popular" => Target::MostViewed,
"rated" => Target::TopRated,
_ => Target::Latest,
}
}
fn resolve_category(&self, value: &str) -> Option<String> {
let normalized = Self::normalize_title(value);
if normalized.is_empty() {
return None;
}
self.category_map
.read()
.ok()
.and_then(|map| map.get(&normalized).cloned())
}
async fn fetch_html(
&self,
options: &ServerOptions,
url: &str,
referer: &str,
) -> Result<String> {
let mut requester = requester_or_default(options, CHANNEL_ID, "fetch_html");
requester
.get_with_headers(url, Self::html_headers(referer), Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(format!("request failed for {url}: {error}")))
}
fn search_page_from_async(&self, query: &str, page: u16) -> Option<String> {
if page <= 1 {
return None;
}
let query_value = query.trim();
if query_value.is_empty() {
return None;
}
let from = page;
let encoded_query = utf8_percent_encode(query_value, NON_ALPHANUMERIC).to_string();
Some(format!(
"{}/search/{}/?mode=async&function=get_block&block_id=list_videos_videos_list_search_result&q={encoded_query}&category_ids=&sort_by=&from_videos%2Bfrom_albums={from}",
self.url,
Self::build_search_path(query_value)
))
}
async fn fetch_listing_html(
&self,
options: &ServerOptions,
target: &Target,
page: u16,
) -> Result<String> {
if let Target::Search { query } = target
&& page > 1
&& let Some(async_url) = self.search_page_from_async(query, page)
{
return self
.fetch_html(options, &async_url, &format!("{}/search/{}/", self.url, Self::build_search_path(query)))
.await;
}
let page_url = self.build_target_url(target, page);
self.fetch_html(options, &page_url, &self.url).await
}
fn parse_listing_html(&self, html: &str) -> Result<Vec<VideoItem>> {
let href_re = Self::regex(r#"href="([^"]+/videos/\d+/[^"]*)""#)?;
let title_re = Self::regex(r#"title="([^"]+)""#)?;
let thumb_re = Self::regex(r#"(?:data-original|src)="([^"]+/contents/videos_screenshots/[^"]+)""#)?;
let duration_re = Self::regex(r#"<div class="duration">\s*([^<]+)\s*</div>"#)?;
let views_re = Self::regex(r#"<div class="views">\s*([^<]+)\s*</div>"#)?;
let tag_re = Self::regex(r#"<a href="[^"]*/categories/[^"]*"[^>]*>\s*([^<]+)\s*</a>"#)?;
let id_re = Self::regex(r"/videos/(\d+)/")?;
let mut videos = Vec::new();
for segment in html.split("<div class=\"item\">").skip(1) {
let Some(href_caps) = href_re.captures(segment) else {
continue;
};
let Some(raw_url) = href_caps.get(1).map(|m| m.as_str()) else {
continue;
};
let url = self.normalize_url(raw_url);
let id = id_re
.captures(&url)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
.unwrap_or_else(|| url.clone());
let title = title_re
.captures(segment)
.and_then(|caps| caps.get(1).map(|m| Self::decode_html(m.as_str())))
.unwrap_or_default();
if title.is_empty() {
continue;
}
let thumb = thumb_re
.captures(segment)
.and_then(|caps| caps.get(1).map(|m| self.normalize_url(m.as_str())))
.unwrap_or_default();
let duration = duration_re
.captures(segment)
.and_then(|caps| caps.get(1).map(|m| m.as_str().trim().to_string()))
.and_then(|value| parse_time_to_seconds(&value))
.unwrap_or(0)
.max(0) as u32;
let views = views_re
.captures(segment)
.and_then(|caps| caps.get(1).map(|m| m.as_str().trim().to_string()))
.and_then(|value| parse_abbreviated_number(&value))
.map(|value| value as u32);
let tags = tag_re
.captures_iter(segment)
.filter_map(|caps| {
caps.get(1).map(|m| {
Self::decode_html(m.as_str())
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string()
})
})
.filter(|value| !value.is_empty())
.collect::<Vec<_>>();
let mut item = VideoItem::new(
id,
title,
url,
CHANNEL_ID.to_string(),
thumb,
duration,
);
item.tags = Some(tags);
item.views = views;
videos.push(item);
}
Ok(videos)
}
async fn load_categories(
url: &str,
categories: Arc<RwLock<Vec<FilterOption>>>,
category_map: Arc<RwLock<HashMap<String, String>>>,
) -> Result<()> {
let mut requester = crate::util::requester::Requester::new();
let category_url = format!("{url}/categories/");
let html = requester
.get_with_headers(
&category_url,
Self::html_headers(url),
Some(Version::HTTP_11),
)
.await
.map_err(|error| Error::from(format!("category fetch failed: {error}")))?;
let doc = Html::parse_document(&html);
let link_selector = Self::selector("#list_categories_categories_list_items a.item-link")?;
let mut options = vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}];
let mut map = HashMap::new();
for link in doc.select(&link_selector) {
let Some(href) = link.value().attr("href") else {
continue;
};
let raw_title = link
.value()
.attr("title")
.unwrap_or("")
.to_string();
let title = Self::decode_html(&raw_title).trim().to_string();
if title.is_empty() {
continue;
}
let normalized = Self::normalize_title(&title);
if normalized.is_empty() {
continue;
}
options.push(FilterOption {
id: normalized.clone(),
title: title.clone(),
});
map.insert(normalized, href.to_string());
}
if let Ok(mut guard) = categories.write() {
*guard = options;
}
if let Ok(mut guard) = category_map.write() {
*guard = map;
}
Ok(())
}
}
#[async_trait]
impl Provider for BlowjobsproProvider {
async fn get_videos(
&self,
_cache: VideoCache,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let page_number = page.parse::<u16>().unwrap_or(1).max(1);
let target = self.target_from_request(
sort.as_str(),
query.as_deref(),
options.categories.as_deref(),
);
let html = match self.fetch_listing_html(&options, &target, page_number).await {
Ok(html) => html,
Err(error) => {
report_provider_error(CHANNEL_ID, "get_videos.fetch_listing_html", &error.to_string())
.await;
return vec![];
}
};
match self.parse_listing_html(&html) {
Ok(videos) => videos,
Err(error) => {
report_provider_error(CHANNEL_ID, "get_videos.parse_listing_html", &error.to_string())
.await;
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}

384
src/providers/erome.rs Normal file
View File

@@ -0,0 +1,384 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{Provider, report_provider_error, requester_or_default};
use crate::status::*;
use crate::util::parse_abbreviated_number;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use regex::Regex;
use scraper::{ElementRef, Html, Selector};
use url::form_urlencoded;
use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "amateur-homemade",
tags: &["amateur", "albums", "homemade"],
};
const BASE_URL: &str = "https://www.erome.com";
const CHANNEL_ID: &str = "erome";
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct EromeProvider {
url: String,
}
#[derive(Debug, Clone)]
enum Target {
ExploreHot,
ExploreNew,
Search { query: String, order_new: bool },
UploaderPosts { slug: String },
}
impl EromeProvider {
pub fn new() -> Self {
Self {
url: BASE_URL.to_string(),
}
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: CHANNEL_ID.to_string(),
name: "EroMe".to_string(),
description:
"EroMe album feed with hot/new routing, search, and uploader profile shortcuts."
.to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=erome.com".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![
ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Browse EroMe hot or new feeds.".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "new".to_string(),
title: "New".to_string(),
},
FilterOption {
id: "hot".to_string(),
title: "Hot".to_string(),
},
],
multiSelect: false,
},
ChannelOption {
id: "sites".to_string(),
title: "Uploader".to_string(),
description: "Jump directly to an uploader profile slug.".to_string(),
systemImage: "person.crop.square".to_string(),
colorName: "purple".to_string(),
options: vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}],
multiSelect: false,
},
],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn selector(value: &str) -> Option<Selector> {
Selector::parse(value).ok()
}
fn regex(value: &str) -> Option<Regex> {
Regex::new(value).ok()
}
fn normalize_text(value: &str) -> String {
decode(value.as_bytes())
.to_string()
.unwrap_or_else(|_| value.to_string())
.replace('\u{a0}', " ")
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string()
}
fn normalize_url(&self, value: &str) -> String {
let trimmed = value.trim();
if trimmed.is_empty() {
return String::new();
}
if trimmed.starts_with("http://") || trimmed.starts_with("https://") {
return trimmed.to_string();
}
if trimmed.starts_with("//") {
return format!("https:{trimmed}");
}
format!(
"{}/{}",
self.url.trim_end_matches('/'),
trimmed.trim_start_matches('/')
)
}
fn extract_album_guid_from_url(value: &str) -> Option<String> {
let re = Self::regex(r#"/a/([A-Za-z0-9]+)"#)?;
re.captures(value)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
}
fn uploader_slug_from_query(query: &str) -> Option<String> {
let q = query.trim();
if q.is_empty() {
return None;
}
let lower = q.to_ascii_lowercase();
let stripped = if let Some(rest) = lower.strip_prefix("uploader:") {
rest.trim().to_string()
} else if let Some(rest) = lower.strip_prefix("user:") {
rest.trim().to_string()
} else {
return None;
};
let slug = stripped
.trim_start_matches('@')
.chars()
.filter(|c| c.is_ascii_alphanumeric() || *c == '_' || *c == '-')
.collect::<String>();
(!slug.is_empty()).then_some(slug)
}
fn resolve_target(&self, query: &str, options: &ServerOptions, sort: &str) -> Target {
if let Some(site) = options.sites.as_deref() {
let normalized = site.trim();
if !normalized.is_empty() && !normalized.eq_ignore_ascii_case("all") {
let slug = normalized
.trim_start_matches('@')
.chars()
.filter(|c| c.is_ascii_alphanumeric() || *c == '_' || *c == '-')
.collect::<String>();
if !slug.is_empty() {
return Target::UploaderPosts { slug };
}
}
}
if !query.trim().is_empty() {
if let Some(slug) = Self::uploader_slug_from_query(query) {
return Target::UploaderPosts { slug };
}
return Target::Search {
query: query.trim().to_string(),
order_new: matches!(sort, "new" | "latest"),
};
}
if matches!(sort, "new" | "latest") {
Target::ExploreNew
} else {
Target::ExploreHot
}
}
fn build_url_for_target(&self, target: &Target, page: u16) -> String {
let page = page.max(1);
match target {
Target::ExploreHot => {
if page == 1 {
format!("{}/explore", self.url)
} else {
format!("{}/explore?page={page}", self.url)
}
}
Target::ExploreNew => {
if page == 1 {
format!("{}/explore/new", self.url)
} else {
format!("{}/explore/new?page={page}", self.url)
}
}
Target::Search { query, order_new } => {
let mut serializer = form_urlencoded::Serializer::new(String::new());
serializer.append_pair("q", query);
if *order_new {
serializer.append_pair("o", "new");
}
serializer.append_pair("page", &page.to_string());
format!("{}/search?{}", self.url, serializer.finish())
}
Target::UploaderPosts { slug } => {
let mut serializer = form_urlencoded::Serializer::new(String::new());
serializer.append_pair("t", "posts");
serializer.append_pair("page", &page.to_string());
format!("{}/{}?{}", self.url, slug, serializer.finish())
}
}
}
fn text_from_selector(parent: &ElementRef<'_>, selector: &Selector) -> String {
parent
.select(selector)
.next()
.map(|node| Self::normalize_text(&node.text().collect::<Vec<_>>().join(" ")))
.unwrap_or_default()
}
fn parse_video_item(&self, card: &ElementRef<'_>) -> Option<VideoItem> {
let link_selector = Self::selector("a.album-link[href]")?;
let title_selector = Self::selector("a.album-title")?;
let thumb_selector = Self::selector("img.album-thumbnail")?;
let user_selector = Self::selector("span.album-user")?;
let views_selector = Self::selector("span.album-bottom-views")?;
let link = card.select(&link_selector).next()?;
let href = link.value().attr("href")?;
let album_url = self.normalize_url(href);
let album_id = Self::extract_album_guid_from_url(&album_url)?;
let title = Self::text_from_selector(card, &title_selector);
let fallback_title = link
.value()
.attr("title")
.map(Self::normalize_text)
.unwrap_or_default();
let final_title = if !title.is_empty() { title } else { fallback_title };
if final_title.is_empty() {
return None;
}
let thumb = card
.select(&thumb_selector)
.find_map(|node| {
node.value()
.attr("src")
.or_else(|| node.value().attr("data-rotate-src"))
})
.map(|value| self.normalize_url(value))
.unwrap_or_default();
let uploader = Self::text_from_selector(card, &user_selector);
let uploader_slug = uploader
.trim()
.trim_start_matches('@')
.chars()
.filter(|c| c.is_ascii_alphanumeric() || *c == '_' || *c == '-')
.collect::<String>();
let views_text = Self::text_from_selector(card, &views_selector);
let views = parse_abbreviated_number(views_text.replace(',', ".").replace(' ', "").as_str());
let uploader_url = if uploader_slug.is_empty() {
String::new()
} else {
format!("{}/{}?t=posts", self.url, uploader_slug)
};
let mut item = VideoItem::new(
album_id,
final_title,
album_url,
CHANNEL_ID.to_string(),
thumb,
0,
);
if let Some(value) = views {
item = item.views(value);
}
if !uploader.is_empty() {
item = item.uploader(uploader);
}
if !uploader_url.is_empty() {
item = item.uploader_url(uploader_url);
}
if !uploader_slug.is_empty() {
item.uploaderId = Some(format!("{CHANNEL_ID}:{uploader_slug}"));
}
Some(item)
}
fn parse_listing(&self, html: &str) -> Vec<VideoItem> {
let document = Html::parse_document(html);
let Some(card_selector) = Self::selector("div.album") else {
return vec![];
};
document
.select(&card_selector)
.filter_map(|card| self.parse_video_item(&card))
.collect()
}
fn html_headers(referer: &str) -> Vec<(String, String)> {
vec![
(
"accept".to_string(),
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8".to_string(),
),
("accept-language".to_string(), "en-US,en;q=0.8".to_string()),
(
"user-agent".to_string(),
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36".to_string(),
),
("referer".to_string(), referer.to_string()),
]
}
async fn fetch_page(&self, options: &ServerOptions, url: &str) -> Result<String> {
let mut requester = requester_or_default(options, CHANNEL_ID, "get_videos");
requester
.get_with_headers(url, Self::html_headers(&format!("{}/explore", self.url)), Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(format!("request failed for {url}: {error}")))
}
}
#[async_trait]
impl Provider for EromeProvider {
async fn get_videos(
&self,
_cache: crate::util::cache::VideoCache,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let sort_value = if sort.is_empty() {
options.sort.as_deref().unwrap_or("new").to_string()
} else {
sort
};
let query_value = query.unwrap_or_default();
let page_value = page.parse::<u16>().unwrap_or(1);
let target = self.resolve_target(&query_value, &options, &sort_value);
let url = self.build_url_for_target(&target, page_value);
match self.fetch_page(&options, &url).await {
Ok(html) => self.parse_listing(&html),
Err(error) => {
report_provider_error(CHANNEL_ID, "get_videos.fetch", &error.to_string()).await;
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}

View File

@@ -7,6 +7,7 @@ use crate::util::discord::format_error_chain;
use crate::util::discord::send_discord_error_report; use crate::util::discord::send_discord_error_report;
use crate::util::requester::Requester; use crate::util::requester::Requester;
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::util::hoster_proxy::{proxy_name_for_url, rewrite_hoster_url};
use crate::videos::ServerOptions; use crate::videos::ServerOptions;
use crate::videos::VideoFormat; use crate::videos::VideoFormat;
use crate::videos::VideoItem; use crate::videos::VideoItem;
@@ -332,6 +333,13 @@ impl SxyprnProvider {
.and_then(|s| s.split("</div>").next()) .and_then(|s| s.split("</div>").next())
.ok_or_else(|| ErrorKind::Parse("failed to extract title_parts".into()))?; .ok_or_else(|| ErrorKind::Parse("failed to extract title_parts".into()))?;
let title_links: Vec<String> = video_segment
.split("href='https://")
.skip(1)
.filter_map(|part| part.split("'").next().map(|u| u.to_string()))
.collect();
let document = Html::parse_document(title_parts); let document = Html::parse_document(title_parts);
let selector = Selector::parse("*") let selector = Selector::parse("*")
.map_err(|e| ErrorKind::Parse(format!("selector parse failed: {e}")))?; .map_err(|e| ErrorKind::Parse(format!("selector parse failed: {e}")))?;
@@ -358,6 +366,30 @@ impl SxyprnProvider {
.trim() .trim()
.to_string(); .to_string();
// De-duplicate repeated titles
let words: Vec<&str> = title.split_whitespace().collect();
if words.len() > 1 {
for pattern_len in (1..=words.len() / 2).rev() {
let pattern = &words[0..pattern_len];
let mut all_match = true;
let mut idx = pattern_len;
while idx < words.len() {
let end = std::cmp::min(idx + pattern_len, words.len());
if &words[idx..end] != &pattern[0..(end - idx)] {
all_match = false;
break;
}
idx += pattern_len;
}
if all_match && words.len() % pattern_len == 0 {
title = pattern.join(" ");
break;
}
}
}
if title.to_ascii_lowercase().starts_with("new ") { if title.to_ascii_lowercase().starts_with("new ") {
title = title[4..].to_string(); title = title[4..].to_string();
} }
@@ -401,7 +433,7 @@ impl SxyprnProvider {
.nth(1) .nth(1)
.and_then(|s| s.split("data-src='").nth(1)) .and_then(|s| s.split("data-src='").nth(1))
.and_then(|s| s.split('\'').next()) .and_then(|s| s.split('\'').next())
.ok_or_else(|| ErrorKind::Parse("failed to extract thumb".into()))?; .unwrap_or("");
let thumb = format!("https:{thumb_path}"); let thumb = format!("https:{thumb_path}");
@@ -442,48 +474,75 @@ impl SxyprnProvider {
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32; let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
// stream urls - collect both lulustream and vidara.so URLs // stream urls - collect both lulustream and vidara.so URLs
let mut stream_urls = vec![format!( let mut formats = vec![];
// Add sxyprn format
let sxyprn_url = format!(
"{}/proxy/sxyprn/post/{}", "{}/proxy/sxyprn/post/{}",
options.public_url_base.as_deref().unwrap_or(""), options.public_url_base.as_deref().unwrap_or(""),
id id
)]; );
formats.push(
VideoFormat::new(sxyprn_url.clone(), "auto".to_string(), "mp4".to_string())
.format_note(
sxyprn_url
.split("/")
.nth(4)
.unwrap_or("sxyprn")
.to_string(),
),
);
// Also collect and transform vidara.so URLs to proxy format // Also collect and transform vidara.so URLs to proxy format and add as formats
let vidara_urls: Vec<String> = video_segment let vidara_urls: Vec<String> = title_links
.split("extlink_icon extlink") .iter()
.filter_map(|part| { .filter(|url| proxy_name_for_url(url).as_deref() == Some("vidara"))
part.split("href='") .map(|url| rewrite_hoster_url(options, url))
.last()
.and_then(|s| s.split('\'').next())
.map(|u| u.to_string())
})
.filter(|url| url.contains("vidara.so/v/"))
.filter_map(|url| {
url.split("/v/").last().map(|video_id| {
format!(
"{}/proxy/vidara/e/{}",
options.public_url_base.as_deref().unwrap_or(""),
video_id
)
})
})
.collect(); .collect();
stream_urls.extend(vidara_urls); for vidara_url in vidara_urls {
formats.push(
let formats: Vec<VideoFormat> = stream_urls VideoFormat::m3u8(vidara_url.clone(), "1080".to_string(), "m3u8".to_string())
.into_iter()
.map(|url| {
VideoFormat::new(url.clone(), "auto".to_string(), "mp4".to_string())
.format_note( .format_note(
url.split("/") vidara_url
.split("/")
.nth(4) .nth(4)
.or_else(|| Some(&url)) .unwrap_or("vidara")
.unwrap_or_default()
.to_string(), .to_string(),
) )
}) .format_id("vidara".to_string()),
.collect::<Vec<VideoFormat>>(); );
}
let doodstream_urls: Vec<String> = title_links
.iter()
.filter(|url| proxy_name_for_url(url).as_deref() == Some("doodstream"))
.map(|url| rewrite_hoster_url(options, url))
.collect();
for dood_url in doodstream_urls {
formats.push(
VideoFormat::m3u8(dood_url.clone(), "auto".to_string(), "m3u8".to_string())
.format_note("doodstream".to_string())
.format_id("doodstream".to_string()),
);
}
let lulustream_urls: Vec<String> = title_links
.iter()
.filter(|url| proxy_name_for_url(url).as_deref() == Some("lulustream"))
.map(|url| rewrite_hoster_url(options, url))
.collect();
for lulustream_url in lulustream_urls {
formats.push(
VideoFormat::m3u8(lulustream_url.clone(), "auto".to_string(), "m3u8".to_string())
.format_note("lulustream".to_string())
.format_id("lulustream".to_string()),
);
}
let mut video_item = VideoItem::new( let mut video_item = VideoItem::new(
id.clone(), id.clone(),
title, title,

587
src/providers/youporn.rs Normal file
View File

@@ -0,0 +1,587 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{Provider, report_provider_error, requester_or_default};
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use scraper::{ElementRef, Html, Selector};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use std::thread;
use url::{Url, form_urlencoded};
use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["mainstream", "studio", "search"],
};
const BASE_URL: &str = "https://www.youporn.com";
const CHANNEL_ID: &str = "youporn";
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct YoupornProvider {
url: String,
shortcuts: Arc<RwLock<HashMap<String, Target>>>,
}
#[derive(Debug, Clone)]
enum Target {
Latest { sort: String },
Search { query: String },
Tag { slug: String, sort: String },
Channel { slug: String, sort: String },
Pornstar { slug: String, sort: String },
Amateur { slug: String, sort: String },
}
impl YoupornProvider {
pub fn new() -> Self {
let provider = Self {
url: BASE_URL.to_string(),
shortcuts: Arc::new(RwLock::new(HashMap::new())),
};
provider.spawn_initial_load();
provider
}
fn spawn_initial_load(&self) {
let shortcuts = Arc::clone(&self.shortcuts);
let url = self.url.clone();
thread::spawn(move || {
let rt = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(v) => v,
Err(_) => return,
};
rt.block_on(async move {
let mut requester = crate::util::requester::Requester::new();
if let Ok(html) = requester.get(&url, None).await {
let map = Self::collect_shortcuts(&html);
if let Ok(mut guard) = shortcuts.write() {
*guard = map;
}
}
});
});
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: CHANNEL_ID.to_string(),
name: "YouPorn".to_string(),
description: "YouPorn listings with search, tag/channel shortcuts, and watch-page playback URLs."
.to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=youporn.com".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Latest feed ordering.".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![FilterOption {
id: "new".to_string(),
title: "Most Recent".to_string(),
}],
multiSelect: false,
}],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn selector(value: &str) -> Option<Selector> {
Selector::parse(value).ok()
}
fn normalize_text(value: &str) -> String {
decode(value.as_bytes())
.to_string()
.unwrap_or_else(|_| value.to_string())
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string()
}
fn normalize_url(&self, value: &str) -> String {
let trimmed = value.trim();
if trimmed.is_empty() {
return String::new();
}
if trimmed.starts_with("http://") || trimmed.starts_with("https://") {
return trimmed.to_string();
}
if trimmed.starts_with("//") {
return format!("https:{trimmed}");
}
format!(
"{}/{}",
self.url.trim_end_matches('/'),
trimmed.trim_start_matches('/')
)
}
fn normalized_sort(sort: &str) -> &'static str {
let _ = sort;
"new"
}
fn sort_suffix(sort: &str) -> &'static str {
let _ = sort;
""
}
fn page_suffix(page: u8) -> String {
if page > 1 {
format!("?page={page}")
} else {
String::new()
}
}
fn html_headers(referer: &str) -> Vec<(String, String)> {
vec![
(
"accept".to_string(),
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8".to_string(),
),
("accept-language".to_string(), "en-US,en;q=0.7".to_string()),
("cache-control".to_string(), "no-cache".to_string()),
("pragma".to_string(), "no-cache".to_string()),
(
"user-agent".to_string(),
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36".to_string(),
),
("referer".to_string(), referer.to_string()),
]
}
fn target_from_query(&self, query: &str, sort: &str) -> Target {
let q = query.trim();
if q.is_empty() {
return Target::Latest {
sort: Self::normalized_sort(sort).to_string(),
};
}
let lower = q.to_ascii_lowercase();
for (prefix, kind) in [
("tag:", "tag"),
("channel:", "channel"),
("pornstar:", "pornstar"),
("amateur:", "amateur"),
] {
if let Some(rest) = lower.strip_prefix(prefix) {
let slug = rest.trim().replace(' ', "-");
if !slug.is_empty() {
return match kind {
"tag" => Target::Tag {
slug,
sort: Self::normalized_sort(sort).to_string(),
},
"channel" => Target::Channel {
slug,
sort: Self::normalized_sort(sort).to_string(),
},
"pornstar" => Target::Pornstar {
slug,
sort: Self::normalized_sort(sort).to_string(),
},
_ => Target::Amateur {
slug,
sort: Self::normalized_sort(sort).to_string(),
},
};
}
}
}
let shortcut_key = lower.split_whitespace().collect::<Vec<_>>().join(" ");
if let Ok(guard) = self.shortcuts.read()
&& let Some(target) = guard.get(&shortcut_key)
{
return match target {
Target::Tag { slug, .. } => Target::Tag {
slug: slug.clone(),
sort: Self::normalized_sort(sort).to_string(),
},
Target::Channel { slug, .. } => Target::Channel {
slug: slug.clone(),
sort: Self::normalized_sort(sort).to_string(),
},
Target::Pornstar { slug, .. } => Target::Pornstar {
slug: slug.clone(),
sort: Self::normalized_sort(sort).to_string(),
},
Target::Amateur { slug, .. } => Target::Amateur {
slug: slug.clone(),
sort: Self::normalized_sort(sort).to_string(),
},
_ => target.clone(),
};
}
Target::Search {
query: q.to_string(),
}
}
fn build_url(&self, target: &Target, page: u8) -> String {
match target {
Target::Latest { sort } => format!(
"{}/{}{}",
self.url,
Self::sort_suffix(sort),
Self::page_suffix(page)
),
Target::Search { query } => {
let encoded: String = form_urlencoded::byte_serialize(query.as_bytes()).collect();
if page > 1 {
format!("{}/search/?query={encoded}&page={page}", self.url)
} else {
format!("{}/search/?query={encoded}", self.url)
}
}
Target::Tag { slug, sort } => format!(
"{}/porntags/{}/{}{}",
self.url,
slug.trim_matches('/'),
Self::sort_suffix(sort),
Self::page_suffix(page)
),
Target::Channel { slug, sort } => format!(
"{}/channel/{}/{}{}",
self.url,
slug.trim_matches('/'),
Self::sort_suffix(sort),
Self::page_suffix(page)
),
Target::Pornstar { slug, sort } => format!(
"{}/pornstar/{}/{}{}",
self.url,
slug.trim_matches('/'),
Self::sort_suffix(sort),
Self::page_suffix(page)
),
Target::Amateur { slug, sort } => format!(
"{}/amateur/{}/{}{}",
self.url,
slug.trim_matches('/'),
Self::sort_suffix(sort),
Self::page_suffix(page)
),
}
}
fn collect_shortcuts(html: &str) -> HashMap<String, Target> {
let mut map = HashMap::new();
let document = Html::parse_document(html);
let Some(link_selector) = Self::selector("a[href]") else {
return map;
};
for link in document.select(&link_selector) {
let Some(href) = link.value().attr("href") else {
continue;
};
let title = Self::normalize_text(&link.text().collect::<String>()).to_ascii_lowercase();
if title.is_empty() {
continue;
}
let path = if href.starts_with("http://") || href.starts_with("https://") {
Url::parse(href)
.ok()
.map(|u| u.path().to_string())
.unwrap_or_default()
} else {
href.to_string()
};
if let Some(slug) = path
.strip_prefix("/porntags/")
.map(|v| v.trim_matches('/').to_string())
{
if !slug.is_empty() {
map.insert(
title,
Target::Tag {
slug,
sort: "new".to_string(),
},
);
}
continue;
}
if let Some(slug) = path
.strip_prefix("/channel/")
.map(|v| v.trim_matches('/').to_string())
{
if !slug.is_empty() {
map.insert(
title,
Target::Channel {
slug,
sort: "new".to_string(),
},
);
}
continue;
}
if let Some(slug) = path
.strip_prefix("/pornstar/")
.map(|v| v.trim_matches('/').to_string())
{
if !slug.is_empty() {
map.insert(
title,
Target::Pornstar {
slug,
sort: "new".to_string(),
},
);
}
continue;
}
if let Some(slug) = path
.strip_prefix("/amateur/")
.map(|v| v.trim_matches('/').to_string())
&& !slug.is_empty()
{
map.insert(
title,
Target::Amateur {
slug,
sort: "new".to_string(),
},
);
}
}
map
}
fn text_of(node: Option<ElementRef<'_>>) -> String {
node.map(|v| Self::normalize_text(&v.text().collect::<String>()))
.unwrap_or_default()
}
fn parse_items(&self, html: &str) -> Vec<VideoItem> {
let document = Html::parse_document(html);
let Some(card_selector) = Self::selector("article.video-box.js_video-box") else {
return vec![];
};
let link_selector = Self::selector("a[data-testid='plw_video_thumbnail_link'], a.video-box-image, a.video-title-text");
let title_selector = Self::selector("a.video-title-text");
let thumb_selector = Self::selector("img");
let duration_selector = Self::selector(".tm_video_duration");
let views_selector = Self::selector("span.info-views");
let uploader_selector = Self::selector("a.author-title-text");
let tag_selector = Self::selector("a.bubble-porntag");
let mut items = Vec::new();
for card in document.select(&card_selector) {
let link_node = link_selector
.as_ref()
.and_then(|s| card.select(s).next());
let href = link_node
.and_then(|v| v.value().attr("href"))
.unwrap_or_default();
if !href.contains("/watch/") {
continue;
}
let id = card
.value()
.attr("data-video-id")
.map(|v| v.to_string())
.or_else(|| {
href.split("/watch/")
.nth(1)
.and_then(|v| v.split('/').next())
.map(|v| v.to_string())
})
.unwrap_or_default();
if id.is_empty() {
continue;
}
let title = title_selector
.as_ref()
.and_then(|s| card.select(s).next())
.map(|v| {
let from_title = v.value().attr("title").unwrap_or_default();
if from_title.is_empty() {
Self::normalize_text(&v.text().collect::<String>())
} else {
Self::normalize_text(from_title)
}
})
.unwrap_or_default();
let thumb = thumb_selector
.as_ref()
.and_then(|s| card.select(s).next())
.and_then(|v| {
v.value()
.attr("data-original")
.or_else(|| v.value().attr("data-src"))
.or_else(|| v.value().attr("src"))
})
.map(|v| self.normalize_url(v))
.unwrap_or_default();
let duration_text = Self::text_of(duration_selector.as_ref().and_then(|s| card.select(s).next()));
let duration = parse_time_to_seconds(&duration_text).unwrap_or(0) as u32;
let view_text = views_selector
.as_ref()
.and_then(|s| card.select(s).next())
.map(|v| Self::normalize_text(&v.text().collect::<String>()))
.unwrap_or_default();
let views = parse_abbreviated_number(&view_text).unwrap_or(0) as u32;
let rating = views_selector
.as_ref()
.and_then(|s| card.select(s).nth(1))
.map(|v| Self::normalize_text(&v.text().collect::<String>()).replace('%', ""))
.and_then(|v| v.parse::<f32>().ok());
let uploader_node = uploader_selector.as_ref().and_then(|s| card.select(s).next());
let uploader_name = uploader_node
.as_ref()
.map(|v| Self::normalize_text(&v.text().collect::<String>()))
.unwrap_or_default();
let uploader_href = uploader_node
.and_then(|v| v.value().attr("href"))
.map(|v| self.normalize_url(v));
let uploader_id = card
.value()
.attr("data-uploader-id")
.map(|v| format!("{CHANNEL_ID}:{v}"));
let preview = link_node
.and_then(|v| v.value().attr("data-mediabook"))
.map(|v| v.replace("&amp;", "&"));
let mut tags = Vec::new();
if let Some(sel) = &tag_selector {
for tag in card.select(sel) {
let title = Self::normalize_text(&tag.text().collect::<String>());
if !title.is_empty() {
tags.push(title);
}
}
}
let mut item = VideoItem::new(
id,
title,
self.normalize_url(href),
CHANNEL_ID.to_string(),
thumb,
duration,
)
.views(views);
if let Some(value) = rating {
item = item.rating(value);
}
if !uploader_name.is_empty() {
item = item.uploader(uploader_name);
}
if let Some(value) = uploader_href {
item.uploaderUrl = Some(value);
}
if let Some(value) = uploader_id {
item.uploaderId = Some(value);
}
if let Some(value) = preview {
item = item.preview(value);
}
if !tags.is_empty() {
item = item.tags(tags);
}
items.push(item);
}
items
}
}
#[async_trait]
impl Provider for YoupornProvider {
async fn get_videos(
&self,
cache: VideoCache,
_db_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let query = query.unwrap_or_default();
let page = page.parse::<u8>().unwrap_or(1);
let target = self.target_from_query(&query, &sort);
let video_url = self.build_url(&target, page);
let old_items = match cache.get(&video_url) {
Some((time, items)) if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 => {
return items.clone();
}
Some((_time, items)) => items.clone(),
None => vec![],
};
let mut requester = requester_or_default(&options, CHANNEL_ID, "get_videos");
let referer = format!("{}/", self.url.trim_end_matches('/'));
let text = match requester
.get_with_headers(&video_url, Self::html_headers(&referer), Some(Version::HTTP_11))
.await
{
Ok(text) => text,
Err(e) => {
report_provider_error(
CHANNEL_ID,
"get_videos.request",
&format!("url={video_url}; error={e}"),
)
.await;
return old_items;
}
};
let items = self.parse_items(&text);
if items.is_empty() {
return old_items;
}
cache.remove(&video_url);
cache.insert(video_url, items.clone());
items
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}

100
src/proxies/lulustream.rs Normal file
View File

@@ -0,0 +1,100 @@
use ntex::web;
use url::Url;
use serde_json::json;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct LulustreamProxy {}
impl LulustreamProxy {
pub fn new() -> Self {
LulustreamProxy {}
}
fn normalize_detail_request(endpoint: &str) -> Option<(String, String)> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
return None;
}
let detail_url = if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint.to_string()
} else if endpoint.starts_with("lulustream.com/") || endpoint.starts_with("www.lulustream.com/") ||
endpoint.starts_with("luluvdo.com/")
{
format!("https://{endpoint}")
} else {
format!("https://lulustream.com/{endpoint}")
};
if !Self::is_allowed_detail_url(&detail_url) {
return None;
}
let parsed = Url::parse(&detail_url).ok()?;
let video_id = parsed.path_segments()?
.last()
.map(ToOwned::to_owned)?;
Some((detail_url, video_id))
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(parsed) = Url::parse(url).ok() else {
return false;
};
if parsed.scheme() != "https" {
return false;
}
let Some(host) = parsed.host_str() else {
return false;
};
(host == "lulustream.com" || host == "www.lulustream.com" || host == "luluvdo.com")
&& (parsed.path().starts_with("/v/")||parsed.path().starts_with("/e/"))
}
pub async fn get_video_url(
&self,
url: String,
requester: web::types::State<Requester>,
) -> String {
let mut requester = requester.get_ref().clone();
let Some((detail_url, video_id)) = Self::normalize_detail_request(&url) else {
println!("LulustreamProxy: Invalid detail URL: {url}");
return String::new();
};
let text = requester.get(&detail_url, None).await.unwrap_or_default();
let video_url = text.split("sources: [{file:\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or_default()
.to_string();
if video_url.is_empty() {
println!("LulustreamProxy: Failed to extract video URL for video ID: {video_id}");
}
video_url
}
}
#[cfg(test)]
mod tests {
use super::LulustreamProxy;
#[test]
fn normalizes_detail_request_with_full_url() {
let (url, video_id) =
LulustreamProxy::normalize_detail_request("https://lulustream.com/d/s484n23k8opy")
.expect("detail request should parse");
assert_eq!(url, "https://lulustream.com/d/s484n23k8opy");
assert_eq!(video_id, "s484n23k8opy");
}
#[test]
fn normalizes_detail_request_with_path_only() {
let (url, video_id) = LulustreamProxy::normalize_detail_request("d/s484n23k8opy")
.expect("detail request should parse");
assert_eq!(url, "https://lulustream.com/d/s484n23k8opy");
assert_eq!(video_id, "s484n23k8opy");
}
}

View File

@@ -12,6 +12,7 @@ use crate::proxies::spankbang::SpankbangProxy;
use crate::proxies::vjav::VjavProxy; use crate::proxies::vjav::VjavProxy;
use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester}; use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester};
use crate::proxies::vidara::VidaraProxy; use crate::proxies::vidara::VidaraProxy;
use crate::proxies::lulustream::LulustreamProxy;
pub mod archivebate; pub mod archivebate;
pub mod doodstream; pub mod doodstream;
@@ -20,6 +21,7 @@ pub mod heavyfetish;
pub mod hqporner; pub mod hqporner;
pub mod hqpornerthumb; pub mod hqpornerthumb;
pub mod javtiful; pub mod javtiful;
pub mod lulustream;
pub mod noodlemagazine; pub mod noodlemagazine;
pub mod pimpbunny; pub mod pimpbunny;
pub mod porndish; pub mod porndish;
@@ -38,6 +40,7 @@ pub enum AnyProxy {
Doodstream(DoodstreamProxy), Doodstream(DoodstreamProxy),
Sxyprn(SxyprnProxy), Sxyprn(SxyprnProxy),
Javtiful(javtiful::JavtifulProxy), Javtiful(javtiful::JavtifulProxy),
Lulustream(LulustreamProxy),
Pornhd3x(Pornhd3xProxy), Pornhd3x(Pornhd3xProxy),
Pimpbunny(PimpbunnyProxy), Pimpbunny(PimpbunnyProxy),
Porndish(PorndishProxy), Porndish(PorndishProxy),
@@ -60,6 +63,7 @@ impl Proxy for AnyProxy {
AnyProxy::Doodstream(p) => p.get_video_url(url, requester).await, AnyProxy::Doodstream(p) => p.get_video_url(url, requester).await,
AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await, AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await,
AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await, AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await,
AnyProxy::Lulustream(p) => p.get_video_url(url, requester).await,
AnyProxy::Pornhd3x(p) => p.get_video_url(url, requester).await, AnyProxy::Pornhd3x(p) => p.get_video_url(url, requester).await,
AnyProxy::Pimpbunny(p) => p.get_video_url(url, requester).await, AnyProxy::Pimpbunny(p) => p.get_video_url(url, requester).await,
AnyProxy::Porndish(p) => p.get_video_url(url, requester).await, AnyProxy::Porndish(p) => p.get_video_url(url, requester).await,

View File

@@ -38,7 +38,7 @@ impl SxyprnProxy {
) -> String { ) -> String {
let mut requester = requester.get_ref().clone(); let mut requester = requester.get_ref().clone();
let url = "https://sxyprn.com/".to_string() + &url; let url = "https://sxyprn.com/".to_string() + &url;
println!("Fetching URL: {}", url); // println!("Fetching URL: {}", url);
let text = requester.get(&url, None).await.unwrap_or("".to_string()); let text = requester.get(&url, None).await.unwrap_or("".to_string());
if text.is_empty() { if text.is_empty() {
return "".to_string(); return "".to_string();
@@ -49,27 +49,27 @@ impl SxyprnProxy {
.split("\"}") .split("\"}")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.replace("\\", ""); .replace("\\", "");
println!("src: {}", data_string); // println!("src: {}", data_string);
let mut tmp = data_string let mut tmp = data_string
.split("/") .split("/")
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
println!("tmp: {:?}", tmp); // println!("tmp: {:?}", tmp);
tmp[1] = format!( tmp[1] = format!(
"{}8/{}", "{}8/{}",
tmp[1], tmp[1],
boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str())) boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str()))
); );
println!("tmp[1]: {:?}", tmp[1]); // println!("tmp[1]: {:?}", tmp[1]);
//preda //preda
tmp[5] = format!( tmp[5] = format!(
"{}", "{}",
tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str()) tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str())
); );
println!("tmp: {:?}", tmp); // println!("tmp: {:?}", tmp);
let sxyprn_video_url = format!("https://sxyprn.com{}", tmp.join("/")); let sxyprn_video_url = format!("https://sxyprn.com{}", tmp.join("/"));
println!("sxyprn_video_url: {}", sxyprn_video_url); // println!("sxyprn_video_url: {}", sxyprn_video_url);
match crate::util::get_redirect_location(&sxyprn_video_url) { match crate::util::get_redirect_location(&sxyprn_video_url) {
Ok(Some(loc)) => {return format!("https:{}", loc)}, Ok(Some(loc)) => {return format!("https:{}", loc)},
Ok(None) => println!("No redirect found for {}", sxyprn_video_url), Ok(None) => println!("No redirect found for {}", sxyprn_video_url),

View File

@@ -3,25 +3,41 @@ use url::Url;
use crate::providers::{build_proxy_url, strip_url_scheme}; use crate::providers::{build_proxy_url, strip_url_scheme};
use crate::videos::ServerOptions; use crate::videos::ServerOptions;
#[allow(dead_code)]
const DOODSTREAM_HOSTS: &[&str] = &[ const DOODSTREAM_HOSTS: &[&str] = &[
"doodstream.com",
"turboplayers.xyz", "turboplayers.xyz",
"www.turboplayers.xyz",
"trailerhg.xyz", "trailerhg.xyz",
"www.trailerhg.xyz",
"streamhg.com", "streamhg.com",
"www.streamhg.com", ];
const LULUSTREAM_HOSTS: &[&str] = &[
"luluvdo.com",
"lulustream.com",
];
const VIDARA_HOSTS: &[&str] = &[
"vidara.so",
]; ];
#[allow(dead_code)] #[allow(dead_code)]
pub fn proxy_name_for_url(url: &str) -> Option<&'static str> { pub fn proxy_name_for_url(url: &str) -> Option<&'static str> {
let parsed = Url::parse(url).ok()?; let parsed = match !url.starts_with("http://") && !url.starts_with("https://"){
true => Url::parse(&format!("https://{}", url)).ok()?,
false => Url::parse(url).ok()?
};
let host = parsed.host_str()?.to_ascii_lowercase(); let host = parsed.host_str()?.to_ascii_lowercase();
if DOODSTREAM_HOSTS.contains(&host.as_str()) { if DOODSTREAM_HOSTS.contains(&host.as_str()) {
return Some("doodstream"); return Some("doodstream");
} }
if LULUSTREAM_HOSTS.contains(&host.as_str()) {
return Some("lulustream");
}
if VIDARA_HOSTS.contains(&host.as_str()) {
return Some("vidara");
}
None None
} }

View File

@@ -679,7 +679,7 @@ mod tests {
let origin = "https://shared-cookie-requester-test.invalid/"; let origin = "https://shared-cookie-requester-test.invalid/";
a.cookie_jar a.cookie_jar
.add_cookie_str("shared_cookie=1; Path=/; SameSite=Lax", origin); .add_cookie_str("shared_cookie=1; Path=/; SameSite=Lax", &url::Url::parse(origin).unwrap());
let cookie_header = b let cookie_header = b
.cookie_header_for_url("https://shared-cookie-requester-test.invalid/path") .cookie_header_for_url("https://shared-cookie-requester-test.invalid/path")

View File

@@ -282,13 +282,36 @@ impl VideoFormat {
http_headers: None, http_headers: None,
} }
} }
#[cfg(any( pub fn m3u8(url: String, quality: String, format: String) -> Self {
not(hottub_single_provider), let _ = format;
hottub_provider = "vrporn", VideoFormat {
hottub_provider = "perverzija", url,
hottub_provider = "porndish", quality,
hottub_provider = "spankbang", format: format, // Default format
))] format_id: Some("m3u8-1080".to_string()),
format_note: None,
filesize: None,
asr: None,
fps: None,
width: None,
height: None,
tbr: None,
language: None,
language_preference: None,
ext: Some("m3u8".to_string()),
vcodec: None,
acodec: None,
dynamic_range: None,
abr: None,
vbr: None,
container: None,
protocol: Some("m3u8_native".to_string()),
audio_ext: Some("none".to_string()),
video_ext: Some("m3u8".to_string()),
resolution: None,
http_headers: None,
}
}
pub fn add_http_header(&mut self, key: String, value: String) { pub fn add_http_header(&mut self, key: String, value: String) {
if self.http_headers.is_none() { if self.http_headers.is_none() {
self.http_headers = Some(HashMap::new()); self.http_headers = Some(HashMap::new());
@@ -297,14 +320,6 @@ impl VideoFormat {
headers.insert(key, value); headers.insert(key, value);
} }
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hentaihaven",
hottub_provider = "noodlemagazine",
hottub_provider = "shooshtime",
hottub_provider = "heavyfetish",
hottub_provider = "hsex",
))]
pub fn http_header(&mut self, key: String, value: String) -> Self { pub fn http_header(&mut self, key: String, value: String) -> Self {
if self.http_headers.is_none() { if self.http_headers.is_none() {
self.http_headers = Some(HashMap::new()); self.http_headers = Some(HashMap::new());
@@ -329,86 +344,86 @@ impl VideoFormat {
self.format_note = Some(format_note); self.format_note = Some(format_note);
self self
} }
// pub fn filesize(mut self, filesize: u32) -> Self { pub fn filesize(mut self, filesize: u32) -> Self {
// self.filesize = Some(filesize); self.filesize = Some(filesize);
// self self
// } }
// pub fn asr(mut self, asr: u32) -> Self { pub fn asr(mut self, asr: u32) -> Self {
// self.asr = Some(asr); self.asr = Some(asr);
// self self
// } }
// pub fn fps(mut self, fps: u32) -> Self { pub fn fps(mut self, fps: u32) -> Self {
// self.fps = Some(fps); self.fps = Some(fps);
// self self
// } }
// pub fn width(mut self, width: u32) -> Self { pub fn width(mut self, width: u32) -> Self {
// self.width = Some(width); self.width = Some(width);
// self self
// } }
// pub fn height(mut self, height: u32) -> Self { pub fn height(mut self, height: u32) -> Self {
// self.height = Some(height); self.height = Some(height);
// self self
// } }
// pub fn tbr(mut self, tbr: u32) -> Self { pub fn tbr(mut self, tbr: u32) -> Self {
// self.tbr = Some(tbr); self.tbr = Some(tbr);
// self self
// } }
// pub fn language(mut self, language: String) -> Self { pub fn language(mut self, language: String) -> Self {
// self.language = Some(language); self.language = Some(language);
// self self
// } }
// pub fn language_preference(mut self, language_preference: u32) -> Self { pub fn language_preference(mut self, language_preference: u32) -> Self {
// self.language_preference = Some(language_preference); self.language_preference = Some(language_preference);
// self self
// } }
// pub fn ext(mut self, ext: String) -> Self { pub fn ext(mut self, ext: String) -> Self {
// self.ext = Some(ext); self.ext = Some(ext);
// self self
// } }
// pub fn vcodec(mut self, vcodec: String) -> Self { pub fn vcodec(mut self, vcodec: String) -> Self {
// self.vcodec = Some(vcodec); self.vcodec = Some(vcodec);
// self self
// } }
// pub fn acodec(mut self, acodec: String) -> Self { pub fn acodec(mut self, acodec: String) -> Self {
// self.acodec = Some(acodec); self.acodec = Some(acodec);
// self self
// } }
// pub fn dynamic_range(mut self, dynamic_range: String) -> Self { pub fn dynamic_range(mut self, dynamic_range: String) -> Self {
// self.dynamic_range = Some(dynamic_range); self.dynamic_range = Some(dynamic_range);
// self self
// } }
// pub fn abr(mut self, abr: u32) -> Self { pub fn abr(mut self, abr: u32) -> Self {
// self.abr = Some(abr); self.abr = Some(abr);
// self self
// } }
// pub fn vbr(mut self, vbr: u32) -> Self { pub fn vbr(mut self, vbr: u32) -> Self {
// self.vbr = Some(vbr); self.vbr = Some(vbr);
// self self
// } }
// pub fn container(mut self, container: String) -> Self { pub fn container(mut self, container: String) -> Self {
// self.container = Some(container); self.container = Some(container);
// self self
// } }
// pub fn protocol(mut self, protocol: String) -> Self { pub fn protocol(mut self, protocol: String) -> Self {
// self.protocol = Some(protocol); self.protocol = Some(protocol);
// self self
// } }
// pub fn audio_ext(mut self, audio_ext: String) -> Self { pub fn audio_ext(mut self, audio_ext: String) -> Self {
// self.audio_ext = Some(audio_ext); self.audio_ext = Some(audio_ext);
// self self
// } }
// pub fn video_ext(mut self, video_ext: String) -> Self { pub fn video_ext(mut self, video_ext: String) -> Self {
// self.video_ext = Some(video_ext); self.video_ext = Some(video_ext);
// self self
// } }
// pub fn resolution(mut self, resolution: String) -> Self { pub fn resolution(mut self, resolution: String) -> Self {
// self.resolution = Some(resolution); self.resolution = Some(resolution);
// self self
// } }
// pub fn http_headers(mut self, http_headers: HashMap<String, String>) -> Self { pub fn http_headers(mut self, http_headers: HashMap<String, String>) -> Self {
// self.http_headers = Some(http_headers); self.http_headers = Some(http_headers);
// self self
// } }
} }
#[derive(serde::Serialize, Debug)] #[derive(serde::Serialize, Debug)]
pub struct Videos { pub struct Videos {