This commit is contained in:
Simon
2026-03-23 11:32:22 +00:00
parent 9021521c00
commit 90ce9c684b
4 changed files with 350 additions and 14 deletions

View File

@@ -1,4 +1,5 @@
use crate::proxies::doodstream::DoodstreamProxy;
use crate::proxies::pornhd3x::Pornhd3xProxy;
use ntex::web;
use crate::proxies::pimpbunny::PimpbunnyProxy;
@@ -15,6 +16,7 @@ pub mod pimpbunny;
pub mod pimpbunnythumb;
pub mod porndish;
pub mod porndishthumb;
pub mod pornhd3x;
pub mod spankbang;
pub mod sxyprn;
@@ -23,6 +25,7 @@ pub enum AnyProxy {
Doodstream(DoodstreamProxy),
Sxyprn(SxyprnProxy),
Javtiful(javtiful::JavtifulProxy),
Pornhd3x(Pornhd3xProxy),
Pimpbunny(PimpbunnyProxy),
Porndish(PorndishProxy),
Spankbang(SpankbangProxy),
@@ -38,6 +41,7 @@ impl Proxy for AnyProxy {
AnyProxy::Doodstream(p) => p.get_video_url(url, requester).await,
AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await,
AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await,
AnyProxy::Pornhd3x(p) => p.get_video_url(url, requester).await,
AnyProxy::Pimpbunny(p) => p.get_video_url(url, requester).await,
AnyProxy::Porndish(p) => p.get_video_url(url, requester).await,
AnyProxy::Spankbang(p) => p.get_video_url(url, requester).await,

243
src/proxies/pornhd3x.rs Normal file
View File

@@ -0,0 +1,243 @@
use std::sync::Arc;
use std::sync::atomic::{AtomicU32, Ordering};
use ntex::web;
use regex::Regex;
use serde_json::Value;
use url::Url;
use wreq::Version;
use crate::util::requester::Requester;
const BASE_URL: &str = "https://www.pornhd3x.tv";
const SOURCE_SECRET: &str = "98126avrbi6m49vd7shxkn985";
const SOURCE_COOKIE_PREFIX: &str = "826avrbi6m49vd7shxkn985m";
const SOURCE_COOKIE_SUFFIX: &str = "k06twz87wwxtp3dqiicks2df";
#[derive(Debug, Clone)]
pub struct Pornhd3xProxy {
source_counter: Arc<AtomicU32>,
}
impl Pornhd3xProxy {
pub fn new() -> Self {
Self {
source_counter: Arc::new(AtomicU32::new(0)),
}
}
fn normalize_detail_request(endpoint: &str) -> Option<(String, Option<String>)> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
return None;
}
let (detail_part, quality) = match endpoint.split_once("/__quality__/") {
Some((detail, quality)) => {
(detail, Some(quality.replace("%20", " ").trim().to_string()))
}
None => (endpoint, None),
};
let detail_url =
if detail_part.starts_with("http://") || detail_part.starts_with("https://") {
detail_part.to_string()
} else {
format!("https://{}", detail_part.trim_start_matches('/'))
};
Self::is_allowed_detail_url(&detail_url)
.then_some((detail_url, quality.filter(|value| !value.is_empty())))
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
(host == "www.pornhd3x.tv" || host == "pornhd3x.tv") && url.path().starts_with("/movies/")
}
fn normalize_url(raw: &str) -> String {
let value = raw.trim();
if value.is_empty() {
return String::new();
}
if value.starts_with("//") {
return format!("https:{value}");
}
if value.starts_with('/') {
return format!("{BASE_URL}{value}");
}
if value.starts_with("http://") {
return value.replacen("http://", "https://", 1);
}
value.to_string()
}
fn extract_episode_id(html: &str) -> Option<String> {
Regex::new(r#"(?is)(?:id=["']uuid["'][^>]*value=["']|episode-id=["'])([A-Za-z0-9]+)"#)
.ok()?
.captures(html)
.and_then(|captures| captures.get(1))
.map(|value| value.as_str().to_string())
}
fn build_source_cookie_name(episode_id: &str) -> String {
format!("{SOURCE_COOKIE_PREFIX}{episode_id}{SOURCE_COOKIE_SUFFIX}")
}
fn build_source_hash(episode_id: &str, nonce: &str) -> String {
format!(
"{:x}",
md5::compute(format!("{episode_id}{nonce}{SOURCE_SECRET}"))
)
}
fn next_source_request(&self) -> (u32, String) {
let count = self.source_counter.fetch_add(1, Ordering::Relaxed) + 1;
let nonce = format!("{:06x}", count % 0xFF_FFFF);
(count, nonce)
}
async fn fetch_sources(
&self,
requester: &mut Requester,
referer: &str,
episode_id: &str,
) -> Option<Value> {
let (count, nonce) = self.next_source_request();
let source_url = format!(
"{BASE_URL}/ajax/get_sources/{episode_id}/{hash}?count={count}&mobile=true",
hash = Self::build_source_hash(episode_id, &nonce),
);
let existing_cookie = requester.cookie_header_for_url(&source_url);
let cookie_value = format!("{}={nonce}", Self::build_source_cookie_name(episode_id));
let combined_cookie = match existing_cookie {
Some(existing) if !existing.trim().is_empty() => format!("{existing}; {cookie_value}"),
_ => cookie_value,
};
let response = requester
.get_with_headers(
&source_url,
vec![
("Cookie".to_string(), combined_cookie),
("Referer".to_string(), referer.to_string()),
("X-Requested-With".to_string(), "XMLHttpRequest".to_string()),
(
"Accept".to_string(),
"application/json, text/javascript, */*; q=0.01".to_string(),
),
],
Some(Version::HTTP_11),
)
.await
.ok()?;
serde_json::from_str::<Value>(&response).ok()
}
fn select_source_url(payload: &Value, quality: Option<&str>) -> Option<String> {
let sources = payload
.get("playlist")
.and_then(Value::as_array)
.into_iter()
.flatten()
.flat_map(|playlist| {
playlist
.get("sources")
.and_then(Value::as_array)
.into_iter()
.flatten()
})
.collect::<Vec<_>>();
if let Some(quality) = quality {
let quality = quality.trim().to_ascii_lowercase();
for source in &sources {
let label = source
.get("label")
.and_then(Value::as_str)
.unwrap_or_default()
.trim()
.to_ascii_lowercase();
if label == quality {
let file = source.get("file").and_then(Value::as_str)?;
return Some(Self::normalize_url(file));
}
}
}
for source in sources {
let Some(file) = source.get("file").and_then(Value::as_str) else {
continue;
};
let url = Self::normalize_url(file);
if !url.is_empty() {
return Some(url);
}
}
None
}
}
impl crate::proxies::Proxy for Pornhd3xProxy {
async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String {
let Some((detail_url, quality)) = Self::normalize_detail_request(&url) else {
return String::new();
};
let mut requester = requester.get_ref().clone();
let detail_html = match requester.get(&detail_url, Some(Version::HTTP_11)).await {
Ok(text) => text,
Err(_) => return String::new(),
};
let Some(episode_id) = Self::extract_episode_id(&detail_html) else {
return String::new();
};
let Some(payload) = self
.fetch_sources(&mut requester, &detail_url, &episode_id)
.await
else {
return String::new();
};
Self::select_source_url(&payload, quality.as_deref()).unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use super::Pornhd3xProxy;
#[test]
fn normalizes_detail_endpoint_and_quality() {
let (url, quality) = Pornhd3xProxy::normalize_detail_request(
"www.pornhd3x.tv/movies/example-video/__quality__/720p",
)
.expect("proxy target should parse");
assert_eq!(url, "https://www.pornhd3x.tv/movies/example-video");
assert_eq!(quality.as_deref(), Some("720p"));
}
#[test]
fn extracts_episode_id_from_detail_markup() {
let html = r#"
<input id="uuid" value="49Q27JL3HCPVNJQN">
<a class="btn-eps" episode-id="OTHER"></a>
"#;
assert_eq!(
Pornhd3xProxy::extract_episode_id(html).as_deref(),
Some("49Q27JL3HCPVNJQN")
);
}
}