shooshtime fix
This commit is contained in:
@@ -165,7 +165,10 @@ impl DoodstreamProxy {
|
||||
let token_regex = Self::regex(r"\b[0-9a-z]+\b")?;
|
||||
payload = token_regex
|
||||
.replace_all(&payload, |captures: &Captures| {
|
||||
let token = captures.get(0).map(|value| value.as_str()).unwrap_or_default();
|
||||
let token = captures
|
||||
.get(0)
|
||||
.map(|value| value.as_str())
|
||||
.unwrap_or_default();
|
||||
let Some(index) = Self::decode_base36(token) else {
|
||||
return token.to_string();
|
||||
};
|
||||
|
||||
@@ -17,6 +17,7 @@ pub mod pimpbunnythumb;
|
||||
pub mod porndish;
|
||||
pub mod porndishthumb;
|
||||
pub mod pornhd3x;
|
||||
pub mod shooshtime;
|
||||
pub mod spankbang;
|
||||
pub mod sxyprn;
|
||||
|
||||
|
||||
301
src/proxies/shooshtime.rs
Normal file
301
src/proxies/shooshtime.rs
Normal file
@@ -0,0 +1,301 @@
|
||||
use ntex::http::Response;
|
||||
use ntex::http::header::{CONTENT_LENGTH, CONTENT_RANGE, CONTENT_TYPE};
|
||||
use ntex::web::{self, HttpRequest, error};
|
||||
use regex::Regex;
|
||||
use url::Url;
|
||||
|
||||
use crate::util::requester::Requester;
|
||||
|
||||
const BASE_URL: &str = "https://shooshtime.com";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct SourceCandidate {
|
||||
url: String,
|
||||
quality: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ShooshtimeProxy {}
|
||||
|
||||
impl ShooshtimeProxy {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
fn normalize_detail_request(endpoint: &str) -> Option<(String, Option<String>)> {
|
||||
let endpoint = endpoint.trim().trim_start_matches('/');
|
||||
if endpoint.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (detail_part, quality) = match endpoint.split_once("/__quality__/") {
|
||||
Some((detail, quality)) => {
|
||||
(detail, Some(quality.replace("%20", " ").trim().to_string()))
|
||||
}
|
||||
None => (endpoint, None),
|
||||
};
|
||||
|
||||
let mut detail_url =
|
||||
if detail_part.starts_with("http://") || detail_part.starts_with("https://") {
|
||||
detail_part.to_string()
|
||||
} else {
|
||||
format!("https://{}", detail_part.trim_start_matches('/'))
|
||||
};
|
||||
|
||||
if detail_url.contains("/videos/") && !detail_url.ends_with('/') {
|
||||
detail_url.push('/');
|
||||
}
|
||||
|
||||
Self::is_allowed_detail_url(&detail_url)
|
||||
.then_some((detail_url, quality.filter(|value| !value.is_empty())))
|
||||
}
|
||||
|
||||
fn is_allowed_detail_url(url: &str) -> bool {
|
||||
let Some(url) = Url::parse(url).ok() else {
|
||||
return false;
|
||||
};
|
||||
if url.scheme() != "https" {
|
||||
return false;
|
||||
}
|
||||
let Some(host) = url.host_str() else {
|
||||
return false;
|
||||
};
|
||||
(host == "shooshtime.com" || host == "www.shooshtime.com")
|
||||
&& url.path().starts_with("/videos/")
|
||||
}
|
||||
|
||||
fn is_allowed_media_url(url: &str) -> bool {
|
||||
let Some(url) = Url::parse(url).ok() else {
|
||||
return false;
|
||||
};
|
||||
if url.scheme() != "https" {
|
||||
return false;
|
||||
}
|
||||
let Some(host) = url.host_str() else {
|
||||
return false;
|
||||
};
|
||||
(host == "shooshtime.com" || host == "www.shooshtime.com")
|
||||
&& url.path().starts_with("/get_file/")
|
||||
}
|
||||
|
||||
fn normalize_url(raw: &str) -> String {
|
||||
let value = raw.trim().replace("\\/", "/");
|
||||
if value.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
if value.starts_with("//") {
|
||||
return format!("https:{value}");
|
||||
}
|
||||
if value.starts_with('/') {
|
||||
return format!("{BASE_URL}{value}");
|
||||
}
|
||||
if value.starts_with("http://") {
|
||||
return value.replacen("http://", "https://", 1);
|
||||
}
|
||||
value
|
||||
}
|
||||
|
||||
fn regex(value: &str) -> Option<Regex> {
|
||||
Regex::new(value).ok()
|
||||
}
|
||||
|
||||
fn extract_js_value(block: &str, regex: &Regex) -> Option<String> {
|
||||
regex
|
||||
.captures(block)
|
||||
.and_then(|value| value.get(1))
|
||||
.map(|value| value.as_str().replace("\\/", "/").replace("\\'", "'"))
|
||||
}
|
||||
|
||||
fn extract_sources(html: &str) -> Vec<SourceCandidate> {
|
||||
let Some(flashvars_regex) = Self::regex(r#"(?s)var\s+flashvars\s*=\s*\{(.*?)\};"#) else {
|
||||
return vec![];
|
||||
};
|
||||
let Some(flashvars) = flashvars_regex
|
||||
.captures(html)
|
||||
.and_then(|value| value.get(1))
|
||||
.map(|value| value.as_str().to_string())
|
||||
else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
let value_regex = |key: &str| Self::regex(&format!(r#"{key}:\s*'([^']*)'"#));
|
||||
let primary_url_regex = match value_regex("video_url") {
|
||||
Some(value) => value,
|
||||
None => return vec![],
|
||||
};
|
||||
let primary_quality_regex = match value_regex("video_url_text") {
|
||||
Some(value) => value,
|
||||
None => return vec![],
|
||||
};
|
||||
let alt_url_regex = match value_regex("video_alt_url") {
|
||||
Some(value) => value,
|
||||
None => return vec![],
|
||||
};
|
||||
let alt_quality_regex = match value_regex("video_alt_url_text") {
|
||||
Some(value) => value,
|
||||
None => return vec![],
|
||||
};
|
||||
|
||||
let mut sources = Vec::new();
|
||||
|
||||
if let Some(url) = Self::extract_js_value(&flashvars, &primary_url_regex) {
|
||||
let normalized = Self::normalize_url(&url);
|
||||
if !normalized.is_empty() && Self::is_allowed_media_url(&normalized) {
|
||||
sources.push(SourceCandidate {
|
||||
url: normalized,
|
||||
quality: Self::extract_js_value(&flashvars, &primary_quality_regex)
|
||||
.unwrap_or_else(|| "480p".to_string()),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(url) = Self::extract_js_value(&flashvars, &alt_url_regex) {
|
||||
let normalized = Self::normalize_url(&url);
|
||||
if !normalized.is_empty() && Self::is_allowed_media_url(&normalized) {
|
||||
sources.push(SourceCandidate {
|
||||
url: normalized,
|
||||
quality: Self::extract_js_value(&flashvars, &alt_quality_regex)
|
||||
.unwrap_or_else(|| "720p".to_string()),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
sources
|
||||
}
|
||||
|
||||
fn quality_score(label: &str) -> u32 {
|
||||
label
|
||||
.chars()
|
||||
.filter(|value| value.is_ascii_digit())
|
||||
.collect::<String>()
|
||||
.parse::<u32>()
|
||||
.unwrap_or(0)
|
||||
}
|
||||
|
||||
fn select_source_url(html: &str, quality: Option<&str>) -> Option<String> {
|
||||
let sources = Self::extract_sources(html);
|
||||
if sources.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(quality) = quality {
|
||||
let wanted = quality.trim().to_ascii_lowercase();
|
||||
if let Some(source) = sources
|
||||
.iter()
|
||||
.find(|source| source.quality.trim().to_ascii_lowercase() == wanted)
|
||||
{
|
||||
return Some(source.url.clone());
|
||||
}
|
||||
}
|
||||
|
||||
sources
|
||||
.iter()
|
||||
.max_by_key(|source| Self::quality_score(&source.quality))
|
||||
.map(|source| source.url.clone())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn serve_media(
|
||||
req: HttpRequest,
|
||||
requester: web::types::State<Requester>,
|
||||
) -> Result<impl web::Responder, web::Error> {
|
||||
let endpoint = req.match_info().query("endpoint").to_string();
|
||||
let Some((detail_url, quality)) = ShooshtimeProxy::normalize_detail_request(&endpoint) else {
|
||||
return Ok(web::HttpResponse::BadRequest().finish());
|
||||
};
|
||||
|
||||
let mut requester = requester.get_ref().clone();
|
||||
let html = match requester.get(&detail_url, None).await {
|
||||
Ok(html) => html,
|
||||
Err(_) => return Ok(web::HttpResponse::BadGateway().finish()),
|
||||
};
|
||||
|
||||
let Some(source_url) = ShooshtimeProxy::select_source_url(&html, quality.as_deref()) else {
|
||||
return Ok(web::HttpResponse::BadGateway().finish());
|
||||
};
|
||||
|
||||
let mut headers = vec![("Referer".to_string(), detail_url)];
|
||||
if let Some(range) = req
|
||||
.headers()
|
||||
.get("Range")
|
||||
.and_then(|value| value.to_str().ok())
|
||||
{
|
||||
headers.push(("Range".to_string(), range.to_string()));
|
||||
}
|
||||
|
||||
let upstream = match requester.get_raw_with_headers(&source_url, headers).await {
|
||||
Ok(response) => response,
|
||||
Err(_) => return Ok(web::HttpResponse::BadGateway().finish()),
|
||||
};
|
||||
|
||||
let status = upstream.status();
|
||||
let upstream_headers = upstream.headers().clone();
|
||||
let bytes = upstream.bytes().await.map_err(error::ErrorBadGateway)?;
|
||||
|
||||
let mut response = Response::build(status);
|
||||
if let Some(value) = upstream_headers
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
{
|
||||
response.set_header(CONTENT_TYPE, value);
|
||||
}
|
||||
if let Some(value) = upstream_headers
|
||||
.get(CONTENT_LENGTH)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
{
|
||||
response.set_header(CONTENT_LENGTH, value);
|
||||
}
|
||||
if let Some(value) = upstream_headers
|
||||
.get(CONTENT_RANGE)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
{
|
||||
response.set_header(CONTENT_RANGE, value);
|
||||
}
|
||||
if let Some(value) = upstream_headers
|
||||
.get("Accept-Ranges")
|
||||
.and_then(|value| value.to_str().ok())
|
||||
{
|
||||
response.set_header("Accept-Ranges", value);
|
||||
}
|
||||
|
||||
Ok(response.body(bytes.to_vec()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::ShooshtimeProxy;
|
||||
|
||||
#[test]
|
||||
fn normalizes_detail_endpoint_and_quality() {
|
||||
let (url, quality) = ShooshtimeProxy::normalize_detail_request(
|
||||
"shooshtime.com/videos/example/123/__quality__/720p",
|
||||
)
|
||||
.expect("proxy target should parse");
|
||||
|
||||
assert_eq!(url, "https://shooshtime.com/videos/example/123/");
|
||||
assert_eq!(quality.as_deref(), Some("720p"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn selects_requested_or_best_quality() {
|
||||
let html = r#"
|
||||
<script>
|
||||
var flashvars = {
|
||||
video_url: 'https://shooshtime.com/get_file/1/token/1/2/3.mp4/?x=1',
|
||||
video_url_text: '480p',
|
||||
video_alt_url: 'https://shooshtime.com/get_file/1/token/1/2/3_720p.mp4/?x=2',
|
||||
video_alt_url_text: '720p'
|
||||
};
|
||||
</script>
|
||||
"#;
|
||||
|
||||
assert_eq!(
|
||||
ShooshtimeProxy::select_source_url(html, Some("480p")).as_deref(),
|
||||
Some("https://shooshtime.com/get_file/1/token/1/2/3.mp4/?x=1")
|
||||
);
|
||||
assert_eq!(
|
||||
ShooshtimeProxy::select_source_url(html, None).as_deref(),
|
||||
Some("https://shooshtime.com/get_file/1/token/1/2/3_720p.mp4/?x=2")
|
||||
);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user